Merge remote-tracking branch 'origin/develop' into dev-isf

Conflicts:
	webapp/rdf/displayDisplay/everytime/displayDisplay.n3
	webapp/web/i18n/all.properties
	webapp/web/templates/freemarker/lib/lib-properties.ftl
This commit is contained in:
brianjlowe 2013-09-23 16:19:05 -04:00
commit 90512bdc1c
16 changed files with 893 additions and 409 deletions

View file

@ -9,9 +9,11 @@ import java.io.PrintWriter;
import java.io.Writer; import java.io.Writer;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import javax.servlet.RequestDispatcher; import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException; import javax.servlet.ServletException;
@ -40,6 +42,7 @@ import com.hp.hpl.jena.vocabulary.XSD;
import edu.cornell.mannlib.vedit.controller.BaseEditController; import edu.cornell.mannlib.vedit.controller.BaseEditController;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission; import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
import edu.cornell.mannlib.vitro.webapp.beans.Ontology; import edu.cornell.mannlib.vitro.webapp.beans.Ontology;
import edu.cornell.mannlib.vitro.webapp.controller.individual.IndividualController;
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao; import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
@ -47,44 +50,36 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ResultFormat;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils; import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/** /**
* Services a sparql query. This will return a simple error message and a 501 if * Services a SPARQL query. This will return a simple error message and a 501 if
* there is no Model. * there is no Model.
* *
*
* @author bdc34 * @author bdc34
* *
*/ */
public class SparqlQueryServlet extends BaseEditController { public class SparqlQueryServlet extends BaseEditController {
private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName()); private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName());
private final static boolean CONVERT = true;
/** /**
* format configurations for SELECT queries. * format configurations for SELECT queries.
*/ */
protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>(); protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>();
private static RSFormatConfig[] rsfs = {
new RSFormatConfig( "RS_XML", !CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT", !CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv", !CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON", !CONVERT, ResultFormat.JSON, null, "application/javascript") };
/** /**
* format configurations for CONSTRUCT/DESCRIBE queries. * format configurations for CONSTRUCT/DESCRIBE queries.
*/ */
protected static HashMap<String,ModelFormatConfig> modelFormats = protected static HashMap<String,ModelFormatConfig> modelFormats =
new HashMap<String,ModelFormatConfig>(); new HashMap<String,ModelFormatConfig>();
private static ModelFormatConfig[] fmts = { /**
new ModelFormatConfig("RDF/XML", !CONVERT, ModelSerializationFormat.RDFXML, null, "application/rdf+xml" ), * Use this map to decide which MIME type is suited for the "accept" header.
new ModelFormatConfig("RDF/XML-ABBREV", CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ), */
new ModelFormatConfig("N3", !CONVERT, ModelSerializationFormat.N3, null, "text/n3" ), public static final Map<String, Float> ACCEPTED_CONTENT_TYPES;
new ModelFormatConfig("N-TRIPLE", !CONVERT, ModelSerializationFormat.NTRIPLE, null, "text/plain" ),
new ModelFormatConfig("TTL", CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD", CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
@Override @Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) protected void doPost(HttpServletRequest request, HttpServletResponse response)
@ -113,101 +108,86 @@ public class SparqlQueryServlet extends BaseEditController {
String queryParam = vreq.getParameter("query"); String queryParam = vreq.getParameter("query");
log.debug("queryParam was : " + queryParam); log.debug("queryParam was : " + queryParam);
String resultFormatParam = vreq.getParameter("resultFormat"); if( queryParam == null || "".equals(queryParam) ){
log.debug("resultFormat was: " + resultFormatParam);
String rdfResultFormatParam = vreq.getParameter("rdfResultFormat");
if (rdfResultFormatParam == null) {
rdfResultFormatParam = "RDF/XML-ABBREV";
}
log.debug("rdfResultFormat was: " + rdfResultFormatParam);
if( queryParam == null || "".equals(queryParam) ||
resultFormatParam == null || "".equals(resultFormatParam) ||
!rsFormats.containsKey( resultFormatParam ) ||
rdfResultFormatParam == null || "".equals(rdfResultFormatParam) ||
!modelFormats.containsKey( rdfResultFormatParam ) ) {
doHelp(request,response); doHelp(request,response);
return; return;
} }
executeQuery(response, resultFormatParam, rdfResultFormatParam, String contentType = checkForContentType(vreq.getHeader("Accept"));
queryParam, vreq.getUnfilteredRDFService());
Query query = SparqlQueryUtils.create(queryParam);
if( query.isSelectType() ){
String format = contentType!=null ? contentType:vreq.getParameter("resultFormat");
RSFormatConfig formatConf = rsFormats.get(format);
doSelect(response, queryParam, formatConf, vreq.getRDFService());
}else if( query.isAskType()){
doAsk( queryParam, vreq.getRDFService(), response );
}else if( query.isConstructType() ){
String format = contentType != null ? contentType : vreq.getParameter("rdfResultFormat");
if (format== null) {
format= "RDF/XML-ABBREV";
}
ModelFormatConfig formatConf = modelFormats.get(format);
doConstruct(response, query, formatConf, vreq.getRDFService());
}else{
doHelp(request,response);
}
return; return;
} }
private void executeQuery(HttpServletResponse response,
String resultFormatParam,
String rdfResultFormatParam,
String queryParam,
RDFService rdfService ) throws IOException {
/* BJL23 2008-11-06
* modified to support CSV output.
* Unfortunately, ARQ doesn't make it easy to
* do this by implementing a new ResultSetFormat, because
* ResultSetFormatter is hardwired with expected values.
* This slightly ugly approach will have to do for now.
*/
// if ( !("vitro:csv").equals(resultFormatParam) ) {
// rsf = selectFormatSymbols.get(resultFormatParam);
// }
// String mimeType = rdfFormatToMimeType.get(resultFormatParam);
try{ private void doAsk(String queryParam, RDFService rdfService,
Query query = SparqlQueryUtils.create(queryParam); HttpServletResponse response) throws ServletException, IOException {
if( query.isSelectType() ){
doSelectQuery( queryParam, rdfService, resultFormatParam, response); // Irrespective of the ResultFormatParam,
} else if(query.isAskType()){ // this always prints a boolean to the default OutputStream.
// Irrespective of the ResultFormatParam, String result;
// this always prints a boolean to the default OutputStream. try {
String result = (rdfService.sparqlAskQuery(queryParam) == true) result = (rdfService.sparqlAskQuery(queryParam) == true)
? "true" ? "true"
: "false"; : "false";
PrintWriter p = response.getWriter();
p.write(result);
return;
} else {
doModelResultQuery( query, rdfService, rdfResultFormatParam, response);
}
} catch (RDFServiceException e) { } catch (RDFServiceException e) {
throw new RuntimeException(e); throw new ServletException( "Could not execute ask query ", e );
} }
PrintWriter p = response.getWriter();
p.write(result);
return;
} }
/** /**
* Execute the query and send the result to out. Attempt to * Execute the query and send the result to out. Attempt to
* send the RDFService the same format as the rdfResultFormatParam * send the RDFService the same format as the rdfResultFormatParam
* so that the results from the RDFService can be directly piped to the client. * so that the results from the RDFService can be directly piped to the client.
* @param rdfService
* @throws IOException
* @throws RDFServiceException
*/ */
private void doSelectQuery( String queryParam, private void doSelect(HttpServletResponse response,
RDFService rdfService, String resultFormatParam, String queryParam,
HttpServletResponse response) throws IOException, RDFServiceException{ RSFormatConfig formatConf,
RSFormatConfig config = rsFormats.get( resultFormatParam ); RDFService rdfService
) throws ServletException {
try {
if( ! formatConf.converstionFromWireFormat ){
response.setContentType( formatConf.responseMimeType );
InputStream results;
results = rdfService.sparqlSelectQuery(queryParam, formatConf.wireFormat );
pipe( results, response.getOutputStream() );
}else{
//always use JSON when conversion is needed.
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
if( ! config.converstionFromWireFormat ){ response.setContentType( formatConf.responseMimeType );
response.setContentType( config.responseMimeType );
InputStream results = rdfService.sparqlSelectQuery(queryParam, config.wireFormat );
pipe( results, response.getOutputStream() );
}else{
//always use JSON when conversion is needed.
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
response.setContentType( config.responseMimeType ); ResultSet rs = ResultSetFactory.fromJSON( results );
OutputStream out = response.getOutputStream();
ResultSet rs = ResultSetFactory.fromJSON( results ); ResultSetFormatter.output(out, rs, formatConf.jenaResponseFormat);
OutputStream out = response.getOutputStream(); }
ResultSetFormatter.output(out, rs, config.jenaResponseFormat); } catch (RDFServiceException e) {
throw new ServletException("Cannot get result from the RDFService",e);
// } else { } catch (IOException e) {
// Writer out = response.getWriter(); throw new ServletException("Cannot perform SPARQL SELECT",e);
// toCsv(out, results);
//}
} }
} }
/** /**
* Execute the query and send the result to out. Attempt to * Execute the query and send the result to out. Attempt to
* send the RDFService the same format as the rdfResultFormatParam * send the RDFService the same format as the rdfResultFormatParam
@ -217,40 +197,44 @@ public class SparqlQueryServlet extends BaseEditController {
* @throws RDFServiceException * @throws RDFServiceException
* @throws * @throws
*/ */
private void doModelResultQuery( Query query, private void doConstruct( HttpServletResponse response,
RDFService rdfService, String rdfResultFormatParam, Query query,
HttpServletResponse response) throws IOException, RDFServiceException{ ModelFormatConfig formatConfig,
RDFService rdfService
) throws ServletException{
try{
InputStream rawResult = null;
if( query.isConstructType() ){
rawResult= rdfService.sparqlConstructQuery( query.toString(), formatConfig.wireFormat );
}else if ( query.isDescribeType() ){
rawResult = rdfService.sparqlDescribeQuery( query.toString(), formatConfig.wireFormat );
}
//config drives what formats and conversions to use response.setContentType( formatConfig.responseMimeType );
ModelFormatConfig config = modelFormats.get( rdfResultFormatParam );
InputStream rawResult = null; if( formatConfig.converstionFromWireFormat ){
if( query.isConstructType() ){ Model resultModel = RDFServiceUtils.parseModel( rawResult, formatConfig.wireFormat );
rawResult= rdfService.sparqlConstructQuery( query.toString(), config.wireFormat ); if( "JSON-LD".equals( formatConfig.jenaResponseFormat )){
}else if ( query.isDescribeType() ){ //since jena 2.6.4 doesn't support JSON-LD we do it
rawResult = rdfService.sparqlDescribeQuery( query.toString(), config.wireFormat ); try {
} JenaRDFParser parser = new JenaRDFParser();
Object json = JSONLD.fromRDF(resultModel, parser);
response.setContentType( config.responseMimeType ); JSONUtils.write(response.getWriter(), json);
} catch (JSONLDProcessingError e) {
if( config.converstionFromWireFormat ){ throw new RDFServiceException("Could not convert from Jena model to JSON-LD", e);
Model resultModel = RDFServiceUtils.parseModel( rawResult, config.wireFormat ); }
if( "JSON-LD".equals( config.jenaResponseFormat )){ }else{
//since jena 2.6.4 doesn't support JSON-LD we do it OutputStream out = response.getOutputStream();
try { resultModel.write(out, formatConfig.jenaResponseFormat );
JenaRDFParser parser = new JenaRDFParser();
Object json = JSONLD.fromRDF(resultModel, parser);
JSONUtils.write(response.getWriter(), json);
} catch (JSONLDProcessingError e) {
throw new RDFServiceException("Could not convert from Jena model to JSON-LD", e);
} }
}else{ }else{
OutputStream out = response.getOutputStream(); OutputStream out = response.getOutputStream();
resultModel.write(out, config.jenaResponseFormat ); pipe( rawResult, out );
} }
}else{ }catch( IOException ex){
OutputStream out = response.getOutputStream(); throw new ServletException("could not run SPARQL CONSTRUCT",ex);
pipe( rawResult, out ); } catch (RDFServiceException ex) {
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
} }
} }
@ -362,13 +346,35 @@ public class SparqlQueryServlet extends BaseEditController {
rd.forward(req,res); rd.forward(req,res);
} }
/** Simple boolean vaule to improve the legibility of confiugrations. */
private final static boolean CONVERT = true;
public static class ModelFormatConfig{ /** Simple vaule to improve the legibility of confiugrations. */
private final static String NO_CONVERSION = null;
public static class FormatConfig{
public String valueFromForm; public String valueFromForm;
public boolean converstionFromWireFormat; public boolean converstionFromWireFormat;
public String responseMimeType;
}
private static ModelFormatConfig[] fmts = {
new ModelFormatConfig("RDF/XML",
!CONVERT, ModelSerializationFormat.RDFXML, NO_CONVERSION, "application/rdf+xml" ),
new ModelFormatConfig("RDF/XML-ABBREV",
CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ),
new ModelFormatConfig("N3",
!CONVERT, ModelSerializationFormat.N3, NO_CONVERSION, "text/n3" ),
new ModelFormatConfig("N-TRIPLE",
!CONVERT, ModelSerializationFormat.NTRIPLE, NO_CONVERSION, "text/plain" ),
new ModelFormatConfig("TTL",
CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD",
CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
public static class ModelFormatConfig extends FormatConfig{
public RDFService.ModelSerializationFormat wireFormat; public RDFService.ModelSerializationFormat wireFormat;
public String jenaResponseFormat; public String jenaResponseFormat;
public String responseMimeType;
public ModelFormatConfig( String valueFromForm, public ModelFormatConfig( String valueFromForm,
boolean converstionFromWireFormat, boolean converstionFromWireFormat,
@ -383,12 +389,20 @@ public class SparqlQueryServlet extends BaseEditController {
} }
} }
public static class RSFormatConfig{
public String valueFromForm; private static RSFormatConfig[] rsfs = {
public boolean converstionFromWireFormat; new RSFormatConfig( "RS_XML",
!CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT",
!CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv",
!CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON",
!CONVERT, ResultFormat.JSON, null, "application/javascript") };
public static class RSFormatConfig extends FormatConfig{
public ResultFormat wireFormat; public ResultFormat wireFormat;
public ResultSetFormat jenaResponseFormat; public ResultSetFormat jenaResponseFormat;
public String responseMimeType;
public RSFormatConfig( String valueFromForm, public RSFormatConfig( String valueFromForm,
boolean converstionFromWireFormat, boolean converstionFromWireFormat,
@ -404,13 +418,47 @@ public class SparqlQueryServlet extends BaseEditController {
} }
static{ static{
/* move the lists of configs into maps for easy lookup */ HashMap<String, Float> map = new HashMap<String, Float>();
/* move the lists of configurations into maps for easy lookup
* by both MIME content type and the parameters from the form */
for( RSFormatConfig rsfc : rsfs ){ for( RSFormatConfig rsfc : rsfs ){
rsFormats.put( rsfc.valueFromForm, rsfc ); rsFormats.put( rsfc.valueFromForm, rsfc );
rsFormats.put( rsfc.responseMimeType, rsfc);
map.put(rsfc.responseMimeType, 1.0f);
} }
for( ModelFormatConfig mfc : fmts ){ for( ModelFormatConfig mfc : fmts ){
modelFormats.put( mfc.valueFromForm, mfc); modelFormats.put( mfc.valueFromForm, mfc);
modelFormats.put(mfc.responseMimeType, mfc);
map.put(mfc.responseMimeType, 1.0f);
} }
ACCEPTED_CONTENT_TYPES = Collections.unmodifiableMap(map);
} }
/**
* Get the content type based on content negotiation.
* Returns null of no content type can be agreed on or
* if there is no accept header.
*/
protected String checkForContentType( String acceptHeader ) {
if (acceptHeader == null)
return null;
try {
Map<String, Float> typesAndQ = ContentType
.getTypesAndQ(acceptHeader);
String ctStr = ContentType
.getBestContentType(typesAndQ,ACCEPTED_CONTENT_TYPES);
if( ACCEPTED_CONTENT_TYPES.containsKey( ctStr )){
return ctStr;
}
} catch (Throwable th) {
log.error("Problem while checking accept header ", th);
}
return null;
}
} }

View file

@ -207,5 +207,13 @@ public class VitroRequest extends HttpServletRequestWrapper {
return _req.getParameterValues(name); return _req.getParameterValues(name);
} }
public void setLanguageNeutralUnionFullModel(OntModel model) {
setAttribute("languageNeutralUnionFullModel", model);
}
public OntModel getLanguageNeutralUnionFullModel() {
return (OntModel) getAttribute("languageNeutralUnionFullModel");
}
} }

View file

@ -0,0 +1,235 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/**
* TODO Keep this around until release 1.7, in case anyone is relying on it.
*/
@Deprecated
public class ExtendedRdfAssembler {
private static final Log log = LogFactory
.getLog(ExtendedRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person";
private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private final VitroRequest vreq;
private final ServletContext ctx;
private final Individual individual;
private final ContentType rdfFormat;
public ExtendedRdfAssembler(VitroRequest vreq, Individual individual,
ContentType rdfFormat) {
this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/**
* @return
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes);
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
return new RdfResponseValues(rdfFormat, newModel);
}
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) {
Resource subj = newModel.getResource(entity.getURI());
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements();
TypeMapper typeMapper = TypeMapper.getInstance();
for (DataPropertyStatement ds: dstates) {
Property dp = newModel.getProperty(ds.getDatapropURI());
Literal lit = null;
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) {
lit = newModel.createLiteral(ds.getData(),ds.getLanguage());
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) {
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI()));
} else {
lit = newModel.createLiteral(ds.getData());
}
newModel.add(newModel.createStatement(subj, dp, lit));
}
if (recurseDepth < 5) {
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements();
for (ObjectPropertyStatement os: ostates) {
Property prop = newModel.getProperty(os.getPropertyURI());
Resource obj = newModel.getResource(os.getObjectURI());
newModel.add(newModel.createStatement(subj, prop, obj));
if ( includeInLinkedData(obj, contextModel)) {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes));
} else {
contextModel.enterCriticalSection(Lock.READ);
try {
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null));
} finally {
contextModel.leaveCriticalSection();
}
}
}
}
newModel = getLabelAndTypes(entity, contextModel, newModel );
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel);
//bdc34: The following code adds all triples where entity is the Subject.
// contextModel.enterCriticalSection(Lock.READ);
// try {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null);
// while (iter.hasNext()) {
// Statement stmt = iter.next();
// if (!newModel.contains(stmt)) {
// newModel.add(stmt);
// }
// }
// } finally {
// contextModel.leaveCriticalSection();
// }
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
}
long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI());
long elapsedTimeMillis = System.currentTimeMillis()-start;
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis);
newModel.add(extendedModel);
}
}
return newModel;
}
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
}
}
} finally {
contextModel.leaveCriticalSection();
}
return newModel;
}
}

View file

@ -14,6 +14,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues;
@ -30,10 +31,14 @@ public class IndividualController extends FreemarkerHttpServlet {
private static final String TEMPLATE_HELP = "individual-help.ftl"; private static final String TEMPLATE_HELP = "individual-help.ftl";
@Deprecated
private static final String PROPERTY_EXTENDED_LOD = "serveExtendedLinkedData";
/** /**
* Use this map to decide which MIME type is suited for the "accept" header. * Use this map to decide which MIME type is suited for the "accept" header.
*/ */
public static final Map<String, Float> ACCEPTED_CONTENT_TYPES = initializeContentTypes(); public static final Map<String, Float> ACCEPTED_CONTENT_TYPES = initializeContentTypes();
private static Map<String, Float> initializeContentTypes() { private static Map<String, Float> initializeContentTypes() {
HashMap<String, Float> map = new HashMap<String, Float>(); HashMap<String, Float> map = new HashMap<String, Float>();
map.put(HTML_MIMETYPE, 0.5f); map.put(HTML_MIMETYPE, 0.5f);
@ -82,9 +87,15 @@ public class IndividualController extends FreemarkerHttpServlet {
* If they are asking for RDF using the preferred URL, give it * If they are asking for RDF using the preferred URL, give it
* to them. * to them.
*/ */
return new IndividualRdfAssembler(vreq, if (useExtendedLOD(vreq)) {
requestInfo.getIndividual(), requestInfo.getRdfFormat()) return new ExtendedRdfAssembler(vreq,
.assembleRdf(); requestInfo.getIndividual(),
requestInfo.getRdfFormat()).assembleRdf();
} else {
return new IndividualRdfAssembler(vreq,
requestInfo.getIndividual().getURI(),
requestInfo.getRdfFormat()).assembleRdf();
}
default: default:
/* /*
* Otherwise, prepare an HTML response for the requested * Otherwise, prepare an HTML response for the requested
@ -113,6 +124,11 @@ public class IndividualController extends FreemarkerHttpServlet {
HttpServletResponse.SC_NOT_FOUND); HttpServletResponse.SC_NOT_FOUND);
} }
private boolean useExtendedLOD(HttpServletRequest req) {
ConfigurationProperties props = ConfigurationProperties.getBean(req);
return Boolean.valueOf(props.getProperty(PROPERTY_EXTENDED_LOD));
}
@Override @Override
public void doPost(HttpServletRequest request, HttpServletResponse response) public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {

View file

@ -2,233 +2,270 @@
package edu.cornell.mannlib.vitro.webapp.controller.individual; package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap; import java.util.HashSet;
import java.util.List; import java.util.Set;
import java.util.Map;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS; import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayDataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement; import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual; import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement; import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass; import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils; import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType; import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/** /**
* TODO See where this can be improved. * Write a smaller set of Linked Data. It consists of:
*
* 1) The data properties of the entity
*
* 2) The object properties in which the entity is either subject or object
*
* 3) The labels and types of the objects that are linked by those properties.
*
* If the request comes with an Accept-language header, use an appropriately
* language-aware data source to filter the data properties and labels.
* Otherwise, show all triples, regardless of language.
*
* Filter the result based on the policy, removing any triples that should not
* be displayed to the public (or to the user, if logged in). Also remove any
* objects which can only be reached by excluded triples.
*
* ----------------
*
* This still permits the use of rich export, by "include" options on the
* request. The only difference from earlier implementations is that the result
* may be made language-aware.
*/ */
public class IndividualRdfAssembler { public class IndividualRdfAssembler {
private static final Log log = LogFactory private static final Log log = LogFactory
.getLog(IndividualRdfAssembler.class); .getLog(IndividualRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/"; private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person"; private static final String INCLUDE_ALL = "all";
private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private final VitroRequest vreq; private final VitroRequest vreq;
private final ServletContext ctx; private final ServletContext ctx;
private final Individual individual; private final String individualUri;
private final ContentType rdfFormat; private final ContentType rdfFormat;
private final String[] richExportIncludes;
private final RDFService rdfService;
private final OntModel contentModel;
private final WebappDaoFactory wadf;
public IndividualRdfAssembler(VitroRequest vreq, Individual individual, public IndividualRdfAssembler(VitroRequest vreq, String individualUri,
ContentType rdfFormat) { ContentType rdfFormat) {
this.vreq = vreq; this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext(); this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/** this.individualUri = individualUri;
* @return this.rdfFormat = rdfFormat;
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
String[] includes = vreq.getParameterValues("include"); String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes); this.richExportIncludes = (includes == null) ? new String[0] : includes;
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
if (isLanguageAware()) {
this.rdfService = vreq.getRDFService();
this.contentModel = vreq.getJenaOntModel();
} else {
this.rdfService = vreq.getUnfilteredRDFService();
this.contentModel = vreq.getLanguageNeutralUnionFullModel();
}
wadf = vreq.getWebappDaoFactory();
}
public ResponseValues assembleRdf() {
OntModel newModel = getRdf();
newModel.add(getRichExportRdf());
JenaOutputUtils.setNameSpacePrefixes(newModel, wadf);
return new RdfResponseValues(rdfFormat, newModel); return new RdfResponseValues(rdfFormat, newModel);
} }
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) { private boolean isLanguageAware() {
return StringUtils.isNotEmpty(vreq.getHeader("Accept-Language"));
}
Resource subj = newModel.getResource(entity.getURI()); private OntModel getRdf() {
OntModel o = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
o.add(getStatementsAboutEntity());
o.add(getLabelsAndTypesOfRelatedObjects());
filterByPolicy(o);
return o;
}
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements(); /**
TypeMapper typeMapper = TypeMapper.getInstance(); * Get all statements that have the entity as either the subject or the
for (DataPropertyStatement ds: dstates) { * object.
Property dp = newModel.getProperty(ds.getDatapropURI()); */
Literal lit = null; private Model getStatementsAboutEntity() {
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) { Model m = runConstructQuery(String
lit = newModel.createLiteral(ds.getData(),ds.getLanguage()); .format("CONSTRUCT { <%1$s> ?predicate ?object . } "
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) { + "WHERE { <%1$s> ?predicate ?object } ", individualUri));
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI())); m.add(runConstructQuery(String.format(
} else { "CONSTRUCT { ?s ?predicate <%1$s> . } "
lit = newModel.createLiteral(ds.getData()); + "WHERE { ?s ?predicate <%1$s> } ", individualUri)));
} return m;
newModel.add(newModel.createStatement(subj, dp, lit)); }
}
if (recurseDepth < 5) { /**
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements(); * Get the labels and types of all related objects.
*/
private Model getLabelsAndTypesOfRelatedObjects() {
Model m = runConstructQuery(String
.format("CONSTRUCT { ?object <%2$s> ?type . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?type . } ", individualUri, RDF.type));
m.add(runConstructQuery(String.format(
"CONSTRUCT { ?object <%2$s> ?label . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?label . } ", individualUri,
RDFS.label)));
return m;
}
for (ObjectPropertyStatement os: ostates) { /**
Property prop = newModel.getProperty(os.getPropertyURI()); * Remove any triples that we aren't allowed to see. Then remove any objects
Resource obj = newModel.getResource(os.getObjectURI()); * that we no longer have access to.
newModel.add(newModel.createStatement(subj, prop, obj)); */
if ( includeInLinkedData(obj, contextModel)) { private void filterByPolicy(OntModel o) {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes)); removeProhibitedTriples(o);
} else { Set<String> okObjects = determineAccessibleUris(o);
contextModel.enterCriticalSection(Lock.READ); removeOrphanedObjects(o, okObjects);
try { }
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null));
} finally {
contextModel.leaveCriticalSection();
}
}
}
}
newModel = getLabelAndTypes(entity, contextModel, newModel ); /**
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel); * Remove the triples that we aren't allowed to see.
*/
//bdc34: The following code adds all triples where entity is the Subject. private void removeProhibitedTriples(OntModel o) {
// contextModel.enterCriticalSection(Lock.READ); StmtIterator stmts = o.listStatements();
// try { while (stmts.hasNext()) {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null); Statement stmt = stmts.next();
// while (iter.hasNext()) { String subjectUri = stmt.getSubject().getURI();
// Statement stmt = iter.next(); String predicateUri = stmt.getPredicate().getURI();
// if (!newModel.contains(stmt)) { if (stmt.getObject().isLiteral()) {
// newModel.add(stmt); String value = stmt.getObject().asLiteral().getString();
// } DataPropertyStatement dps = new DataPropertyStatementImpl(
// } subjectUri, predicateUri, value);
// } finally { RequestedAction ddps = new DisplayDataPropertyStatement(dps);
// contextModel.leaveCriticalSection(); if (!PolicyHelper.isAuthorizedForActions(vreq, ddps)) {
// } log.debug("not authorized: " + ddps);
stmts.remove();
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
}
long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI());
long elapsedTimeMillis = System.currentTimeMillis()-start;
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis);
newModel.add(extendedModel);
}
}
return newModel;
}
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
} }
} else if (stmt.getObject().isURIResource()) {
String objectUri = stmt.getObject().asResource().getURI();
ObjectPropertyStatement ops = new ObjectPropertyStatementImpl(
subjectUri, predicateUri, objectUri);
RequestedAction dops = new DisplayObjectPropertyStatement(ops);
if (!PolicyHelper.isAuthorizedForActions(vreq, dops)) {
log.debug("not authorized: " + dops);
stmts.remove();
}
} else {
log.warn("blank node: + stmt");
stmts.remove();
}
}
}
/**
* Collect the URIs of all objects that are accessible through permitted
* triples.
*/
private Set<String> determineAccessibleUris(OntModel o) {
Resource i = o.getResource(individualUri);
Set<String> uris = new HashSet<>();
uris.add(individualUri);
StmtIterator stmts;
stmts = o.listStatements(i, null, (RDFNode) null);
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (stmt.getObject().isURIResource()) {
uris.add(stmt.getObject().asResource().getURI());
} }
} finally {
contextModel.leaveCriticalSection();
} }
return newModel; stmts = o.listStatements(null, null, i);
} while (stmts.hasNext()) {
Statement stmt = stmts.next();
uris.add(stmt.getSubject().getURI());
}
return uris;
}
/**
* Remove any statements about objects that cannot be reached through
* permitted triples.
*/
private void removeOrphanedObjects(OntModel o, Set<String> okObjects) {
StmtIterator stmts = o.listStatements();
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (!okObjects.contains(stmt.getSubject().getURI())) {
log.debug("removing orphan triple: " + stmt);
stmts.remove();
}
}
}
private Model runConstructQuery(String query) {
try {
return RDFServiceUtils.parseModel(rdfService.sparqlConstructQuery(
query, RDFService.ModelSerializationFormat.N3),
RDFService.ModelSerializationFormat.N3);
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
}
private Model getRichExportRdf() {
Model richExportModel = ModelFactory.createDefaultModel();
for (String include : richExportIncludes) {
String rootDir = RICH_EXPORT_ROOT;
if (!INCLUDE_ALL.equals(include)) {
rootDir += include + "/";
}
long start = System.currentTimeMillis();
richExportModel.add(ExtendedLinkedDataUtils.createModelFromQueries(
ctx, rootDir, contentModel, individualUri));
long elapsedTimeMillis = System.currentTimeMillis() - start;
log.debug("Time to create rich export model: msecs = "
+ elapsedTimeMillis);
}
return richExportModel;
}
} }

View file

@ -132,10 +132,11 @@ public class IndividualRequestAnalyzer {
* only provide a set of bytes. * only provide a set of bytes.
*/ */
protected ContentType checkAcceptHeaderForLinkedDataRequest() { protected ContentType checkAcceptHeaderForLinkedDataRequest() {
String acceptHeader = vreq.getHeader("accept"); String acceptHeader = vreq.getHeader("Accept");
if (acceptHeader == null) { if (acceptHeader == null)
acceptHeader = vreq.getHeader("accept");
if (acceptHeader == null)
return null; return null;
}
try { try {
Map<String, Float> typesAndQ = ContentType Map<String, Float> typesAndQ = ContentType

View file

@ -28,6 +28,14 @@ public interface WebappDaoFactory {
*/ */
public String checkURI(String uriStr, boolean checkUniqueness); public String checkURI(String uriStr, boolean checkUniqueness);
/**
* Check if a given URI string exists in the system:
* checks for the following conditions: URI found as subject in a statement or an object or as a property
* @param uriStr
* @return
*/
public boolean hasExistingURI(String uriStr);
public String getDefaultNamespace(); public String getDefaultNamespace();
public Set<String> getNonuserNamespaces(); public Set<String> getNonuserNamespaces();

View file

@ -83,6 +83,10 @@ public class WebappDaoFactoryFiltering implements WebappDaoFactory {
return innerWebappDaoFactory.checkURI(uriStr, checkUniqueness); return innerWebappDaoFactory.checkURI(uriStr, checkUniqueness);
} }
public boolean hasExistingURI(String uriStr) {
return innerWebappDaoFactory.hasExistingURI(uriStr);
}
public WebappDaoFactory getUserAwareDaoFactory(String userURI) { public WebappDaoFactory getUserAwareDaoFactory(String userURI) {
//TODO: need to clone the filtering factory //TODO: need to clone the filtering factory
return innerWebappDaoFactory.getUserAwareDaoFactory(userURI); return innerWebappDaoFactory.getUserAwareDaoFactory(userURI);

View file

@ -186,40 +186,58 @@ public class WebappDaoFactoryJena implements WebappDaoFactory {
errorMsg += (iri.violations(false).next()) errorMsg += (iri.violations(false).next())
.getShortMessage() + " "; .getShortMessage() + " ";
} else if (checkUniqueness) { } else if (checkUniqueness) {
OntModel ontModel = ontModelSelector.getFullModel(); boolean existingURI = this.hasExistingURI(uriStr);
ontModel.enterCriticalSection(Lock.READ); if(existingURI) {
try { errorMsg+="Not a valid URI. Please enter another URI. ";
Resource newURIAsRes = ResourceFactory.createResource(uriStr); errorMsg+=duplicateMsg;
Property newURIAsProp = ResourceFactory.createProperty(uriStr); //the original code included an extra line "Not a valid URI. Please enter another URI. "
StmtIterator closeIt = ontModel.listStatements( //in the error message in addition to the duplicate error message in the case where the uri
newURIAsRes, null, (RDFNode)null); //is in the subject position of any of the statements in the system - but not so where the
if (closeIt.hasNext()) { //uri was only in the object position or was a propery. In this code, the same error message
validURI = false; //is returned for all duplicate uris
errorMsg+="Not a valid URI. Please enter another URI. "; }
errorMsg+=duplicateMsg;
}
if (validURI) {
closeIt = ontModel.listStatements(null, null, newURIAsRes);
if (closeIt.hasNext()) {
validURI = false;
errorMsg+=duplicateMsg;
}
}
if (validURI) {
closeIt = ontModel.listStatements(
null, newURIAsProp, (RDFNode)null);
if (closeIt.hasNext()) {
validURI = false;
errorMsg+=duplicateMsg;
}
}
} finally {
ontModel.leaveCriticalSection();
}
} }
return (errorMsg.length()>0) ? errorMsg : null; return (errorMsg.length()>0) ? errorMsg : null;
} }
//Check if URI already in use or not either as resource OR as property
public boolean hasExistingURI(String uriStr) {
boolean existingURI = false;
OntModel ontModel = ontModelSelector.getFullModel();
ontModel.enterCriticalSection(Lock.READ);
try {
Resource newURIAsRes = ResourceFactory.createResource(uriStr);
Property newURIAsProp = ResourceFactory.createProperty(uriStr);
StmtIterator closeIt = ontModel.listStatements(
newURIAsRes, null, (RDFNode)null);
if (closeIt.hasNext()) {
existingURI = true;
}
//if not in the subject position, check in object position
if (!existingURI) {
closeIt = ontModel.listStatements(null, null, newURIAsRes);
if (closeIt.hasNext()) {
existingURI= true;
}
}
//Check for property
if (!existingURI) {
closeIt = ontModel.listStatements(
null, newURIAsProp, (RDFNode)null);
if (closeIt.hasNext()) {
existingURI = true;
}
}
} finally {
ontModel.leaveCriticalSection();
}
return existingURI;
}
public WebappDaoFactory getUserAwareDaoFactory(String userURI) { public WebappDaoFactory getUserAwareDaoFactory(String userURI) {
return new WebappDaoFactoryJena(this, userURI); return new WebappDaoFactoryJena(this, userURI);
} }

View file

@ -2,6 +2,9 @@
package edu.cornell.mannlib.vitro.webapp.filters; package edu.cornell.mannlib.vitro.webapp.filters;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DB_MODEL;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_INF_MODEL;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -19,9 +22,12 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers; import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers;
@ -36,8 +42,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering; import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter; import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SpecialBulkUpdateHandlerGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
@ -61,7 +67,7 @@ public class RequestModelsPrep implements Filter {
* parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css" * parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css"
*/ */
private final static Pattern[] skipPatterns = { private final static Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"), Pattern.compile(".*\\.(gif|GIF|jpg|jpeg|png|PNG)$"),
Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"), Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"), Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*") }; Pattern.compile("/.*/images/.*") };
@ -130,28 +136,120 @@ public class RequestModelsPrep implements Filter {
HttpServletRequest req) { HttpServletRequest req) {
VitroRequest vreq = new VitroRequest(req); VitroRequest vreq = new VitroRequest(req);
setRdfServicesAndDatasets(rawRdfService, vreq);
RDFService rdfService = vreq.getRDFService();
Dataset dataset = vreq.getDataset();
setRawModels(vreq, dataset);
// We need access to the language-ignorant version of this model.
// Grab it before it gets wrapped in language awareness.
vreq.setLanguageNeutralUnionFullModel(ModelAccess.on(vreq).getOntModel(ModelID.UNION_FULL));
wrapModelsWithLanguageAwareness(vreq);
setWebappDaoFactories(vreq, rdfService);
}
/**
* Set language-neutral and language-aware versions of the RdfService and
* Dataset.
*/
private void setRdfServicesAndDatasets(RDFService rawRdfService,
VitroRequest vreq) {
vreq.setUnfilteredRDFService(rawRdfService); vreq.setUnfilteredRDFService(rawRdfService);
vreq.setUnfilteredDataset(new RDFServiceDataset(rawRdfService)); vreq.setUnfilteredDataset(new RDFServiceDataset(rawRdfService));
List<String> langs = getPreferredLanguages(req); RDFService rdfService = addLanguageAwareness(vreq, rawRdfService);
RDFService rdfService = addLanguageAwareness(langs, rawRdfService);
vreq.setRDFService(rdfService); vreq.setRDFService(rdfService);
Dataset dataset = new RDFServiceDataset(rdfService); Dataset dataset = new RDFServiceDataset(rdfService);
vreq.setDataset(dataset); vreq.setDataset(dataset);
}
WebappDaoFactoryConfig config = createWadfConfig(langs, req); private void setRawModels(VitroRequest vreq, Dataset dataset) {
// These are memory-mapped (fast), and read-mostly (low contention), so
// just use the ones from the context.
useModelFromContext(vreq, ModelID.APPLICATION_METADATA);
useModelFromContext(vreq, ModelID.USER_ACCOUNTS);
useModelFromContext(vreq, ModelID.DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_TBOX);
useModelFromContext(vreq, ModelID.BASE_TBOX);
useModelFromContext(vreq, ModelID.INFERRED_TBOX);
useModelFromContext(vreq, ModelID.UNION_TBOX);
ModelAccess.on(vreq).setJenaOntModel( // Anything derived from the ABOX is not memory-mapped, so create
ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, // versions from the short-term RDF service.
dataset.getDefaultModel())); OntModel baseABoxModel = createNamedModelFromDataset(dataset,
JENA_DB_MODEL);
OntModel inferenceABoxModel = createNamedModelFromDataset(dataset,
JENA_INF_MODEL);
OntModel unionABoxModel = createCombinedBulkUpdatingModel(
baseABoxModel, inferenceABoxModel);
addLanguageAwarenessToRequestModel(req, ModelID.DISPLAY); OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel,
addLanguageAwarenessToRequestModel(req, ModelID.APPLICATION_METADATA); ModelAccess.on(vreq).getOntModel(ModelID.BASE_TBOX));
addLanguageAwarenessToRequestModel(req, ModelID.UNION_TBOX); OntModel inferenceFullModel = createCombinedModel(inferenceABoxModel,
addLanguageAwarenessToRequestModel(req, ModelID.UNION_FULL); ModelAccess.on(vreq).getOntModel(ModelID.INFERRED_TBOX));
addLanguageAwarenessToRequestModel(req, ModelID.BASE_TBOX); OntModel unionFullModel = ModelFactory.createOntologyModel(
addLanguageAwarenessToRequestModel(req, ModelID.BASE_FULL); OntModelSpec.OWL_MEM, dataset.getDefaultModel());
ModelAccess.on(vreq).setOntModel(ModelID.BASE_ABOX, baseABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_ABOX, unionABoxModel);
ModelAccess.on(vreq)
.setOntModel(ModelID.UNION_ABOX, inferenceABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.BASE_FULL, baseFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_FULL,
inferenceFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.UNION_FULL, unionFullModel);
}
private void useModelFromContext(VitroRequest vreq, ModelID modelId) {
OntModel contextModel = ModelAccess.on(ctx).getOntModel(modelId);
ModelAccess.on(vreq).setOntModel(modelId, contextModel);
}
private OntModel createNamedModelFromDataset(Dataset dataset, String name) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dataset.getNamedModel(name));
}
private OntModel createCombinedModel(OntModel oneModel, OntModel otherModel) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
ModelFactory.createUnion(oneModel, otherModel));
}
private OntModel createCombinedBulkUpdatingModel(OntModel baseModel,
OntModel otherModel) {
BulkUpdateHandler bulkUpdateHandler = baseModel.getGraph().getBulkUpdateHandler();
Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel).getGraph();
Model unionModel = ModelFactory.createModelForGraph(
new SpecialBulkUpdateHandlerGraph(unionGraph, bulkUpdateHandler));
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, unionModel);
}
private void wrapModelsWithLanguageAwareness(VitroRequest vreq) {
wrapModelWithLanguageAwareness(vreq, ModelID.DISPLAY);
wrapModelWithLanguageAwareness(vreq, ModelID.APPLICATION_METADATA);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_FULL);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_FULL);
}
private void wrapModelWithLanguageAwareness(HttpServletRequest req,
ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private void setWebappDaoFactories(VitroRequest vreq, RDFService rdfService) {
WebappDaoFactoryConfig config = createWadfConfig(vreq);
WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService, WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService,
ModelAccess.on(vreq).getUnionOntModelSelector(), config); ModelAccess.on(vreq).getUnionOntModelSelector(), config);
@ -175,14 +273,15 @@ public class RequestModelsPrep implements Filter {
.checkForModelSwitching(vreq, wadf); .checkForModelSwitching(vreq, wadf);
HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter( HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter(
RequestIdentifiers.getIdBundleForRequest(req), RequestIdentifiers.getIdBundleForRequest(vreq),
ServletPolicyList.getPolicies(ctx)); ServletPolicyList.getPolicies(ctx));
WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering( WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering(
switchedWadf, filter); switchedWadf, filter);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf); ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf);
} }
private WebappDaoFactoryConfig createWadfConfig(List<String> langs, HttpServletRequest req) { private WebappDaoFactoryConfig createWadfConfig(HttpServletRequest req) {
List<String> langs = getPreferredLanguages(req);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig(); WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace); config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs); config.setPreferredLanguages(langs);
@ -203,8 +302,9 @@ public class RequestModelsPrep implements Filter {
"true")); "true"));
} }
private RDFService addLanguageAwareness(List<String> langs, private RDFService addLanguageAwareness(HttpServletRequest req,
RDFService rawRDFService) { RDFService rawRDFService) {
List<String> langs = getPreferredLanguages(req);
if (isLanguageAwarenessEnabled()) { if (isLanguageAwarenessEnabled()) {
return new LanguageFilteringRDFService(rawRDFService, langs); return new LanguageFilteringRDFService(rawRDFService, langs);
} else { } else {
@ -212,15 +312,6 @@ public class RequestModelsPrep implements Filter {
} }
} }
private void addLanguageAwarenessToRequestModel(HttpServletRequest req, ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req.getSession()).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private boolean isStoreReasoned(ServletRequest req) { private boolean isStoreReasoned(ServletRequest req) {
String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty( String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty(
"VitroConnection.DataSource.isStoreReasoned", "true"); "VitroConnection.DataSource.isStoreReasoned", "true");

View file

@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao; import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao; import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.search.IndexConstants; import edu.cornell.mannlib.vitro.webapp.search.IndexConstants;
import edu.cornell.mannlib.vitro.webapp.search.SearchException; import edu.cornell.mannlib.vitro.webapp.search.SearchException;
import edu.cornell.mannlib.vitro.webapp.search.VitroSearchTermNames; import edu.cornell.mannlib.vitro.webapp.search.VitroSearchTermNames;
@ -164,9 +165,9 @@ public class PagedSearchController extends FreemarkerHttpServlet {
log.debug("Query text is \""+ queryText + "\""); log.debug("Query text is \""+ queryText + "\"");
String badQueryMsg = badQueryText( queryText ); String badQueryMsg = badQueryText( queryText, vreq );
if( badQueryMsg != null ){ if( badQueryMsg != null ){
return doFailedSearch(badQueryMsg, queryText, format); return doFailedSearch(badQueryMsg, queryText, format, vreq);
} }
SolrQuery query = getQuery(queryText, hitsPerPage, startIndex, vreq); SolrQuery query = getQuery(queryText, hitsPerPage, startIndex, vreq);
@ -176,26 +177,26 @@ public class PagedSearchController extends FreemarkerHttpServlet {
try { try {
response = solr.query(query); response = solr.query(query);
} catch (Exception ex) { } catch (Exception ex) {
String msg = makeBadSearchMessage(queryText, ex.getMessage()); String msg = makeBadSearchMessage(queryText, ex.getMessage(), vreq);
log.error("could not run Solr query",ex); log.error("could not run Solr query",ex);
return doFailedSearch(msg, queryText, format); return doFailedSearch(msg, queryText, format, vreq);
} }
if (response == null) { if (response == null) {
log.error("Search response was null"); log.error("Search response was null");
return doFailedSearch("The search request contained errors.", queryText, format); return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText, format, vreq);
} }
SolrDocumentList docs = response.getResults(); SolrDocumentList docs = response.getResults();
if (docs == null) { if (docs == null) {
log.error("Document list for a search was null"); log.error("Document list for a search was null");
return doFailedSearch("The search request contained errors.", queryText,format); return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText,format, vreq);
} }
long hitCount = docs.getNumFound(); long hitCount = docs.getNumFound();
log.debug("Number of hits = " + hitCount); log.debug("Number of hits = " + hitCount);
if ( hitCount < 1 ) { if ( hitCount < 1 ) {
return doNoHits(queryText,format); return doNoHits(queryText,format, vreq);
} }
List<Individual> individuals = new ArrayList<Individual>(docs.size()); List<Individual> individuals = new ArrayList<Individual>(docs.size());
@ -275,7 +276,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
body.put("pagingLinks", body.put("pagingLinks",
getPagingLinks(startIndex, hitsPerPage, hitCount, getPagingLinks(startIndex, hitsPerPage, hitCount,
vreq.getServletPath(), vreq.getServletPath(),
pagingLinkParams)); pagingLinkParams, vreq));
if (startIndex != 0) { if (startIndex != 0) {
body.put("prevPage", getPreviousPageLink(startIndex, body.put("prevPage", getPreviousPageLink(startIndex,
@ -339,12 +340,12 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return startIndex; return startIndex;
} }
private String badQueryText(String qtxt) { private String badQueryText(String qtxt, VitroRequest vreq) {
if( qtxt == null || "".equals( qtxt.trim() ) ) if( qtxt == null || "".equals( qtxt.trim() ) )
return "Please enter a search term."; return I18n.text(vreq, "enter_search_term");
if( qtxt.equals("*:*") ) if( qtxt.equals("*:*") )
return "Search term was invalid" ; return I18n.text(vreq, "invalid_search_term") ;
return null; return null;
} }
@ -526,7 +527,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
public String getCount() { return Long.toString(count); } public String getCount() { return Long.toString(count); }
} }
protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params) { protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params, VitroRequest vreq) {
List<PagingLink> pagingLinks = new ArrayList<PagingLink>(); List<PagingLink> pagingLinks = new ArrayList<PagingLink>();
@ -550,7 +551,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
pagingLinks.add(new PagingLink(pageNumber, baseUrl, params)); pagingLinks.add(new PagingLink(pageNumber, baseUrl, params));
} }
} else { } else {
pagingLinks.add(new PagingLink("more...", baseUrl, params)); pagingLinks.add(new PagingLink(I18n.text(vreq, "paging_link_more"), baseUrl, params));
break; break;
} }
} }
@ -591,20 +592,20 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return new ExceptionResponseValues(getTemplate(f,Result.ERROR), body, e); return new ExceptionResponseValues(getTemplate(f,Result.ERROR), body, e);
} }
private TemplateResponseValues doFailedSearch(String message, String querytext, Format f) { private TemplateResponseValues doFailedSearch(String message, String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>(); Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'"); body.put("title", I18n.text(vreq, "search_for", querytext));
if ( StringUtils.isEmpty(message) ) { if ( StringUtils.isEmpty(message) ) {
message = "Search failed."; message = I18n.text(vreq, "search_failed");
} }
body.put("message", message); body.put("message", message);
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body); return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
} }
private TemplateResponseValues doNoHits(String querytext, Format f) { private TemplateResponseValues doNoHits(String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>(); Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'"); body.put("title", I18n.text(vreq, "search_for", querytext));
body.put("message", "No matching results."); body.put("message", I18n.text(vreq, "no_matching_results"));
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body); return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
} }
@ -613,7 +614,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
* @param queryText * @param queryText
* @param exceptionMsg * @param exceptionMsg
*/ */
private String makeBadSearchMessage(String querytext, String exceptionMsg){ private String makeBadSearchMessage(String querytext, String exceptionMsg, VitroRequest vreq){
String rv = ""; String rv = "";
try{ try{
//try to get the column in the search term that is causing the problems //try to get the column in the search term that is causing the problems
@ -641,7 +642,8 @@ public class PagedSearchController extends FreemarkerHttpServlet {
if (post > i) if (post > i)
after = querytext.substring(i + 1, post); after = querytext.substring(i + 1, post);
rv = "The search term had an error near <span class='searchQuote'>" rv = I18n.text(vreq, "search_term_error_near") +
" <span class='searchQuote'>"
+ before + "<span class='searchError'>" + querytext.charAt(i) + before + "<span class='searchError'>" + querytext.charAt(i)
+ "</span>" + after + "</span>"; + "</span>" + after + "</span>";
} catch (Throwable ex) { } catch (Throwable ex) {

View file

@ -88,6 +88,8 @@ public class ContentModelSetup extends JenaDataSourceSetupBase
} else { } else {
checkForNamespaceMismatch( applicationMetadataModel, ctx ); checkForNamespaceMismatch( applicationMetadataModel, ctx );
} }
RDFFilesLoader.loadEveryTimeFiles(ctx, "abox", baseABoxModel);
RDFFilesLoader.loadEveryTimeFiles(ctx, "tbox", baseTBoxModel);
log.info("Setting up full models"); log.info("Setting up full models");
OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel); OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel);

View file

@ -11,18 +11,26 @@ import junit.framework.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import stubs.edu.cornell.mannlib.vitro.webapp.i18n.I18nStub;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap;
import edu.cornell.mannlib.vitro.webapp.search.controller.PagedSearchController.PagingLink; import edu.cornell.mannlib.vitro.webapp.search.controller.PagedSearchController.PagingLink;
public class PagedSearchControllerTest { public class PagedSearchControllerTest {
@SuppressWarnings("unused")
@Before
public void useI18nStubBundles() {
new I18nStub();
}
@Test @Test
public void testGetPagingLinks() { public void testGetPagingLinks() {
ParamMap pm = new ParamMap(); ParamMap pm = new ParamMap();
int hitsPerPage = 25; int hitsPerPage = 25;
int totalHits = 500; int totalHits = 500;
int currentStartIndex = 0; int currentStartIndex = 0;
List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals(500 / 25, pageLinks.size()); Assert.assertEquals(500 / 25, pageLinks.size());
@ -30,7 +38,7 @@ public class PagedSearchControllerTest {
hitsPerPage = 25; hitsPerPage = 25;
totalHits = 10; totalHits = 10;
currentStartIndex = 0; currentStartIndex = 0;
pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals(0, pageLinks.size()); Assert.assertEquals(0, pageLinks.size());
} }
@ -41,7 +49,7 @@ public class PagedSearchControllerTest {
int hitsPerPage = 25; int hitsPerPage = 25;
int totalHits = 349909; int totalHits = 349909;
int currentStartIndex = 0; int currentStartIndex = 0;
List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals( PagedSearchController.DEFAULT_MAX_HIT_COUNT / hitsPerPage, pageLinks.size()); Assert.assertEquals( PagedSearchController.DEFAULT_MAX_HIT_COUNT / hitsPerPage, pageLinks.size());
@ -49,7 +57,7 @@ public class PagedSearchControllerTest {
hitsPerPage = 25; hitsPerPage = 25;
totalHits = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 20329; totalHits = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 20329;
currentStartIndex = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 5432; currentStartIndex = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 5432;
pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals( Assert.assertEquals(
(currentStartIndex / hitsPerPage) + //all the pages that are before the current page (currentStartIndex / hitsPerPage) + //all the pages that are before the current page

View file

@ -155,6 +155,12 @@ return this.objectPropertyStatementDao; }
"WebappDaoFactory.checkURI() not implemented."); "WebappDaoFactory.checkURI() not implemented.");
} }
@Override
public boolean hasExistingURI(String uriStr) {
throw new RuntimeException(
"WebappDaoFactory.hasExistingURI() not implemented.");
}
@Override @Override
public Set<String> getNonuserNamespaces() { public Set<String> getNonuserNamespaces() {
throw new RuntimeException( throw new RuntimeException(

View file

@ -224,7 +224,7 @@ name will be used as the label. -->
<#assign labelPropertyUri = ("http://www.w3.org/2000/01/rdf-schema#label"?url) /> <#assign labelPropertyUri = ("http://www.w3.org/2000/01/rdf-schema#label"?url) />
<#assign useEditLink = false /> <#assign useEditLink = false />
<#--edit link used if in edit mode and only one label and one language--> <#--edit link used if in edit mode and only one label and one language-->
<#if labelCount = 1 && editable> <#if labelCount = 1 && editable && localesCount = 1 >
<#assign useEditLink = true/> <#assign useEditLink = true/>
</#if> </#if>
<#local label = individual.nameStatement> <#local label = individual.nameStatement>