Merge branch 'develop' of https://github.com/vivo-project/Vitro into develop

This commit is contained in:
hudajkhan 2013-09-16 14:08:34 -04:00
commit d5c1cc49c6
10 changed files with 660 additions and 237 deletions

View file

@ -508,6 +508,18 @@ previous = Anterior
page_link = enlace de la página page_link = enlace de la página
next_capitalized = Próximo next_capitalized = Próximo
#
# search controller ( PagedSearchController.java )
#
error_in_search_request = La petici—n de bœsqueda contena errores.
enter_search_term = Por favor introduzca un tŽrmino de bœsqueda.
invalid_search_term = Criterio de bœsqueda no es v‡lido
paging_link_more = m‡s ...
no_matching_results = No hay resultados que coincidan.
search_failed = Buscar fall—.
search_term_error_near = El tŽrmino de bœsqueda tuvo un error cerca
search_for = Buscar ''{0}''
# #
# shortview templates ( /templates/freemarker/body/partials/shortview ) # shortview templates ( /templates/freemarker/body/partials/shortview )
# #

View file

@ -207,5 +207,13 @@ public class VitroRequest extends HttpServletRequestWrapper {
return _req.getParameterValues(name); return _req.getParameterValues(name);
} }
public void setLanguageNeutralUnionFullModel(OntModel model) {
setAttribute("languageNeutralUnionFullModel", model);
}
public OntModel getLanguageNeutralUnionFullModel() {
return (OntModel) getAttribute("languageNeutralUnionFullModel");
}
} }

View file

@ -0,0 +1,235 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/**
* TODO Keep this around until release 1.7, in case anyone is relying on it.
*/
@Deprecated
public class ExtendedRdfAssembler {
private static final Log log = LogFactory
.getLog(ExtendedRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person";
private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private final VitroRequest vreq;
private final ServletContext ctx;
private final Individual individual;
private final ContentType rdfFormat;
public ExtendedRdfAssembler(VitroRequest vreq, Individual individual,
ContentType rdfFormat) {
this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/**
* @return
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes);
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
return new RdfResponseValues(rdfFormat, newModel);
}
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) {
Resource subj = newModel.getResource(entity.getURI());
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements();
TypeMapper typeMapper = TypeMapper.getInstance();
for (DataPropertyStatement ds: dstates) {
Property dp = newModel.getProperty(ds.getDatapropURI());
Literal lit = null;
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) {
lit = newModel.createLiteral(ds.getData(),ds.getLanguage());
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) {
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI()));
} else {
lit = newModel.createLiteral(ds.getData());
}
newModel.add(newModel.createStatement(subj, dp, lit));
}
if (recurseDepth < 5) {
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements();
for (ObjectPropertyStatement os: ostates) {
Property prop = newModel.getProperty(os.getPropertyURI());
Resource obj = newModel.getResource(os.getObjectURI());
newModel.add(newModel.createStatement(subj, prop, obj));
if ( includeInLinkedData(obj, contextModel)) {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes));
} else {
contextModel.enterCriticalSection(Lock.READ);
try {
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null));
} finally {
contextModel.leaveCriticalSection();
}
}
}
}
newModel = getLabelAndTypes(entity, contextModel, newModel );
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel);
//bdc34: The following code adds all triples where entity is the Subject.
// contextModel.enterCriticalSection(Lock.READ);
// try {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null);
// while (iter.hasNext()) {
// Statement stmt = iter.next();
// if (!newModel.contains(stmt)) {
// newModel.add(stmt);
// }
// }
// } finally {
// contextModel.leaveCriticalSection();
// }
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
}
long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI());
long elapsedTimeMillis = System.currentTimeMillis()-start;
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis);
newModel.add(extendedModel);
}
}
return newModel;
}
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
}
}
} finally {
contextModel.leaveCriticalSection();
}
return newModel;
}
}

View file

@ -14,6 +14,7 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ExceptionResponseValues;
@ -30,10 +31,14 @@ public class IndividualController extends FreemarkerHttpServlet {
private static final String TEMPLATE_HELP = "individual-help.ftl"; private static final String TEMPLATE_HELP = "individual-help.ftl";
@Deprecated
private static final String PROPERTY_EXTENDED_LOD = "serveExtendedLinkedData";
/** /**
* Use this map to decide which MIME type is suited for the "accept" header. * Use this map to decide which MIME type is suited for the "accept" header.
*/ */
public static final Map<String, Float> ACCEPTED_CONTENT_TYPES = initializeContentTypes(); public static final Map<String, Float> ACCEPTED_CONTENT_TYPES = initializeContentTypes();
private static Map<String, Float> initializeContentTypes() { private static Map<String, Float> initializeContentTypes() {
HashMap<String, Float> map = new HashMap<String, Float>(); HashMap<String, Float> map = new HashMap<String, Float>();
map.put(HTML_MIMETYPE, 0.5f); map.put(HTML_MIMETYPE, 0.5f);
@ -82,9 +87,15 @@ public class IndividualController extends FreemarkerHttpServlet {
* If they are asking for RDF using the preferred URL, give it * If they are asking for RDF using the preferred URL, give it
* to them. * to them.
*/ */
if (useExtendedLOD(vreq)) {
return new ExtendedRdfAssembler(vreq,
requestInfo.getIndividual(),
requestInfo.getRdfFormat()).assembleRdf();
} else {
return new IndividualRdfAssembler(vreq, return new IndividualRdfAssembler(vreq,
requestInfo.getIndividual(), requestInfo.getRdfFormat()) requestInfo.getIndividual().getURI(),
.assembleRdf(); requestInfo.getRdfFormat()).assembleRdf();
}
default: default:
/* /*
* Otherwise, prepare an HTML response for the requested * Otherwise, prepare an HTML response for the requested
@ -113,6 +124,11 @@ public class IndividualController extends FreemarkerHttpServlet {
HttpServletResponse.SC_NOT_FOUND); HttpServletResponse.SC_NOT_FOUND);
} }
private boolean useExtendedLOD(HttpServletRequest req) {
ConfigurationProperties props = ConfigurationProperties.getBean(req);
return Boolean.valueOf(props.getProperty(PROPERTY_EXTENDED_LOD));
}
@Override @Override
public void doPost(HttpServletRequest request, HttpServletResponse response) public void doPost(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {

View file

@ -2,233 +2,270 @@
package edu.cornell.mannlib.vitro.webapp.controller.individual; package edu.cornell.mannlib.vitro.webapp.controller.individual;
import java.util.HashMap; import java.util.HashSet;
import java.util.List; import java.util.Set;
import java.util.Map;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.datatypes.TypeMapper;
import com.hp.hpl.jena.datatypes.xsd.XSDDatatype;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.rdf.model.Literal; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.rdf.model.Model; import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import com.hp.hpl.jena.rdf.model.Property;
import com.hp.hpl.jena.rdf.model.RDFNode; import com.hp.hpl.jena.rdf.model.RDFNode;
import com.hp.hpl.jena.rdf.model.Resource; import com.hp.hpl.jena.rdf.model.Resource;
import com.hp.hpl.jena.rdf.model.ResourceFactory;
import com.hp.hpl.jena.rdf.model.Statement; import com.hp.hpl.jena.rdf.model.Statement;
import com.hp.hpl.jena.rdf.model.StmtIterator; import com.hp.hpl.jena.rdf.model.StmtIterator;
import com.hp.hpl.jena.shared.Lock;
import com.hp.hpl.jena.vocabulary.RDF; import com.hp.hpl.jena.vocabulary.RDF;
import com.hp.hpl.jena.vocabulary.RDFS; import com.hp.hpl.jena.vocabulary.RDFS;
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayDataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.display.DisplayObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement; import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.Individual; import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement; import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.VClass; import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest; import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RdfResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils; import edu.cornell.mannlib.vitro.webapp.utils.jena.ExtendedLinkedDataUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils; import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType; import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/** /**
* TODO See where this can be improved. * Write a smaller set of Linked Data. It consists of:
*
* 1) The data properties of the entity
*
* 2) The object properties in which the entity is either subject or object
*
* 3) The labels and types of the objects that are linked by those properties.
*
* If the request comes with an Accept-language header, use an appropriately
* language-aware data source to filter the data properties and labels.
* Otherwise, show all triples, regardless of language.
*
* Filter the result based on the policy, removing any triples that should not
* be displayed to the public (or to the user, if logged in). Also remove any
* objects which can only be reached by excluded triples.
*
* ----------------
*
* This still permits the use of rich export, by "include" options on the
* request. The only difference from earlier implementations is that the result
* may be made language-aware.
*/ */
public class IndividualRdfAssembler { public class IndividualRdfAssembler {
private static final Log log = LogFactory private static final Log log = LogFactory
.getLog(IndividualRdfAssembler.class); .getLog(IndividualRdfAssembler.class);
private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/"; private static final String RICH_EXPORT_ROOT = "/WEB-INF/rich-export/";
private static final String PERSON_CLASS_URI = "http://xmlns.com/foaf/0.1/Person";
private static final String INCLUDE_ALL = "all"; private static final String INCLUDE_ALL = "all";
@SuppressWarnings("serial")
private static final Map<String, String> namespaces = new HashMap<String, String>() {{
put("display", VitroVocabulary.DISPLAY);
put("vitro", VitroVocabulary.vitroURI);
put("vitroPublic", VitroVocabulary.VITRO_PUBLIC);
}};
private static final Property extendedLinkedDataProperty = ResourceFactory.createProperty(namespaces.get("vitro") + "extendedLinkedData");
private static final Literal xsdTrue = ResourceFactory.createTypedLiteral("true", XSDDatatype.XSDboolean);
private final VitroRequest vreq; private final VitroRequest vreq;
private final ServletContext ctx; private final ServletContext ctx;
private final Individual individual; private final String individualUri;
private final ContentType rdfFormat; private final ContentType rdfFormat;
private final String[] richExportIncludes;
private final RDFService rdfService;
private final OntModel contentModel;
private final WebappDaoFactory wadf;
public IndividualRdfAssembler(VitroRequest vreq, Individual individual, public IndividualRdfAssembler(VitroRequest vreq, String individualUri,
ContentType rdfFormat) { ContentType rdfFormat) {
this.vreq = vreq; this.vreq = vreq;
this.ctx = vreq.getSession().getServletContext(); this.ctx = vreq.getSession().getServletContext();
this.individual = individual;
this.rdfFormat = rdfFormat;
}
/** this.individualUri = individualUri;
* @return this.rdfFormat = rdfFormat;
*/
public ResponseValues assembleRdf() {
OntModel ontModel = vreq.getJenaOntModel();
String[] includes = vreq.getParameterValues("include"); String[] includes = vreq.getParameterValues("include");
Model newModel = getRDF(individual, ontModel, ModelFactory.createDefaultModel(), 0, includes); this.richExportIncludes = (includes == null) ? new String[0] : includes;
JenaOutputUtils.setNameSpacePrefixes(newModel, vreq.getWebappDaoFactory());
if (isLanguageAware()) {
this.rdfService = vreq.getRDFService();
this.contentModel = vreq.getJenaOntModel();
} else {
this.rdfService = vreq.getUnfilteredRDFService();
this.contentModel = vreq.getLanguageNeutralUnionFullModel();
}
wadf = vreq.getWebappDaoFactory();
}
public ResponseValues assembleRdf() {
OntModel newModel = getRdf();
newModel.add(getRichExportRdf());
JenaOutputUtils.setNameSpacePrefixes(newModel, wadf);
return new RdfResponseValues(rdfFormat, newModel); return new RdfResponseValues(rdfFormat, newModel);
} }
private Model getRDF(Individual entity, OntModel contextModel, Model newModel, int recurseDepth, String[] includes) { private boolean isLanguageAware() {
return StringUtils.isNotEmpty(vreq.getHeader("Accept-Language"));
Resource subj = newModel.getResource(entity.getURI());
List<DataPropertyStatement> dstates = entity.getDataPropertyStatements();
TypeMapper typeMapper = TypeMapper.getInstance();
for (DataPropertyStatement ds: dstates) {
Property dp = newModel.getProperty(ds.getDatapropURI());
Literal lit = null;
if ((ds.getLanguage()) != null && (ds.getLanguage().length()>0)) {
lit = newModel.createLiteral(ds.getData(),ds.getLanguage());
} else if ((ds.getDatatypeURI() != null) && (ds.getDatatypeURI().length()>0)) {
lit = newModel.createTypedLiteral(ds.getData(),typeMapper.getSafeTypeByName(ds.getDatatypeURI()));
} else {
lit = newModel.createLiteral(ds.getData());
}
newModel.add(newModel.createStatement(subj, dp, lit));
} }
if (recurseDepth < 5) { private OntModel getRdf() {
List<ObjectPropertyStatement> ostates = entity.getObjectPropertyStatements(); OntModel o = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
o.add(getStatementsAboutEntity());
o.add(getLabelsAndTypesOfRelatedObjects());
filterByPolicy(o);
return o;
}
for (ObjectPropertyStatement os: ostates) { /**
Property prop = newModel.getProperty(os.getPropertyURI()); * Get all statements that have the entity as either the subject or the
Resource obj = newModel.getResource(os.getObjectURI()); * object.
newModel.add(newModel.createStatement(subj, prop, obj)); */
if ( includeInLinkedData(obj, contextModel)) { private Model getStatementsAboutEntity() {
newModel.add(getRDF(os.getObject(), contextModel, newModel, recurseDepth + 1, includes)); Model m = runConstructQuery(String
.format("CONSTRUCT { <%1$s> ?predicate ?object . } "
+ "WHERE { <%1$s> ?predicate ?object } ", individualUri));
m.add(runConstructQuery(String.format(
"CONSTRUCT { ?s ?predicate <%1$s> . } "
+ "WHERE { ?s ?predicate <%1$s> } ", individualUri)));
return m;
}
/**
* Get the labels and types of all related objects.
*/
private Model getLabelsAndTypesOfRelatedObjects() {
Model m = runConstructQuery(String
.format("CONSTRUCT { ?object <%2$s> ?type . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?type . } ", individualUri, RDF.type));
m.add(runConstructQuery(String.format(
"CONSTRUCT { ?object <%2$s> ?label . } "
+ "WHERE { <%1$s> ?predicate ?object ."
+ " ?object <%2$s> ?label . } ", individualUri,
RDFS.label)));
return m;
}
/**
* Remove any triples that we aren't allowed to see. Then remove any objects
* that we no longer have access to.
*/
private void filterByPolicy(OntModel o) {
removeProhibitedTriples(o);
Set<String> okObjects = determineAccessibleUris(o);
removeOrphanedObjects(o, okObjects);
}
/**
* Remove the triples that we aren't allowed to see.
*/
private void removeProhibitedTriples(OntModel o) {
StmtIterator stmts = o.listStatements();
while (stmts.hasNext()) {
Statement stmt = stmts.next();
String subjectUri = stmt.getSubject().getURI();
String predicateUri = stmt.getPredicate().getURI();
if (stmt.getObject().isLiteral()) {
String value = stmt.getObject().asLiteral().getString();
DataPropertyStatement dps = new DataPropertyStatementImpl(
subjectUri, predicateUri, value);
RequestedAction ddps = new DisplayDataPropertyStatement(dps);
if (!PolicyHelper.isAuthorizedForActions(vreq, ddps)) {
log.debug("not authorized: " + ddps);
stmts.remove();
}
} else if (stmt.getObject().isURIResource()) {
String objectUri = stmt.getObject().asResource().getURI();
ObjectPropertyStatement ops = new ObjectPropertyStatementImpl(
subjectUri, predicateUri, objectUri);
RequestedAction dops = new DisplayObjectPropertyStatement(ops);
if (!PolicyHelper.isAuthorizedForActions(vreq, dops)) {
log.debug("not authorized: " + dops);
stmts.remove();
}
} else { } else {
contextModel.enterCriticalSection(Lock.READ); log.warn("blank node: + stmt");
stmts.remove();
}
}
}
/**
* Collect the URIs of all objects that are accessible through permitted
* triples.
*/
private Set<String> determineAccessibleUris(OntModel o) {
Resource i = o.getResource(individualUri);
Set<String> uris = new HashSet<>();
uris.add(individualUri);
StmtIterator stmts;
stmts = o.listStatements(i, null, (RDFNode) null);
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (stmt.getObject().isURIResource()) {
uris.add(stmt.getObject().asResource().getURI());
}
}
stmts = o.listStatements(null, null, i);
while (stmts.hasNext()) {
Statement stmt = stmts.next();
uris.add(stmt.getSubject().getURI());
}
return uris;
}
/**
* Remove any statements about objects that cannot be reached through
* permitted triples.
*/
private void removeOrphanedObjects(OntModel o, Set<String> okObjects) {
StmtIterator stmts = o.listStatements();
while (stmts.hasNext()) {
Statement stmt = stmts.next();
if (!okObjects.contains(stmt.getSubject().getURI())) {
log.debug("removing orphan triple: " + stmt);
stmts.remove();
}
}
}
private Model runConstructQuery(String query) {
try { try {
newModel.add(contextModel.listStatements(obj, RDFS.label, (RDFNode)null)); return RDFServiceUtils.parseModel(rdfService.sparqlConstructQuery(
} finally { query, RDFService.ModelSerializationFormat.N3),
contextModel.leaveCriticalSection(); RDFService.ModelSerializationFormat.N3);
} } catch (RDFServiceException e) {
} throw new RuntimeException(e);
} }
} }
newModel = getLabelAndTypes(entity, contextModel, newModel ); private Model getRichExportRdf() {
newModel = getStatementsWithUntypedProperties(subj, contextModel, vreq.getAssertionsOntModel(), newModel); Model richExportModel = ModelFactory.createDefaultModel();
//bdc34: The following code adds all triples where entity is the Subject. for (String include : richExportIncludes) {
// contextModel.enterCriticalSection(Lock.READ); String rootDir = RICH_EXPORT_ROOT;
// try { if (!INCLUDE_ALL.equals(include)) {
// StmtIterator iter = contextModel.listStatements(subj, (Property) null, (RDFNode) null); rootDir += include + "/";
// while (iter.hasNext()) {
// Statement stmt = iter.next();
// if (!newModel.contains(stmt)) {
// newModel.add(stmt);
// }
// }
// } finally {
// contextModel.leaveCriticalSection();
// }
if (recurseDepth == 0 && includes != null && entity.isVClass(PERSON_CLASS_URI)) {
for (String include : includes) {
String rootDir = null;
if (INCLUDE_ALL.equals(include)) {
rootDir = RICH_EXPORT_ROOT;
} else {
rootDir = RICH_EXPORT_ROOT + include + "/";
} }
long start = System.currentTimeMillis(); long start = System.currentTimeMillis();
Model extendedModel = ExtendedLinkedDataUtils.createModelFromQueries(ctx, rootDir, contextModel, entity.getURI()); richExportModel.add(ExtendedLinkedDataUtils.createModelFromQueries(
long elapsedTimeMillis = System.currentTimeMillis()-start; ctx, rootDir, contentModel, individualUri));
log.info("Time to create rich export model: msecs = " + elapsedTimeMillis); long elapsedTimeMillis = System.currentTimeMillis() - start;
log.debug("Time to create rich export model: msecs = "
newModel.add(extendedModel); + elapsedTimeMillis);
}
} }
return newModel; return richExportModel;
} }
public static boolean includeInLinkedData(Resource object, Model contextModel) {
boolean retval = false;
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(object, RDF.type, (RDFNode)null);
while (iter.hasNext()) {
Statement stmt = iter.next();
if (stmt.getObject().isResource() && contextModel.contains(stmt.getObject().asResource(), extendedLinkedDataProperty, xsdTrue)) {
retval = true;
break;
}
}
} finally {
contextModel.leaveCriticalSection();
}
return retval;
}
/* Get the properties that are difficult to get via a filtered WebappDaoFactory. */
private Model getLabelAndTypes(Individual entity, Model ontModel, Model newModel){
for( VClass vclass : entity.getVClasses()){
newModel.add(newModel.getResource(entity.getURI()), RDF.type, newModel.getResource(vclass.getURI()));
}
ontModel.enterCriticalSection(Lock.READ);
try {
newModel.add(ontModel.listStatements(ontModel.getResource(entity.getURI()), RDFS.label, (RDFNode)null));
} finally {
ontModel.leaveCriticalSection();
}
return newModel;
}
/* This method adds in statements in which the property does not
* have an rdf type in the asserted model.
* This was added for release 1.5 to handle cases such as the
* reasoning-plugin inferred dcterms:creator assertion
*/
private Model getStatementsWithUntypedProperties(Resource subject, OntModel contextModel, OntModel assertionsModel, Model newModel) {
contextModel.enterCriticalSection(Lock.READ);
try {
StmtIterator iter = contextModel.listStatements(subject, (Property) null, (RDFNode) null);
while (iter.hasNext()) {
Statement stmt = iter.next();
Property property = stmt.getPredicate();
assertionsModel.enterCriticalSection(Lock.READ);
try {
if (!assertionsModel.contains(property, RDF.type) && !newModel.contains(stmt)) {
newModel.add(stmt);
}
} finally {
assertionsModel.leaveCriticalSection();
}
}
} finally {
contextModel.leaveCriticalSection();
}
return newModel;
}
} }

View file

@ -2,6 +2,9 @@
package edu.cornell.mannlib.vitro.webapp.filters; package edu.cornell.mannlib.vitro.webapp.filters;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_DB_MODEL;
import static edu.cornell.mannlib.vitro.webapp.servlet.setup.JenaDataSourceSetupBase.JENA_INF_MODEL;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -19,9 +22,12 @@ import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import com.hp.hpl.jena.graph.BulkUpdateHandler;
import com.hp.hpl.jena.graph.Graph;
import com.hp.hpl.jena.ontology.OntModel; import com.hp.hpl.jena.ontology.OntModel;
import com.hp.hpl.jena.ontology.OntModelSpec; import com.hp.hpl.jena.ontology.OntModelSpec;
import com.hp.hpl.jena.query.Dataset; import com.hp.hpl.jena.query.Dataset;
import com.hp.hpl.jena.rdf.model.Model;
import com.hp.hpl.jena.rdf.model.ModelFactory; import com.hp.hpl.jena.rdf.model.ModelFactory;
import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers; import edu.cornell.mannlib.vitro.webapp.auth.identifier.RequestIdentifiers;
@ -36,8 +42,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig; import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactoryConfig;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering; import edu.cornell.mannlib.vitro.webapp.dao.filtering.WebappDaoFactoryFiltering;
import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter; import edu.cornell.mannlib.vitro.webapp.dao.filtering.filters.HideFromDisplayByPolicyFilter;
import edu.cornell.mannlib.vitro.webapp.dao.jena.OntModelSelector;
import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset; import edu.cornell.mannlib.vitro.webapp.dao.jena.RDFServiceDataset;
import edu.cornell.mannlib.vitro.webapp.dao.jena.SpecialBulkUpdateHandlerGraph;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB;
import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode; import edu.cornell.mannlib.vitro.webapp.dao.jena.WebappDaoFactorySDB.SDBDatasetMode;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
@ -61,7 +67,7 @@ public class RequestModelsPrep implements Filter {
* parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css" * parameters, e.g. "/vitro/index.jsp" "/vitro/themes/enhanced/css/edit.css"
*/ */
private final static Pattern[] skipPatterns = { private final static Pattern[] skipPatterns = {
Pattern.compile(".*\\.(gif|GIF|jpg|jpeg)$"), Pattern.compile(".*\\.(gif|GIF|jpg|jpeg|png|PNG)$"),
Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"), Pattern.compile(".*\\.css$"), Pattern.compile(".*\\.js$"),
Pattern.compile("/.*/themes/.*/site_icons/.*"), Pattern.compile("/.*/themes/.*/site_icons/.*"),
Pattern.compile("/.*/images/.*") }; Pattern.compile("/.*/images/.*") };
@ -130,28 +136,120 @@ public class RequestModelsPrep implements Filter {
HttpServletRequest req) { HttpServletRequest req) {
VitroRequest vreq = new VitroRequest(req); VitroRequest vreq = new VitroRequest(req);
setRdfServicesAndDatasets(rawRdfService, vreq);
RDFService rdfService = vreq.getRDFService();
Dataset dataset = vreq.getDataset();
setRawModels(vreq, dataset);
// We need access to the language-ignorant version of this model.
// Grab it before it gets wrapped in language awareness.
vreq.setLanguageNeutralUnionFullModel(ModelAccess.on(vreq).getOntModel(ModelID.UNION_FULL));
wrapModelsWithLanguageAwareness(vreq);
setWebappDaoFactories(vreq, rdfService);
}
/**
* Set language-neutral and language-aware versions of the RdfService and
* Dataset.
*/
private void setRdfServicesAndDatasets(RDFService rawRdfService,
VitroRequest vreq) {
vreq.setUnfilteredRDFService(rawRdfService); vreq.setUnfilteredRDFService(rawRdfService);
vreq.setUnfilteredDataset(new RDFServiceDataset(rawRdfService)); vreq.setUnfilteredDataset(new RDFServiceDataset(rawRdfService));
List<String> langs = getPreferredLanguages(req); RDFService rdfService = addLanguageAwareness(vreq, rawRdfService);
RDFService rdfService = addLanguageAwareness(langs, rawRdfService);
vreq.setRDFService(rdfService); vreq.setRDFService(rdfService);
Dataset dataset = new RDFServiceDataset(rdfService); Dataset dataset = new RDFServiceDataset(rdfService);
vreq.setDataset(dataset); vreq.setDataset(dataset);
}
WebappDaoFactoryConfig config = createWadfConfig(langs, req); private void setRawModels(VitroRequest vreq, Dataset dataset) {
// These are memory-mapped (fast), and read-mostly (low contention), so
// just use the ones from the context.
useModelFromContext(vreq, ModelID.APPLICATION_METADATA);
useModelFromContext(vreq, ModelID.USER_ACCOUNTS);
useModelFromContext(vreq, ModelID.DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_DISPLAY);
useModelFromContext(vreq, ModelID.DISPLAY_TBOX);
useModelFromContext(vreq, ModelID.BASE_TBOX);
useModelFromContext(vreq, ModelID.INFERRED_TBOX);
useModelFromContext(vreq, ModelID.UNION_TBOX);
ModelAccess.on(vreq).setJenaOntModel( // Anything derived from the ABOX is not memory-mapped, so create
ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, // versions from the short-term RDF service.
dataset.getDefaultModel())); OntModel baseABoxModel = createNamedModelFromDataset(dataset,
JENA_DB_MODEL);
OntModel inferenceABoxModel = createNamedModelFromDataset(dataset,
JENA_INF_MODEL);
OntModel unionABoxModel = createCombinedBulkUpdatingModel(
baseABoxModel, inferenceABoxModel);
addLanguageAwarenessToRequestModel(req, ModelID.DISPLAY); OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel,
addLanguageAwarenessToRequestModel(req, ModelID.APPLICATION_METADATA); ModelAccess.on(vreq).getOntModel(ModelID.BASE_TBOX));
addLanguageAwarenessToRequestModel(req, ModelID.UNION_TBOX); OntModel inferenceFullModel = createCombinedModel(inferenceABoxModel,
addLanguageAwarenessToRequestModel(req, ModelID.UNION_FULL); ModelAccess.on(vreq).getOntModel(ModelID.INFERRED_TBOX));
addLanguageAwarenessToRequestModel(req, ModelID.BASE_TBOX); OntModel unionFullModel = ModelFactory.createOntologyModel(
addLanguageAwarenessToRequestModel(req, ModelID.BASE_FULL); OntModelSpec.OWL_MEM, dataset.getDefaultModel());
ModelAccess.on(vreq).setOntModel(ModelID.BASE_ABOX, baseABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_ABOX, unionABoxModel);
ModelAccess.on(vreq)
.setOntModel(ModelID.UNION_ABOX, inferenceABoxModel);
ModelAccess.on(vreq).setOntModel(ModelID.BASE_FULL, baseFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.INFERRED_FULL,
inferenceFullModel);
ModelAccess.on(vreq).setOntModel(ModelID.UNION_FULL, unionFullModel);
}
private void useModelFromContext(VitroRequest vreq, ModelID modelId) {
OntModel contextModel = ModelAccess.on(ctx).getOntModel(modelId);
ModelAccess.on(vreq).setOntModel(modelId, contextModel);
}
private OntModel createNamedModelFromDataset(Dataset dataset, String name) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, dataset.getNamedModel(name));
}
private OntModel createCombinedModel(OntModel oneModel, OntModel otherModel) {
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM,
ModelFactory.createUnion(oneModel, otherModel));
}
private OntModel createCombinedBulkUpdatingModel(OntModel baseModel,
OntModel otherModel) {
BulkUpdateHandler bulkUpdateHandler = baseModel.getGraph().getBulkUpdateHandler();
Graph unionGraph = ModelFactory.createUnion(baseModel, otherModel).getGraph();
Model unionModel = ModelFactory.createModelForGraph(
new SpecialBulkUpdateHandlerGraph(unionGraph, bulkUpdateHandler));
return ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM, unionModel);
}
private void wrapModelsWithLanguageAwareness(VitroRequest vreq) {
wrapModelWithLanguageAwareness(vreq, ModelID.DISPLAY);
wrapModelWithLanguageAwareness(vreq, ModelID.APPLICATION_METADATA);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_TBOX);
wrapModelWithLanguageAwareness(vreq, ModelID.UNION_FULL);
wrapModelWithLanguageAwareness(vreq, ModelID.BASE_FULL);
}
private void wrapModelWithLanguageAwareness(HttpServletRequest req,
ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private void setWebappDaoFactories(VitroRequest vreq, RDFService rdfService) {
WebappDaoFactoryConfig config = createWadfConfig(vreq);
WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService, WebappDaoFactory unfilteredWadf = new WebappDaoFactorySDB(rdfService,
ModelAccess.on(vreq).getUnionOntModelSelector(), config); ModelAccess.on(vreq).getUnionOntModelSelector(), config);
@ -175,14 +273,15 @@ public class RequestModelsPrep implements Filter {
.checkForModelSwitching(vreq, wadf); .checkForModelSwitching(vreq, wadf);
HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter( HideFromDisplayByPolicyFilter filter = new HideFromDisplayByPolicyFilter(
RequestIdentifiers.getIdBundleForRequest(req), RequestIdentifiers.getIdBundleForRequest(vreq),
ServletPolicyList.getPolicies(ctx)); ServletPolicyList.getPolicies(ctx));
WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering( WebappDaoFactoryFiltering filteredWadf = new WebappDaoFactoryFiltering(
switchedWadf, filter); switchedWadf, filter);
ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf); ModelAccess.on(vreq).setWebappDaoFactory(FactoryID.UNION, filteredWadf);
} }
private WebappDaoFactoryConfig createWadfConfig(List<String> langs, HttpServletRequest req) { private WebappDaoFactoryConfig createWadfConfig(HttpServletRequest req) {
List<String> langs = getPreferredLanguages(req);
WebappDaoFactoryConfig config = new WebappDaoFactoryConfig(); WebappDaoFactoryConfig config = new WebappDaoFactoryConfig();
config.setDefaultNamespace(defaultNamespace); config.setDefaultNamespace(defaultNamespace);
config.setPreferredLanguages(langs); config.setPreferredLanguages(langs);
@ -203,8 +302,9 @@ public class RequestModelsPrep implements Filter {
"true")); "true"));
} }
private RDFService addLanguageAwareness(List<String> langs, private RDFService addLanguageAwareness(HttpServletRequest req,
RDFService rawRDFService) { RDFService rawRDFService) {
List<String> langs = getPreferredLanguages(req);
if (isLanguageAwarenessEnabled()) { if (isLanguageAwarenessEnabled()) {
return new LanguageFilteringRDFService(rawRDFService, langs); return new LanguageFilteringRDFService(rawRDFService, langs);
} else { } else {
@ -212,15 +312,6 @@ public class RequestModelsPrep implements Filter {
} }
} }
private void addLanguageAwarenessToRequestModel(HttpServletRequest req, ModelID id) {
if (isLanguageAwarenessEnabled()) {
OntModel unaware = ModelAccess.on(req.getSession()).getOntModel(id);
OntModel aware = LanguageFilteringUtils
.wrapOntModelInALanguageFilter(unaware, req);
ModelAccess.on(req).setOntModel(id, aware);
}
}
private boolean isStoreReasoned(ServletRequest req) { private boolean isStoreReasoned(ServletRequest req) {
String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty( String isStoreReasoned = ConfigurationProperties.getBean(req).getProperty(
"VitroConnection.DataSource.isStoreReasoned", "true"); "VitroConnection.DataSource.isStoreReasoned", "true");

View file

@ -44,6 +44,7 @@ import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassDao; import edu.cornell.mannlib.vitro.webapp.dao.VClassDao;
import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao; import edu.cornell.mannlib.vitro.webapp.dao.VClassGroupDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary; import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.search.IndexConstants; import edu.cornell.mannlib.vitro.webapp.search.IndexConstants;
import edu.cornell.mannlib.vitro.webapp.search.SearchException; import edu.cornell.mannlib.vitro.webapp.search.SearchException;
import edu.cornell.mannlib.vitro.webapp.search.VitroSearchTermNames; import edu.cornell.mannlib.vitro.webapp.search.VitroSearchTermNames;
@ -164,9 +165,9 @@ public class PagedSearchController extends FreemarkerHttpServlet {
log.debug("Query text is \""+ queryText + "\""); log.debug("Query text is \""+ queryText + "\"");
String badQueryMsg = badQueryText( queryText ); String badQueryMsg = badQueryText( queryText, vreq );
if( badQueryMsg != null ){ if( badQueryMsg != null ){
return doFailedSearch(badQueryMsg, queryText, format); return doFailedSearch(badQueryMsg, queryText, format, vreq);
} }
SolrQuery query = getQuery(queryText, hitsPerPage, startIndex, vreq); SolrQuery query = getQuery(queryText, hitsPerPage, startIndex, vreq);
@ -176,26 +177,26 @@ public class PagedSearchController extends FreemarkerHttpServlet {
try { try {
response = solr.query(query); response = solr.query(query);
} catch (Exception ex) { } catch (Exception ex) {
String msg = makeBadSearchMessage(queryText, ex.getMessage()); String msg = makeBadSearchMessage(queryText, ex.getMessage(), vreq);
log.error("could not run Solr query",ex); log.error("could not run Solr query",ex);
return doFailedSearch(msg, queryText, format); return doFailedSearch(msg, queryText, format, vreq);
} }
if (response == null) { if (response == null) {
log.error("Search response was null"); log.error("Search response was null");
return doFailedSearch("The search request contained errors.", queryText, format); return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText, format, vreq);
} }
SolrDocumentList docs = response.getResults(); SolrDocumentList docs = response.getResults();
if (docs == null) { if (docs == null) {
log.error("Document list for a search was null"); log.error("Document list for a search was null");
return doFailedSearch("The search request contained errors.", queryText,format); return doFailedSearch(I18n.text(vreq, "error_in_search_request"), queryText,format, vreq);
} }
long hitCount = docs.getNumFound(); long hitCount = docs.getNumFound();
log.debug("Number of hits = " + hitCount); log.debug("Number of hits = " + hitCount);
if ( hitCount < 1 ) { if ( hitCount < 1 ) {
return doNoHits(queryText,format); return doNoHits(queryText,format, vreq);
} }
List<Individual> individuals = new ArrayList<Individual>(docs.size()); List<Individual> individuals = new ArrayList<Individual>(docs.size());
@ -275,7 +276,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
body.put("pagingLinks", body.put("pagingLinks",
getPagingLinks(startIndex, hitsPerPage, hitCount, getPagingLinks(startIndex, hitsPerPage, hitCount,
vreq.getServletPath(), vreq.getServletPath(),
pagingLinkParams)); pagingLinkParams, vreq));
if (startIndex != 0) { if (startIndex != 0) {
body.put("prevPage", getPreviousPageLink(startIndex, body.put("prevPage", getPreviousPageLink(startIndex,
@ -339,12 +340,12 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return startIndex; return startIndex;
} }
private String badQueryText(String qtxt) { private String badQueryText(String qtxt, VitroRequest vreq) {
if( qtxt == null || "".equals( qtxt.trim() ) ) if( qtxt == null || "".equals( qtxt.trim() ) )
return "Please enter a search term."; return I18n.text(vreq, "enter_search_term");
if( qtxt.equals("*:*") ) if( qtxt.equals("*:*") )
return "Search term was invalid" ; return I18n.text(vreq, "invalid_search_term") ;
return null; return null;
} }
@ -526,7 +527,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
public String getCount() { return Long.toString(count); } public String getCount() { return Long.toString(count); }
} }
protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params) { protected static List<PagingLink> getPagingLinks(int startIndex, int hitsPerPage, long hitCount, String baseUrl, ParamMap params, VitroRequest vreq) {
List<PagingLink> pagingLinks = new ArrayList<PagingLink>(); List<PagingLink> pagingLinks = new ArrayList<PagingLink>();
@ -550,7 +551,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
pagingLinks.add(new PagingLink(pageNumber, baseUrl, params)); pagingLinks.add(new PagingLink(pageNumber, baseUrl, params));
} }
} else { } else {
pagingLinks.add(new PagingLink("more...", baseUrl, params)); pagingLinks.add(new PagingLink(I18n.text(vreq, "paging_link_more"), baseUrl, params));
break; break;
} }
} }
@ -591,20 +592,20 @@ public class PagedSearchController extends FreemarkerHttpServlet {
return new ExceptionResponseValues(getTemplate(f,Result.ERROR), body, e); return new ExceptionResponseValues(getTemplate(f,Result.ERROR), body, e);
} }
private TemplateResponseValues doFailedSearch(String message, String querytext, Format f) { private TemplateResponseValues doFailedSearch(String message, String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>(); Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'"); body.put("title", I18n.text(vreq, "search_for", querytext));
if ( StringUtils.isEmpty(message) ) { if ( StringUtils.isEmpty(message) ) {
message = "Search failed."; message = I18n.text(vreq, "search_failed");
} }
body.put("message", message); body.put("message", message);
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body); return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
} }
private TemplateResponseValues doNoHits(String querytext, Format f) { private TemplateResponseValues doNoHits(String querytext, Format f, VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>(); Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Search for '" + querytext + "'"); body.put("title", I18n.text(vreq, "search_for", querytext));
body.put("message", "No matching results."); body.put("message", I18n.text(vreq, "no_matching_results"));
return new TemplateResponseValues(getTemplate(f,Result.ERROR), body); return new TemplateResponseValues(getTemplate(f,Result.ERROR), body);
} }
@ -613,7 +614,7 @@ public class PagedSearchController extends FreemarkerHttpServlet {
* @param queryText * @param queryText
* @param exceptionMsg * @param exceptionMsg
*/ */
private String makeBadSearchMessage(String querytext, String exceptionMsg){ private String makeBadSearchMessage(String querytext, String exceptionMsg, VitroRequest vreq){
String rv = ""; String rv = "";
try{ try{
//try to get the column in the search term that is causing the problems //try to get the column in the search term that is causing the problems
@ -641,7 +642,8 @@ public class PagedSearchController extends FreemarkerHttpServlet {
if (post > i) if (post > i)
after = querytext.substring(i + 1, post); after = querytext.substring(i + 1, post);
rv = "The search term had an error near <span class='searchQuote'>" rv = I18n.text(vreq, "search_term_error_near") +
" <span class='searchQuote'>"
+ before + "<span class='searchError'>" + querytext.charAt(i) + before + "<span class='searchError'>" + querytext.charAt(i)
+ "</span>" + after + "</span>"; + "</span>" + after + "</span>";
} catch (Throwable ex) { } catch (Throwable ex) {

View file

@ -88,6 +88,8 @@ public class ContentModelSetup extends JenaDataSourceSetupBase
} else { } else {
checkForNamespaceMismatch( applicationMetadataModel, ctx ); checkForNamespaceMismatch( applicationMetadataModel, ctx );
} }
RDFFilesLoader.loadEveryTimeFiles(ctx, "abox", baseABoxModel);
RDFFilesLoader.loadEveryTimeFiles(ctx, "tbox", baseTBoxModel);
log.info("Setting up full models"); log.info("Setting up full models");
OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel); OntModel baseFullModel = createCombinedBulkUpdatingModel(baseABoxModel, baseTBoxModel);

View file

@ -11,18 +11,26 @@ import junit.framework.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import stubs.edu.cornell.mannlib.vitro.webapp.i18n.I18nStub;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap; import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap;
import edu.cornell.mannlib.vitro.webapp.search.controller.PagedSearchController.PagingLink; import edu.cornell.mannlib.vitro.webapp.search.controller.PagedSearchController.PagingLink;
public class PagedSearchControllerTest { public class PagedSearchControllerTest {
@SuppressWarnings("unused")
@Before
public void useI18nStubBundles() {
new I18nStub();
}
@Test @Test
public void testGetPagingLinks() { public void testGetPagingLinks() {
ParamMap pm = new ParamMap(); ParamMap pm = new ParamMap();
int hitsPerPage = 25; int hitsPerPage = 25;
int totalHits = 500; int totalHits = 500;
int currentStartIndex = 0; int currentStartIndex = 0;
List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals(500 / 25, pageLinks.size()); Assert.assertEquals(500 / 25, pageLinks.size());
@ -30,7 +38,7 @@ public class PagedSearchControllerTest {
hitsPerPage = 25; hitsPerPage = 25;
totalHits = 10; totalHits = 10;
currentStartIndex = 0; currentStartIndex = 0;
pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals(0, pageLinks.size()); Assert.assertEquals(0, pageLinks.size());
} }
@ -41,7 +49,7 @@ public class PagedSearchControllerTest {
int hitsPerPage = 25; int hitsPerPage = 25;
int totalHits = 349909; int totalHits = 349909;
int currentStartIndex = 0; int currentStartIndex = 0;
List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); List<PagingLink> pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals( PagedSearchController.DEFAULT_MAX_HIT_COUNT / hitsPerPage, pageLinks.size()); Assert.assertEquals( PagedSearchController.DEFAULT_MAX_HIT_COUNT / hitsPerPage, pageLinks.size());
@ -49,7 +57,7 @@ public class PagedSearchControllerTest {
hitsPerPage = 25; hitsPerPage = 25;
totalHits = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 20329; totalHits = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 20329;
currentStartIndex = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 5432; currentStartIndex = PagedSearchController.DEFAULT_MAX_HIT_COUNT + 5432;
pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm); pageLinks = PagedSearchController.getPagingLinks(currentStartIndex, hitsPerPage, totalHits, "baseURL", pm, null);
Assert.assertNotNull(pageLinks); Assert.assertNotNull(pageLinks);
Assert.assertEquals( Assert.assertEquals(
(currentStartIndex / hitsPerPage) + //all the pages that are before the current page (currentStartIndex / hitsPerPage) + //all the pages that are before the current page

View file

@ -105,7 +105,7 @@ updated_account_title = updated account
updated_account_notification = A confirmation email has been sent to {0} \ updated_account_notification = A confirmation email has been sent to {0} \
with instructions for resetting a password. \ with instructions for resetting a password. \
The password will not be reset until the user follows the link provided in this email. The password will not be reset until the user follows the link provided in this email.
deleted_accounts = Deleted {0} {0, choice, 0#accounts |1#account |1<accounts}. deleted_accounts = Deleted {0} {0, choice, 0#accounts|1#account|1<accounts}.
enter_new_password = Please enter your new password for {0} enter_new_password = Please enter your new password for {0}
error_no_email_address = Please enter your email address. error_no_email_address = Please enter your email address.
@ -520,6 +520,18 @@ previous = Previous
page_link = page link page_link = page link
next_capitalized = Next next_capitalized = Next
#
# search controller ( PagedSearchController.java )
#
error_in_search_request = The search request contained errors.
enter_search_term = Please enter a search term.
invalid_search_term = Search term was invalid
paging_link_more = more...
no_matching_results = No matching results.
search_failed = Search failed.
search_term_error_near = The search term had an error near
search_for = Search for ''{0}''
# #
# shortview templates ( /templates/freemarker/body/partials/shortview ) # shortview templates ( /templates/freemarker/body/partials/shortview )
# #