Merge branch 'develop' into feature/theming

This commit is contained in:
Graham Triggs 2017-02-15 19:28:15 +00:00
commit 6d44d052f7
345 changed files with 15498 additions and 5977 deletions

5
.gitignore vendored
View file

@ -17,3 +17,8 @@ utilities/sdb_to_tdb/.work
**/target
**/overlays
# Eclipse artifacts
**/.settings
**/.classpath
**/.project

View file

@ -1,13 +1,20 @@
language: java
dist: trusty
sudo: false
jdk:
- openjdk8
- oraclejdk8
env:
# Give Maven 1GB of memory to work with
- MAVEN_OPTS=-Xmx1024M
cache:
directories:
- .autoconf
- $HOME/.m2
install:
- "echo 'Skipping install stage, dependencies will be downloaded during build and test stages.'"

View file

@ -7,13 +7,13 @@
<groupId>org.vivoweb</groupId>
<artifactId>vitro-api</artifactId>
<version>1.9.0-SNAPSHOT</version>
<version>1.10.0-SNAPSHOT</version>
<packaging>jar</packaging>
<parent>
<groupId>org.vivoweb</groupId>
<artifactId>vitro-project</artifactId>
<version>1.9.0-SNAPSHOT</version>
<version>1.10.0-SNAPSHOT</version>
<relativePath>..</relativePath>
</parent>
@ -39,7 +39,7 @@
<dependency>
<groupId>org.vivoweb</groupId>
<artifactId>vitro-dependencies</artifactId>
<version>1.9.0-SNAPSHOT</version>
<version>1.10.0-SNAPSHOT</version>
<type>pom</type>
</dependency>
<dependency>

View file

@ -0,0 +1,25 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.bo;
public class BaseObject {
/**
* Simple JavaBean domain object with an id property.
* Used as a base class for objects needing this property.
*/
private Integer id;
public void setId(Integer id) {
this.id = id;
}
public Integer getId() {
return id;
}
public boolean isNew() {
return (this.id == null);
}
}

View file

@ -0,0 +1,163 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.bo;
import java.util.ArrayList;
import java.util.List;
public class Concept {
private String definedBy;
private String conceptId;
private String bestMatch;
private String label;
private String type;
private String definition;
private String uri;
private String schemeURI;
private List<String> broaderURIList;
private List<String> narrowerURIList;
private List<String> exactMatchURIList;
private List<String> closeMatchURIList;
private List<String> altLabelList;
/**
* default constructor
*/
public Concept() {
this.broaderURIList = new ArrayList<String>();
this.narrowerURIList = new ArrayList<String>();
this.exactMatchURIList = new ArrayList<String>();
this.closeMatchURIList = new ArrayList<String>();
}
/**
* @return the conceptId
*/
public String getConceptId() {
return conceptId;
}
/**
* @param conceptId the conceptId to set
*/
public void setConceptId(String conceptId) {
this.conceptId = conceptId;
}
/**
* @return the label
*/
public String getLabel() {
return label;
}
/**
* @param label the label to set
*/
public void setLabel(String label) {
this.label = label;
}
/**
* @return the type
*/
public String getType() {
return type;
}
/**
* @param type the type to set
*/
public void setType(String type) {
this.type = type;
}
/**
* @return the definition
*/
public String getDefinition() {
return definition;
}
/**
* @param definition the definition to set
*/
public void setDefinition(String definition) {
this.definition = definition;
}
/**
* @return the uri
*/
public String getUri() {
return uri;
}
/**
* @param uri the uri to set
*/
public void setUri(String uri) {
this.uri = uri;
}
/**
* @return the definedBy
*/
public String getDefinedBy() {
return definedBy;
}
/**
* @param definedBy the definedBy to set
*/
public void setDefinedBy(String definedBy) {
this.definedBy = definedBy;
}
/**
* @return the schemeURI
*/
public String getSchemeURI() {
return schemeURI;
}
/**
* @param schemeURI the schemeURI to set
*/
public void setSchemeURI(String schemeURI) {
this.schemeURI = schemeURI;
}
/**
* @return the bestMatch
*/
public String getBestMatch() {
return bestMatch;
}
/**
* @param bestMatch the bestMatch to set
*/
public void setBestMatch(String bestMatch) {
this.bestMatch = bestMatch;
}
public List<String> getBroaderURIList() {
return broaderURIList;
}
public void setBroaderURIList(List<String> broaderURIList) {
this.broaderURIList = broaderURIList;
}
public List<String> getNarrowerURIList() {
return narrowerURIList;
}
public void setNarrowerURIList(List<String> narrowerURIList) {
this.narrowerURIList = narrowerURIList;
}
public List<String> getExactMatchURIList() {
return exactMatchURIList;
}
public void setExactMatchURIList(List<String> exactMatchURIList) {
this.exactMatchURIList = exactMatchURIList;
}
public List<String> getCloseMatchURIList() {
return closeMatchURIList;
}
public void setCloseMatchURIList(List<String> closeMatchURIList) {
this.closeMatchURIList = closeMatchURIList;
}
public List<String> getAltLabelList() {
return altLabelList;
}
public void setAltLabelList(List<String> altLabelList) {
this.altLabelList = altLabelList;
}
}

View file

@ -0,0 +1,32 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.bo;
import java.util.List;
public class ConceptInfo extends SemanticServicesInfoBase {
private List<?> conceptList;
/**
*
*/
public ConceptInfo() {
super();
}
/**
* @return the conceptList
*/
public List<?> getConceptList() {
return conceptList;
}
/**
* @param conceptList the conceptList to set
*/
public void setConceptList(List<?> conceptList) {
this.conceptList = conceptList;
}
}

View file

@ -0,0 +1,75 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.bo;
public class SemanticServicesError {
private String message;
private String exception;
private String severity;
/**
*
*/
public SemanticServicesError() {
super();
}
/**
* @param exception Exception description
* @param message Error message
* @param severity Severity
*/
public SemanticServicesError(String exception, String message, String severity) {
super();
this.exception = exception;
this.message = message;
this.severity = severity;
}
/**
* @return the message
*/
public String getMessage() {
return message;
}
/**
* @param message the message to set
*/
public void setMessage(String message) {
this.message = message;
}
/**
* @return the exception
*/
public String getException() {
return exception;
}
/**
* @param exception the exception to set
*/
public void setException(String exception) {
this.exception = exception;
}
/**
* @return the severity
*/
public String getSeverity() {
return severity;
}
/**
* @param severity the severity to set
*/
public void setSeverity(String severity) {
this.severity = severity;
}
}

View file

@ -0,0 +1,29 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.bo;
public class SemanticServicesInfoBase {
private SemanticServicesError semanticServicesError;
/**
*
*/
public SemanticServicesInfoBase() {
super();
// TODO Auto-generated constructor stub
}
/**
* @return the semanticServicesError
*/
public SemanticServicesError getSemanticServicesError() {
return semanticServicesError;
}
/**
* @param semanticServicesError the semanticServicesError to set
*/
public void setSemanticServicesError(SemanticServicesError semanticServicesError) {
this.semanticServicesError = semanticServicesError;
}
}

View file

@ -0,0 +1,15 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.exceptions;
public class ConceptsNotFoundException extends Exception {
/**
* An exception that indicates a service could not find a Concept
*/
private static final long serialVersionUID = -4729465393290022840L;
public ConceptsNotFoundException() { }
public ConceptsNotFoundException(String message) { super(message); }
public ConceptsNotFoundException(Throwable cause) { super(cause); }
public ConceptsNotFoundException(String message, Throwable cause) { super(message, cause); }
}

View file

@ -0,0 +1,27 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.service;
import java.util.List;
import edu.cornell.mannlib.semservices.bo.Concept;
public interface ExternalConceptService {
/**
* @param term Term
*/
List<Concept> processResults(String term) throws Exception;
/**
* @param term Term
* @throws Exception
*/
List<Concept> getConcepts(String term) throws Exception;
/**
* @param uri URI
*/
List<Concept> getConceptsByURIWithSparql(String uri) throws Exception;
}

View file

@ -0,0 +1,26 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.util;
import java.util.Iterator;
import javax.xml.XMLConstants;
import javax.xml.namespace.NamespaceContext;
public class MetadataNamespaceContext implements NamespaceContext {
public String getNamespaceURI(String prefix) {
if (prefix == null) throw new NullPointerException("Null prefix");
else if ("mix".equals(prefix)) return "http://www.loc.gov/mix/";
else if ("xml".equals(prefix)) return XMLConstants.XML_NS_URI;
return XMLConstants.NULL_NS_URI;
}
// This method isn't necessary for XPath processing.
public String getPrefix(String uri) {
throw new UnsupportedOperationException();
}
// This method isn't necessary for XPath processing either.
public Iterator getPrefixes(String uri) {
throw new UnsupportedOperationException();
}
}

View file

@ -0,0 +1,266 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
/* We are no longer using the SKOS API since Vitro has moved to V 4.0 of OWL API which does not appear to be compatible.
This file will contain methods used for reading SKOS as XML and parsing it for the properties
we want to extract*/
package edu.cornell.mannlib.semservices.util;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.io.StringReader;
import java.io.StringWriter;
import java.net.URL;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.NodeIterator;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.ResourceFactory;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import edu.cornell.mannlib.semservices.bo.Concept;
public class SKOSUtils {
protected final static Log log = LogFactory.getLog(SKOSUtils.class);
public static String getConceptXML(String conceptUriString) {
URL conceptURL = null;
try {
conceptURL = new URL(conceptUriString);
} catch (Exception e) {
log.error("Exception occurred in instantiating URL for "
+ conceptUriString, e);
// If the url is having trouble, just return null for the concept
return null;
}
log.debug("loading concept uri " + conceptUriString);
String results = null;
try {
StringWriter sw = new StringWriter();
BufferedReader in = new BufferedReader(new InputStreamReader(
conceptURL.openStream()));
String inputLine;
while ((inputLine = in.readLine()) != null) {
sw.write(inputLine);
}
in.close();
results = sw.toString();
log.debug(results);
} catch (Exception ex) {
log.error("Error occurred in getting concept from the URL "
+ conceptUriString, ex);
return null;
}
return results;
}
// Downloading the XML from the URI itself
// No language tag support here but can be specified if need be at this
// level as well
public static Concept createConceptUsingXMLFromURL(Concept concept,
String conceptURLString, String langTagValue, boolean addNotes) {
String results = getConceptXML(conceptURLString);
if (StringUtils.isEmpty(results)) {
return null;
}
// return createConceptUsingXML(concept, results, langTagValue);
return createConceptUsingXMLModel(concept, results, langTagValue,
addNotes);
}
// Because of the fact the xml returns matches by tag name, and the XML may
// look like <skos:narrower><skos:Concept ..><skos:broader
// rdf:resource:"conceptURI">
// where conceptURI is the concept that is the subject of skos:narrower, we
// need to ensure we are not returning the same uri as that of the main
// concept
public static List<String> removeConceptURIFromList(List<String> uris,
String conceptURI) {
// remove will return a boolean if the value exists in the list and is
// removed
// if/when it returns false, the URI is not in the list
while (uris.remove(conceptURI)) {
}
;
return uris;
}
/**
* The above code, although functional, does not take advantage of the fact
* that we can actually read and query the RDF in precisely the manner we
* wish.
*/
public static Concept createConceptUsingXMLModel(Concept concept,
String results, String langTagValue, boolean addNotes) {
try {
String conceptURI = concept.getUri();
// Load Model from RDF
StringReader reader = new StringReader(results);
Model model = ModelFactory.createDefaultModel();
model.read(reader, null, "RDF/XML");
// Execute the following query to get the information we want for
// this resource
// Preferred label
List<String> labelLiterals = getPrefLabelsFromModel(conceptURI,
model, langTagValue);
if (labelLiterals.size() > 0) {
concept.setLabel(labelLiterals.get(0));
} else {
// This is an error because there should be at least one label
// returned
log.debug("The number of preferred labels is not greater than zero");
}
// Alternate label
List<String> altLabelList = getAltLabelsFromModel(conceptURI,
model, langTagValue);
concept.setAltLabelList(altLabelList);
// Broder, narrower, exact match, and close match properties
List<String> broaderURIList = getBroaderURIsFromModel(conceptURI,
model);
// broaderURIList = removeConceptURIFromList(broaderURIList,
// conceptURI);
concept.setBroaderURIList(broaderURIList);
List<String> narrowerURIList = getNarrowerURIsFromModel(conceptURI,
model);
// narrowerURIList = removeConceptURIFromList(narrowerURIList,
// conceptURI);
concept.setNarrowerURIList(narrowerURIList);
List<String> exactMatchURIList = getExactMatchURIsFromModel(
conceptURI, model);
// exactMatchURIList = removeConceptURIFromList(exactMatchURIList,
// conceptURI);
concept.setExactMatchURIList(exactMatchURIList);
List<String> closeMatchURIList = getCloseMatchURIsFromModel(
conceptURI, model);
// closeMatchURIList = removeConceptURIFromList(closeMatchURIList,
// conceptURI);
concept.setCloseMatchURIList(closeMatchURIList);
// Notes may exist, in which case they should be employed
if (addNotes) {
List<String> notes = getNotesFromModel(conceptURI, model,
langTagValue);
if (notes.size() > 0) {
concept.setDefinition(notes.get(0));
}
}
} catch (Exception e) {
log.error("error occurred in parsing " + results, e);
}
return concept;
}
private static List<String> getPrefLabelsFromModel(String conceptURI,
Model model, String langTagValue) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#prefLabel";
return getLabelsFromModel(conceptURI, propertyURI, model, langTagValue);
}
private static List<String> getAltLabelsFromModel(String conceptURI,
Model model, String langTagValue) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#altLabel";
return getLabelsFromModel(conceptURI, propertyURI, model, langTagValue);
}
private static List<String> getLabelsFromModel(String conceptURI,
String propertyURI, Model model, String langTagValue) {
List<String> labels = new ArrayList<String>();
StmtIterator statements = model.listStatements(
ResourceFactory.createResource(conceptURI),
ResourceFactory.createProperty(propertyURI), (RDFNode) null);
while (statements.hasNext()) {
Statement statement = statements.nextStatement();
RDFNode node = statement.getObject();
if (node != null && node.isLiteral()) {
String label = node.asLiteral().getString();
if (StringUtils.isNotEmpty(langTagValue)) {
String language = node.asLiteral().getLanguage();
if (language != null && language.equals(langTagValue)) {
labels.add(label);
}
} else {
labels.add(label);
}
}
}
return labels;
}
private static List<String> getNotesFromModel(String conceptURI,
Model model, String langTagValue) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#note";
return getLabelsFromModel(conceptURI, propertyURI, model, langTagValue);
}
private static List<String> getCloseMatchURIsFromModel(String conceptURI,
Model model) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#closeMatch";
return getRelatedURIsFromModel(conceptURI, propertyURI, model);
}
private static List<String> getExactMatchURIsFromModel(String conceptURI,
Model model) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#exactMatch";
return getRelatedURIsFromModel(conceptURI, propertyURI, model);
}
private static List<String> getNarrowerURIsFromModel(String conceptURI,
Model model) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#narrower";
return getRelatedURIsFromModel(conceptURI, propertyURI, model);
}
private static List<String> getBroaderURIsFromModel(String conceptURI,
Model model) {
String propertyURI = "http://www.w3.org/2004/02/skos/core#broader";
return getRelatedURIsFromModel(conceptURI, propertyURI, model);
}
private static List<String> getRelatedURIsFromModel(String conceptURI,
String propertyURI, Model model) {
List<String> URIs = new ArrayList<String>();
NodeIterator nodeIterator = model.listObjectsOfProperty(
ResourceFactory.createResource(conceptURI),
ResourceFactory.createProperty(propertyURI));
while (nodeIterator.hasNext()) {
RDFNode node = nodeIterator.nextNode();
if (node.isResource() && node.asResource().getURI() != null) {
String URI = node.asResource().getURI();
URIs.add(URI);
}
}
return URIs;
}
}

View file

@ -0,0 +1,361 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.semservices.util;
import java.io.BufferedWriter;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStreamWriter;
import java.io.StringReader;
import java.io.StringWriter;
import java.io.Writer;
import javax.xml.parsers.DocumentBuilder;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.parsers.ParserConfigurationException;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Source;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerConfigurationException;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.TransformerFactoryConfigurationError;
import javax.xml.transform.stream.StreamResult;
import javax.xml.transform.stream.StreamSource;
import javax.xml.xpath.XPath;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpressionException;
import javax.xml.xpath.XPathFactory;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.w3c.dom.Document;
import org.w3c.dom.DocumentType;
import org.w3c.dom.NamedNodeMap;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
/**
* Convenience Class to parse XML strings to DOM Document for XML contents
* retrieval.
*/
public class XMLUtils {
private static DocumentBuilder parser;
public static Writer writer;
static private String indent = "";
protected static final Log logger = LogFactory.getLog(XMLUtils.class);
/**
* @throws ParserConfigurationException
*/
public static DocumentBuilder getDocumentBuilder()
throws ParserConfigurationException {
if (parser == null) {
// JPT: Remove xerces use
DocumentBuilderFactory documentBuilderFactory = DocumentBuilderFactory
.newInstance();
documentBuilderFactory.setNamespaceAware(true);
documentBuilderFactory.setValidating(false);
parser = documentBuilderFactory.newDocumentBuilder();
}
return parser;
}
/**
* @param xmlString XML String
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
public synchronized static Document parse(String xmlString)
throws IOException, SAXException, ParserConfigurationException {
StringReader reader = new StringReader(xmlString);
InputSource inputSource = new InputSource(reader);
return getDocumentBuilder().parse(inputSource);
}
/**
* @param stream Input stream
* @throws IOException
* @throws SAXException
* @throws ParserConfigurationException
*/
public synchronized static Document parse(InputStream stream)
throws IOException, SAXException, ParserConfigurationException {
return getDocumentBuilder().parse(stream);
}
/**
* @param document DOM Document
* @param name Name
*/
public static String getElementByName(Document document, String name) {
NodeList nodes = document.getElementsByTagName(name);
String s = null;
for (int i=0; i < nodes.getLength() ; i++) {
Node node = nodes.item(i);
s = node.getTextContent().trim();
}
return s;
}
/**
* @param doc DOM Document
* @throws IOException
*/
@SuppressWarnings("deprecation")
public static void serializeDoc(Document doc) throws IOException {
org.apache.xml.serialize.XMLSerializer serializer = new org.apache.xml.serialize.XMLSerializer();
serializer.setOutputByteStream(System.out);
serializer.serialize(doc);
}
@SuppressWarnings("deprecation")
public static String serializeDoctoString(Document doc) throws IOException {
org.apache.xml.serialize.XMLSerializer serializer = new org.apache.xml.serialize.XMLSerializer();
ByteArrayOutputStream bout = new ByteArrayOutputStream();
serializer.setOutputByteStream(bout);
serializer.serialize(doc);
return bout.toString();
}
/**
* @param xml XML String
*/
public static void prettyPrint(String xml) {
Source xmlInput = new StreamSource(new StringReader(xml));
StreamResult xmlOutput = new StreamResult(new StringWriter());
Transformer transformer = null;
try {
transformer = TransformerFactory.newInstance().newTransformer();
} catch (TransformerConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TransformerFactoryConfigurationError e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "testing.dtd");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
try {
transformer.transform(xmlInput, xmlOutput);
} catch (TransformerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String formattedxml=xmlOutput.getWriter().toString();
System.out.println(formattedxml);
}
/**
* @param xml XML String
*/
public static String prettyPrintToString(String xml) {
Source xmlInput = new StreamSource(new StringReader(xml));
StreamResult xmlOutput = new StreamResult(new StringWriter());
Transformer transformer = null;
try {
transformer = TransformerFactory.newInstance().newTransformer();
} catch (TransformerConfigurationException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (TransformerFactoryConfigurationError e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//transformer.setOutputProperty(OutputKeys.DOCTYPE_SYSTEM, "testing.dtd");
transformer.setOutputProperty(OutputKeys.INDENT, "yes");
transformer.setOutputProperty("{http://xml.apache.org/xslt}indent-amount", "2");
try {
transformer.transform(xmlInput, xmlOutput);
} catch (TransformerException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
String formattedxml=xmlOutput.getWriter().toString();
return formattedxml;
}
/**
* @param node DOM Node
*/
public static void displayNodeInfo(Node node) {
switch (node.getNodeType()) {
case Node.DOCUMENT_NODE:
System.out.println("Document Node ");
break;
case Node.ELEMENT_NODE:
System.out.println("Element Node: "+ node.getNodeName());
break;
case Node.TEXT_NODE:
System.out.println("Text Node: "+ node.getNodeName());
break;
case Node.CDATA_SECTION_NODE:
System.out.println("CDATA Section Node: ");
break;
case Node.COMMENT_NODE:
System.out.println("Comment Node ");
break;
case Node.PROCESSING_INSTRUCTION_NODE:
System.out.println("Processing Instruction Node ");
break;
case Node.ENTITY_REFERENCE_NODE:
System.out.println("Entity Reference Node ");
break;
case Node.DOCUMENT_TYPE_NODE:
System.out.println("Document Type Node ");
break;
}
}
/**
* @param node DOM Node
* @throws IOException
*/
public static void serializeNode(Node node) throws IOException {
if (writer == null) writer = new BufferedWriter(new OutputStreamWriter(System.out));
switch (node.getNodeType()) {
case Node.DOCUMENT_NODE:
Document doc = (Document) node;
writer.write("<?xml version=\"");
writer.write(doc.getXmlVersion());
writer.write("\" encoding=\"UTF-8\" standalone=\"");
if (doc.getXmlStandalone())
writer.write("yes");
else
writer.write("no");
writer.write("\"?>\n");
NodeList nodes = node.getChildNodes();
if (nodes != null)
for (int i = 0; i < nodes.getLength(); i++)
serializeNode(nodes.item(i));
break;
case Node.ELEMENT_NODE:
String name = node.getNodeName();
writer.write("<" + name);
NamedNodeMap attributes = node.getAttributes();
for (int i = 0; i < attributes.getLength(); i++) {
Node current = attributes.item(i);
writer.write(" " + current.getNodeName() + "=\"");
print(current.getNodeValue());
writer.write("\"");
}
writer.write(">");
NodeList children = node.getChildNodes();
if (children != null) {
//if ((children.item(0) != null) && (children.item(0).getNodeType() == Node.ELEMENT_NODE))
// writer.write("\n");
for (int i = 0; i < children.getLength(); i++)
serializeNode(children.item(i));
if ((children.item(0) != null)
&& (children.item(children.getLength() - 1).getNodeType() == Node.ELEMENT_NODE))
writer.write("");
}
writer.write("</" + name + ">");
break;
case Node.TEXT_NODE:
print(node.getNodeValue());
break;
case Node.CDATA_SECTION_NODE:
writer.write("CDATA");
print(node.getNodeValue());
writer.write("");
break;
case Node.COMMENT_NODE:
writer.write("<!-- " + node.getNodeValue() + " -->\n");
break;
case Node.PROCESSING_INSTRUCTION_NODE:
writer.write("<?" + node.getNodeName() + " " + node.getNodeValue() + "?>\n");
break;
case Node.ENTITY_REFERENCE_NODE:
writer.write("&" + node.getNodeName() + ";");
break;
case Node.DOCUMENT_TYPE_NODE:
DocumentType docType = (DocumentType) node;
String publicId = docType.getPublicId();
String systemId = docType.getSystemId();
String internalSubset = docType.getInternalSubset();
writer.write("<!DOCTYPE " + docType.getName());
if (publicId != null)
writer.write(" PUBLIC \"" + publicId + "\" ");
else
writer.write(" SYSTEM ");
writer.write("\"" + systemId + "\"");
if (internalSubset != null)
writer.write(" [" + internalSubset + "]");
writer.write(">\n");
break;
}
writer.flush();
}
/**
* @param s String
* @throws IOException
*/
private static void print(String s) throws IOException {
if (s == null)
return;
for (int i = 0, len = s.length(); i < len; i++) {
char c = s.charAt(i);
switch (c) {
case '<':
writer.write("&lt;");
break;
case '>':
writer.write("&gt;");
break;
case '&':
writer.write("&amp;");
break;
case '\r':
writer.write("&#xD;");
break;
default:
writer.write(c);
}
}
}
/**
* @param obj (either a Document or a Node)
* @param expression Expression
* @return string contents
*/
public static Node getNodeWithXpath(Object obj, String expression) {
Object root = null;
if (obj instanceof Document) {
Document doc = (Document) obj;
root = doc.getDocumentElement();
} else {
root = (Node) obj;
}
XPath xpath = XPathFactory.newInstance().newXPath();
xpath.setNamespaceContext(new MetadataNamespaceContext());
Node result = null;
try {
result = ((Node) xpath.evaluate(expression, root, XPathConstants.NODE));
return result;
} catch (XPathExpressionException e) {
logger.error("XPathExpressionException ", e);
return null;
}
}
}

View file

@ -27,7 +27,7 @@ import edu.cornell.mannlib.vedit.beans.FormObject;
import edu.cornell.mannlib.vedit.beans.DynamicField;
import edu.cornell.mannlib.vedit.beans.DynamicFieldRow;
import org.apache.commons.codec.binary.Base64;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import edu.cornell.mannlib.vedit.tags.EditTag;
public class DynamicFieldsTag extends EditTag {

View file

@ -11,7 +11,7 @@ import javax.servlet.jsp.JspWriter;
import edu.cornell.mannlib.vedit.beans.EditProcessObject;
import edu.cornell.mannlib.vedit.beans.FormObject;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
public class EditTag extends TagSupport {
private String name = null;

View file

@ -7,7 +7,7 @@ import javax.servlet.jsp.tagext.TagSupport;
import javax.servlet.jsp.JspWriter;
import edu.cornell.mannlib.vedit.beans.FormObject;
import edu.cornell.mannlib.vedit.tags.EditTag;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
/** This tag allows validation error messages to be displayed on a form JSP **/
public class ErrorTag extends EditTag {
@ -29,7 +29,7 @@ public class ErrorTag extends EditTag {
}
if (errors != null){
out.print(StringEscapeUtils.escapeHtml((String) errors));
out.print(StringEscapeUtils.ESCAPE_HTML4.translate((String) errors));
}
} catch(Exception ex) {

View file

@ -12,7 +12,7 @@ import javax.servlet.jsp.JspWriter;
import edu.cornell.mannlib.vedit.beans.Option;
import edu.cornell.mannlib.vedit.tags.EditTag;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
public class OptionTag extends EditTag {
private String name = null;
@ -29,11 +29,11 @@ public class OptionTag extends EditTag {
opt.setValue("");
if (opt.getBody() == null)
opt.setBody("");
out.print("<option value=\""+StringEscapeUtils.escapeHtml(opt.getValue())+"\"");
out.print("<option value=\""+StringEscapeUtils.ESCAPE_HTML4.translate(opt.getValue())+"\"");
if (opt.getSelected())
out.print(" selected=\"selected\"");
out.print(">");
out.print(StringEscapeUtils.escapeHtml(opt.getBody()));
out.print(StringEscapeUtils.ESCAPE_HTML4.translate(opt.getBody()));
out.print("</option>\n");
}
}
@ -54,7 +54,7 @@ public class OptionTag extends EditTag {
OrderedMapIterator ogKey = optGroups.orderedMapIterator();
while (ogKey.hasNext()) {
String optGroupName = (String) ogKey.next();
out.println("<optgroup label=\""+StringEscapeUtils.escapeHtml(optGroupName)+"\">");
out.println("<optgroup label=\""+StringEscapeUtils.ESCAPE_HTML4.translate(optGroupName)+"\">");
outputOptionsMarkup((List)optGroups.get(optGroupName),out);
out.println("</optgroup>");
}

View file

@ -8,7 +8,7 @@ import javax.servlet.jsp.JspException;
import javax.servlet.jsp.tagext.TagSupport;
import javax.servlet.jsp.JspWriter;
import edu.cornell.mannlib.vedit.beans.FormObject;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import edu.cornell.mannlib.vedit.tags.EditTag;
public class ValueTag extends EditTag {
@ -35,7 +35,7 @@ public class ValueTag extends EditTag {
if (values != null){
String value = (String) values.get(name);
if (value != null)
out.print(StringEscapeUtils.escapeHtml(value));
out.print(StringEscapeUtils.ESCAPE_HTML4.translate(value));
} else {
System.out.println("ValueTag unable to get HashMap of form values");
}

View file

@ -25,7 +25,6 @@ import edu.cornell.mannlib.vitro.webapp.startup.ComponentStartupStatusImpl;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
import edu.cornell.mannlib.vitro.webapp.triplesource.impl.BasicCombinedTripleSource;
import edu.cornell.mannlib.vitro.webapp.utils.configuration.Property;
import edu.cornell.mannlib.vitro.webapp.utils.configuration.Validation;
/**
* The basic implementation of the Application interface.
@ -69,15 +68,9 @@ public class ApplicationImpl implements Application {
return searchEngine;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasSearchEngine")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasSearchEngine", minOccurs = 1, maxOccurs = 1)
public void setSearchEngine(SearchEngine se) {
if (searchEngine == null) {
searchEngine = se;
} else {
throw new IllegalStateException(
"Configuration includes multiple SearchEngine instances: "
+ searchEngine + ", and " + se);
}
}
@Override
@ -85,15 +78,9 @@ public class ApplicationImpl implements Application {
return searchIndexer;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasSearchIndexer")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasSearchIndexer", minOccurs = 1, maxOccurs = 1)
public void setSearchIndexer(SearchIndexer si) {
if (searchIndexer == null) {
searchIndexer = si;
} else {
throw new IllegalStateException(
"Configuration includes multiple SearchIndexer instances: "
+ searchIndexer + ", and " + si);
}
}
@Override
@ -101,15 +88,9 @@ public class ApplicationImpl implements Application {
return imageProcessor;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasImageProcessor")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasImageProcessor", minOccurs = 1, maxOccurs = 1)
public void setImageProcessor(ImageProcessor ip) {
if (imageProcessor == null) {
imageProcessor = ip;
} else {
throw new IllegalStateException(
"Configuration includes multiple ImageProcessor instances: "
+ imageProcessor + ", and " + ip);
}
}
@Override
@ -117,15 +98,9 @@ public class ApplicationImpl implements Application {
return fileStorage;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasFileStorage")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasFileStorage", minOccurs = 1, maxOccurs = 1)
public void setFileStorage(FileStorage fs) {
if (fileStorage == null) {
fileStorage = fs;
} else {
throw new IllegalStateException(
"Configuration includes multiple FileStorage instances: "
+ fileStorage + ", and " + fs);
}
}
@Override
@ -133,15 +108,9 @@ public class ApplicationImpl implements Application {
return contentTripleSource;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasContentTripleSource")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasContentTripleSource", minOccurs = 1, maxOccurs = 1)
public void setContentTripleSource(ContentTripleSource source) {
if (contentTripleSource == null) {
contentTripleSource = source;
} else {
throw new IllegalStateException(
"Configuration includes multiple instances of ContentTripleSource: "
+ contentTripleSource + ", and " + source);
}
}
@Override
@ -149,15 +118,9 @@ public class ApplicationImpl implements Application {
return configurationTripleSource;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasConfigurationTripleSource")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasConfigurationTripleSource", minOccurs = 1, maxOccurs = 1)
public void setConfigurationTripleSource(ConfigurationTripleSource source) {
if (configurationTripleSource == null) {
configurationTripleSource = source;
} else {
throw new IllegalStateException(
"Configuration includes multiple instances of ConfigurationTripleSource: "
+ configurationTripleSource + ", and " + source);
}
}
@Override
@ -165,47 +128,9 @@ public class ApplicationImpl implements Application {
return tboxReasonerModule;
}
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasTBoxReasonerModule")
@Property(uri = "http://vitro.mannlib.cornell.edu/ns/vitro/ApplicationSetup#hasTBoxReasonerModule", minOccurs = 1, maxOccurs = 1)
public void setTBoxReasonerModule(TBoxReasonerModule module) {
if (tboxReasonerModule == null) {
tboxReasonerModule = module;
} else {
throw new IllegalStateException(
"Configuration includes multiple instances of TBoxReasonerModule: "
+ tboxReasonerModule + ", and " + module);
}
}
@Validation
public void validate() throws Exception {
if (searchEngine == null) {
throw new IllegalStateException(
"Configuration did not include a SearchEngine.");
}
if (searchIndexer == null) {
throw new IllegalStateException(
"Configuration did not include a SearchIndexer.");
}
if (imageProcessor == null) {
throw new IllegalStateException(
"Configuration did not include an ImageProcessor.");
}
if (fileStorage == null) {
throw new IllegalStateException(
"Configuration did not include a FileStorage.");
}
if (contentTripleSource == null) {
throw new IllegalStateException(
"Configuration did not include a ContentTripleSource.");
}
if (configurationTripleSource == null) {
throw new IllegalStateException(
"Configuration did not include a ConfigurationTripleSource.");
}
if (tboxReasonerModule == null) {
throw new IllegalStateException(
"Configuration did not include a TBoxReasonerModule.");
}
}
@Override

View file

@ -12,7 +12,7 @@ import java.util.Set;
import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -10,7 +10,7 @@ import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -1,713 +0,0 @@
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
package edu.cornell.mannlib.vitro.webapp.controller;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.net.MalformedURLException;
import java.security.MessageDigest;
import java.text.SimpleDateFormat;
import java.util.List;
import java.util.Properties;
import javax.servlet.ServletContext;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import edu.cornell.mannlib.vitro.webapp.utils.JSPPageHandler;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.ontology.DatatypeProperty;
import org.apache.jena.ontology.ObjectProperty;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.rdf.model.Resource;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.vocabulary.RDFS;
import org.apache.jena.vocabulary.XSD;
import com.ibm.icu.util.Calendar;
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
import edu.cornell.mannlib.vitro.webapp.beans.DataProperty;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.beans.DataPropertyStatementImpl;
import edu.cornell.mannlib.vitro.webapp.beans.Individual;
import edu.cornell.mannlib.vitro.webapp.beans.ObjectPropertyStatement;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.modelaccess.ModelAccess;
import fedora.client.FedoraClient;
import fedora.common.Constants;
import fedora.server.management.FedoraAPIM;
import fedora.server.types.gen.Datastream;
/**
* Handles a request to change a datastream in a fedora repository.
* Some of this code is copied from N3MultiPartUpload.java
*
* @author bdc34
*
*/
public class FedoraDatastreamController extends VitroHttpServlet implements Constants{
private static String FEDORA_PROPERTIES = "/WEB-INF/fedora.properties";
private static String DEFAULT_DSID = "DS1";
private String fedoraUrl = null;
private String adminUser = null;
private String adminPassword = null;
private String pidNamespace = null;
private String configurationStatus = "<p>Fedora configuration not yet loaded</p>";
private boolean configured = false;
private boolean connected = false;
private static final int DEFAULT_MAX_SIZE = 1024 * 1024 * 50;//Shoudl this be changed to 1 GB to be consistent
private static final String DEFAULT_BASE_DIR = "/usr/local/vitrofiles";
private static String baseDirectoryForFiles = DEFAULT_BASE_DIR;
private static int maxFileSize = DEFAULT_MAX_SIZE;
protected String contentTypeProperty = VitroVocabulary.CONTENT_TYPE;
protected String fileSizeProperty = VitroVocabulary.FILE_SIZE;
protected String fileNameProperty = VitroVocabulary.FILE_NAME;
protected String fileLocationProperty = VitroVocabulary.FILE_LOCATION;
protected String fileLabelProperty = RDFS.label.getURI();
protected String checksumNodeProperty = "";//Object property linking file to check sum node object
protected String checksumNodeDateTimeProperty = "";
protected String checksumNodeValueProperty = "";
protected String checksumDataProperty = ""; //is there a vitro equivalent?
protected String deleteNs = "";
protected String individualPrefix = "";
protected String fedoraNs = VitroVocabulary.VITRO_FEDORA;
/**
* The get will present a form to the user.
*/
@Override
public void doGet(HttpServletRequest req, HttpServletResponse res)
throws IOException, ServletException {
try {
super.doGet(req, res);
log.debug("In doGet");
VitroRequest vreq = new VitroRequest(req);
OntModel sessionOntModel = ModelAccess.on(getServletContext()).getOntModel();
synchronized (FedoraDatastreamController.class) {
if( fedoraUrl == null ){
setup( sessionOntModel, getServletContext() );
if( fedoraUrl == null )
throw new FdcException("Connection to the file repository is " +
"not setup correctly. Could not read fedora.properties file");
}else{
if( !canConnectToFedoraServer() ){
fedoraUrl = null;
throw new FdcException("Could not connect to Fedora.");
}
}
}
FedoraClient fedora;
try { fedora = new FedoraClient(fedoraUrl,adminUser,adminPassword); }
catch (MalformedURLException e) {
throw new FdcException("Malformed URL for fedora Repository location: " + fedoraUrl);
}
FedoraAPIM apim;
try { apim = fedora.getAPIM(); } catch (Exception e) {
throw new FdcException("could not create fedora APIM:" + e.getMessage());
}
//check if logged in
//get URI for file individual
if( req.getParameter("uri") == null || "".equals(req.getParameter("uri")))
throw new FdcException("No file uri specified in request");
boolean isDelete = (req.getParameter("delete") != null && "true".equals(req.getParameter("delete")));
String fileUri = req.getParameter("uri");
//check if file individual has a fedora:PID for a data stream
IndividualDao iwDao = vreq.getWebappDaoFactory().getIndividualDao();
Individual entity = iwDao.getIndividualByURI(fileUri);
if( entity == null )
throw new FdcException( "No entity found in system for file uri " + fileUri);
//System.out.println("Entity == null:" + (entity == null));
//get the fedora PID
//System.out.println("entity data property " + entity.getDataPropertyMap().get(VitroVocabulary.FEDORA_PID));
if( entity.getDataPropertyMap().get(VitroVocabulary.FEDORA_PID ) == null )
throw new FdcException( "No fedora:pid found in system for file uri " + fileUri);
List<DataPropertyStatement> stmts = entity.getDataPropertyMap().get(VitroVocabulary.FEDORA_PID).getDataPropertyStatements();
if( stmts == null || stmts.size() == 0)
throw new FdcException( "No fedora:pid found in system for file uri " + fileUri);
String pid = null;
for(DataPropertyStatement stmt : stmts){
if( stmt.getData() != null && stmt.getData().length() > 0){
pid = stmt.getData();
break;
}
}
//System.out.println("pid is " + pid + " and comparison is " + (pid == null));
if( pid == null )
throw new FdcException( "No fedora:pid found in system for file uri " + fileUri);
req.setAttribute("pid", pid);
req.setAttribute("fileUri", fileUri);
//get current file name to use on form
req.setAttribute("fileName", entity.getName());
if(isDelete)
{
//Execute a 'deletion', i.e. unlink dataset and file, without removing file
//Also save deletion as a deleteEvent entity which can later be queried
String datasetUri = null;
//Get dataset uri by getting the fromDataSet property
edu.cornell.mannlib.vitro.webapp.beans.ObjectProperty fromDataSet = entity.getObjectPropertyMap().get(fedoraNs + "fromDataSet");
if(fromDataSet != null)
{
List<ObjectPropertyStatement> fromDsStmts = fromDataSet.getObjectPropertyStatements();
if(fromDsStmts.size() > 0) {
datasetUri = fromDsStmts.get(0).getObjectURI();
//System.out.println("object uri should be " + datasetUri);
} else {
//System.out.println("No matching dataset uri could be found");
}
} else {
//System.out.println("From dataset is null");
}
req.setAttribute("dataseturi", datasetUri);
boolean success = deleteFile(req, entity, iwDao, sessionOntModel);
req.setAttribute("deletesuccess", (success)?"success":"error");
JSPPageHandler.renderBasicPage(req, res, "/edit/fileDeleteConfirm.jsp");
}
else{
//check if the data stream exists in the fedora repository
Datastream ds = apim.getDatastream(pid,DEFAULT_DSID,null);
if( ds == null )
throw new FdcException("There was no datastream in the " +
"repository for " + pid + " " + DEFAULT_DSID);
req.setAttribute("dsid", DEFAULT_DSID);
//forward to form
JSPPageHandler.renderBasicPage(req, res, "/fileupload/datastreamModification.jsp");
}
}catch(FdcException ex){
req.setAttribute("errors", ex.getMessage());
JSPPageHandler.renderPlainPage(req, res, "/edit/fileUploadError.jsp");
return;
}
}
@Override
public long maximumMultipartFileSize() {
return maxFileSize;
}
@Override
public boolean stashFileSizeException() {
return true;
}
@Override
public void doPost(HttpServletRequest rawRequest, HttpServletResponse res)
throws ServletException, IOException {
try{
VitroRequest req = new VitroRequest(rawRequest);
if (req.hasFileSizeException()) {
throw new FdcException("Size limit exceeded: " + req.getFileSizeException().getLocalizedMessage());
}
if (!req.isMultipart()) {
throw new FdcException("Must POST a multipart encoded request");
}
//check if fedora is on line
OntModel sessionOntModel = ModelAccess.on(getServletContext()).getOntModel();
synchronized (FedoraDatastreamController.class) {
if( fedoraUrl == null ){
setup( sessionOntModel, getServletContext() );
if( fedoraUrl == null )
throw new FdcException("Connection to the file repository is " +
"not setup correctly. Could not read fedora.properties file");
}else{
if( !canConnectToFedoraServer() ){
fedoraUrl = null;
throw new FdcException("Could not connect to Fedora.");
}
}
}
FedoraClient fedora;
try { fedora = new FedoraClient(fedoraUrl,adminUser,adminPassword); }
catch (MalformedURLException e) {
throw new FdcException("Malformed URL for fedora Repository location: " + fedoraUrl);
}
FedoraAPIM apim;
try { apim = fedora.getAPIM(); } catch (Exception e) {
throw new FdcException("could not create fedora APIM:" + e.getMessage());
}
//get the parameters from the request
String pId=req.getParameter("pid");
String dsId=req.getParameter("dsid");
String fileUri=req.getParameter("fileUri");
boolean useNewName=false;
if( "true".equals(req.getParameter("useNewName"))){
useNewName = true;
}
if( pId == null || pId.length() == 0 )
throw new FdcException("Your form submission did not contain " +
"enough information to complete your request.(Missing pid parameter)");
if( dsId == null || dsId.length() == 0 )
throw new FdcException("Your form submission did not contain " +
"enough information to complete your request.(Missing dsid parameter)");
if( fileUri == null || fileUri.length() == 0 )
throw new FdcException("Your form submission did not contain " +
"enough information to complete your request.(Missing fileUri parameter)");
FileItem fileRes = req.getFileItem("fileRes");
if( fileRes == null )
throw new FdcException("Your form submission did not contain " +
"enough information to complete your request.(Missing fileRes)");
//check if file individual has a fedora:PID for a data stream
VitroRequest vreq = new VitroRequest(rawRequest);
IndividualDao iwDao = vreq.getWebappDaoFactory().getIndividualDao();
Individual fileEntity = iwDao.getIndividualByURI(fileUri);
//check if logged in
//TODO: check if logged in
//check if user is allowed to edit datastream
//TODO:check if can edit datastream
//check if digital object and data stream exist in fedora
Datastream ds = apim.getDatastream(pId,dsId,null);
if( ds == null )
throw new FdcException("There was no datastream in the " +
"repository for " + pId + " " + DEFAULT_DSID);
//upload to temp holding area
String originalName = fileRes.getName();
String name = originalName.replaceAll("[,+\\\\/$%^&*#@!<>'\"~;]", "_");
name = name.replace("..", "_");
name = name.trim().toLowerCase();
String saveLocation = baseDirectoryForFiles + File.separator + name;
String savedName = name;
int next = 0;
boolean foundUnusedName = false;
while (!foundUnusedName) {
File test = new File(saveLocation);
if (test.exists()) {
next++;
savedName = name + '(' + next + ')';
saveLocation = baseDirectoryForFiles + File.separator + savedName;
} else {
foundUnusedName = true;
}
}
File uploadedFile = new File(saveLocation);
try {
fileRes.write(uploadedFile);
} catch (Exception ex) {
log.error("Unable to save POSTed file. " + ex.getMessage());
throw new FdcException("Unable to save file to the disk. "
+ ex.getMessage());
}
//upload to temp area on fedora
File file = new File(saveLocation);
String uploadFileUri = fedora.uploadFile( file );
// System.out.println("Fedora upload temp = upload file uri is " + uploadFileUri);
String md5 = md5hashForFile( file );
md5 = md5.toLowerCase();
//make change to data stream on fedora
apim.modifyDatastreamByReference(pId, dsId,
null, null,
fileRes.getContentType(), null,
uploadFileUri,
"MD5", null,
null, false);
String checksum =
apim.compareDatastreamChecksum(pId,dsId,null);
//update properties like checksum, file size, and content type
WebappDaoFactory wdf = vreq.getWebappDaoFactory();
DataPropertyStatement dps = null;
DataProperty contentType = wdf.getDataPropertyDao().getDataPropertyByURI(this.contentTypeProperty);
if(contentType != null)
{
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(fileEntity, contentType);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(fileEntity.getURI());
dps.setDatapropURI(contentType.getURI());
dps.setData(fileRes.getContentType());
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
}
DataProperty fileSize = wdf.getDataPropertyDao().getDataPropertyByURI(this.fileSizeProperty);
if(fileSize != null)
{
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(fileEntity, fileSize);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(fileEntity.getURI());
dps.setDatapropURI(fileSize.getURI());
dps.setData(Long.toString(fileRes.getSize()));
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
//System.out.println("Updated file size with " + fileRes.getSize());
}
DataProperty checksumDp = wdf.getDataPropertyDao().getDataPropertyByURI(this.checksumDataProperty);
if(checksumDp != null)
{
//System.out.println("Checksum data property is also not null");
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(fileEntity, checksumDp);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(fileEntity.getURI());
dps.setDatapropURI(checksumDp.getURI());
dps.setData(checksum);
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
}
//I'm leaving if statement out for now as the above properties are obviously being replaced as well
//if( "true".equals(useNewName)){
//Do we need to encapuslate in this if OR is this path always for replacing a file
//TODO: Put in check to see if file name has changed and only execute these statements if file name has changed
DataProperty fileNameProperty = wdf.getDataPropertyDao().getDataPropertyByURI(this.fileNameProperty);
if(fileNameProperty != null) {
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(fileEntity, fileNameProperty);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(fileEntity.getURI());
dps.setDatapropURI(fileNameProperty.getURI());
dps.setData(originalName); //This follows the pattern of the original file upload - the name returned from the uploaded file object
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
//System.out.println("File name property is not null = " + fileNameProperty.getURI() + " updating to " + originalName);
} else {
//System.out.println("file name property is null");
}
//Need to also update the check sum node - how would we do that
//Find checksum node related to this particular file uri, then go ahead and update two specific fields
List<ObjectPropertyStatement >csNodeStatements = fileEntity.getObjectPropertyMap().get(this.checksumNodeProperty).getObjectPropertyStatements();
if(csNodeStatements.size() == 0) {
System.out.println("No object property statements correspond to this property");
} else {
ObjectPropertyStatement cnodeStatement = csNodeStatements.get(0);
String cnodeUri = cnodeStatement.getObjectURI();
//System.out.println("Checksum node uri is " + cnodeUri);
Individual checksumNodeObject = iwDao.getIndividualByURI(cnodeUri);
DataProperty checksumDateTime = wdf.getDataPropertyDao().getDataPropertyByURI(this.checksumNodeDateTimeProperty);
if(checksumDateTime != null) {
String newDatetime = sessionOntModel.createTypedLiteral(new DateTime()).getString();
//Review how to update date time
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(checksumNodeObject, checksumDateTime);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(checksumNodeObject.getURI());
dps.setDatapropURI(checksumDateTime.getURI());
dps.setData(newDatetime);
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
}
DataProperty checksumNodeValue = wdf.getDataPropertyDao().getDataPropertyByURI(this.checksumDataProperty);
if(checksumNodeValue != null) {
wdf.getDataPropertyStatementDao().deleteDataPropertyStatementsForIndividualByDataProperty(checksumNodeObject, checksumNodeValue);
dps = new DataPropertyStatementImpl();
dps.setIndividualURI(checksumNodeObject.getURI());
dps.setDatapropURI(checksumNodeValue.getURI());
dps.setData(checksum); //Same as fileName above - change if needed
wdf.getDataPropertyStatementDao().insertNewDataPropertyStatement(dps);
}
}
//Assumes original entity name is equal to the location - as occurs with regular file upload
String originalEntityName = fileEntity.getName();
if(originalEntityName != originalName) {
//System.out.println("Setting file entity to name of uploaded file");
fileEntity.setName(originalName);
} else {
//System.out.println("Conditional for file entity name and uploaded name is saying same");
}
iwDao.updateIndividual(fileEntity);
//}
req.setAttribute("fileUri", fileUri);
req.setAttribute("originalFileName", fileEntity.getName());
req.setAttribute("checksum", checksum);
if( "true".equals(useNewName)){
req.setAttribute("useNewName", "true");
req.setAttribute("newFileName", originalName);
}else{
req.setAttribute("newFileName", fileEntity.getName());
}
//forward to form
JSPPageHandler.renderBasicPage(req, res, "/fileupload/datastreamModificationSuccess.jsp");
}catch(FdcException ex){
rawRequest.setAttribute("errors", ex.getMessage());
JSPPageHandler.renderPlainPage(rawRequest, res, "/edit/fileUploadError.jsp");
return;
}
}
//Delete method
public boolean deleteFile(HttpServletRequest req, Individual entity, IndividualDao iwDao, OntModel sessionOntModel) {
boolean success = false;
String fileUri = entity.getURI();
//Create uri based on milliseconds etc.?
Calendar c = Calendar.getInstance();
long timeMs = c.getTimeInMillis();
//Cuirrent date
SimpleDateFormat dateTime = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS");
String formattedDeleteDate = dateTime.format(c.getTime());
String deleteEventName = "deleteEvent" + timeMs;
//System.out.println("Delete event name is " +deleteEventName + " - delete time is " + formattedDeleteDate);
//Get current user
String userURI = LoginStatusBean.getBean(req).getUserURI();
//System.out.println("Current logged in user uri is " + userURI);
//Update model
sessionOntModel.enterCriticalSection(true);
try {
//Dataset Uri
String datasetUri = (String) req.getAttribute("dataseturi");
//System.out.println("Dataset uri is " + datasetUri);
//Remove the actual relationships: dsr:hasFile and fedora:fromDataSet
ObjectProperty hasFileProperty = sessionOntModel.getObjectProperty(fedoraNs + "hasFile");
ObjectProperty fromDatasetProperty = sessionOntModel.getObjectProperty(fedoraNs + "fromDataSet");
if(hasFileProperty != null) {
//System.out.println("Has file property does exist");
sessionOntModel.remove(sessionOntModel.createStatement(sessionOntModel.getResource(datasetUri), hasFileProperty, sessionOntModel.getResource(fileUri)));
} else{
//System.out.println("Has file property does not exist");
}
if(fromDatasetProperty != null) {
//System.out.println("From dataset property exists ");
sessionOntModel.remove(sessionOntModel.createStatement(sessionOntModel.getResource(fileUri), fromDatasetProperty, sessionOntModel.getResource(datasetUri)));
} else{
//System.out.println("From dataset property does not exist");
}
//Create delete event entity and update with the correct information
//Type of Event
Resource deleteEventType = sessionOntModel.createResource(deleteNs + "DeleteEvent");
//Individual event
Resource eventIndividual = sessionOntModel.createResource(individualPrefix + deleteEventName);
//Event is of type DeleteEvent
Statement rType = sessionOntModel.createStatement(eventIndividual, org.apache.jena.vocabulary.RDF.type, deleteEventType);
sessionOntModel.add(rType);
//Add properties to individual - deleteDateTime, deletedBy, forDataSet, forFile
DatatypeProperty dateTimeProp = sessionOntModel.createDatatypeProperty(deleteNs + "deleteDateTime");
dateTimeProp.setRange(XSD.dateTime);
ObjectProperty deletedByProp = sessionOntModel.createObjectProperty(deleteNs + "deletedBy");
ObjectProperty forDatasetProp = sessionOntModel.createObjectProperty(deleteNs + "forDataset");
ObjectProperty forFileProp = sessionOntModel.createObjectProperty(deleteNs + "forFile");
//Need to make sure date time property is set to correct xsd:DateTime
//XSDDateTime now = new XSDDateTime(c);
//XSDDateTime now = new XSDDateTime(java.util.Calendar.getInstance());
eventIndividual.addProperty(dateTimeProp, sessionOntModel.createTypedLiteral(formattedDeleteDate, XSDDatatype.XSDdateTime));
//eventIndividual.addProperty(dateTimeProp, sessionOntModel.createTypedLiteral(now, XSDDatatype.XSDdateTime));
eventIndividual.addProperty(deletedByProp, sessionOntModel.getResource(userURI));
if(datasetUri != null){
//System.out.println("Dataset uri is " + datasetUri);
eventIndividual.addProperty(forDatasetProp, sessionOntModel.getResource(datasetUri));
}
eventIndividual.addProperty(forFileProp, sessionOntModel.getResource(fileUri));
success = true;
} finally {
sessionOntModel.leaveCriticalSection();
}
return success;
}
@Override
public void init() throws ServletException {
super.init();
ConfigurationProperties configProperties = ConfigurationProperties
.getBean(getServletContext());
baseDirectoryForFiles = configProperties.getProperty(
"n3.baseDirectoryForFiles", DEFAULT_BASE_DIR);
String maxSize = configProperties.getProperty("n3.maxSize", Long
.toString(DEFAULT_MAX_SIZE));
try {
maxFileSize = Integer.parseInt(maxSize);
} catch (NumberFormatException nfe) {
log.error(nfe);
maxFileSize = DEFAULT_MAX_SIZE;
}
}
public void setup(OntModel model, ServletContext context) {
this.configurationStatus = "";
StringBuffer status = new StringBuffer("");
if( connected && configured )
return;
Properties props = new Properties();
String path = context.getRealPath(FEDORA_PROPERTIES);
try{
InputStream in = new FileInputStream(new File( path ));
props.load( in );
fedoraUrl = props.getProperty("fedoraUrl");
adminUser = props.getProperty("adminUser");
adminPassword = props.getProperty("adminPassword");
pidNamespace = props.getProperty("pidNamespace");
if( fedoraUrl == null || adminUser == null || adminPassword == null ){
if( fedoraUrl == null ){
log.error("'fedoraUrl' not found in properties file");
status.append("<p>'fedoraUrl' not found in properties file.</p>\n");
}
if( adminUser == null ) {
log.error("'adminUser' was not found in properties file, the " +
"user name of the fedora admin is needed to access the " +
"fedora API-M services.");
status.append("<p>'adminUser' was not found in properties file, the " +
"user name of the fedora admin is needed to access the " +
"fedora API-M services.</p>\n");
}
if( adminPassword == null ){
log.error("'adminPassword' was not found in properties file, the " +
"admin password is needed to access the fedora API-M services.");
status.append("<p>'adminPassword' was not found in properties file, the " +
"admin password is needed to access the fedora API-M services.</p>\n");
}
if( pidNamespace == null ){
log.error("'pidNamespace' was not found in properties file, the " +
"PID namespace indicates which namespace to use when creating " +
"new fedor digital objects.");
status.append("<p>'pidNamespace' was not found in properties file, the " +
"PID namespace indicates which namespace to use when creating " +
"new fedor digital objects.</p>\n");
}
fedoraUrl = null; adminUser = null; adminPassword = null;
configured = false;
} else {
configured = true;
}
}catch(FileNotFoundException e) {
log.error("No fedora.properties file found,"+
"it should be located at " + path);
status.append("<h1>Fedora configuration failed.</h1>\n");
status.append("<p>No fedora.properties file found,"+
"it should be located at " + path + "</p>\n");
configured = false;
return;
}catch(Exception ex){
status.append("<p>Fedora configuration failed.</p>\n");
status.append("<p>Exception while loading" + path + "</p>\n");
status.append("<p>" + ex.getMessage() + "</p>\n");
log.error("could not load fedora properties", ex);
fedoraUrl = null; adminUser = null; adminPassword = null;
configured = false;
return;
}
status.append(RELOAD_MSG);
this.configurationStatus += status.toString();
// else{
// status.append("<h2>Fedora configuration file ").append(path).append(" was loaded</h2>");
// status.append("<p>fedoraUrl: ").append(fedoraUrl).append("</p>\n");
// checkFedoraServer();
// }
}
private boolean canConnectToFedoraServer( ){
try{
FedoraClient fc = new FedoraClient(fedoraUrl,adminUser, adminPassword);
String fedoraVersion = fc.getServerVersion();
if( fedoraVersion != null && fedoraVersion.length() > 0 ){
configurationStatus += "<p>Fedora server is live and is running " +
"fedora version " + fedoraVersion + "</p>\n";
connected = true;
return true;
} else {
configurationStatus += "<p>Unable to reach fedora server</p>\n";
connected = false;
return false;
}
}catch (Exception e) {
configurationStatus += "<p>There was an error while checking the " +
"fedora server version</p>\n<p>"+ e.getMessage() + "</p>\n";
connected = false;
return false;
}
}
public boolean isConfigured(){ return configured; }
public boolean isConnected(){ return connected; }
private class FdcException extends Exception {
public FdcException(String message) {
super(message);
}
}
private static final String RELOAD_MSG =
"<p>The fedora configuartion file will be reloaded if " +
"you edit the properties file and check the status.</p>\n";
public static String md5hashForFile(File file){
try {
InputStream fin = new FileInputStream(file);
java.security.MessageDigest md5er =
MessageDigest.getInstance("MD5");
byte[] buffer = new byte[1024];
int read;
do {
read = fin.read(buffer);
if (read > 0)
md5er.update(buffer, 0, read);
} while (read != -1);
fin.close();
byte[] digest = md5er.digest();
if (digest == null)
return null;
String strDigest = "0x";
for (int i = 0; i < digest.length; i++) {
strDigest += Integer.toString((digest[i] & 0xff)
+ 0x100, 16).substring(1);
}
return strDigest;
} catch (Exception e) {
return null;
}
}
private static final Log log = LogFactory.getLog(FedoraDatastreamController.class.getName());
}

View file

@ -20,9 +20,9 @@ import org.apache.jena.rdf.model.Resource;
import edu.cornell.mannlib.vitro.webapp.beans.UserAccount;
import edu.cornell.mannlib.vitro.webapp.beans.UserAccount.Status;
import edu.cornell.mannlib.vitro.webapp.controller.accounts.UserAccountsOrdering.Field;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner.QueryParser;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.ResultSetParser;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.SparqlQueryRunner;
/**
* Pull some UserAccounts from the model, based on a set of criteria.
@ -111,8 +111,9 @@ public class UserAccountsSelector {
.replace("%offset%", offset());
log.debug("main query: " + qString);
List<UserAccount> accounts = new SparqlQueryRunner(model)
.executeSelect(new MainQueryParser(), qString);
List<UserAccount> accounts = SparqlQueryRunner
.createSelectQueryContext(model, qString).execute()
.parse(new MainQueryParser());
log.debug("query returns: " + accounts);
return accounts;
}
@ -126,8 +127,8 @@ public class UserAccountsSelector {
.replace("%filterClauses%", filterClauses());
log.debug("count query: " + qString);
int count = new SparqlQueryRunner(model).executeSelect(
new CountQueryParser(), qString);
int count = SparqlQueryRunner.createSelectQueryContext(model, qString)
.execute().parse(new CountQueryParser());
log.debug("result count: " + count);
return count;
}
@ -139,8 +140,9 @@ public class UserAccountsSelector {
PREFIX_LINES).replace("%uri%", uri);
log.debug("permissions query: " + qString);
Set<String> permissions = new SparqlQueryRunner(model)
.executeSelect(new PermissionsQueryParser(), qString);
Set<String> permissions = SparqlQueryRunner
.createSelectQueryContext(model, qString).execute()
.parse(new PermissionsQueryParser());
log.debug("permissions for '" + uri + "': " + permissions);
account.setPermissionSetUris(permissions);
}
@ -214,7 +216,8 @@ public class UserAccountsSelector {
return String.valueOf(offset);
}
private static class MainQueryParser extends QueryParser<List<UserAccount>> {
private static class MainQueryParser extends
ResultSetParser<List<UserAccount>> {
@Override
protected List<UserAccount> defaultValue() {
return Collections.emptyList();
@ -274,7 +277,7 @@ public class UserAccountsSelector {
}
}
private static class CountQueryParser extends QueryParser<Integer> {
private static class CountQueryParser extends ResultSetParser<Integer> {
@Override
protected Integer defaultValue() {
return 0;
@ -299,7 +302,7 @@ public class UserAccountsSelector {
}
private static class PermissionsQueryParser extends
QueryParser<Set<String>> {
ResultSetParser<Set<String>> {
@Override
protected Set<String> defaultValue() {
return Collections.emptySet();

View file

@ -16,9 +16,9 @@ import org.apache.jena.query.ResultSet;
import edu.cornell.mannlib.vitro.webapp.controller.accounts.manageproxies.ProxyRelationshipSelectionBuilder.ItemInfo;
import edu.cornell.mannlib.vitro.webapp.controller.accounts.manageproxies.ProxyRelationshipSelectionBuilder.Relationship;
import edu.cornell.mannlib.vitro.webapp.controller.accounts.manageproxies.ProxyRelationshipSelectionCriteria.ProxyRelationshipView;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner.QueryParser;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.ResultSetParser;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.SparqlQueryRunner;
/**
* A class which will accept a ProxyRelationshipSelectionCriteria and produce a
@ -94,8 +94,9 @@ public class ProxyRelationshipSelector {
PREFIX_LINES);
qString = replaceFilterClauses(qString);
int count = new SparqlQueryRunner(context.userAccountsModel)
.executeSelect(new CountQueryParser(), qString);
int count = SparqlQueryRunner
.createSelectQueryContext(context.userAccountsModel, qString)
.execute().parse(new CountQueryParser());
log.debug("result count: " + count);
builder.count = count;
@ -136,9 +137,9 @@ public class ProxyRelationshipSelector {
.replace("%offset%", offset());
qString = replaceFilterClauses(qString);
List<Relationship> relationships = new SparqlQueryRunner(
context.userAccountsModel).executeSelect(
new ProxyBasicsParser(), qString);
List<Relationship> relationships = SparqlQueryRunner
.createSelectQueryContext(context.userAccountsModel, qString)
.execute().parse(new ProxyBasicsParser());
log.debug("getProxyBasics returns: " + relationships);
builder.relationships.addAll(relationships);
}
@ -177,8 +178,9 @@ public class ProxyRelationshipSelector {
.replace("%matchingProperty%", context.matchingProperty)
.replace("%externalAuthId%", proxy.externalAuthId);
ItemInfo expansion = new SparqlQueryRunner(context.unionModel)
.executeSelect(new ExpandProxyParser(), qString);
ItemInfo expansion = SparqlQueryRunner
.createSelectQueryContext(context.unionModel, qString)
.execute().parse(new ExpandProxyParser());
proxy.classLabel = expansion.classLabel;
proxy.imageUrl = expansion.imageUrl;
}
@ -199,9 +201,10 @@ public class ProxyRelationshipSelector {
String qString = QUERY_RELATIONSHIPS.replace("%prefixes%",
PREFIX_LINES).replace("%proxyUri%", proxy.uri);
List<String> profileUris = new SparqlQueryRunner(
context.userAccountsModel).executeSelect(
new RelationshipsParser(), qString);
List<String> profileUris = SparqlQueryRunner
.createSelectQueryContext(context.userAccountsModel,
qString).execute()
.parse(new RelationshipsParser());
for (String profileUri : profileUris) {
r.profileInfos
@ -235,8 +238,9 @@ public class ProxyRelationshipSelector {
String qString = QUERY_EXPAND_PROFILE.replace("%prefixes%",
PREFIX_LINES).replace("%profileUri%", profile.uri);
ItemInfo expansion = new SparqlQueryRunner(context.unionModel)
.executeSelect(new ExpandProfileParser(), qString);
ItemInfo expansion = SparqlQueryRunner
.createSelectQueryContext(context.unionModel, qString)
.execute().parse(new ExpandProfileParser());
profile.label = expansion.label;
profile.classLabel = expansion.classLabel;
profile.imageUrl = expansion.imageUrl;
@ -285,7 +289,7 @@ public class ProxyRelationshipSelector {
// ----------------------------------------------------------------------
private static class ProxyBasicsParser extends
QueryParser<List<Relationship>> {
ResultSetParser<List<Relationship>> {
@Override
protected List<Relationship> defaultValue() {
return Collections.emptyList();
@ -318,7 +322,7 @@ public class ProxyRelationshipSelector {
}
}
private static class CountQueryParser extends QueryParser<Integer> {
private static class CountQueryParser extends ResultSetParser<Integer> {
@Override
protected Integer defaultValue() {
return 0;
@ -342,7 +346,7 @@ public class ProxyRelationshipSelector {
}
}
private static class ExpandProxyParser extends QueryParser<ItemInfo> {
private static class ExpandProxyParser extends ResultSetParser<ItemInfo> {
@Override
protected ItemInfo defaultValue() {
return new ItemInfo();
@ -367,7 +371,8 @@ public class ProxyRelationshipSelector {
}
}
private static class RelationshipsParser extends QueryParser<List<String>> {
private static class RelationshipsParser extends
ResultSetParser<List<String>> {
@Override
protected List<String> defaultValue() {
return Collections.emptyList();
@ -388,7 +393,7 @@ public class ProxyRelationshipSelector {
}
}
private static class ExpandProfileParser extends QueryParser<ItemInfo> {
private static class ExpandProfileParser extends ResultSetParser<ItemInfo> {
@Override
protected ItemInfo defaultValue() {
return new ItemInfo();

View file

@ -24,8 +24,8 @@ import edu.cornell.mannlib.vitro.webapp.controller.ajax.AbstractAjaxResponder;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.modelaccess.ModelAccess;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.web.images.PlaceholderUtil;
/**
@ -76,10 +76,10 @@ public class BasicProxiesGetter extends AbstractAjaxResponder {
String cleanTerm = SparqlQueryUtils.escapeForRegex(term);
String queryStr = QUERY_BASIC_PROXIES.replace("%term%", cleanTerm);
JSONArray jsonArray = new SparqlQueryRunner(userAccountsModel)
.executeSelect(
new BasicProxyInfoParser(placeholderImageUrl),
queryStr);
JSONArray jsonArray = SparqlQueryRunner
.createSelectQueryContext(userAccountsModel, queryStr)
.execute()
.parse(new BasicProxyInfoParser(placeholderImageUrl));
String response = jsonArray.toString();
log.debug(response);

View file

@ -9,7 +9,7 @@ import java.util.Map;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.PermissionSets;
import edu.cornell.mannlib.vitro.webapp.beans.UserAccount;

View file

@ -16,6 +16,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServ
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
/**
* Offer the user the ability to apply a RestrictedAuthenticator or revert to a
@ -80,7 +81,7 @@ public class RestrictLoginsController extends FreemarkerHttpServlet {
boolean restricted = figureCurrentlyState() == State.RESTRICTED;
Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Restrict Logins");
body.put("title", I18n.text(vreq, "restrict_logins"));
body.put("restricted", restricted);
if (!MESSAGE_NO_MESSAGE.equals(messageCode)) {
body.put(messageCode, Boolean.TRUE);

View file

@ -16,7 +16,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -36,9 +36,9 @@ import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
import edu.cornell.mannlib.vitro.webapp.modelaccess.ModelAccess;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
/**
* Present the SPARQL Query form, and execute the queries.

View file

@ -11,6 +11,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
/**
@ -27,7 +28,7 @@ public class StartupStatusController extends FreemarkerHttpServlet {
protected ResponseValues processRequest(VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Startup Status");
body.put("title", I18n.text(vreq, "startup_status"));
body.put("status", StartupStatus.getBean(getServletContext()));
body.put("contextPath", getContextPath());
body.put("applicationName", getApplicationName(vreq));

View file

@ -24,7 +24,7 @@ import org.apache.jena.query.ResultSet;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.dao.UserAccountsDao;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.ResultSetParser;
/**
* A base class for AJAX responder objects, to be instantiated and invoked by
@ -82,7 +82,7 @@ public abstract class AbstractAjaxResponder {
*/
protected String assembleJsonResponse(List<Map<String, String>> maps) {
JSONArray jsonArray = new JSONArray();
for (Map<String, String> map: maps) {
for (Map<String, String> map : maps) {
jsonArray.put(map);
}
return jsonArray.toString();
@ -93,7 +93,7 @@ public abstract class AbstractAjaxResponder {
* implement "parseSolutionRow()"
*/
protected abstract static class JsonArrayParser extends
SparqlQueryRunner.QueryParser<JSONArray> {
ResultSetParser<JSONArray> {
@Override
protected JSONArray defaultValue() {
return new JSONArray();

View file

@ -14,7 +14,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -10,9 +10,9 @@ import org.apache.jena.query.QueryParseException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
/**
* The base class for the SPARQL query API.

View file

@ -11,7 +11,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -11,7 +11,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper;

View file

@ -6,7 +6,7 @@ import static javax.servlet.http.HttpServletResponse.SC_UNAUTHORIZED;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -32,6 +32,8 @@ import edu.cornell.mannlib.vitro.webapp.dao.DataPropertyStatementDao;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;
import edu.cornell.mannlib.vitro.webapp.dao.jena.event.EditEvent;
import java.io.IOException;
/**
* This controller receives Ajax requests for reordering a list of individuals.
* Parameters:
@ -103,9 +105,12 @@ public class ReorderController extends VitroAjaxController {
// we only hit the database once?
reorderIndividuals(individualUris, vreq, rankPredicate);
response.setStatus(SC_OK);
try {
response.getWriter().write("{}");
} catch (IOException e) {
e.printStackTrace();
}
}
private void reorderIndividuals(String[] individualUris, VitroRequest vreq, String rankPredicate) {

View file

@ -11,7 +11,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import edu.cornell.mannlib.vitro.webapp.utils.JSPPageHandler;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import edu.cornell.mannlib.vedit.controller.BaseEditController;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;

View file

@ -14,7 +14,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import edu.cornell.mannlib.vitro.webapp.utils.JSPPageHandler;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -5,7 +5,7 @@ package edu.cornell.mannlib.vitro.webapp.controller.freemarker;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -13,7 +13,7 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.commons.lang.time.DateUtils;
import org.apache.commons.lang3.time.DateUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -7,7 +7,7 @@ import java.util.Map;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -17,7 +17,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -9,7 +9,7 @@ import java.util.List;
import java.util.Map;
import edu.cornell.mannlib.vitro.webapp.web.templatemodels.individuallist.ListedIndividualBuilder;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -22,6 +22,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Res
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.individuallist.IndividualListResults;
import edu.cornell.mannlib.vitro.webapp.dao.IndividualDao;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.modules.searchEngine.SearchEngineException;
import edu.cornell.mannlib.vitro.webapp.modules.searchEngine.SearchQuery;
import edu.cornell.mannlib.vitro.webapp.utils.searchengine.SearchQueryUtils;
@ -94,7 +95,7 @@ public class IndividualListController extends FreemarkerHttpServlet {
vclass.getURI(),
page,
alpha,
vreq.getWebappDaoFactory().getIndividualDao());
vreq);
body.putAll(vcResults.asFreemarkerMap());
List<Individual> inds = vcResults.getEntities();
@ -148,12 +149,12 @@ public class IndividualListController extends FreemarkerHttpServlet {
return SearchQueryUtils.getPageParameter(request);
}
public static IndividualListResults getResultsForVClass(String vclassURI, int page, String alpha, IndividualDao indDao)
public static IndividualListResults getResultsForVClass(String vclassURI, int page, String alpha, VitroRequest vreq)
throws SearchException{
try{
List<String> classUris = Collections.singletonList(vclassURI);
IndividualListQueryResults results = buildAndExecuteVClassQuery(classUris, alpha, page, INDIVIDUALS_PER_PAGE, indDao);
return getResultsForVClassQuery(results, page, INDIVIDUALS_PER_PAGE, alpha);
IndividualListQueryResults results = buildAndExecuteVClassQuery(classUris, alpha, page, INDIVIDUALS_PER_PAGE, vreq.getWebappDaoFactory().getIndividualDao());
return getResultsForVClassQuery(results, page, INDIVIDUALS_PER_PAGE, alpha, vreq);
} catch (SearchEngineException e) {
String msg = "An error occurred retrieving results for vclass query";
log.error(msg, e);
@ -165,31 +166,31 @@ public class IndividualListController extends FreemarkerHttpServlet {
}
}
public static IndividualListResults getResultsForVClassIntersections(List<String> vclassURIs, int page, int pageSize, String alpha, IndividualDao indDao) {
public static IndividualListResults getResultsForVClassIntersections(List<String> vclassURIs, int page, int pageSize, String alpha, VitroRequest vreq) {
try{
IndividualListQueryResults results = buildAndExecuteVClassQuery(vclassURIs, alpha, page, pageSize, indDao);
return getResultsForVClassQuery(results, page, pageSize, alpha);
IndividualListQueryResults results = buildAndExecuteVClassQuery(vclassURIs, alpha, page, pageSize, vreq.getWebappDaoFactory().getIndividualDao());
return getResultsForVClassQuery(results, page, pageSize, alpha, vreq);
} catch(Throwable th) {
log.error("Error retrieving individuals corresponding to intersection multiple classes." + vclassURIs.toString(), th);
return IndividualListResults.EMPTY;
}
}
public static IndividualListResults getRandomResultsForVClass(String vclassURI, int page, int pageSize, IndividualDao indDao) {
public static IndividualListResults getRandomResultsForVClass(String vclassURI, int page, int pageSize, VitroRequest vreq) {
try{
List<String> classUris = Collections.singletonList(vclassURI);
IndividualListQueryResults results = buildAndExecuteRandomVClassQuery(classUris, page, pageSize, indDao);
return getResultsForVClassQuery(results, page, pageSize, "");
IndividualListQueryResults results = buildAndExecuteRandomVClassQuery(classUris, page, pageSize, vreq.getWebappDaoFactory().getIndividualDao());
return getResultsForVClassQuery(results, page, pageSize, "", vreq);
} catch(Throwable th) {
log.error("An error occurred retrieving random results for vclass query", th);
return IndividualListResults.EMPTY;
}
}
private static IndividualListResults getResultsForVClassQuery(IndividualListQueryResults results, int page, int pageSize, String alpha) {
private static IndividualListResults getResultsForVClassQuery(IndividualListQueryResults results, int page, int pageSize, String alpha, VitroRequest vreq) {
long hitCount = results.getHitCount();
if ( hitCount > pageSize ){
return new IndividualListResults(hitCount, results.getIndividuals(), alpha, true, makePagesList(hitCount, pageSize, page));
return new IndividualListResults(hitCount, results.getIndividuals(), alpha, true, makePagesList(hitCount, pageSize, page, vreq));
}else{
return new IndividualListResults(hitCount, results.getIndividuals(), alpha, false, Collections.<PageRecord>emptyList());
}
@ -221,7 +222,7 @@ public class IndividualListController extends FreemarkerHttpServlet {
}
public static List<PageRecord> makePagesList( long size, int pageSize, int selectedPage ) {
public static List<PageRecord> makePagesList( long size, int pageSize, int selectedPage , VitroRequest vreq) {
List<PageRecord> records = new ArrayList<PageRecord>( MAX_PAGES + 1 );
int requiredPages = (int) (size/pageSize) ;
@ -234,7 +235,7 @@ public class IndividualListController extends FreemarkerHttpServlet {
for(int page = 1; page < requiredPages && page <= MAX_PAGES ; page++ ){
records.add( new PageRecord( "page=" + page, Integer.toString(page), Integer.toString(page), selectedPage == page ) );
}
records.add( new PageRecord( "page="+ (MAX_PAGES+1), Integer.toString(MAX_PAGES+1), "more...", false));
records.add( new PageRecord( "page="+ (MAX_PAGES+1), Integer.toString(MAX_PAGES+1), I18n.text(vreq, "paging_link_more"), false));
}else if( requiredPages > MAX_PAGES && selectedPage+1 > MAX_PAGES && selectedPage < requiredPages - MAX_PAGES){
//the selected pages is in the middle of the list of page
int startPage = selectedPage - MAX_PAGES / 2;
@ -242,7 +243,7 @@ public class IndividualListController extends FreemarkerHttpServlet {
for(int page = startPage; page <= endPage ; page++ ){
records.add( new PageRecord( "page=" + page, Integer.toString(page), Integer.toString(page), selectedPage == page ) );
}
records.add( new PageRecord( "page="+ (endPage+1), Integer.toString(endPage+1), "more...", false));
records.add( new PageRecord( "page="+ (endPage+1), Integer.toString(endPage+1), I18n.text(vreq, "paging_link_more"), false));
}else if ( requiredPages > MAX_PAGES && selectedPage > requiredPages - MAX_PAGES ){
//the selected page is in the end of the list
int startPage = requiredPages - MAX_PAGES;

View file

@ -9,7 +9,7 @@ import java.util.Map;
import net.sf.json.util.JSONUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -10,7 +10,7 @@ import java.util.Map;
import net.sf.json.util.JSONUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -14,7 +14,7 @@ import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -27,6 +27,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.dao.DisplayVocabulary;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
import edu.cornell.mannlib.vitro.webapp.utils.dataGetter.DataGetter;
import edu.cornell.mannlib.vitro.webapp.utils.dataGetter.DataGetterUtils;
/**
@ -188,23 +189,23 @@ public class PageController extends FreemarkerHttpServlet{
private ResponseValues doError(VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title","Page could not be created");
body.put("errorMessage", "There was an error while creating the page, please check the logs.");
body.put("title", I18n.text(vreq, "page_not_created"));
body.put("errorMessage", I18n.text(vreq, "page_not_created_msg"));
return new TemplateResponseValues(Template.TITLED_ERROR_MESSAGE.toString(), body, HttpServletResponse.SC_INTERNAL_SERVER_ERROR);
}
private ResponseValues doNotFound(VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title","Page Not Found");
body.put("errorMessage", "The page was not found in the system.");
body.put("title", I18n.text(vreq, "page_not_found"));
body.put("errorMessage", I18n.text(vreq, "page_not_found_msg"));
return new TemplateResponseValues(Template.TITLED_ERROR_MESSAGE.toString(), body, HttpServletResponse.SC_NOT_FOUND);
}
private ResponseValues doNoPageSpecified(VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title","No page URI specified");
body.put("errorMessage", "Could not generate page beacause it was unclear what page was being requested. A URL mapping may be missing.");
body.put("title",I18n.text(vreq, "page_uri_missing"));
body.put("errorMessage", I18n.text(vreq, "page_uri_missing_msg"));
return new TemplateResponseValues(Template.TITLED_ERROR_MESSAGE.toString(), body, HttpServletResponse.SC_NOT_FOUND);
}

View file

@ -5,12 +5,14 @@ package edu.cornell.mannlib.vitro.webapp.controller.freemarker;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
/*
* Servlet that only specifies a template, without putting any data
@ -33,7 +35,7 @@ public class StaticPageController extends FreemarkerHttpServlet {
String requestedUrl = vreq.getServletPath();
String title = null;
if (requestedUrl.equals("/login")) {
title = "Log in to " + siteName;
title = StringUtils.capitalize(I18n.text(vreq, "log_in")) + " - " + siteName;
}
return title;
}

View file

@ -71,7 +71,7 @@ public class UrlBuilder {
public enum Css {
CUSTOM_FORM("/edit/forms/css/customForm.css"),
JQUERY_UI("/js/jquery-ui/css/smoothness/jquery-ui-1.8.9.custom.css");
JQUERY_UI("/js/jquery-ui/css/smoothness/jquery-ui-1.12.1.css");
private final String path;
@ -90,8 +90,9 @@ public class UrlBuilder {
public enum JavaScript {
CUSTOM_FORM_UTILS("/js/customFormUtils.js"),
JQUERY("/js/jquery.js"),
JQUERY_UI("/js/jquery-ui/js/jquery-ui-1.8.9.custom.min.js"),
JQUERY("/js/jquery-1.12.4.min.js"),
JQUERY_MIGRATE("/js/jquery-migrate-1.4.1.js"),
JQUERY_UI("/js/jquery-ui/js/jquery-ui-1.12.1.min.js"),
UTILS("/js/utils.js");
private final String path;

View file

@ -15,7 +15,7 @@ import javax.servlet.ServletOutputStream;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONArray;
@ -470,18 +470,10 @@ public class JSONReconcileServlet extends VitroHttpServlet {
return label;
}
public String getJsonLabel() {
return JSONObject.quote(label);
}
public String getUri() {
return uri;
}
public String getJsonUri() {
return JSONObject.quote(uri);
}
Map<String, String> toMap() {
Map<String, String> map = new HashMap<String, String>();
map.put("label", label);

View file

@ -21,6 +21,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Exc
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.RedirectResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
import edu.cornell.mannlib.vitro.webapp.i18n.I18n;
/**
* Handles requests for entity information.
@ -74,7 +75,7 @@ public class IndividualController extends FreemarkerHttpServlet {
* If we can't figure out what individual you want, or if there
* is no such individual, show an informative error page.
*/
return doNotFound();
return doNotFound(vreq);
case BYTESTREAM_REDIRECT:
/*
* If the Individual requested is a FileBytestream, redirect
@ -116,10 +117,10 @@ public class IndividualController extends FreemarkerHttpServlet {
new IndividualRequestAnalysisContextImpl(vreq)).analyze();
}
private ResponseValues doNotFound() {
private ResponseValues doNotFound(VitroRequest vreq) {
Map<String, Object> body = new HashMap<String, Object>();
body.put("title", "Individual Not Found");
body.put("errorMessage", "The individual was not found in the system.");
body.put("title", I18n.text(vreq, "individual_not_found"));
body.put("errorMessage", I18n.text(vreq, "individual_not_found_msg"));
return new TemplateResponseValues(TEMPLATE_HELP, body,
HttpServletResponse.SC_NOT_FOUND);

View file

@ -10,7 +10,7 @@ import java.util.Set;
import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -8,7 +8,7 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ArrayUtils;
import org.apache.commons.lang3.ArrayUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONArray;

View file

@ -141,7 +141,7 @@ public class JenaCsv2RdfController extends JenaIngestController {
}
public Model doExecuteCsv2Rdf(VitroRequest vreq, FileItem fileStream, String filePath) throws Exception {
char[] quoteChars = {'"'};
char quoteChar = '"';
String namespace = "";
String tboxNamespace = vreq.getParameter("tboxNamespace");
String typeName = vreq.getParameter("typeName");
@ -162,7 +162,7 @@ public class JenaCsv2RdfController extends JenaIngestController {
separatorChar = '\t';
}
Csv2Rdf c2r = new Csv2Rdf(separatorChar, quoteChars,namespace,tboxNamespace,typeName);
Csv2Rdf c2r = new Csv2Rdf(separatorChar, quoteChar,namespace,tboxNamespace,typeName);
InputStream is = null;

View file

@ -33,7 +33,7 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import edu.cornell.mannlib.vitro.webapp.utils.JSPPageHandler;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.semanticweb.owlapi.reasoner.InconsistentOntologyException;
@ -73,12 +73,12 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.adapters.VitroModelFactory;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestUtils.MergeResult;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaIngestWorkflowProcessor;
import edu.cornell.mannlib.vitro.webapp.utils.jena.JenaOutputUtils;
import edu.cornell.mannlib.vitro.webapp.utils.jena.WorkflowOntology;
import edu.cornell.mannlib.vitro.webapp.utils.sparql.SparqlQueryUtils;
public class JenaIngestController extends BaseEditController {
private static final Log log = LogFactory.getLog(JenaIngestController.class);
@ -861,7 +861,7 @@ public class JenaIngestController extends BaseEditController {
Individual ind = jenaOntModel.getIndividual(savedQueryURIStr);
log.debug("Using query "+savedQueryURIStr);
queryStr = ( (Literal) ind.getPropertyValue(queryStrProp)).getLexicalForm();
queryStr = StringEscapeUtils.unescapeHtml(queryStr); // !!! We need to turn off automatic HTML-escaping for data property editing.
queryStr = StringEscapeUtils.UNESCAPE_HTML4.translate(queryStr); // !!! We need to turn off automatic HTML-escaping for data property editing.
} finally {
jenaOntModel.leaveCriticalSection();
}

View file

@ -7,7 +7,7 @@ import java.util.List;
import javax.servlet.ServletException;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.json.JSONArray;

View file

@ -109,7 +109,7 @@ public class JsonServlet extends VitroHttpServlet {
vclassURIs,
page, INDIVIDUALS_PER_PAGE,
alpha,
vreq.getWebappDaoFactory().getIndividualDao());
vreq);
} catch(Exception ex) {
log.error("Error in retrieval of search results for VClass " + vclassURIs.toString(), ex);
return IndividualListResults.EMPTY;
@ -144,7 +144,7 @@ public class JsonServlet extends VitroHttpServlet {
vclassURI,
page,
pageSize,
vreq.getWebappDaoFactory().getIndividualDao());
vreq);
} catch(Exception ex) {
log.error("Error in retrieval of search results for VClass " + vclassURI, ex);
return IndividualListResults.EMPTY;

View file

@ -33,7 +33,7 @@ public class VitroVocabulary {
public static final String OWL_ONTOLOGY = OWL+"Ontology";
public static final String OWL_THING = OWL+"Thing";
public static final String AFN = "http://jena.hpl.hp.com/ARQ/function#";
public static final String AFN = "http://jena.apache.org/ARQ/function#";
public static final String label = vitroURI + "label";

View file

@ -13,7 +13,7 @@ import java.util.List;
import java.util.Map;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -2,9 +2,9 @@
package edu.cornell.mannlib.vitro.webapp.dao.jena;
import static edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.SparqlQueryRunner.createSelectQueryContext;
import static edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.SparqlQueryRunner.queryHolder;
import static org.apache.jena.rdf.model.ResourceFactory.createResource;
import static edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner.bindValues;
import static edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner.uriValue;
import java.util.ArrayList;
import java.util.Collection;
@ -16,7 +16,6 @@ import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.ontology.ObjectProperty;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntModelSpec;
@ -33,11 +32,11 @@ import edu.cornell.mannlib.vitro.webapp.beans.FauxProperty;
import edu.cornell.mannlib.vitro.webapp.dao.FauxPropertyDao;
import edu.cornell.mannlib.vitro.webapp.dao.InsertException;
import edu.cornell.mannlib.vitro.webapp.dao.VitroVocabulary;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryRunner.QueryParser;
import edu.cornell.mannlib.vitro.webapp.utils.jena.criticalsection.LockableOntModel;
import edu.cornell.mannlib.vitro.webapp.utils.jena.criticalsection.LockableOntModelSelector;
import edu.cornell.mannlib.vitro.webapp.utils.jena.criticalsection.LockedOntModel;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.QueryHolder;
import edu.cornell.mannlib.vitro.webapp.utils.sparqlrunner.ResultSetParser;
/**
* TODO
@ -522,7 +521,7 @@ public class FauxPropertyDaoJena extends JenaBaseDao implements FauxPropertyDao
+ "} \n"; //
private static class ParserLocateConfigContext extends
QueryParser<Set<ConfigContext>> {
ResultSetParser<Set<ConfigContext>> {
private final String domainUri;
private final String baseUri;
private final String rangeUri;
@ -561,30 +560,29 @@ public class FauxPropertyDaoJena extends JenaBaseDao implements FauxPropertyDao
LockableOntModel lockableDisplayModel, String domainUri,
String baseUri, String rangeUri) {
try (LockedOntModel displayModel = lockableDisplayModel.read()) {
String queryString;
QueryHolder qHolder;
if (domainUri == null || domainUri.trim().isEmpty()
|| domainUri.equals(OWL.Thing.getURI())) {
queryString = bindValues(
QUERY_LOCATE_CONFIG_CONTEXT_WITH_NO_DOMAIN,
uriValue("baseUri", baseUri),
uriValue("rangeUri", rangeUri));
qHolder = queryHolder(
QUERY_LOCATE_CONFIG_CONTEXT_WITH_NO_DOMAIN)
.bindToUri("baseUri", baseUri).bindToUri(
"rangeUri", rangeUri);
} else {
queryString = bindValues(
QUERY_LOCATE_CONFIG_CONTEXT_WITH_DOMAIN,
uriValue("baseUri", baseUri),
uriValue("rangeUri", rangeUri),
uriValue("domainUri", domainUri));
qHolder = queryHolder(
QUERY_LOCATE_CONFIG_CONTEXT_WITH_DOMAIN)
.bindToUri("baseUri", baseUri)
.bindToUri("rangeUri", rangeUri)
.bindToUri("domainUri", domainUri);
}
if (log.isDebugEnabled()) {
log.debug("domainUri=" + domainUri + ", baseUri=" + baseUri
+ ", rangeUri=" + rangeUri + ", queryString="
+ queryString);
+ ", rangeUri=" + rangeUri + ", qHolder=" + qHolder);
}
ParserLocateConfigContext parser = new ParserLocateConfigContext(
domainUri, baseUri, rangeUri);
Set<ConfigContext> contexts = new SparqlQueryRunner(
displayModel).executeSelect(parser, queryString);
Set<ConfigContext> contexts = createSelectQueryContext(
displayModel, qHolder).execute().parse(parser);
log.debug("found " + contexts.size() + " contexts: " + contexts);
return contexts;

View file

@ -13,7 +13,7 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
import org.apache.commons.lang.NotImplementedException;
import org.apache.commons.lang3.NotImplementedException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -605,12 +605,12 @@ public class IndividualDaoJena extends JenaBaseDao implements IndividualDao {
public Collection<String> getAllIndividualUris() {
//this is implemented in IndivdiualSDB
throw new NotImplementedException();
throw new NotImplementedException("");
}
public Iterator<String> getUpdatedSinceIterator(long updatedSince){
//this is implemented in IndivdiualSDB
throw new NotImplementedException();
throw new NotImplementedException("");
}
public boolean isIndividualOfClass(String vclassURI, String indURI) {

View file

@ -13,7 +13,7 @@ import java.util.LinkedList;
import java.util.List;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;

View file

@ -868,6 +868,28 @@ public class JenaBaseDao extends JenaBaseDaoCon {
return label;
}
/**
* Searches for literal in preferred language.
* @param labels
* the literals to search; must not be null
* @return the literal in preferred language if its containing in given list;
* otherwise the first entry will returned; returns null if an empty list was given
*/
protected Literal tryLiteralForPreferredLanguages(List<Literal> labels) {
// search for literal of preferred language
for (Literal literal : labels) {
for (String lang : PREFERRED_LANGUAGES) {
if (lang.equals(literal.getLanguage())) {
return literal;
}
}
}
// return first literal as last resort
return 0 == labels.size() ? null : labels.get(0);
}
private Literal tryPropertyForPreferredLanguages( OntResource r, Property p, boolean alsoTryNoLang ) {
Literal label = null;
List<RDFNode> labels = r.listPropertyValues(p).toList();
@ -1149,7 +1171,7 @@ public class JenaBaseDao extends JenaBaseDaoCon {
String describeQueryStr = "DESCRIBE <" + res.getURI() + ">" ;
// ? "PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n\n" +
// ? "PREFIX afn: <http://jena.apache.org/ARQ/function#> \n\n" +
// "DESCRIBE ?bnode \n" +
// "WHERE { \n" +
// " FILTER(afn:bnode(?bnode) = \"" + res.getId().toString() + "\")\n" +

View file

@ -262,7 +262,7 @@ public class JenaModelUtils {
dataset.getLock().enterCriticalSection(Lock.READ);
try {
StringBuffer buff = new StringBuffer();
buff.append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n")
buff.append("PREFIX afn: <http://jena.apache.org/ARQ/function#> \n")
.append("CONSTRUCT { \n")
.append(" ?res <" + property.getURI() + "> ?o } WHERE { \n");
if (graphURI != null) {
@ -294,7 +294,7 @@ public class JenaModelUtils {
StringBuffer describeQueryStrBuff = new StringBuffer()
.append("PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> \n")
.append("PREFIX afn: <http://jena.hpl.hp.com/ARQ/function#> \n")
.append("PREFIX afn: <http://jena.apache.org/ARQ/function#> \n")
.append("DESCRIBE ?res WHERE { \n");
if (graphURI != null) {
describeQueryStrBuff

View file

@ -11,7 +11,7 @@ import java.util.List;
import java.util.Map;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -7,16 +7,19 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.ontology.OntModel;
import org.apache.jena.ontology.OntResource;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
@ -26,6 +29,7 @@ import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.query.ResultSet;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.Property;
@ -503,7 +507,8 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
return Collections.emptyMap();
}
Map<String, String> types = new LinkedHashMap<String, String>();
Map<String, String> result = new LinkedHashMap<String, String>();
Map<String, List<Literal>> types = new LinkedHashMap<String, List<Literal>>();
DatasetWrapper w = dwf.getDatasetWrapper();
Dataset dataset = w.getDataset();
dataset.getLock().enterCriticalSection(Lock.READ);
@ -521,16 +526,24 @@ public class ObjectPropertyStatementDaoJena extends JenaBaseDao implements Objec
}
RDFNode labelNode = soln.get("label");
String label = null;
if (labelNode.isLiteral()) {
label = labelNode.asLiteral().getLexicalForm();
if (StringUtils.isNotBlank(type) && labelNode.isLiteral()) {
List<Literal> langLabels = types.get(type);
if (null == langLabels) {
types.put(type, langLabels = new ArrayList<Literal>());
}
langLabels.add(labelNode.asLiteral());
}
}
if (StringUtils.isNotBlank(type) && StringUtils.isNotBlank(label)) {
types.put(type, label);
// choose labels corresponding to preferred languages
Set<Entry<String, List<Literal>>> typeEntries = types.entrySet();
for (Entry<String, List<Literal>> current : typeEntries) {
result.put(current.getKey(), tryLiteralForPreferredLanguages(current.getValue()).getLexicalForm());
}
}
return types;
return result;
} catch (Exception e) {
log.error("Error getting most specific types for subject " + subjectUri);

View file

@ -9,8 +9,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.ObjectUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.ObjectUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -7,6 +7,7 @@ import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.function.Supplier;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -524,6 +525,16 @@ public class RDFServiceGraph implements GraphWithPerform {
return null;
}
@Override
public void execute(Runnable runnable) {
}
@Override
public <T> T calculate(Supplier<T> supplier) {
return null;
}
@Override
public boolean transactionsSupported() {
return true;

View file

@ -8,7 +8,7 @@ import java.util.Comparator;
import java.util.LinkedList;
import java.util.List;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -3,7 +3,7 @@ package edu.cornell.mannlib.vitro.webapp.dao.jena;
import javax.servlet.ServletContext;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelReader;

View file

@ -16,8 +16,8 @@ import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpSession;
import org.apache.commons.fileupload.servlet.ServletFileUpload;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.DateTime;

View file

@ -11,7 +11,7 @@ import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -68,7 +68,7 @@ public class EditN3GeneratorVTwo {
}
log.debug("The original value String is " + values.toString());
String valueString = org.apache.commons.lang.StringUtils.join(values,
String valueString = org.apache.commons.lang3.StringUtils.join(values,
">, <");
valueString = "<" + valueString + ">";
log.debug("The multiUri value String is " + valueString);
@ -233,7 +233,7 @@ public class EditN3GeneratorVTwo {
log.debug("value of literal for " + var + " was null");
}
}
String valueString = org.apache.commons.lang.StringUtils.join(n3Values, ",");
String valueString = org.apache.commons.lang3.StringUtils.join(n3Values, ",");
//Substitute it in to n3
String varRegex = "\\?" + var + "(?=\\p{Punct}|\\p{Space}|$)";

View file

@ -11,9 +11,9 @@ import java.util.List;
import java.util.Map;
import org.apache.commons.fileupload.FileItem;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.joda.time.format.DateTimeFormat;

View file

@ -7,7 +7,7 @@ import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jena.vocabulary.OWL;

View file

@ -5,8 +5,8 @@ package edu.cornell.mannlib.vitro.webapp.edit.n3editing.VTwo.fields;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.lang.builder.ToStringBuilder;
import org.apache.commons.lang.builder.ToStringStyle;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import edu.cornell.mannlib.vitro.webapp.edit.n3editing.VTwo.BaseEditElementVTwo;
import edu.cornell.mannlib.vitro.webapp.edit.n3editing.VTwo.EditElementVTwo;

View file

@ -14,7 +14,7 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -6,7 +6,7 @@ import java.util.List;
import javax.servlet.http.HttpSession;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.jena.ontology.OntModel;

View file

@ -11,7 +11,7 @@ import java.util.Map;
import javax.servlet.http.HttpSession;
import edu.cornell.mannlib.vitro.webapp.web.templatemodels.searchresult.IndividualSearchResult;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@ -226,7 +226,7 @@ public class DefaultAddMissingIndividualFormGenerator implements EditConfigurati
return prefixes;
}
private String getN3ForName() {
protected String getN3ForName() {
return "?" + objectVarName + " rdfs:label ?name .";
}

View file

@ -7,7 +7,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.owasp.validator.html.AntiSamy;
import org.owasp.validator.html.CleanResults;
import org.owasp.validator.html.PolicyException;

View file

@ -6,7 +6,7 @@ import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -8,7 +8,7 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -15,7 +15,7 @@ import static edu.cornell.mannlib.vitro.webapp.modelaccess.ModelNames.DISPLAY_TB
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -18,7 +18,7 @@ import javax.servlet.ServletResponse;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import edu.cornell.mannlib.vitro.webapp.beans.ApplicationBean;
import edu.cornell.mannlib.vitro.webapp.dao.WebappDaoFactory;

View file

@ -12,7 +12,7 @@ import java.util.List;
import java.util.regex.Pattern;
import java.util.regex.Matcher;
import org.apache.commons.lang.StringEscapeUtils;
import org.apache.commons.lang3.StringEscapeUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -13,7 +13,7 @@ import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import javax.servlet.http.HttpServletRequest;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -18,7 +18,7 @@ import java.util.TreeSet;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -11,8 +11,8 @@ import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.commons.lang.LocaleUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.LocaleUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

View file

@ -10,8 +10,8 @@ import javax.servlet.ServletContext;
import javax.servlet.ServletContextEvent;
import javax.servlet.ServletContextListener;
import org.apache.commons.lang.LocaleUtils;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.LocaleUtils;
import org.apache.commons.lang3.StringUtils;
import edu.cornell.mannlib.vitro.webapp.config.ConfigurationProperties;
import edu.cornell.mannlib.vitro.webapp.startup.StartupStatus;
@ -132,6 +132,12 @@ public class LocaleSelectionSetup implements ServletContextListener {
private Locale buildLocale(String localeString)
throws IllegalArgumentException {
// Replicate exception from lang2 with empty strings
if ("".equals(localeString)) {
throw new IllegalArgumentException("Invalid locale format");
}
Locale locale = LocaleUtils.toLocale(localeString);
if (!"es_GO".equals(localeString) && // No complaint about bogus locale

View file

@ -2,7 +2,6 @@
package edu.cornell.mannlib.vitro.webapp.imageprocessor.imageio;
import com.sun.media.jai.codec.MemoryCacheSeekableStream;
import edu.cornell.mannlib.vitro.webapp.modules.Application;
import edu.cornell.mannlib.vitro.webapp.modules.ComponentStartupStatus;
import edu.cornell.mannlib.vitro.webapp.modules.imageProcessor.ImageProcessor;
@ -10,11 +9,8 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import javax.imageio.ImageIO;
import javax.media.jai.JAI;
import javax.media.jai.RenderedOp;
import javax.media.jai.operator.BandSelectDescriptor;
import javax.media.jai.operator.StreamDescriptor;
import javax.media.jai.util.ImagingListener;
import javax.imageio.stream.ImageInputStream;
import javax.imageio.stream.MemoryCacheImageInputStream;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
@ -55,8 +51,8 @@ public class IIOImageProcessor implements ImageProcessor {
*/
@Override
public void startup(Application application, ComponentStartupStatus ss) {
JAI.getDefaultInstance().setImagingListener(
new NonNoisyImagingListener());
// JAI.getDefaultInstance().setImagingListener(
// new NonNoisyImagingListener());
}
@Override
@ -66,7 +62,7 @@ public class IIOImageProcessor implements ImageProcessor {
@Override
public Dimensions getDimensions(InputStream imageStream) throws ImageProcessorException, IOException {
MemoryCacheSeekableStream stream = new MemoryCacheSeekableStream(imageStream);
ImageInputStream stream = new MemoryCacheImageInputStream(imageStream);
BufferedImage image = ImageIO.read(stream);
return new Dimensions(image.getWidth(), image.getHeight());
}
@ -80,7 +76,7 @@ public class IIOImageProcessor implements ImageProcessor {
CropRectangle crop, Dimensions limits)
throws ImageProcessorException, IOException {
try {
MemoryCacheSeekableStream stream = new MemoryCacheSeekableStream(mainImageStream);
ImageInputStream stream = new MemoryCacheImageInputStream(mainImageStream);
BufferedImage mainImage = ImageIO.read(stream);
BufferedImage bufferedImage = new BufferedImage(mainImage.getWidth(), mainImage.getHeight(), BufferedImage.TYPE_3BYTE_BGR); // BufferedImage.TYPE_INT_RGB
@ -176,33 +172,4 @@ public class IIOImageProcessor implements ImageProcessor {
ImageIO.write(image, "JPG", bytes);
return bytes.toByteArray();
}
/**
* This ImagingListener means that Java Advanced Imaging won't dump an
* exception log to System.out. It writes to the log, instead.
*
* Further, since the lack of native accelerator classes isn't an error, it
* is written as a simple log message.
*/
static class NonNoisyImagingListener implements ImagingListener {
@Override
public boolean errorOccurred(String message, Throwable thrown,
Object where, boolean isRetryable) throws RuntimeException {
if (thrown instanceof RuntimeException) {
throw (RuntimeException) thrown;
}
if ((thrown instanceof NoClassDefFoundError)
&& (thrown.getMessage()
.contains("com/sun/medialib/mlib/Image"))) {
log.info("Java Advanced Imaging: Could not find mediaLib "
+ "accelerator wrapper classes. "
+ "Continuing in pure Java mode.");
return false;
}
log.error(thrown, thrown);
return false;
}
}
}

View file

@ -2,234 +2,44 @@
package edu.cornell.mannlib.vitro.webapp.imageprocessor.jai;
import java.awt.geom.AffineTransform;
import java.awt.image.AffineTransformOp;
import java.awt.image.BufferedImage;
import java.awt.image.ColorConvertOp;
import java.awt.image.ColorModel;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import javax.imageio.ImageIO;
import javax.media.jai.JAI;
import javax.media.jai.RenderedOp;
import javax.media.jai.operator.BandSelectDescriptor;
import javax.media.jai.operator.StreamDescriptor;
import javax.media.jai.util.ImagingListener;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import com.sun.media.jai.codec.MemoryCacheSeekableStream;
import edu.cornell.mannlib.vitro.webapp.imageprocessor.imageio.IIOImageProcessor;
import edu.cornell.mannlib.vitro.webapp.modules.Application;
import edu.cornell.mannlib.vitro.webapp.modules.ComponentStartupStatus;
import edu.cornell.mannlib.vitro.webapp.modules.imageProcessor.ImageProcessor;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.io.IOException;
import java.io.InputStream;
/**
* Crop the main image as specified, and scale it to the correct size for a
* thumbnail.
* Re-implemented as ImageIO / TwelveMonkeys plugin for better OpenJDK compatibility and to remove
* JAI codec dependencies that are not distributed via Maven
*
* Use the JAI library to read the file because the javax.imageio package
* doesn't read extended JPEG properly. Use JAI to remove transparency from
* JPEGs and PNGs, simply by removing the alpha channel. Annoyingly, this will
* not work with GIFs with transparent pixels.
*
* The transforms in the JAI library are buggy, so standard AWT operations do
* the scaling and cropping. The most obvious problem in the JAI library is the
* refusal to crop after scaling an image.
*
* Scale first to avoid the boundary error that produces black lines along the
* edge of the image.
*
* Use the javax.imagio pacakge to write the thumbnail image as a JPEG file.
* This transitional stub extends the new class, and logs warnings for people to update their config
*/
public class JaiImageProcessor implements ImageProcessor {
@Deprecated
public class JaiImageProcessor extends IIOImageProcessor {
private static final Log log = LogFactory.getLog(JaiImageProcessor.class);
/** If an image has 3 color bands and 1 alpha band, we want these. */
private static final int[] COLOR_BAND_INDEXES = new int[] { 0, 1, 2 };
/**
* Prevent Java Advanced Imaging from complaining about the lack of
* accelerator classes.
*/
@Deprecated
@Override
public void startup(Application application, ComponentStartupStatus ss) {
JAI.getDefaultInstance().setImagingListener(
new NonNoisyImagingListener());
log.warn("JaiImageProcessor is deprecated and will be removed - please update config/applicationSetup.n3 to use edu.cornell.mannlib.vitro.webapp.imageprocessor.imageio.IIOImageProcessor");
super.startup(application, ss);
}
@Deprecated
@Override
public void shutdown(Application application) {
// Nothing to tear down.
public Dimensions getDimensions(InputStream imageStream) throws ImageProcessorException, IOException {
log.warn("JaiImageProcessor is deprecated and will be removed - please update config/applicationSetup.n3 to use edu.cornell.mannlib.vitro.webapp.imageprocessor.imageio.IIOImageProcessor");
return super.getDimensions(imageStream);
}
@Deprecated
@Override
public Dimensions getDimensions(InputStream imageStream)
throws ImageProcessorException, IOException {
MemoryCacheSeekableStream stream = new MemoryCacheSeekableStream(
imageStream);
RenderedOp image = JAI.create("stream", stream);
return new Dimensions(image.getWidth(), image.getHeight());
public InputStream cropAndScale(InputStream mainImageStream, CropRectangle crop, Dimensions limits) throws ImageProcessorException, IOException {
log.warn("JaiImageProcessor is deprecated and will be removed - please update config/applicationSetup.n3 to use edu.cornell.mannlib.vitro.webapp.imageprocessor.imageio.IIOImageProcessor");
return super.cropAndScale(mainImageStream, crop, limits);
}
/**
* Crop the main image according to this rectangle, and scale it to the
* correct size for a thumbnail.
*/
@Override
public InputStream cropAndScale(InputStream mainImageStream,
CropRectangle crop, Dimensions limits)
throws ImageProcessorException, IOException {
try {
RenderedOp mainImage = loadImage(mainImageStream);
BufferedImage bufferedImage = new BufferedImage(mainImage.getWidth(), mainImage.getHeight(), BufferedImage.TYPE_3BYTE_BGR); // BufferedImage.TYPE_INT_RGB
new ColorConvertOp(null).filter(mainImage.getAsBufferedImage(), bufferedImage);
log.debug("initial image: " + imageSize(bufferedImage));
log.debug("initial crop: " + crop);
CropRectangle boundedCrop = limitCropRectangleToImageBounds(
bufferedImage, crop);
log.debug("bounded crop: " + boundedCrop);
float scaleFactor = figureScaleFactor(boundedCrop, limits);
log.debug("scale factor: " + scaleFactor);
BufferedImage scaledImage = scaleImage(bufferedImage, scaleFactor);
log.debug("scaled image: " + imageSize(scaledImage));
CropRectangle rawScaledCrop = adjustCropRectangleToScaledImage(
boundedCrop, scaleFactor);
log.debug("scaled crop: " + rawScaledCrop);
CropRectangle scaledCrop = limitCropRectangleToImageBounds(
scaledImage, rawScaledCrop);
log.debug("bounded scaled crop: " + scaledCrop);
BufferedImage croppedImage = cropImage(scaledImage, scaledCrop);
log.debug("cropped image: " + imageSize(croppedImage));
byte[] jpegBytes = encodeAsJpeg(croppedImage);
return new ByteArrayInputStream(jpegBytes);
} catch (Exception e) {
throw new IllegalStateException("Failed to scale the image", e);
}
}
private RenderedOp loadImage(InputStream imageStream) {
return StreamDescriptor.create(new MemoryCacheSeekableStream(
imageStream), null, null);
}
private RenderedOp makeImageOpaque(RenderedOp image) {
ColorModel colorModel = image.getColorModel();
if (!colorModel.hasAlpha()) {
// The image is already opaque.
return image;
}
if (image.getNumBands() == 4) {
// The image has a separate alpha channel. Drop the alpha channel.
return BandSelectDescriptor.create(image, COLOR_BAND_INDEXES, null);
}
// Don't know how to handle it. Probably a GIF with a transparent
// background. Give up.
return image;
}
private String imageSize(BufferedImage image) {
return image.getWidth() + " by " + image.getHeight();
}
private CropRectangle limitCropRectangleToImageBounds(BufferedImage image,
CropRectangle crop) {
int imageWidth = image.getWidth();
int imageHeight = image.getHeight();
// Ensure that x and y are at least zero, but not big enough to push the
// crop rectangle out of the image.
int greatestX = imageWidth - MINIMUM_CROP_SIZE;
int greatestY = imageHeight - MINIMUM_CROP_SIZE;
int x = Math.max(0, Math.min(greatestX, Math.abs(crop.x)));
int y = Math.max(0, Math.min(greatestY, Math.abs(crop.y)));
// Ensure that width and height are at least as big as the minimum, but
// no so big as to extend beyond the image.
int greatestW = imageWidth - x;
int greatestH = imageHeight - y;
int w = Math.max(MINIMUM_CROP_SIZE, Math.min(greatestW, crop.width));
int h = Math.max(MINIMUM_CROP_SIZE, Math.min(greatestH, crop.height));
return new CropRectangle(x, y, h, w);
}
private float figureScaleFactor(CropRectangle boundedCrop, Dimensions limits) {
float horizontalScale = ((float) limits.width)
/ ((float) boundedCrop.width);
float verticalScale = ((float) limits.height)
/ ((float) boundedCrop.height);
return Math.min(horizontalScale, verticalScale);
}
private BufferedImage scaleImage(BufferedImage image, float scaleFactor) {
AffineTransform transform = AffineTransform.getScaleInstance(
scaleFactor, scaleFactor);
AffineTransformOp atoOp = new AffineTransformOp(transform, null);
return atoOp.filter(image, null);
}
private CropRectangle adjustCropRectangleToScaledImage(CropRectangle crop,
float scaleFactor) {
int newX = (int) (crop.x * scaleFactor);
int newY = (int) (crop.y * scaleFactor);
int newHeight = (int) (crop.height * scaleFactor);
int newWidth = (int) (crop.width * scaleFactor);
return new CropRectangle(newX, newY, newHeight, newWidth);
}
private BufferedImage cropImage(BufferedImage image, CropRectangle crop) {
return image.getSubimage(crop.x, crop.y, crop.width, crop.height);
}
private byte[] encodeAsJpeg(BufferedImage image) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
ImageIO.write(image, "JPG", bytes);
return bytes.toByteArray();
}
/**
* This ImagingListener means that Java Advanced Imaging won't dump an
* exception log to System.out. It writes to the log, instead.
*
* Further, since the lack of native accelerator classes isn't an error, it
* is written as a simple log message.
*/
static class NonNoisyImagingListener implements ImagingListener {
@Override
public boolean errorOccurred(String message, Throwable thrown,
Object where, boolean isRetryable) throws RuntimeException {
if (thrown instanceof RuntimeException) {
throw (RuntimeException) thrown;
}
if ((thrown instanceof NoClassDefFoundError)
&& (thrown.getMessage()
.contains("com/sun/medialib/mlib/Image"))) {
log.info("Java Advanced Imaging: Could not find mediaLib "
+ "accelerator wrapper classes. "
+ "Continuing in pure Java mode.");
return false;
}
log.error(thrown, thrown);
return false;
}
}
}

View file

@ -5,14 +5,16 @@ package edu.cornell.mannlib.vitro.webapp.ontology.update;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.List;
import java.util.StringTokenizer;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.skife.csv.CSVReader;
import org.skife.csv.SimpleReader;
import edu.cornell.mannlib.vitro.webapp.ontology.update.AtomicOntologyChange.AtomicChangeType;
@ -35,7 +37,6 @@ public class OntologyChangeParser {
/**
* @param diffPath Diff path
* @throws IOException
*/
@SuppressWarnings({ "unchecked", "null", "static-access" })
@ -50,52 +51,53 @@ public class OntologyChangeParser {
String destinationURI = null;
StringTokenizer stArr = null;
FileInputStream in = new FileInputStream(new File(diffPath));
CSVReader readFile = new SimpleReader();
readFile.setSeperator('\t');
List<String[]> rows = readFile.parse(in);
CSVParser readFile = new CSVParser(new InputStreamReader(in),
CSVFormat.DEFAULT.withRecordSeparator('\t'));
for(int rowNum = 0; rowNum < rows.size(); rowNum++){
String[] cols = rows.get(rowNum);
if (cols.length != 5) {
logger.logError("Invalid PromptDiff data at row " + (rowNum + 1)
+ ". Expected 5 columns; found " + cols.length );
int rowNum = 0;
for (CSVRecord record : readFile) {
rowNum++;
if (record.size() != 5) {
logger.logError("Invalid PromptDiff data at row " + (rowNum)
+ ". Expected 5 columns; found " + record.size() );
} else {
String col = null;
changeObj = new AtomicOntologyChange();
if (cols[0] != null && cols[0].length() > 0) {
changeObj.setSourceURI(cols[0]);
col = record.get(0);
if (col != null && col.length() > 0) {
changeObj.setSourceURI(col);
}
if (cols[1] != null && cols[1].length() > 0) {
changeObj.setDestinationURI(cols[1]);
col = record.get(1);
if (col != null && col.length() > 0) {
changeObj.setDestinationURI(col);
}
if (cols[4] != null && cols[4].length() > 0) {
changeObj.setNotes(cols[4]);
col = record.get(4);
if (col != null && col.length() > 0) {
changeObj.setNotes(col);
}
if ("Yes".equals(cols[2])) {
if ("Yes".equals(record.get(2))) {
changeObj.setAtomicChangeType(AtomicChangeType.RENAME);
} else if ("Delete".equals(cols[3])) {
} else if ("Delete".equals(record.get(3))) {
changeObj.setAtomicChangeType(AtomicChangeType.DELETE);
} else if ("Add".equals(cols[3])) {
} else if ("Add".equals(record.get(3))) {
changeObj.setAtomicChangeType(AtomicChangeType.ADD);
} else {
logger.logError("Invalid rename or change type data: '" +
cols[2] + " " + cols[3] + "'");
record.get(2) + " " + record.get(3) + "'");
}
log.debug(changeObj);
changeObjects.add(changeObj);
}
}
}
readFile.close();
if (changeObjects.size() == 0) {
logger.log("No ABox updates are required.");

View file

@ -8,6 +8,7 @@ import java.util.List;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelChangedListener;
import org.apache.jena.rdf.model.RDFNode;
/**
* Interface for API to write, read, and update Vitro's RDF store, with support
@ -249,6 +250,10 @@ public interface RDFService {
*/
public ChangeSet manufactureChangeSet();
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException;
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException;
/**
* Frees any resources held by this RDFService object
*

View file

@ -10,6 +10,7 @@ import java.util.Calendar;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.graph.Graph;
@ -1078,4 +1079,33 @@ public abstract class AbstractModelDecorator implements Model {
return inner.isClosed();
}
@Override
public Statement getRequiredProperty(Resource resource, Property property, String s) {
return inner.getRequiredProperty(resource, property, s);
}
@Override
public Statement getProperty(Resource resource, Property property, String s) {
return inner.getProperty(resource, property, s);
}
@Override
public void executeInTxn(Runnable runnable) {
inner.executeInTxn(runnable);
}
@Override
public <T> T calculateInTxn(Supplier<T> supplier) {
return inner.calculateInTxn(supplier);
}
@Override
public PrefixMapping clearNsPrefixMap() {
return inner.clearNsPrefixMap();
}
@Override
public int numPrefixes() {
return inner.numPrefixes();
}
}

View file

@ -11,6 +11,7 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Supplier;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.graph.Graph;
@ -1775,4 +1776,34 @@ public abstract class AbstractOntModelDecorator implements OntModel {
public Model writeAll(OutputStream out, String lang, String base) {
return inner.writeAll(out, lang, base);
}
@Override
public Statement getRequiredProperty(Resource resource, Property property, String s) {
return inner.getRequiredProperty(resource, property, s);
}
@Override
public Statement getProperty(Resource resource, Property property, String s) {
return inner.getProperty(resource, property, s);
}
@Override
public void executeInTxn(Runnable runnable) {
inner.executeInTxn(runnable);
}
@Override
public <T> T calculateInTxn(Supplier<T> supplier) {
return inner.calculateInTxn(supplier);
}
@Override
public PrefixMapping clearNsPrefixMap() {
return inner.clearNsPrefixMap();
}
@Override
public int numPrefixes() {
return inner.numPrefixes();
}
}

View file

@ -468,6 +468,16 @@ public class LanguageFilteringRDFService implements RDFService {
return s.manufactureChangeSet();
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
return s.countTriples(subject, predicate, object);
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
return s.getTriples(subject, predicate, object, limit, offset);
}
@Override
public void close() {
s.close();

View file

@ -16,6 +16,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceFactory;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
import org.apache.jena.rdf.model.RDFNode;
/**
* An RDFServiceFactory that always returns the same RDFService object
@ -192,6 +193,16 @@ public class RDFServiceFactorySingle implements RDFServiceFactory {
return s.manufactureChangeSet();
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
return s.countTriples(subject, predicate, object);
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
return s.getTriples(subject, predicate, object, limit, offset);
}
@Override
public void close() {
// Don't close s. It's being used by everybody.

View file

@ -12,18 +12,24 @@ import java.util.concurrent.CopyOnWriteArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.atlas.io.StringWriterI;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.graph.Triple;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryFactory;
import org.apache.jena.query.QueryParseException;
import org.apache.jena.query.QuerySolution;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelChangedListener;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.rdf.model.Statement;
import org.apache.jena.rdf.model.StmtIterator;
import org.apache.jena.riot.out.NodeFormatter;
import org.apache.jena.riot.out.NodeFormatterTTL;
import org.apache.jena.vocabulary.RDF;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeListener;
@ -34,6 +40,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import edu.cornell.mannlib.vitro.webapp.utils.logging.ToString;
import org.vivoweb.linkeddatafragments.datasource.rdfservice.RDFServiceBasedRequestProcessorForTPFs;
public abstract class RDFServiceImpl implements RDFService {
@ -341,4 +348,120 @@ public abstract class RDFServiceImpl implements RDFService {
return ToString.simpleName(this) + "[" + ToString.hashHex(this) + "]";
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
StringBuilder whereClause = new StringBuilder();
StringBuilder orderBy = new StringBuilder();
if ( subject != null ) {
appendNode(whereClause.append(' '), subject);
} else {
whereClause.append(" ?s");
orderBy.append(" ?s");
}
if ( predicate != null ) {
appendNode(whereClause.append(' '), predicate);
} else {
whereClause.append(" ?p");
orderBy.append(" ?p");
}
if ( object != null ) {
appendNode(whereClause.append(' '), object);
} else {
whereClause.append(" ?o");
orderBy.append(" ?o");
}
long estimate = -1;
StringBuilder count = new StringBuilder();
count.append("SELECT (COUNT(*) AS ?count) WHERE { ");
count.append(whereClause.toString());
count.append(" . ");
count.append(" }");
CountConsumer countConsumer = new CountConsumer();
this.sparqlSelectQuery(count.toString(), countConsumer);
return countConsumer.count;
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
StringBuilder whereClause = new StringBuilder();
StringBuilder orderBy = new StringBuilder();
if ( subject != null ) {
appendNode(whereClause.append(' '), subject);
} else {
whereClause.append(" ?s");
orderBy.append(" ?s");
}
if ( predicate != null ) {
appendNode(whereClause.append(' '), predicate);
} else {
whereClause.append(" ?p");
orderBy.append(" ?p");
}
if ( object != null ) {
appendNode(whereClause.append(' '), object);
} else {
whereClause.append(" ?o");
orderBy.append(" ?o");
}
StringBuilder constructQuery = new StringBuilder();
constructQuery.append("CONSTRUCT { ");
constructQuery.append(whereClause.toString());
constructQuery.append(" } WHERE { ");
constructQuery.append(whereClause.toString()).append(" . ");
constructQuery.append(" }");
if (orderBy.length() > 0) {
constructQuery.append(" ORDER BY").append(orderBy.toString());
}
if (limit > 0) {
constructQuery.append(" LIMIT ").append(limit);
}
if (offset > 0) {
constructQuery.append(" OFFSET ").append(offset);
}
Model triples = ModelFactory.createDefaultModel();
this.sparqlConstructQuery(constructQuery.toString(), triples);
return triples;
}
private void appendNode(StringBuilder builder, RDFNode node) {
if (node.isLiteral()) {
builder.append(literalToString(node.asLiteral()));
} else if (node.isURIResource()) {
builder.append('<' + node.asResource().getURI() + '>');
}
}
private String literalToString(Literal l) {
StringWriterI sw = new StringWriterI();
NodeFormatter fmt = new NodeFormatterTTL(null, null);
fmt.formatLiteral(sw, l.asNode());
return sw.toString();
}
class CountConsumer extends ResultSetConsumer {
public long count = -1;
@Override
protected void processQuerySolution(QuerySolution qs) {
if (count == -1) {
Literal literal = qs.getLiteral("count");
count = literal.getLong();
}
}
}
}

View file

@ -16,6 +16,9 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.commons.io.IOUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.query.QuerySolutionMap;
import org.apache.jena.query.Syntax;
import org.apache.jena.rdf.model.Literal;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.log4j.lf5.util.StreamUtils;
@ -612,6 +615,71 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
return graph.isIsomorphicWith(fromTripleStoreModel);
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
Query countQuery = QueryFactory.create("SELECT (COUNT(?s) AS ?count) WHERE { ?s ?p ?o } ORDER BY ?s ?p ?o", Syntax.syntaxSPARQL_11);
QuerySolutionMap map = new QuerySolutionMap();
if ( subject != null ) {
map.add("s", subject);
}
if ( predicate != null ) {
map.add("p", predicate);
}
if ( object != null ) {
map.add("o", object);
}
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
try (QueryExecution qexec = QueryExecutionFactory.create(countQuery, d, map)) {
ResultSet results = qexec.execSelect();
if (results.hasNext()) {
QuerySolution soln = results.nextSolution() ;
Literal literal = soln.getLiteral("count");
return literal.getLong();
}
}
} finally {
dw.close();
}
return 0;
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
Query query = QueryFactory.create("CONSTRUCT WHERE { ?s ?p ?o }", Syntax.syntaxSPARQL_11);
QuerySolutionMap map = new QuerySolutionMap();
if ( subject != null ) {
map.add("s", subject);
}
if ( predicate != null ) {
map.add("p", predicate);
}
if ( object != null ) {
map.add("o", object);
}
query.setOffset(offset);
query.setLimit(limit);
Model triples = ModelFactory.createDefaultModel();
DatasetWrapper dw = getDatasetWrapper();
try {
Dataset d = dw.getDataset();
try (QueryExecution qexec = QueryExecutionFactory.create(query, d, map)) {
qexec.execConstruct(triples);
}
return triples;
} finally {
dw.close();
}
}
@Override
public void close() {
// nothing
@ -620,5 +688,4 @@ public abstract class RDFServiceJena extends RDFServiceImpl implements RDFServic
protected QueryExecution createQueryExecution(String queryString, Query q, Dataset d) {
return QueryExecutionFactory.create(q, d);
}
}

View file

@ -4,20 +4,34 @@ package edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.sdb;
import java.io.IOException;
import java.sql.Connection;
import java.sql.ResultSet;
import java.sql.SQLException;
import java.sql.Statement;
import javax.sql.DataSource;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.jena.datatypes.RDFDatatype;
import org.apache.jena.datatypes.TypeMapper;
import org.apache.jena.datatypes.xsd.XSDDatatype;
import org.apache.jena.graph.Node;
import org.apache.jena.graph.NodeFactory;
import org.apache.jena.graph.Triple;
import org.apache.jena.query.Dataset;
import org.apache.jena.query.Query;
import org.apache.jena.query.QueryExecution;
import org.apache.jena.query.QueryExecutionFactory;
import org.apache.jena.rdf.model.Model;
import org.apache.jena.rdf.model.ModelFactory;
import org.apache.jena.rdf.model.RDFNode;
import org.apache.jena.sdb.SDBFactory;
import org.apache.jena.sdb.Store;
import org.apache.jena.sdb.StoreDesc;
import org.apache.jena.sdb.layout2.NodeLayout2;
import org.apache.jena.sdb.layout2.ValueType;
import org.apache.jena.sdb.sql.SDBConnection;
import edu.cornell.mannlib.vitro.webapp.dao.jena.DatasetWrapper;
@ -28,6 +42,8 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.RDFServiceJena;
import org.apache.jena.sdb.store.DatabaseType;
import org.apache.jena.sdb.store.LayoutType;
public class RDFServiceSDB extends RDFServiceJena implements RDFService {
@ -148,6 +164,116 @@ public class RDFServiceSDB extends RDFServiceJena implements RDFService {
// However, in recent Jena this turns out to be much slower than executing against the dataset directly
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
if (LayoutType.LayoutTripleNodesHash.equals(storeDesc.getLayout())) {
if (DatabaseType.MySQL.equals(storeDesc.getDbType()) ||
DatabaseType.PostgreSQL.equals(storeDesc.getDbType())) {
SDBConnection sdbConn = getSDBConnection();
try {
String whereClause = makeWhereClause(subject, predicate, object);
Statement stmt = sdbConn.getSqlConnection().createStatement();
ResultSet rs = stmt.executeQuery("SELECT count(DISTINCT s,p,o) AS tcount FROM Quads" + (StringUtils.isEmpty(whereClause) ? "" : " WHERE " + whereClause));
try {
while (rs.next()) {
return rs.getLong("tcount");
}
} finally {
rs.close();
}
} catch (SQLException sqle) {
throw new RDFServiceException("Unable to retrieve triples", sqle);
} finally {
close(sdbConn);
}
}
} else {
return super.countTriples(subject, predicate, object);
}
return super.countTriples(subject, predicate, object);
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
if (LayoutType.LayoutTripleNodesHash.equals(storeDesc.getLayout())) {
if (DatabaseType.MySQL.equals(storeDesc.getDbType()) ||
DatabaseType.PostgreSQL.equals(storeDesc.getDbType())) {
Model triples = ModelFactory.createDefaultModel();
SDBConnection sdbConn = getSDBConnection();
try {
String whereClause = makeWhereClause(subject, predicate, object);
Statement stmt = sdbConn.getSqlConnection().createStatement();
ResultSet rs = stmt.executeQuery("SELECT \n" +
"N1.lex AS s_lex,\n" +
"N1.lang AS s_lang,\n" +
"N1.datatype AS s_datatype,\n" +
"N1.type AS s_type,\n" +
"N2.lex AS p_lex,\n" +
"N2.lang AS p_lang,\n" +
"N2.datatype AS p_datatype,\n" +
"N2.type AS p_type,\n" +
"N3.lex AS o_lex,\n" +
"N3.lang AS o_lang,\n" +
"N3.datatype AS o_datatype,\n" +
"N3.type AS o_type\n" +
"FROM\n" +
"(SELECT DISTINCT s,p,o FROM Quads" +
(StringUtils.isEmpty(whereClause) ? "" : " WHERE " + whereClause) +
" ORDER BY s,p,o " +
(limit > 0 ? "LIMIT " + limit : "") +
(offset > 0 ? " OFFSET " + offset : "") + ") Q\n" +
"LEFT OUTER JOIN\n" +
"\tNodes AS N1\n" +
"ON ( Q.s = N1.hash )\n" +
"LEFT OUTER JOIN\n" +
"\tNodes AS N2\n" +
"ON ( Q.p = N2.hash )\n" +
"LEFT OUTER JOIN\n" +
"\tNodes AS N3\n" +
"ON ( Q.o = N3.hash )");
try {
while (rs.next()) {
Node subjectNode = makeNode(
rs.getString("s_lex"),
rs.getString("s_datatype"),
rs.getString("s_lang"),
ValueType.lookup(rs.getInt("s_type")));
Node predicateNode = makeNode(
rs.getString("p_lex"),
rs.getString("p_datatype"),
rs.getString("p_lang"),
ValueType.lookup(rs.getInt("p_type")));
Node objectNode = makeNode(
rs.getString("o_lex"),
rs.getString("o_datatype"),
rs.getString("o_lang"),
ValueType.lookup(rs.getInt("o_type")));
triples.add(
triples.asStatement(Triple.create(subjectNode, predicateNode, objectNode))
);
}
} finally {
rs.close();
}
} catch (SQLException sqle) {
throw new RDFServiceException("Unable to retrieve triples", sqle);
} finally {
close(sdbConn);
}
return triples;
}
}
return super.getTriples(subject, predicate, object, limit, offset);
}
@Override
public void close() {
if (conn != null) {
@ -159,4 +285,55 @@ public class RDFServiceSDB extends RDFServiceJena implements RDFService {
}
}
// Copied from Jena SQLBridge2
private static Node makeNode(String lex, String datatype, String lang, ValueType vType) {
switch(vType) {
case BNODE:
return NodeFactory.createBlankNode(lex);
case URI:
return NodeFactory.createURI(lex);
case STRING:
return NodeFactory.createLiteral(lex, lang);
case XSDSTRING:
return NodeFactory.createLiteral(lex, XSDDatatype.XSDstring);
case INTEGER:
return NodeFactory.createLiteral(lex, XSDDatatype.XSDinteger);
case DOUBLE:
return NodeFactory.createLiteral(lex, XSDDatatype.XSDdouble);
case DATETIME:
return NodeFactory.createLiteral(lex, XSDDatatype.XSDdateTime);
case OTHER:
RDFDatatype dt = TypeMapper.getInstance().getSafeTypeByName(datatype);
return NodeFactory.createLiteral(lex, dt);
default:
log.warn("Unrecognized: (" + lex + ", " + lang + ", " + vType + ")");
return NodeFactory.createLiteral("UNRECOGNIZED");
}
}
private String makeWhereClause(RDFNode subject, RDFNode predicate, RDFNode object) {
StringBuilder whereClause = new StringBuilder();
if (subject != null) {
if (whereClause.length() > 0) {
whereClause.append(" AND ");
}
whereClause.append("s=").append(NodeLayout2.hash(subject.asNode()));
}
if (predicate != null) {
if (whereClause.length() > 0) {
whereClause.append(" AND ");
}
whereClause.append("p=").append(NodeLayout2.hash(predicate.asNode()));
}
if (object != null) {
if (whereClause.length() > 0) {
whereClause.append(" AND ");
}
whereClause.append("o=").append(NodeLayout2.hash(object.asNode()));
}
return whereClause.length() > 0 ? whereClause.toString() : null;
}
}

View file

@ -14,6 +14,7 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.ChangeSet;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.ResultSetConsumer;
import org.apache.jena.rdf.model.RDFNode;
/**
* This RDFService wrapper adds instrumentation to the time-consuming methods of
@ -182,6 +183,16 @@ public class LoggingRDFService implements RDFService {
return innerService.manufactureChangeSet();
}
@Override
public long countTriples(RDFNode subject, RDFNode predicate, RDFNode object) throws RDFServiceException {
return innerService.countTriples(subject, predicate, object);
}
@Override
public Model getTriples(RDFNode subject, RDFNode predicate, RDFNode object, long limit, long offset) throws RDFServiceException {
return innerService.getTriples(subject, predicate, object, limit, offset);
}
@Override
public void close() {
innerService.close();

View file

@ -7,7 +7,7 @@ import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.commons.lang.StringUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;

Some files were not shown because too many files have changed in this diff Show more