VIVO-731 Replace SparqlQueryServlet with SparqlQueryController
SparqlQueryServlet was JSP-based, so delete the JSP also. SparqlQueryController is Freemarker-based, and is a this shell around the SparqlQueryApiExecutor.
This commit is contained in:
parent
0c0915ef65
commit
de32d53791
7 changed files with 264 additions and 1560 deletions
|
@ -1,476 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.controller;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.InputStream;
|
|
||||||
import java.io.OutputStream;
|
|
||||||
import java.io.PrintWriter;
|
|
||||||
import java.io.Writer;
|
|
||||||
import java.net.URLDecoder;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.Collections;
|
|
||||||
import java.util.HashMap;
|
|
||||||
import java.util.Iterator;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import javax.servlet.RequestDispatcher;
|
|
||||||
import javax.servlet.ServletException;
|
|
||||||
import javax.servlet.ServletOutputStream;
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
|
||||||
import javax.servlet.http.HttpServletResponse;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
|
||||||
import org.apache.commons.logging.LogFactory;
|
|
||||||
|
|
||||||
import com.github.jsonldjava.core.JSONLD;
|
|
||||||
import com.github.jsonldjava.core.JSONLDProcessingError;
|
|
||||||
import com.github.jsonldjava.impl.JenaRDFParser;
|
|
||||||
import com.github.jsonldjava.utils.JSONUtils;
|
|
||||||
import com.hp.hpl.jena.query.Query;
|
|
||||||
import com.hp.hpl.jena.query.QuerySolution;
|
|
||||||
import com.hp.hpl.jena.query.ResultSet;
|
|
||||||
import com.hp.hpl.jena.query.ResultSetFactory;
|
|
||||||
import com.hp.hpl.jena.query.ResultSetFormatter;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Literal;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Resource;
|
|
||||||
import com.hp.hpl.jena.sparql.resultset.ResultSetFormat;
|
|
||||||
import com.hp.hpl.jena.vocabulary.XSD;
|
|
||||||
|
|
||||||
import edu.cornell.mannlib.vedit.controller.BaseEditController;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyHelper;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.Ontology;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ResultFormat;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Services a SPARQL query. This will return a simple error message and a 501 if
|
|
||||||
* there is no Model.
|
|
||||||
*
|
|
||||||
*
|
|
||||||
* @author bdc34
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
public class SparqlQueryServlet extends BaseEditController {
|
|
||||||
private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName());
|
|
||||||
|
|
||||||
/**
|
|
||||||
* format configurations for SELECT queries.
|
|
||||||
*/
|
|
||||||
protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* format configurations for CONSTRUCT/DESCRIBE queries.
|
|
||||||
*/
|
|
||||||
protected static HashMap<String,ModelFormatConfig> modelFormats =
|
|
||||||
new HashMap<String,ModelFormatConfig>();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Use this map to decide which MIME type is suited for the "accept" header.
|
|
||||||
*/
|
|
||||||
public static final Map<String, Float> ACCEPTED_CONTENT_TYPES;
|
|
||||||
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doPost(HttpServletRequest request, HttpServletResponse response)
|
|
||||||
throws ServletException, IOException
|
|
||||||
{
|
|
||||||
this.doGet(request,response);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void doGet(HttpServletRequest request, HttpServletResponse response)
|
|
||||||
throws ServletException, IOException
|
|
||||||
{
|
|
||||||
VitroRequest vreq = new VitroRequest(request);
|
|
||||||
|
|
||||||
//first check if the email and password are just in the request
|
|
||||||
String email = vreq.getParameter("email");
|
|
||||||
String password = vreq.getParameter("password");
|
|
||||||
boolean isAuth = PolicyHelper.isAuthorizedForActions(vreq,
|
|
||||||
email, password, SimplePermission.USE_SPARQL_QUERY_PAGE.ACTIONS);
|
|
||||||
|
|
||||||
//otherwise use the normal auth mechanism
|
|
||||||
if( ! isAuth &&
|
|
||||||
!isAuthorizedToDisplayPage(request, response,
|
|
||||||
SimplePermission.USE_SPARQL_QUERY_PAGE.ACTIONS)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Model model = vreq.getJenaOntModel();
|
|
||||||
if( model == null ){
|
|
||||||
doNoModelInContext(response);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Use RDFService from context to avoid language filtering
|
|
||||||
RDFService rdfService = RDFServiceUtils.getRDFServiceFactory(
|
|
||||||
getServletContext()).getRDFService();
|
|
||||||
|
|
||||||
String queryParam = vreq.getParameter("query");
|
|
||||||
log.debug("queryParam was : " + queryParam);
|
|
||||||
|
|
||||||
if( queryParam == null || "".equals(queryParam) ){
|
|
||||||
doHelp(request,response);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
String contentType = checkForContentType(vreq.getHeader("Accept"));
|
|
||||||
|
|
||||||
Query query = SparqlQueryUtils.create(queryParam);
|
|
||||||
if( query.isSelectType() ){
|
|
||||||
String format = contentType!=null ? contentType:vreq.getParameter("resultFormat");
|
|
||||||
RSFormatConfig formatConf = rsFormats.get(format);
|
|
||||||
doSelect(response, queryParam, formatConf, rdfService);
|
|
||||||
}else if( query.isAskType()){
|
|
||||||
doAsk( queryParam, rdfService, response );
|
|
||||||
}else if( query.isConstructType() || query.isDescribeType() ){
|
|
||||||
String format = contentType != null ? contentType : vreq.getParameter("rdfResultFormat");
|
|
||||||
if (format== null) {
|
|
||||||
format= "RDF/XML-ABBREV";
|
|
||||||
}
|
|
||||||
ModelFormatConfig formatConf = modelFormats.get(format);
|
|
||||||
doConstruct(response, query, formatConf, rdfService);
|
|
||||||
}else{
|
|
||||||
doHelp(request,response);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private void doAsk(String queryParam, RDFService rdfService,
|
|
||||||
HttpServletResponse response) throws ServletException, IOException {
|
|
||||||
|
|
||||||
// Irrespective of the ResultFormatParam,
|
|
||||||
// this always prints a boolean to the default OutputStream.
|
|
||||||
String result;
|
|
||||||
try {
|
|
||||||
result = (rdfService.sparqlAskQuery(queryParam) == true)
|
|
||||||
? "true"
|
|
||||||
: "false";
|
|
||||||
} catch (RDFServiceException e) {
|
|
||||||
throw new ServletException( "Could not execute ask query ", e );
|
|
||||||
}
|
|
||||||
PrintWriter p = response.getWriter();
|
|
||||||
p.write(result);
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the query and send the result to out. Attempt to
|
|
||||||
* send the RDFService the same format as the rdfResultFormatParam
|
|
||||||
* so that the results from the RDFService can be directly piped to the client.
|
|
||||||
*/
|
|
||||||
private void doSelect(HttpServletResponse response,
|
|
||||||
String queryParam,
|
|
||||||
RSFormatConfig formatConf,
|
|
||||||
RDFService rdfService
|
|
||||||
) throws ServletException {
|
|
||||||
try {
|
|
||||||
if( ! formatConf.converstionFromWireFormat ){
|
|
||||||
response.setContentType( formatConf.responseMimeType );
|
|
||||||
InputStream results;
|
|
||||||
results = rdfService.sparqlSelectQuery(queryParam, formatConf.wireFormat );
|
|
||||||
pipe( results, response.getOutputStream() );
|
|
||||||
}else{
|
|
||||||
//always use JSON when conversion is needed.
|
|
||||||
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
|
|
||||||
|
|
||||||
response.setContentType( formatConf.responseMimeType );
|
|
||||||
|
|
||||||
ResultSet rs = ResultSetFactory.fromJSON( results );
|
|
||||||
OutputStream out = response.getOutputStream();
|
|
||||||
ResultSetFormatter.output(out, rs, formatConf.jenaResponseFormat);
|
|
||||||
}
|
|
||||||
} catch (RDFServiceException e) {
|
|
||||||
throw new ServletException("Cannot get result from the RDFService",e);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new ServletException("Cannot perform SPARQL SELECT",e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Execute the query and send the result to out. Attempt to
|
|
||||||
* send the RDFService the same format as the rdfResultFormatParam
|
|
||||||
* so that the results from the RDFService can be directly piped to the client.
|
|
||||||
* @param rdfService
|
|
||||||
* @throws IOException
|
|
||||||
* @throws RDFServiceException
|
|
||||||
* @throws
|
|
||||||
*/
|
|
||||||
private void doConstruct( HttpServletResponse response,
|
|
||||||
Query query,
|
|
||||||
ModelFormatConfig formatConfig,
|
|
||||||
RDFService rdfService
|
|
||||||
) throws ServletException{
|
|
||||||
try{
|
|
||||||
InputStream rawResult = null;
|
|
||||||
if( query.isConstructType() ){
|
|
||||||
rawResult= rdfService.sparqlConstructQuery( query.toString(), formatConfig.wireFormat );
|
|
||||||
}else if ( query.isDescribeType() ){
|
|
||||||
rawResult = rdfService.sparqlDescribeQuery( query.toString(), formatConfig.wireFormat );
|
|
||||||
}
|
|
||||||
|
|
||||||
response.setContentType( formatConfig.responseMimeType );
|
|
||||||
|
|
||||||
if( formatConfig.converstionFromWireFormat ){
|
|
||||||
Model resultModel = RDFServiceUtils.parseModel( rawResult, formatConfig.wireFormat );
|
|
||||||
if( "JSON-LD".equals( formatConfig.jenaResponseFormat )){
|
|
||||||
//since jena 2.6.4 doesn't support JSON-LD we do it
|
|
||||||
try {
|
|
||||||
JenaRDFParser parser = new JenaRDFParser();
|
|
||||||
Object json = JSONLD.fromRDF(resultModel, parser);
|
|
||||||
JSONUtils.write(response.getWriter(), json);
|
|
||||||
} catch (JSONLDProcessingError e) {
|
|
||||||
throw new RDFServiceException("Could not convert from Jena model to JSON-LD", e);
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
OutputStream out = response.getOutputStream();
|
|
||||||
resultModel.write(out, formatConfig.jenaResponseFormat );
|
|
||||||
}
|
|
||||||
}else{
|
|
||||||
OutputStream out = response.getOutputStream();
|
|
||||||
pipe( rawResult, out );
|
|
||||||
}
|
|
||||||
}catch( IOException ex){
|
|
||||||
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
|
|
||||||
} catch (RDFServiceException ex) {
|
|
||||||
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void pipe( InputStream in, OutputStream out) throws IOException{
|
|
||||||
int size;
|
|
||||||
byte[] buffer = new byte[4096];
|
|
||||||
while( (size = in.read(buffer)) > -1 ) {
|
|
||||||
out.write(buffer,0,size);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void doNoModelInContext(HttpServletResponse res){
|
|
||||||
try {
|
|
||||||
res.setStatus(HttpServletResponse.SC_NOT_IMPLEMENTED);
|
|
||||||
ServletOutputStream sos = res.getOutputStream();
|
|
||||||
sos.println("<html><body>this service is not supporeted by the current " +
|
|
||||||
"webapp configuration. A jena model is required in the servlet context.</body></html>" );
|
|
||||||
} catch (IOException e) {
|
|
||||||
log.error("Could not write to ServletOutputStream");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void toCsv(Writer out, ResultSet results) {
|
|
||||||
// The Skife library wouldn't quote and escape the normal way,
|
|
||||||
// so I'm trying it manually.
|
|
||||||
List<String> varNames = results.getResultVars();
|
|
||||||
int width = varNames.size();
|
|
||||||
while (results.hasNext()) {
|
|
||||||
QuerySolution solution = (QuerySolution) results.next();
|
|
||||||
String[] valueArray = new String[width];
|
|
||||||
Iterator<String> varNameIt = varNames.iterator();
|
|
||||||
int index = 0;
|
|
||||||
while (varNameIt.hasNext()) {
|
|
||||||
String varName = varNameIt.next();
|
|
||||||
String value = null;
|
|
||||||
try {
|
|
||||||
Literal lit = solution.getLiteral(varName);
|
|
||||||
if (lit != null) {
|
|
||||||
value = lit.getLexicalForm();
|
|
||||||
if (XSD.anyURI.getURI().equals(lit.getDatatypeURI())) {
|
|
||||||
value = URLDecoder.decode(value, "UTF-8");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
try {
|
|
||||||
Resource res = solution.getResource(varName);
|
|
||||||
if (res != null) {
|
|
||||||
if (res.isAnon()) {
|
|
||||||
value = res.getId().toString();
|
|
||||||
} else {
|
|
||||||
value = res.getURI();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception f) {}
|
|
||||||
}
|
|
||||||
valueArray[index] = value;
|
|
||||||
index++;
|
|
||||||
}
|
|
||||||
|
|
||||||
StringBuffer rowBuff = new StringBuffer();
|
|
||||||
for (int i = 0; i < valueArray.length; i++) {
|
|
||||||
String value = valueArray[i];
|
|
||||||
if (value != null) {
|
|
||||||
value.replaceAll("\"", "\\\"");
|
|
||||||
rowBuff.append("\"").append(value).append("\"");
|
|
||||||
}
|
|
||||||
if (i + 1 < width) {
|
|
||||||
rowBuff.append(",");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
rowBuff.append("\n");
|
|
||||||
try {
|
|
||||||
out.write(rowBuff.toString());
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
log.error(ioe);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
out.flush();
|
|
||||||
} catch (IOException ioe) {
|
|
||||||
log.error(ioe);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void doHelp(HttpServletRequest req, HttpServletResponse res) throws ServletException, IOException {
|
|
||||||
VitroRequest vreq = new VitroRequest(req);
|
|
||||||
|
|
||||||
OntologyDao daoObj = vreq.getUnfilteredWebappDaoFactory().getOntologyDao();
|
|
||||||
List<Ontology> ontologiesObj = daoObj.getAllOntologies();
|
|
||||||
ArrayList<String> prefixList = new ArrayList<String>();
|
|
||||||
|
|
||||||
if(ontologiesObj !=null && ontologiesObj.size()>0){
|
|
||||||
for(Ontology ont: ontologiesObj) {
|
|
||||||
prefixList.add(ont.getPrefix() == null ? "(not yet specified)" : ont.getPrefix());
|
|
||||||
prefixList.add(ont.getURI() == null ? "" : ont.getURI());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else{
|
|
||||||
prefixList.add("<strong>" + "No Ontologies added" + "</strong>");
|
|
||||||
prefixList.add("<strong>" + "Load Ontologies" + "</strong>");
|
|
||||||
}
|
|
||||||
|
|
||||||
req.setAttribute("prefixList", prefixList);
|
|
||||||
|
|
||||||
req.setAttribute("title","SPARQL Query");
|
|
||||||
req.setAttribute("bodyJsp", "/admin/sparqlquery/sparqlForm.jsp");
|
|
||||||
|
|
||||||
RequestDispatcher rd = req.getRequestDispatcher("/"+Controllers.BASIC_JSP);
|
|
||||||
rd.forward(req,res);
|
|
||||||
}
|
|
||||||
|
|
||||||
/** Simple boolean vaule to improve the legibility of confiugrations. */
|
|
||||||
private final static boolean CONVERT = true;
|
|
||||||
|
|
||||||
/** Simple vaule to improve the legibility of confiugrations. */
|
|
||||||
private final static String NO_CONVERSION = null;
|
|
||||||
|
|
||||||
public static class FormatConfig{
|
|
||||||
public String valueFromForm;
|
|
||||||
public boolean converstionFromWireFormat;
|
|
||||||
public String responseMimeType;
|
|
||||||
}
|
|
||||||
|
|
||||||
private static ModelFormatConfig[] fmts = {
|
|
||||||
new ModelFormatConfig("RDF/XML",
|
|
||||||
!CONVERT, ModelSerializationFormat.RDFXML, NO_CONVERSION, "application/rdf+xml" ),
|
|
||||||
new ModelFormatConfig("RDF/XML-ABBREV",
|
|
||||||
CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ),
|
|
||||||
new ModelFormatConfig("N3",
|
|
||||||
!CONVERT, ModelSerializationFormat.N3, NO_CONVERSION, "text/n3" ),
|
|
||||||
new ModelFormatConfig("N-TRIPLE",
|
|
||||||
!CONVERT, ModelSerializationFormat.NTRIPLE, NO_CONVERSION, "text/plain" ),
|
|
||||||
new ModelFormatConfig("TTL",
|
|
||||||
CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
|
|
||||||
new ModelFormatConfig("JSON-LD",
|
|
||||||
CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
|
|
||||||
|
|
||||||
public static class ModelFormatConfig extends FormatConfig{
|
|
||||||
public RDFService.ModelSerializationFormat wireFormat;
|
|
||||||
public String jenaResponseFormat;
|
|
||||||
|
|
||||||
public ModelFormatConfig( String valueFromForm,
|
|
||||||
boolean converstionFromWireFormat,
|
|
||||||
RDFService.ModelSerializationFormat wireFormat,
|
|
||||||
String jenaResponseFormat,
|
|
||||||
String responseMimeType){
|
|
||||||
this.valueFromForm = valueFromForm;
|
|
||||||
this.converstionFromWireFormat = converstionFromWireFormat;
|
|
||||||
this.wireFormat = wireFormat;
|
|
||||||
this.jenaResponseFormat = jenaResponseFormat;
|
|
||||||
this.responseMimeType = responseMimeType;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
private static RSFormatConfig[] rsfs = {
|
|
||||||
new RSFormatConfig( "RS_XML",
|
|
||||||
!CONVERT, ResultFormat.XML, null, "text/xml"),
|
|
||||||
new RSFormatConfig( "RS_TEXT",
|
|
||||||
!CONVERT, ResultFormat.TEXT, null, "text/plain"),
|
|
||||||
new RSFormatConfig( "vitro:csv",
|
|
||||||
!CONVERT, ResultFormat.CSV, null, "text/csv"),
|
|
||||||
new RSFormatConfig( "RS_JSON",
|
|
||||||
!CONVERT, ResultFormat.JSON, null, "application/javascript") };
|
|
||||||
|
|
||||||
public static class RSFormatConfig extends FormatConfig{
|
|
||||||
public ResultFormat wireFormat;
|
|
||||||
public ResultSetFormat jenaResponseFormat;
|
|
||||||
|
|
||||||
public RSFormatConfig( String valueFromForm,
|
|
||||||
boolean converstionFromWireFormat,
|
|
||||||
ResultFormat wireFormat,
|
|
||||||
ResultSetFormat jenaResponseFormat,
|
|
||||||
String responseMimeType ){
|
|
||||||
this.valueFromForm = valueFromForm;
|
|
||||||
this.converstionFromWireFormat = converstionFromWireFormat;
|
|
||||||
this.wireFormat = wireFormat;
|
|
||||||
this.jenaResponseFormat = jenaResponseFormat;
|
|
||||||
this.responseMimeType = responseMimeType;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
static{
|
|
||||||
HashMap<String, Float> map = new HashMap<String, Float>();
|
|
||||||
|
|
||||||
/* move the lists of configurations into maps for easy lookup
|
|
||||||
* by both MIME content type and the parameters from the form */
|
|
||||||
for( RSFormatConfig rsfc : rsfs ){
|
|
||||||
rsFormats.put( rsfc.valueFromForm, rsfc );
|
|
||||||
rsFormats.put( rsfc.responseMimeType, rsfc);
|
|
||||||
map.put(rsfc.responseMimeType, 1.0f);
|
|
||||||
}
|
|
||||||
for( ModelFormatConfig mfc : fmts ){
|
|
||||||
modelFormats.put( mfc.valueFromForm, mfc);
|
|
||||||
modelFormats.put(mfc.responseMimeType, mfc);
|
|
||||||
map.put(mfc.responseMimeType, 1.0f);
|
|
||||||
}
|
|
||||||
|
|
||||||
ACCEPTED_CONTENT_TYPES = Collections.unmodifiableMap(map);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get the content type based on content negotiation.
|
|
||||||
* Returns null of no content type can be agreed on or
|
|
||||||
* if there is no accept header.
|
|
||||||
*/
|
|
||||||
protected String checkForContentType( String acceptHeader ) {
|
|
||||||
if (acceptHeader == null)
|
|
||||||
return null;
|
|
||||||
|
|
||||||
try {
|
|
||||||
Map<String, Float> typesAndQ = ContentType
|
|
||||||
.getTypesAndQ(acceptHeader);
|
|
||||||
|
|
||||||
String ctStr = ContentType
|
|
||||||
.getBestContentType(typesAndQ,ACCEPTED_CONTENT_TYPES);
|
|
||||||
|
|
||||||
if( ACCEPTED_CONTENT_TYPES.containsKey( ctStr )){
|
|
||||||
return ctStr;
|
|
||||||
}
|
|
||||||
} catch (Throwable th) {
|
|
||||||
log.error("Problem while checking accept header ", th);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,228 @@
|
||||||
|
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||||
|
|
||||||
|
package edu.cornell.mannlib.vitro.webapp.controller.admin;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.io.PrintWriter;
|
||||||
|
import java.io.StringWriter;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
|
import java.util.Collections;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.servlet.ServletException;
|
||||||
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
import javax.servlet.http.HttpServletResponse;
|
||||||
|
|
||||||
|
import org.apache.commons.lang.StringUtils;
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
|
||||||
|
import com.hp.hpl.jena.query.Query;
|
||||||
|
import com.hp.hpl.jena.query.QueryParseException;
|
||||||
|
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.beans.Ontology;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery.InvalidQueryTypeException;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery.ResultSetMediaType;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery.SparqlQueryApiExecutor;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.FreemarkerHttpServlet;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||||
|
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Present the SPARQL Query form, and execute the queries.
|
||||||
|
*/
|
||||||
|
public class SparqlQueryController extends FreemarkerHttpServlet {
|
||||||
|
private static final Log log = LogFactory
|
||||||
|
.getLog(SparqlQueryController.class);
|
||||||
|
|
||||||
|
private static final String TEMPLATE_NAME = "admin-sparqlQueryForm.ftl";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Always show these prefixes, even though they don't appear in the list of
|
||||||
|
* ontologies.
|
||||||
|
*/
|
||||||
|
private static final List<Prefix> DEFAULT_PREFIXES = buildDefaults();
|
||||||
|
|
||||||
|
private static List<Prefix> buildDefaults() {
|
||||||
|
Prefix[] array = new Prefix[] {
|
||||||
|
new Prefix("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#"),
|
||||||
|
new Prefix("rdfs", "http://www.w3.org/2000/01/rdf-schema#"),
|
||||||
|
new Prefix("xsd", "http://www.w3.org/2001/XMLSchema#"),
|
||||||
|
new Prefix("owl", "http://www.w3.org/2002/07/owl#"),
|
||||||
|
new Prefix("swrl", "http://www.w3.org/2003/11/swrl#"),
|
||||||
|
new Prefix("swrlb", "http://www.w3.org/2003/11/swrlb#"),
|
||||||
|
new Prefix("vitro",
|
||||||
|
"http://vitro.mannlib.cornell.edu/ns/vitro/0.7#") };
|
||||||
|
return Collections.unmodifiableList(Arrays.asList(array));
|
||||||
|
}
|
||||||
|
|
||||||
|
private static final String[] SAMPLE_QUERY = { //
|
||||||
|
"", //
|
||||||
|
"#", //
|
||||||
|
"# This example query gets 20 geographic locations", //
|
||||||
|
"# and (if available) their labels", //
|
||||||
|
"#", //
|
||||||
|
"SELECT ?geoLocation ?label", //
|
||||||
|
"WHERE", //
|
||||||
|
"{", //
|
||||||
|
" ?geoLocation rdf:type vivo:GeographicLocation",
|
||||||
|
" OPTIONAL { ?geoLocation rdfs:label ?label } ", //
|
||||||
|
"}", //
|
||||||
|
"LIMIT 20" //
|
||||||
|
};
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If a query has been provided, we answer it directly, bypassing the
|
||||||
|
* Freemarker mechanisms.
|
||||||
|
*/
|
||||||
|
@Override
|
||||||
|
public void doGet(HttpServletRequest req, HttpServletResponse resp)
|
||||||
|
throws IOException, ServletException {
|
||||||
|
if (!isAuthorizedToDisplayPage(req, resp,
|
||||||
|
SimplePermission.USE_SPARQL_QUERY_PAGE.ACTIONS)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (req.getParameterMap().containsKey("query")) {
|
||||||
|
respondToQuery(req, resp);
|
||||||
|
} else {
|
||||||
|
super.doGet(req, resp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void respondToQuery(HttpServletRequest req, HttpServletResponse resp)
|
||||||
|
throws IOException {
|
||||||
|
RDFService rdfService = RDFServiceUtils.getRDFServiceFactory(
|
||||||
|
getServletContext()).getRDFService();
|
||||||
|
|
||||||
|
String queryString = req.getParameter("query");
|
||||||
|
try {
|
||||||
|
String format = interpretRequestedFormats(req, queryString);
|
||||||
|
SparqlQueryApiExecutor core = SparqlQueryApiExecutor.instance(
|
||||||
|
rdfService, queryString, format);
|
||||||
|
resp.setContentType(core.getMediaType());
|
||||||
|
core.executeAndFormat(resp.getOutputStream());
|
||||||
|
} catch (InvalidQueryTypeException e) {
|
||||||
|
do400BadRequest("Query type is not SELECT, ASK, CONSTRUCT, "
|
||||||
|
+ "or DESCRIBE: '" + queryString + "'", resp);
|
||||||
|
} catch (QueryParseException e) {
|
||||||
|
do400BadRequest("Failed to parse query: '" + queryString + "''", e,
|
||||||
|
resp);
|
||||||
|
} catch (NotAcceptableException | AcceptHeaderParsingException e) {
|
||||||
|
do500InternalServerError("Problem with the page fields: the "
|
||||||
|
+ "selected fields do not include an "
|
||||||
|
+ "acceptable content type.", e, resp);
|
||||||
|
} catch (RDFServiceException e) {
|
||||||
|
do500InternalServerError("Problem executing the query.", e, resp);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String interpretRequestedFormats(HttpServletRequest req,
|
||||||
|
String queryString) throws NotAcceptableException {
|
||||||
|
Query query = SparqlQueryUtils.create(queryString);
|
||||||
|
String parameterName = (query.isSelectType() || query.isAskType()) ? "resultFormat"
|
||||||
|
: "rdfResultFormat";
|
||||||
|
String parameterValue = req.getParameter(parameterName);
|
||||||
|
if (StringUtils.isBlank(parameterValue)) {
|
||||||
|
throw new NotAcceptableException("Parameter '" + parameterName
|
||||||
|
+ "' was '" + parameterValue + "'.");
|
||||||
|
} else {
|
||||||
|
return parameterValue;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void do400BadRequest(String message, HttpServletResponse resp)
|
||||||
|
throws IOException {
|
||||||
|
resp.setStatus(400);
|
||||||
|
resp.getWriter().println(message);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void do400BadRequest(String message, Exception e,
|
||||||
|
HttpServletResponse resp) throws IOException {
|
||||||
|
resp.setStatus(400);
|
||||||
|
PrintWriter w = resp.getWriter();
|
||||||
|
w.println(message);
|
||||||
|
e.printStackTrace(w);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void do500InternalServerError(String message, Exception e,
|
||||||
|
HttpServletResponse resp) throws IOException {
|
||||||
|
resp.setStatus(500);
|
||||||
|
PrintWriter w = resp.getWriter();
|
||||||
|
w.println(message);
|
||||||
|
e.printStackTrace(w);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ResponseValues processRequest(VitroRequest vreq) throws Exception {
|
||||||
|
Map<String, Object> bodyMap = new HashMap<>();
|
||||||
|
bodyMap.put("sampleQuery", buildSampleQuery(buildPrefixList(vreq)));
|
||||||
|
bodyMap.put("title", "SPARQL Query");
|
||||||
|
bodyMap.put("submitUrl", UrlBuilder.getUrl("admin/sparqlquery"));
|
||||||
|
return new TemplateResponseValues(TEMPLATE_NAME, bodyMap);
|
||||||
|
}
|
||||||
|
|
||||||
|
private List<Prefix> buildPrefixList(VitroRequest vreq) {
|
||||||
|
List<Prefix> prefixList = new ArrayList<>(DEFAULT_PREFIXES);
|
||||||
|
|
||||||
|
OntologyDao dao = vreq.getUnfilteredWebappDaoFactory().getOntologyDao();
|
||||||
|
List<Ontology> ontologies = dao.getAllOntologies();
|
||||||
|
if (ontologies == null) {
|
||||||
|
ontologies = Collections.emptyList();
|
||||||
|
}
|
||||||
|
|
||||||
|
int unnamedOntologyIndex = 1;
|
||||||
|
|
||||||
|
for (Ontology ont : ontologies) {
|
||||||
|
String prefix = ont.getPrefix();
|
||||||
|
if (prefix == null) {
|
||||||
|
prefix = "p" + unnamedOntologyIndex++;
|
||||||
|
}
|
||||||
|
prefixList.add(new Prefix(prefix, ont.getURI()));
|
||||||
|
}
|
||||||
|
|
||||||
|
return prefixList;
|
||||||
|
}
|
||||||
|
|
||||||
|
private String buildSampleQuery(List<Prefix> prefixList) {
|
||||||
|
StringWriter sw = new StringWriter();
|
||||||
|
PrintWriter writer = new PrintWriter(sw);
|
||||||
|
|
||||||
|
for (Prefix p : prefixList) {
|
||||||
|
writer.println(p);
|
||||||
|
}
|
||||||
|
for (String line : SAMPLE_QUERY) {
|
||||||
|
writer.println(line);
|
||||||
|
}
|
||||||
|
|
||||||
|
return sw.toString();
|
||||||
|
}
|
||||||
|
|
||||||
|
public static class Prefix {
|
||||||
|
private final String prefix;
|
||||||
|
private final String uri;
|
||||||
|
|
||||||
|
public Prefix(String prefix, String uri) {
|
||||||
|
this.prefix = prefix;
|
||||||
|
this.uri = uri;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public String toString() {
|
||||||
|
return String.format("PREFIX %-9s <%s>", prefix + ":", uri);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
|
@ -1,76 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.controller;
|
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.LinkedHashMap;
|
|
||||||
import java.util.List;
|
|
||||||
import java.util.Map;
|
|
||||||
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import com.github.jsonldjava.core.JSONLD;
|
|
||||||
import com.github.jsonldjava.core.JSONLDProcessingError;
|
|
||||||
import com.github.jsonldjava.impl.JenaRDFParser;
|
|
||||||
import com.github.jsonldjava.utils.JSONUtils;
|
|
||||||
import com.hp.hpl.jena.rdf.model.Model;
|
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
|
||||||
|
|
||||||
public class SparqlQueryServletOldTest {
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testJSONLD() throws JSONLDProcessingError {
|
|
||||||
//just check if we can use JSONLD-JAVA
|
|
||||||
|
|
||||||
final String turtle = "@prefix const: <http://foo.com/> .\n"
|
|
||||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n"
|
|
||||||
+ "<http://localhost:8080/foo1> const:code \"123\" .\n"
|
|
||||||
+ "<http://localhost:8080/foo2> const:code \"ABC\"^^xsd:string .\n";
|
|
||||||
|
|
||||||
final List<Map<String, Object>> expected = new ArrayList<Map<String, Object>>() {
|
|
||||||
{
|
|
||||||
add(new LinkedHashMap<String, Object>() {
|
|
||||||
{
|
|
||||||
put("@id", "http://localhost:8080/foo1");
|
|
||||||
put("http://foo.com/code", new ArrayList<Object>() {
|
|
||||||
{
|
|
||||||
add(new LinkedHashMap<String, Object>() {
|
|
||||||
{
|
|
||||||
put("@value", "123");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
add(new LinkedHashMap<String, Object>() {
|
|
||||||
{
|
|
||||||
put("@id", "http://localhost:8080/foo2");
|
|
||||||
put("http://foo.com/code", new ArrayList<Object>() {
|
|
||||||
{
|
|
||||||
add(new LinkedHashMap<String, Object>() {
|
|
||||||
{
|
|
||||||
put("@value", "ABC");
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
final Model modelResult = ModelFactory.createDefaultModel().read(
|
|
||||||
new ByteArrayInputStream(turtle.getBytes()), "", "TURTLE");
|
|
||||||
final JenaRDFParser parser = new JenaRDFParser();
|
|
||||||
final Object json = JSONLD.fromRDF(modelResult, parser);
|
|
||||||
|
|
||||||
assertTrue(JSONUtils.equals(json, expected));
|
|
||||||
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,930 +0,0 @@
|
||||||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
|
||||||
|
|
||||||
package edu.cornell.mannlib.vitro.webapp.controller;
|
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.io.StringReader;
|
|
||||||
import java.net.MalformedURLException;
|
|
||||||
import java.net.URL;
|
|
||||||
|
|
||||||
import javax.servlet.ServletException;
|
|
||||||
import javax.servlet.http.HttpServletRequest;
|
|
||||||
|
|
||||||
import org.junit.Before;
|
|
||||||
import org.junit.Ignore;
|
|
||||||
import org.junit.Test;
|
|
||||||
|
|
||||||
import stubs.edu.cornell.mannlib.vitro.webapp.i18n.I18nStub;
|
|
||||||
import stubs.javax.servlet.ServletConfigStub;
|
|
||||||
import stubs.javax.servlet.ServletContextStub;
|
|
||||||
import stubs.javax.servlet.http.HttpServletRequestStub;
|
|
||||||
import stubs.javax.servlet.http.HttpServletResponseStub;
|
|
||||||
import stubs.javax.servlet.http.HttpSessionStub;
|
|
||||||
|
|
||||||
import com.hp.hpl.jena.ontology.OntModel;
|
|
||||||
import com.hp.hpl.jena.ontology.OntModelSpec;
|
|
||||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
|
||||||
|
|
||||||
import edu.cornell.mannlib.vedit.beans.LoginStatusBean;
|
|
||||||
import edu.cornell.mannlib.vedit.beans.LoginStatusBean.AuthenticationSource;
|
|
||||||
import edu.cornell.mannlib.vitro.testing.AbstractTestClass;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.identifier.ActiveIdentifierBundleFactories;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.identifier.ArrayIdentifierBundle;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.identifier.Identifier;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.identifier.IdentifierBundle;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.identifier.UserBasedIdentifierBundleFactory;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.BasicPolicyDecision;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.PolicyList;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.ServletPolicyList;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.ifaces.Authorization;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.ifaces.PolicyDecision;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.policy.ifaces.PolicyIface;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.auth.requestedAction.ifaces.RequestedAction;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.beans.UserAccount;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.authenticate.Authenticator;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.authenticate.AuthenticatorStub;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.controller.authenticate.AuthenticatorStub.Factory;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.dao.ModelAccess;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceFactorySingle;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
|
||||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.model.RDFServiceModel;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* TODO
|
|
||||||
*/
|
|
||||||
public class SparqlQueryServletTest extends AbstractTestClass {
|
|
||||||
private static final String MODEL_CONTENTS_N3 = "<http://here.edu/subject> \n"
|
|
||||||
+ " <http://here.edu/predicate> <http://here.edu/object> .";
|
|
||||||
private static final String BASE_URI = "http://here.edu";
|
|
||||||
|
|
||||||
private static final String SELECT_ALL_QUERY = "SELECT ?s ?p ?o WHERE {?s ?p ?o}";
|
|
||||||
private static final String SELECT_RESULT_TEXT = ""
|
|
||||||
+ "--------------------------------------------------------------------------------------\n"
|
|
||||||
+ "| s | p | o |\n"
|
|
||||||
+ "======================================================================================\n"
|
|
||||||
+ "| <http://here.edu/subject> | <http://here.edu/predicate> | <http://here.edu/object> |\n"
|
|
||||||
+ "--------------------------------------------------------------------------------------\n";
|
|
||||||
private static final String SELECT_RESULT_CSV = "s,p,o\r\n"
|
|
||||||
+ "http://here.edu/subject,http://here.edu/predicate,http://here.edu/object\r\n";
|
|
||||||
private static final String SELECT_RESULT_TSV = "s\tp\to\r\n"
|
|
||||||
+ "http://here.edu/subject\thttp://here.edu/predicate\thttp://here.edu/object\r\n";
|
|
||||||
private static final String SELECT_RESULT_XML = "<?xml version=\"1.0\"?>"
|
|
||||||
+ "<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\">"
|
|
||||||
+ "<head>" + "<variable name=\"x\"/>"
|
|
||||||
+ "<variable name=\"hpage\"/>" + "</head>" + "<results>"
|
|
||||||
+ "<result>" + "<binding name=\"x\"> ... </binding>"
|
|
||||||
+ "<binding name=\"hpage\"> ... </binding>" + "</result>"
|
|
||||||
+ "</results>" + "</sparql>";
|
|
||||||
private static final String SELECT_RESULT_JSON = "{\"head\": { \"vars\": [ \"s\" , \"o\", \"p\" ] } ,"
|
|
||||||
+ "\"results\": {"
|
|
||||||
+ "\"bindings\": ["
|
|
||||||
+ " {"
|
|
||||||
+ "\"s\": { \"type\": \"uri\" , \"value\": \"http://here.edu/subject\" } ,"
|
|
||||||
+ "\"p\": { \"type\": \"uri\" , \"value\": \"http://here.edu/predicate\" } ,"
|
|
||||||
+ "\"o\": { \"type\": \"uri\" , \"value\": \"http://here.edu/object\" } ,"
|
|
||||||
+ "}" + "]" + "}" + "}";
|
|
||||||
private static final String ACCEPTABLE_FOR_SELECT = "For SELECT queries, Accept header must be one of "
|
|
||||||
+ "'text/plain', 'text/csv', 'text/tsv', 'application/sparql-results+xml', "
|
|
||||||
+ "or 'application/sparql-results+json'";
|
|
||||||
|
|
||||||
private static final String ASK_ALL_QUERY = "ASK WHERE {?s ?p ?o}";
|
|
||||||
private static final String ASK_RESULT_TEXT = "true";
|
|
||||||
private static final String ASK_RESULT_CSV = "true";
|
|
||||||
private static final String ASK_RESULT_TSV = "true";
|
|
||||||
private static final String ASK_RESULT_XML = "<?xml version=\"1.0\"?>"
|
|
||||||
+ "<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\">"
|
|
||||||
+ "<head></head><boolean>true</boolean></sparql>";
|
|
||||||
private static final String ASK_RESULT_JSON = "{\"head\" : { } , \"boolean\" : true}";
|
|
||||||
private static final String ACCEPTABLE_FOR_ASK = "For ASK queries, Accept header must be one of "
|
|
||||||
+ "'text/plain', 'text/csv', 'text/tsv', 'application/sparql-results+xml', "
|
|
||||||
+ "or 'application/sparql-results+json'";
|
|
||||||
|
|
||||||
private static final String CONSTRUCT_ALL_QUERY = "CONSTRUCT {?s ?p ?o} WHERE {?s ?p ?o}";
|
|
||||||
private static final String CONSTRUCT_RESULT_TEXT = "<http://here.edu/subject> "
|
|
||||||
+ "<http://here.edu/predicate> <http://here.edu/object> .\n";
|
|
||||||
private static final String CONSTRUCT_RESULT_TURTLE = "<rdf:RDF\n"
|
|
||||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
|
||||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
|
||||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
|
||||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
|
||||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\">\n"
|
|
||||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
|
||||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
|
||||||
+ " </rdf:Description>\n" + "</rdf:RDF>\n";
|
|
||||||
private static final String CONSTRUCT_RESULT_N3 = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"
|
|
||||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n"
|
|
||||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n"
|
|
||||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n"
|
|
||||||
+ "\n"
|
|
||||||
+ "<http://here.edu/subject>\n"
|
|
||||||
+ " <http://here.edu/predicate>\n"
|
|
||||||
+ " <http://here.edu/object> .\n";
|
|
||||||
private static final String CONSTRUCT_RESULT_RDFXML = "<rdf:RDF\n"
|
|
||||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
|
||||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
|
||||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
|
||||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
|
||||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\">\n"
|
|
||||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
|
||||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
|
||||||
+ " </rdf:Description>\n" + "</rdf:RDF>\n";
|
|
||||||
private static final String CONSTRUCT_RESULT_JSONLD = "["
|
|
||||||
+ "{\"@id\":\"http://here.edu/object\"},"
|
|
||||||
+ "{\"@id\":\"http://here.edu/subject\",\"http://here.edu/predicate\":[{\"@id\":\"http://here.edu/object\"}]}"
|
|
||||||
+ "]";
|
|
||||||
private static final String ACCEPTABLE_FOR_CONSTRUCT = "For CONSTRUCT queries, Accept header must be one of "
|
|
||||||
+ "'text/plain', 'application/rdf+xml', 'text/n3', 'text/turtle', or 'application/json'";
|
|
||||||
|
|
||||||
private static final String DESCRIBE_ALL_QUERY = "DESCRIBE <http://here.edu/subject>";
|
|
||||||
private static final String DESCRIBE_RESULT_TEXT = "<http://here.edu/subject> "
|
|
||||||
+ "<http://here.edu/predicate> <http://here.edu/object> .\n";
|
|
||||||
private static final String DESCRIBE_RESULT_RDFXML = "<rdf:RDF\n"
|
|
||||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
|
||||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
|
||||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
|
||||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
|
||||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\">\n"
|
|
||||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
|
||||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
|
||||||
+ " </rdf:Description>\n" + "</rdf:RDF>\n";
|
|
||||||
private static final String DESCRIBE_RESULT_N3 = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"
|
|
||||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n"
|
|
||||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n"
|
|
||||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n"
|
|
||||||
+ "\n"
|
|
||||||
+ "<http://here.edu/subject>\n"
|
|
||||||
+ " <http://here.edu/predicate>\n"
|
|
||||||
+ " <http://here.edu/object> .\n";
|
|
||||||
private static final String DESCRIBE_RESULT_TURTLE = "<rdf:RDF\n"
|
|
||||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
|
||||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
|
||||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
|
||||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
|
||||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\">\n"
|
|
||||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
|
||||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
|
||||||
+ " </rdf:Description>\n" + "</rdf:RDF>\n";
|
|
||||||
private static final String DESCRIBE_RESULT_JSONLD = "["
|
|
||||||
+ "{\"@id\":\"http://here.edu/object\"},"
|
|
||||||
+ "{\"@id\":\"http://here.edu/subject\",\"http://here.edu/predicate\":[{\"@id\":\"http://here.edu/object\"}]}"
|
|
||||||
+ "]";
|
|
||||||
private static final String ACCEPTABLE_FOR_DESCRIBE = "For DESCRIBE queries, Accept header must be one of "
|
|
||||||
+ "'text/plain', 'application/rdf+xml', 'text/n3', 'text/turtle', or 'application/json'";
|
|
||||||
|
|
||||||
private static final String SERVLET_PATH_INFO = "/admin/sparqlquery";
|
|
||||||
private static final String REDIRECT_TO_LOGIN_URL = "/authenticate?";
|
|
||||||
private static final String REDIRECT_TO_HOME_URL = "";
|
|
||||||
|
|
||||||
public static final String KILROY_URI = "http://here.edu/kilroyUser";
|
|
||||||
public static final String KILROY_EMAIL = "kilroy_email";
|
|
||||||
public static final String KILROY_PASSWORD = "kilroy_password";
|
|
||||||
public static final UserAccount KILROY = createUserAccount(KILROY_URI,
|
|
||||||
KILROY_EMAIL, KILROY_PASSWORD);
|
|
||||||
|
|
||||||
public static final String BONZO_URI = "http://here.edu/bonzoUser";
|
|
||||||
public static final String BONZO_EMAIL = "bonzo_email";
|
|
||||||
public static final String BONZO_PASSWORD = "bonzo_password";
|
|
||||||
public static final UserAccount BONZO = createUserAccount(BONZO_URI,
|
|
||||||
BONZO_EMAIL, BONZO_PASSWORD);
|
|
||||||
|
|
||||||
private static UserAccount createUserAccount(String uri, String name,
|
|
||||||
String password) {
|
|
||||||
UserAccount user = new UserAccount();
|
|
||||||
user.setEmailAddress(name);
|
|
||||||
user.setUri(uri);
|
|
||||||
user.setMd5Password(Authenticator.applyMd5Encoding(password));
|
|
||||||
user.setLoginCount(10);
|
|
||||||
user.setPasswordChangeRequired(false);
|
|
||||||
return user;
|
|
||||||
}
|
|
||||||
|
|
||||||
private ServletContextStub ctx;
|
|
||||||
private ServletConfigStub servletConfig;
|
|
||||||
private HttpSessionStub session;
|
|
||||||
private SparqlQueryServlet servlet;
|
|
||||||
private HttpServletResponseStub resp;
|
|
||||||
private HttpServletRequestStub req;
|
|
||||||
|
|
||||||
private OntModel model;
|
|
||||||
|
|
||||||
@Before
|
|
||||||
public void setup() throws ServletException, MalformedURLException {
|
|
||||||
ctx = new ServletContextStub();
|
|
||||||
|
|
||||||
servletConfig = new ServletConfigStub();
|
|
||||||
servletConfig.setServletContext(ctx);
|
|
||||||
|
|
||||||
servlet = new SparqlQueryServlet();
|
|
||||||
servlet.init(servletConfig);
|
|
||||||
|
|
||||||
session = new HttpSessionStub();
|
|
||||||
session.setServletContext(ctx);
|
|
||||||
|
|
||||||
resp = new HttpServletResponseStub();
|
|
||||||
|
|
||||||
req = new HttpServletRequestStub();
|
|
||||||
req.setSession(session);
|
|
||||||
req.setRequestUrl(new URL(BASE_URI + SERVLET_PATH_INFO));
|
|
||||||
|
|
||||||
I18nStub.setup();
|
|
||||||
|
|
||||||
initializePolicyList(new AuthorizeEveryone());
|
|
||||||
|
|
||||||
model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
|
|
||||||
model.read(new StringReader(MODEL_CONTENTS_N3), BASE_URI, "N3");
|
|
||||||
ModelAccess.on(ctx).setJenaOntModel(model);
|
|
||||||
RDFServiceUtils.setRDFServiceFactory(ctx, new RDFServiceFactorySingle(
|
|
||||||
new RDFServiceModel(model)));
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// invalid requests
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
// Currently throws a NullPointerException
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void noQueryParameter() {
|
|
||||||
req.setHeader("Accept", "text/plain");
|
|
||||||
runTheServlet();
|
|
||||||
assertEquals("no query parameter", 400, resp.getStatus());
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// query-response tests by Accept header
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void selectToTextByHeader() {
|
|
||||||
executeWithAcceptHeader("select to text by header", SELECT_ALL_QUERY,
|
|
||||||
"text/plain", SELECT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void selectToCsvByHeader() {
|
|
||||||
executeWithAcceptHeader("select to csv by header", SELECT_ALL_QUERY,
|
|
||||||
"text/csv", SELECT_RESULT_CSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToTsvByHeader() {
|
|
||||||
executeWithAcceptHeader("select to tsv by header", SELECT_ALL_QUERY,
|
|
||||||
"text/tab-separated-values", SELECT_RESULT_TSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToXmlByHeader() {
|
|
||||||
executeWithAcceptHeader("select to xml by header", SELECT_ALL_QUERY,
|
|
||||||
"application/sparql-results+xml", SELECT_RESULT_XML);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToJsonByHeader() {
|
|
||||||
executeWithAcceptHeader("select to json by header", SELECT_ALL_QUERY,
|
|
||||||
"application/sparql-results+json", SELECT_RESULT_JSON);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectWithInvalidAcceptHeader() {
|
|
||||||
executeWithInvalidAcceptHeader("select with application/rdf+xml",
|
|
||||||
SELECT_ALL_QUERY, "application/rdf+xml", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidAcceptHeader("select with text/n3", SELECT_ALL_QUERY,
|
|
||||||
"text/n3", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidAcceptHeader("select with text/turtle",
|
|
||||||
SELECT_ALL_QUERY, "text/turtle", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidAcceptHeader("select with application/json",
|
|
||||||
SELECT_ALL_QUERY, "application/json", ACCEPTABLE_FOR_SELECT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToTextByHeader() {
|
|
||||||
executeWithAcceptHeader("ask to text by header", ASK_ALL_QUERY,
|
|
||||||
"text/plain", ASK_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToCsvByHeader() {
|
|
||||||
executeWithAcceptHeader("ask to csv by header", ASK_ALL_QUERY,
|
|
||||||
"text/csv", ASK_RESULT_CSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToTsvByHeader() {
|
|
||||||
executeWithAcceptHeader("ask to tsv by header", ASK_ALL_QUERY,
|
|
||||||
"text/tab-separated-values", ASK_RESULT_TSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askToXmlByHeader() {
|
|
||||||
executeWithAcceptHeader("ask to xml by header", ASK_ALL_QUERY,
|
|
||||||
"application/sparql-results+xml", ASK_RESULT_XML);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askToJsonByHeader() {
|
|
||||||
executeWithAcceptHeader("ask to json by header", ASK_ALL_QUERY,
|
|
||||||
"application/sparql-results+json", ASK_RESULT_JSON);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askWithInvalidAcceptHeader() {
|
|
||||||
executeWithInvalidAcceptHeader("ask with application/rdf+xml",
|
|
||||||
ASK_ALL_QUERY, "application/rdf+xml", ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidAcceptHeader("ask with text/n3", ASK_ALL_QUERY,
|
|
||||||
"text/n3", ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidAcceptHeader("ask with text/turtle", ASK_ALL_QUERY,
|
|
||||||
"text/turtle", ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidAcceptHeader("ask with application/json",
|
|
||||||
ASK_ALL_QUERY, "application/json", ACCEPTABLE_FOR_ASK);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToTextByHeader() {
|
|
||||||
executeWithAcceptHeader("construct to text by header",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/plain", CONSTRUCT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToRdfXmlByHeader() {
|
|
||||||
executeWithAcceptHeader("construct to rdf/xml by header",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/rdf+xml",
|
|
||||||
CONSTRUCT_RESULT_RDFXML);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToN3ByHeader() {
|
|
||||||
executeWithAcceptHeader("construct to n3 by header",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/n3", CONSTRUCT_RESULT_N3);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToTurtleByHeader() {
|
|
||||||
executeWithAcceptHeader("construct to turtle by header",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/turtle", CONSTRUCT_RESULT_TURTLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The servlet only recognizes "application/javascript", which is incorrect.
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void constructToJsonldByHeader() {
|
|
||||||
executeWithAcceptHeader("construct to JSON-LD by header",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/json",
|
|
||||||
CONSTRUCT_RESULT_JSONLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void constructWithInvalidAcceptHeader() {
|
|
||||||
executeWithInvalidAcceptHeader("construct with text/csv",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/csv", ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidAcceptHeader("construct with text/tsv",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/tsv", ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidAcceptHeader(
|
|
||||||
"construct with application/sparql-results+xml",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+xml",
|
|
||||||
ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidAcceptHeader(
|
|
||||||
"construct with application/sparql-results+json",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+json",
|
|
||||||
ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToTextByHeader() {
|
|
||||||
executeWithAcceptHeader("describe to text by header",
|
|
||||||
DESCRIBE_ALL_QUERY, "text/plain", DESCRIBE_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToRdfXmlByHeader() {
|
|
||||||
executeWithAcceptHeader("describe to rdf/xml by header",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/rdf+xml",
|
|
||||||
DESCRIBE_RESULT_RDFXML);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToN3ByHeader() {
|
|
||||||
executeWithAcceptHeader("describe to n3 by header", DESCRIBE_ALL_QUERY,
|
|
||||||
"text/n3", DESCRIBE_RESULT_N3);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToTurtleByHeader() {
|
|
||||||
executeWithAcceptHeader("describe to turtle by header",
|
|
||||||
DESCRIBE_ALL_QUERY, "text/turtle", DESCRIBE_RESULT_TURTLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// The servlet only recognizes "application/javascript", which is incorrect.
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void describeToJsonldByHeader() {
|
|
||||||
executeWithAcceptHeader("describe to JSON-LD by header",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/json", DESCRIBE_RESULT_JSONLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void describeWithInvalidAcceptHeader() {
|
|
||||||
executeWithInvalidAcceptHeader("describe with text/csv",
|
|
||||||
DESCRIBE_ALL_QUERY, "text/csv", ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidAcceptHeader("describe with text/tsv",
|
|
||||||
DESCRIBE_ALL_QUERY, "text/tsv", ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidAcceptHeader(
|
|
||||||
"describe with application/sparql-results+xml",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/sparql-results+xml",
|
|
||||||
ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidAcceptHeader(
|
|
||||||
"describe with application/sparql-results+json",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/sparql-results+json",
|
|
||||||
ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// query-response tests by format parameter
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void selectToTextByResultFormat() {
|
|
||||||
executeWithResultFormat("select to text by result format",
|
|
||||||
SELECT_ALL_QUERY, "RS_TEXT", SELECT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void selectToCsvByResultFormat() {
|
|
||||||
executeWithResultFormat("select to csv by result format",
|
|
||||||
SELECT_ALL_QUERY, "vitro:csv", SELECT_RESULT_CSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToTsvByResultFormat() {
|
|
||||||
executeWithResultFormat("select to tsv by result format",
|
|
||||||
SELECT_ALL_QUERY, "text/tab-separated-values",
|
|
||||||
SELECT_RESULT_TSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToXmlByResultFormat() {
|
|
||||||
executeWithResultFormat("select to xml by result format",
|
|
||||||
SELECT_ALL_QUERY, "application/sparql-results+xml",
|
|
||||||
SELECT_RESULT_XML);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectToJsonByResultFormat() {
|
|
||||||
executeWithResultFormat("select to json by result format",
|
|
||||||
SELECT_ALL_QUERY, "application/sparql-results+json",
|
|
||||||
SELECT_RESULT_JSON);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void selectWithInvalidResultFormat() {
|
|
||||||
executeWithInvalidResultFormat("select with N-TRIPLE",
|
|
||||||
SELECT_ALL_QUERY, "N-TRIPLE", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidResultFormat("select with RDF/XML", SELECT_ALL_QUERY,
|
|
||||||
"RDF/XML", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidResultFormat("select with N3", SELECT_ALL_QUERY,
|
|
||||||
"N3", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidResultFormat("select with TTL", SELECT_ALL_QUERY,
|
|
||||||
"TTL", ACCEPTABLE_FOR_SELECT);
|
|
||||||
executeWithInvalidResultFormat("select with JSON-LD", SELECT_ALL_QUERY,
|
|
||||||
"JSON-LD", ACCEPTABLE_FOR_SELECT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToTextByResultFormat() {
|
|
||||||
executeWithResultFormat("ask to text by result format", ASK_ALL_QUERY,
|
|
||||||
"RS_TEXT", ASK_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToCsvByResultFormat() {
|
|
||||||
executeWithResultFormat("ask to csv by result format", ASK_ALL_QUERY,
|
|
||||||
"vitro:csv", ASK_RESULT_CSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void askToTsvByResultFormat() {
|
|
||||||
executeWithResultFormat("ask to tsv by result format", ASK_ALL_QUERY,
|
|
||||||
"text/tab-separated-values", ASK_RESULT_TSV);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askToXmlByResultFormat() {
|
|
||||||
executeWithResultFormat("ask to xml by result format", ASK_ALL_QUERY,
|
|
||||||
"application/sparql-results+xml", ASK_RESULT_XML);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askToJsonByResultFormat() {
|
|
||||||
executeWithResultFormat("ask to json by result format", ASK_ALL_QUERY,
|
|
||||||
"application/sparql-results+json", ASK_RESULT_JSON);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Not yet supported
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void askWithInvalidResultFormat() {
|
|
||||||
executeWithInvalidResultFormat("ask with N-TRIPLE", ASK_ALL_QUERY,
|
|
||||||
"N-TRIPLE", ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidResultFormat("ask with RDF/XML", ASK_ALL_QUERY,
|
|
||||||
"RDF/XML", ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidResultFormat("ask with N3", ASK_ALL_QUERY, "N3",
|
|
||||||
ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidResultFormat("ask with TTL", ASK_ALL_QUERY, "TTL",
|
|
||||||
ACCEPTABLE_FOR_ASK);
|
|
||||||
executeWithInvalidResultFormat("ask with JSON-LD", ASK_ALL_QUERY,
|
|
||||||
"JSON-LD", ACCEPTABLE_FOR_ASK);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToTextByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("construct to text by rdf result format",
|
|
||||||
CONSTRUCT_ALL_QUERY, "N-TRIPLE", CONSTRUCT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Differs by white space?
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void constructToRdfXmlByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("construct to rdf/xml by rdf result format",
|
|
||||||
CONSTRUCT_ALL_QUERY, "RDF/XML", CONSTRUCT_RESULT_RDFXML);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToN3ByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("construct to n3 by rdf result format",
|
|
||||||
CONSTRUCT_ALL_QUERY, "N3", CONSTRUCT_RESULT_N3);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Either this or constructToTurtleByHeader is wrong.
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void constructToTurtleByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("construct to turtle by rdf result format",
|
|
||||||
CONSTRUCT_ALL_QUERY, "TTL", CONSTRUCT_RESULT_TURTLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void constructToJsonldByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("construct to JSON-LD by rdf result format",
|
|
||||||
CONSTRUCT_ALL_QUERY, "JSON-LD", CONSTRUCT_RESULT_JSONLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void constructWithInvalidAcceptRdfResultFormat() {
|
|
||||||
executeWithInvalidRdfResultFormat("construct with RS_TEXT",
|
|
||||||
CONSTRUCT_ALL_QUERY, "RS_TEXT", ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidRdfResultFormat("construct with vitro:csv",
|
|
||||||
CONSTRUCT_ALL_QUERY, "vitro:csv", ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidRdfResultFormat("construct with text/tsv",
|
|
||||||
CONSTRUCT_ALL_QUERY, "text/tsv", ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidRdfResultFormat(
|
|
||||||
"construct with application/sparql-results+xml",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+xml",
|
|
||||||
ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
executeWithInvalidRdfResultFormat(
|
|
||||||
"construct with application/sparql-results+json",
|
|
||||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+json",
|
|
||||||
ACCEPTABLE_FOR_CONSTRUCT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToTextByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("describe to text by rdf result format",
|
|
||||||
DESCRIBE_ALL_QUERY, "N-TRIPLE", DESCRIBE_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Differs by white space?
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void describeToRdfXmlByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("describe to rdf/xml by rdf result format",
|
|
||||||
DESCRIBE_ALL_QUERY, "RDF/XML", DESCRIBE_RESULT_RDFXML);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToN3ByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("describe to n3 by rdf result format",
|
|
||||||
DESCRIBE_ALL_QUERY, "N3", DESCRIBE_RESULT_N3);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Either this or describeToTurtleByHeader is wrong.
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void describeToTurtleByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("describe to turtle by rdf result format",
|
|
||||||
DESCRIBE_ALL_QUERY, "TTL", DESCRIBE_RESULT_TURTLE);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void describeToJsonldByRdfResultFormat() {
|
|
||||||
executeWithRdfResultFormat("describe to JSON-LD by rdf result format",
|
|
||||||
DESCRIBE_ALL_QUERY, "JSON-LD", DESCRIBE_RESULT_JSONLD);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Currently throws a null pointer exception
|
|
||||||
@Ignore
|
|
||||||
@Test
|
|
||||||
public void describeWithInvalidAcceptRdfResultFormat() {
|
|
||||||
executeWithInvalidRdfResultFormat("describe with RS_TEXT",
|
|
||||||
DESCRIBE_ALL_QUERY, "RS_TEXT", ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidRdfResultFormat("describe with vitro:csv",
|
|
||||||
DESCRIBE_ALL_QUERY, "vitro:csv", ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidRdfResultFormat("describe with text/tsv",
|
|
||||||
DESCRIBE_ALL_QUERY, "text/tsv", ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidRdfResultFormat(
|
|
||||||
"describe with application/sparql-results+xml",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/sparql-results+xml",
|
|
||||||
ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
executeWithInvalidRdfResultFormat(
|
|
||||||
"describe with application/sparql-results+json",
|
|
||||||
DESCRIBE_ALL_QUERY, "application/sparql-results+json",
|
|
||||||
ACCEPTABLE_FOR_DESCRIBE);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// Authentication tests
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authNoParmsNotLoggedIn() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
executeToRedirect("invalid parms", SELECT_ALL_QUERY,
|
|
||||||
REDIRECT_TO_LOGIN_URL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authNoParmsNotLoggedInEnough() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
setLoggedInUser(BONZO_URI);
|
|
||||||
executeToRedirect("redirect to home", SELECT_ALL_QUERY,
|
|
||||||
REDIRECT_TO_HOME_URL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authNoParmsLoggedIn() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
setLoggedInUser(KILROY_URI);
|
|
||||||
executeWithAcceptHeader("logged in properly", SELECT_ALL_QUERY,
|
|
||||||
"text/plain", SELECT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authInvalidParms() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
setParms(KILROY_EMAIL, "bogus_password");
|
|
||||||
executeToRedirect("invalid parms", SELECT_ALL_QUERY,
|
|
||||||
REDIRECT_TO_LOGIN_URL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authWrongValidParms() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
setParms(BONZO_EMAIL, BONZO_PASSWORD);
|
|
||||||
executeToRedirect("logged in to wrong user", SELECT_ALL_QUERY,
|
|
||||||
REDIRECT_TO_LOGIN_URL);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
|
||||||
public void authCorrectParms() {
|
|
||||||
initializeForAuthTest();
|
|
||||||
setParms(KILROY_EMAIL, KILROY_PASSWORD);
|
|
||||||
executeWithAcceptHeader("logged in properly", SELECT_ALL_QUERY,
|
|
||||||
"text/plain", SELECT_RESULT_TEXT);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// Helper methods
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
private void initializePolicyList(PolicyIface policy) {
|
|
||||||
PolicyList policyList = new PolicyList();
|
|
||||||
policyList.add(policy);
|
|
||||||
ctx.setAttribute(ServletPolicyList.class.getName(), policyList);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithAcceptHeader(String message, String query,
|
|
||||||
String acceptHeader, String expectedResult) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.setHeader("Accept", acceptHeader);
|
|
||||||
runAndCheckResult(message, expectedResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithResultFormat(String message, String query,
|
|
||||||
String resultFormat, String expectedResult) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.addParameter("resultFormat", resultFormat);
|
|
||||||
runAndCheckResult(message, expectedResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithRdfResultFormat(String message, String query,
|
|
||||||
String rdfResultFormat, String expectedResult) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.addParameter("rdfResultFormat", rdfResultFormat);
|
|
||||||
runAndCheckResult(message, expectedResult);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runAndCheckResult(String message, String expectedOutput) {
|
|
||||||
runTheServlet();
|
|
||||||
assertNormalResponse();
|
|
||||||
assertEquals(message, expectedOutput, resp.getOutput());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeToRedirect(String message, String query,
|
|
||||||
String redirectUrl) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
runTheServlet();
|
|
||||||
assertResponseIsRedirect(message, redirectUrl);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertNormalResponse() {
|
|
||||||
int status = resp.getStatus();
|
|
||||||
String redirect = resp.getRedirectLocation();
|
|
||||||
if ((status != 200) || (redirect != null)) {
|
|
||||||
fail("Not a normal response, status=" + status + ", redirect="
|
|
||||||
+ redirect);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void assertResponseIsRedirect(String message, String redirectUrl) {
|
|
||||||
assertEquals(message + " status", 200, resp.getStatus());
|
|
||||||
|
|
||||||
String redirect = resp.getRedirectLocation();
|
|
||||||
if (!redirect.startsWith(redirectUrl)) {
|
|
||||||
fail(message + ", expected redirect to start with '" + redirectUrl
|
|
||||||
+ "', but redirect was '" + redirect + "'");
|
|
||||||
}
|
|
||||||
|
|
||||||
assertEquals(message + " output", "", resp.getOutput());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void runTheServlet() {
|
|
||||||
try {
|
|
||||||
servlet.doGet(req, resp);
|
|
||||||
} catch (ServletException | IOException e) {
|
|
||||||
throw new RuntimeException(e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithInvalidAcceptHeader(String message, String query,
|
|
||||||
String acceptHeader, String expectedOutput) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.setHeader("Accept", acceptHeader);
|
|
||||||
runTheServlet();
|
|
||||||
assertEquals(message + " - status", 406, resp.getStatus());
|
|
||||||
assertEquals(message + " - output", expectedOutput, resp.getOutput());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithInvalidResultFormat(String message, String query,
|
|
||||||
String resultFormat, String expectedOutput) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.addParameter("resultFormat", resultFormat);
|
|
||||||
runTheServlet();
|
|
||||||
assertEquals(message + " - status", 406, resp.getStatus());
|
|
||||||
assertEquals(message + " - output", expectedOutput, resp.getOutput());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void executeWithInvalidRdfResultFormat(String message,
|
|
||||||
String query, String rdfResultFormat, String expectedOutput) {
|
|
||||||
req.addParameter("query", query);
|
|
||||||
req.addParameter("rdfResultFormat", rdfResultFormat);
|
|
||||||
runTheServlet();
|
|
||||||
assertEquals(message + " - status", 406, resp.getStatus());
|
|
||||||
assertEquals(message + " - output", expectedOutput, resp.getOutput());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setLoggedInUser(String userUri) {
|
|
||||||
LoginStatusBean.setBean(session, new LoginStatusBean(userUri,
|
|
||||||
AuthenticationSource.INTERNAL));
|
|
||||||
}
|
|
||||||
|
|
||||||
private void setParms(String email, String password) {
|
|
||||||
req.addParameter("email", email);
|
|
||||||
req.addParameter("password", password);
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initializeForAuthTest() {
|
|
||||||
initializePolicyList(new AuthorizeKilroyOnly());
|
|
||||||
initializeAuthenticator();
|
|
||||||
initializeIdentifierBundleFactories();
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initializeIdentifierBundleFactories() {
|
|
||||||
ActiveIdentifierBundleFactories.addFactory(ctx,
|
|
||||||
new UserUriIdentifierBundleFactory());
|
|
||||||
}
|
|
||||||
|
|
||||||
private void initializeAuthenticator() {
|
|
||||||
Factory factory = new AuthenticatorStub.Factory();
|
|
||||||
AuthenticatorStub auth = factory.getInstance(req);
|
|
||||||
auth.addUser(KILROY);
|
|
||||||
auth.addUser(BONZO);
|
|
||||||
ctx.setAttribute(AuthenticatorStub.FACTORY_ATTRIBUTE_NAME, factory);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
// Helper classes
|
|
||||||
// ----------------------------------------------------------------------
|
|
||||||
|
|
||||||
private static class AuthorizeEveryone implements PolicyIface {
|
|
||||||
@Override
|
|
||||||
public PolicyDecision isAuthorized(IdentifierBundle whoToAuth,
|
|
||||||
RequestedAction whatToAuth) {
|
|
||||||
return new BasicPolicyDecision(Authorization.AUTHORIZED,
|
|
||||||
"Everybody is a winner");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class AuthorizeKilroyOnly implements PolicyIface {
|
|
||||||
@Override
|
|
||||||
public PolicyDecision isAuthorized(IdentifierBundle whoToAuth,
|
|
||||||
RequestedAction whatToAuth) {
|
|
||||||
for (Identifier id : whoToAuth) {
|
|
||||||
if (id instanceof UserUriIdentifier) {
|
|
||||||
UserUriIdentifier uuId = (UserUriIdentifier) id;
|
|
||||||
if (uuId.userUri.equals(KILROY_URI)) {
|
|
||||||
return new BasicPolicyDecision(
|
|
||||||
Authorization.AUTHORIZED, "Kilroy is a winner");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return new BasicPolicyDecision(Authorization.INCONCLUSIVE,
|
|
||||||
"Everybody else is a loser");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class UserUriIdentifier implements Identifier {
|
|
||||||
public final String userUri;
|
|
||||||
|
|
||||||
public UserUriIdentifier(String userUri) {
|
|
||||||
this.userUri = userUri;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class UserUriIdentifierBundleFactory implements
|
|
||||||
UserBasedIdentifierBundleFactory {
|
|
||||||
@Override
|
|
||||||
public IdentifierBundle getIdentifierBundle(HttpServletRequest req) {
|
|
||||||
LoginStatusBean lsb = LoginStatusBean.getBean(req);
|
|
||||||
return getIdentifierBundle(lsb.getUserURI());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public IdentifierBundle getIdentifierBundleForUser(UserAccount user) {
|
|
||||||
return getIdentifierBundle(user.getUri());
|
|
||||||
}
|
|
||||||
|
|
||||||
private IdentifierBundle getIdentifierBundle(String userUri) {
|
|
||||||
IdentifierBundle bundle = new ArrayIdentifierBundle();
|
|
||||||
if (!userUri.isEmpty()) {
|
|
||||||
bundle.add(new UserUriIdentifier(userUri));
|
|
||||||
}
|
|
||||||
return bundle;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1026,7 +1026,7 @@
|
||||||
|
|
||||||
<servlet>
|
<servlet>
|
||||||
<servlet-name>SparqlQuery</servlet-name>
|
<servlet-name>SparqlQuery</servlet-name>
|
||||||
<servlet-class>edu.cornell.mannlib.vitro.webapp.controller.SparqlQueryServlet</servlet-class>
|
<servlet-class>edu.cornell.mannlib.vitro.webapp.controller.admin.SparqlQueryController</servlet-class>
|
||||||
</servlet>
|
</servlet>
|
||||||
|
|
||||||
<servlet-mapping>
|
<servlet-mapping>
|
||||||
|
|
|
@ -1,77 +0,0 @@
|
||||||
<%-- $This file is distributed under the terms of the license in /doc/license.txt$ --%>
|
|
||||||
|
|
||||||
<%@page import="com.hp.hpl.jena.rdf.model.ModelMaker"%>
|
|
||||||
<%@page import="java.util.Iterator"%>
|
|
||||||
<%@page import="java.util.ArrayList"%>
|
|
||||||
<%@page import="java.util.List"%>
|
|
||||||
|
|
||||||
<%@taglib prefix="vitro" uri="/WEB-INF/tlds/VitroUtils.tld" %>
|
|
||||||
<%@page import="edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission" %>
|
|
||||||
<% request.setAttribute("requestedActions", SimplePermission.USE_SPARQL_QUERY_PAGE.ACTION); %>
|
|
||||||
<vitro:confirmAuthorization />
|
|
||||||
|
|
||||||
<body>
|
|
||||||
<div id="content" class="sparqlform">
|
|
||||||
<h2>SPARQL Query</h2>
|
|
||||||
<form action='sparqlquery' method="get">
|
|
||||||
<h3>Query:</h3>
|
|
||||||
<div>
|
|
||||||
<textarea name='query' rows ='30' cols='100' class="span-23 maxWidth">
|
|
||||||
PREFIX rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#>
|
|
||||||
PREFIX rdfs: <http://www.w3.org/2000/01/rdf-schema#>
|
|
||||||
PREFIX xsd: <http://www.w3.org/2001/XMLSchema#>
|
|
||||||
PREFIX owl: <http://www.w3.org/2002/07/owl#>
|
|
||||||
PREFIX swrl: <http://www.w3.org/2003/11/swrl#>
|
|
||||||
PREFIX swrlb: <http://www.w3.org/2003/11/swrlb#>
|
|
||||||
PREFIX vitro: <http://vitro.mannlib.cornell.edu/ns/vitro/0.7#><%List prefixes = (List)request.getAttribute("prefixList");
|
|
||||||
if(prefixes != null){
|
|
||||||
Iterator prefixItr = prefixes.iterator();
|
|
||||||
Integer count = 0;
|
|
||||||
while (prefixItr.hasNext()){
|
|
||||||
String prefixText = (String) prefixItr.next();
|
|
||||||
if(prefixText.equals("(not yet specified)")){
|
|
||||||
count++;
|
|
||||||
prefixText = "p." + count.toString();
|
|
||||||
}
|
|
||||||
String urlText = (String) prefixItr.next();%>
|
|
||||||
PREFIX <%=prefixText%>: <<%=urlText%>><%}}%>
|
|
||||||
|
|
||||||
#
|
|
||||||
# This example query gets 20 geographic locations
|
|
||||||
# and (if available) their labels
|
|
||||||
#
|
|
||||||
SELECT ?geoLocation ?label
|
|
||||||
WHERE
|
|
||||||
{
|
|
||||||
?geoLocation rdf:type vivo:GeographicLocation
|
|
||||||
OPTIONAL { ?geoLocation rdfs:label ?label }
|
|
||||||
}
|
|
||||||
LIMIT 20
|
|
||||||
</textarea>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<h3>Format for SELECT query results:</h3>
|
|
||||||
|
|
||||||
<input id='RS_XML_BUTTON' type='radio' name='resultFormat' value='RS_XML'> <label for='RS_XML_BUTTON'>RS_XML</label>
|
|
||||||
<input id='RS_TEXT_BUTTON' type='radio' name='resultFormat' value='RS_TEXT' checked='checked'> <label for='RS_TEXT_BUTTON'>RS_TEXT</label>
|
|
||||||
<input id='RS_CSV_BUTTON' type='radio' name='resultFormat' value='vitro:csv'> <label for='RS_CSV_BUTTON'>CSV</label>
|
|
||||||
<input id='RS_JSON_BUTTON' type='radio' name='resultFormat' value='RS_JSON'> <label for='RS_JSON_BUTTON'>RS_JSON</label>
|
|
||||||
<input id='RS_RDF_BUTTON' type='radio' name='resultFormat' value='RS_RDF'> <label for='RS_RDF_BUTTON'>RS_RDF</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<div>
|
|
||||||
<h3>Format for CONSTRUCT and DESCRIBE query results:</h3>
|
|
||||||
<input id='RR_RDFXML_BUTTON' type='radio' name='rdfResultFormat' value='RDF/XML'> <label for='RR_RDFXML_BUTTON'>RDF/XML</label>
|
|
||||||
<input id='RR_RDFXMLABBREV_BUTTON' type='radio' name='rdfResultFormat' value='RDF/XML-ABBREV' checked='checked'> <label for='RR_RDFXMLABBREV_BUTTON'>RDF/XML-ABBREV</label>
|
|
||||||
<input id='RR_N3_BUTTON' type='radio' name='rdfResultFormat' value='N3'> <label for='RR_N3_BUTTON'>N3</label>
|
|
||||||
<input id='RR_NTRIPLE_BUTTON' type='radio' name='rdfResultFormat' value='N-TRIPLE'> <label for='RR_NTRIPLE_BUTTON'>N-Triples</label>
|
|
||||||
<input id='RR_TURTLE_BUTTON' type='radio' name='rdfResultFormat' value='TTL'> <label for='RR_TURTLE_BUTTON'>Turtle</label>
|
|
||||||
<input id='RR_JSON_LD_BUTTON' type='radio' name='rdfResultFormat' value='JSON-LD'> <label for='RR_JSON_LD_BUTTON'>JSON-LD</label>
|
|
||||||
</div>
|
|
||||||
|
|
||||||
<input class="submit" type="submit" value="Run Query" />
|
|
||||||
</form>
|
|
||||||
</div><!-- content -->
|
|
||||||
</body></html>
|
|
||||||
|
|
|
@ -0,0 +1,35 @@
|
||||||
|
<#-- $This file is distributed under the terms of the license in /doc/license.txt$ -->
|
||||||
|
|
||||||
|
<#-- Template that presents the SPARQL query form. -->
|
||||||
|
|
||||||
|
<div id="content" class="sparqlform">
|
||||||
|
<h2>SPARQL Query</h2>
|
||||||
|
<form action='${submitUrl}' method="get">
|
||||||
|
<h3>Query:</h3>
|
||||||
|
<div>
|
||||||
|
<textarea name='query' rows ='30' cols='100' class="span-23 maxWidth">
|
||||||
|
${sampleQuery}
|
||||||
|
</textarea>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h3>Format for SELECT and ASK query results:</h3>
|
||||||
|
<label><input type='radio' name='resultFormat' value='text/plain' checked>RS_TEXT</label>
|
||||||
|
<label><input type='radio' name='resultFormat' value='text/csv'>CSV</label>
|
||||||
|
<label><input type='radio' name='resultFormat' value='text/tab-separated-values'>TSV</label>
|
||||||
|
<label><input type='radio' name='resultFormat' value='application/sparql-results+xml'>RS_XML</label>
|
||||||
|
<label><input type='radio' name='resultFormat' value='application/sparql-results+json'>RS_JSON</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<div>
|
||||||
|
<h3>Format for CONSTRUCT and DESCRIBE query results:</h3>
|
||||||
|
<label><input type='radio' name='rdfResultFormat' value='text/plain'>N-Triples</label>
|
||||||
|
<label><input type='radio' name='rdfResultFormat' value='application/rdf+xml' checked>RDF/XML</label>
|
||||||
|
<label><input type='radio' name='rdfResultFormat' value='text/n3'>N3</label>
|
||||||
|
<label><input type='radio' name='rdfResultFormat' value='text/turtle'>Turtle</label>
|
||||||
|
<label><input type='radio' name='rdfResultFormat' value='application/json'>JSON-LD</label>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
<input class="submit" type="submit" value="Run Query" />
|
||||||
|
</form>
|
||||||
|
</div><!-- content -->
|
Loading…
Add table
Add a link
Reference in a new issue