VIVO-731 Create SparqlQueryApiExecutor with tests.
This commit is contained in:
parent
7b849ace9b
commit
0c0915ef65
11 changed files with 1064 additions and 0 deletions
|
@ -0,0 +1,12 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
/**
|
||||
* Indicates that the API can't process this type of query.
|
||||
*/
|
||||
public class InvalidQueryTypeException extends Exception {
|
||||
public InvalidQueryTypeException(String message) {
|
||||
super(message);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* The supported media types for SPARQL queries that return RDF (i.e., CONSTRUCT
|
||||
* and DESCRIBE).
|
||||
*/
|
||||
public enum RdfResultMediaType {
|
||||
TEXT("text/plain", true, "NTRIPLE", null),
|
||||
|
||||
RDF_XML("application/rdf+xml", true, "RDFXML", null),
|
||||
|
||||
N3("text/n3", true, "N3", null),
|
||||
|
||||
TTL("text/turtle", false, "N3", "TTL"),
|
||||
|
||||
JSON("application/json", false, "N3", "JSON");
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// Keep a map of content types, for easy conversion back and forth
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
private final static Map<String, RdfResultMediaType> contentTypesMap = buildMap();
|
||||
|
||||
private static Map<String, RdfResultMediaType> buildMap() {
|
||||
Map<String, RdfResultMediaType> map = new LinkedHashMap<>();
|
||||
for (RdfResultMediaType value : values()) {
|
||||
map.put(value.contentType, value);
|
||||
}
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
public static Collection<String> contentTypes() {
|
||||
return contentTypesMap.keySet();
|
||||
}
|
||||
|
||||
public static RdfResultMediaType fromContentType(String contentType)
|
||||
throws IllegalArgumentException {
|
||||
RdfResultMediaType type = contentTypesMap.get(contentType);
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"No RdfResultMediaType has contentType='" + contentType
|
||||
+ "'");
|
||||
} else {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// The instance
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The MIME type as it would appear in an HTTP Accept or Content-Type
|
||||
* header.
|
||||
*/
|
||||
private final String contentType;
|
||||
|
||||
/**
|
||||
* Is this a format that is supported directly by the RDFService?
|
||||
*/
|
||||
private final boolean nativeFormat;
|
||||
|
||||
/**
|
||||
* What format shall we ask the RDFService to supply?
|
||||
*/
|
||||
private final String serializationFormat;
|
||||
|
||||
/**
|
||||
* What format shall we ask the resulting OntModel to write? (Applies only
|
||||
* to non-native formats)
|
||||
*/
|
||||
private final String jenaResponseFormat;
|
||||
|
||||
private RdfResultMediaType(String contentType, boolean nativeFormat,
|
||||
String serializationFormat, String jenaResponseFormat) {
|
||||
this.contentType = contentType;
|
||||
this.nativeFormat = nativeFormat;
|
||||
this.serializationFormat = serializationFormat;
|
||||
this.jenaResponseFormat = jenaResponseFormat;
|
||||
}
|
||||
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
public boolean isNativeFormat() {
|
||||
return nativeFormat;
|
||||
}
|
||||
|
||||
public String getSerializationFormat() {
|
||||
return serializationFormat;
|
||||
}
|
||||
|
||||
public String getJenaResponseFormat() {
|
||||
return jenaResponseFormat;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,105 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* The supported media types for SPARQL queries that return a Result Set (i.e.,
|
||||
* SELECT and ASK).
|
||||
*/
|
||||
public enum ResultSetMediaType {
|
||||
TEXT("text/plain", true, "TEXT", null),
|
||||
|
||||
CSV("text/csv", true, "CSV", null),
|
||||
|
||||
TSV("text/tab-separated-values", false, "CSV", "tsv"),
|
||||
|
||||
XML("application/sparql-results+xml", true, "XML", null),
|
||||
|
||||
JSON("application/sparql-results+json", true, "JSON", null);
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// Keep a map of content types, for easy conversion back and forth
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
private final static Map<String, ResultSetMediaType> contentTypesMap = buildMap();
|
||||
|
||||
private static Map<String, ResultSetMediaType> buildMap() {
|
||||
Map<String, ResultSetMediaType> map = new LinkedHashMap<>();
|
||||
for (ResultSetMediaType value : values()) {
|
||||
map.put(value.contentType, value);
|
||||
}
|
||||
return Collections.unmodifiableMap(map);
|
||||
}
|
||||
|
||||
public static Collection<String> contentTypes() {
|
||||
return contentTypesMap.keySet();
|
||||
}
|
||||
|
||||
public static ResultSetMediaType fromContentType(String contentType)
|
||||
throws IllegalArgumentException {
|
||||
ResultSetMediaType type = contentTypesMap.get(contentType);
|
||||
if (type == null) {
|
||||
throw new IllegalArgumentException(
|
||||
"No ResultSetMediaType has contentType='" + contentType
|
||||
+ "'");
|
||||
} else {
|
||||
return type;
|
||||
}
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// The instance
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The MIME type as it would appear in an HTTP Accept or Content-Type
|
||||
* header.
|
||||
*/
|
||||
private final String contentType;
|
||||
|
||||
/**
|
||||
* Is this a format that is supported directly by the RDFService?
|
||||
*/
|
||||
private final boolean nativeFormat;
|
||||
|
||||
/**
|
||||
* What format shall we ask the RDFService to supply?
|
||||
*/
|
||||
private final String rdfServiceFormat;
|
||||
|
||||
/**
|
||||
* What format shall we ask the ResultSetFormatter to output? (Applies only
|
||||
* to non-native formats)
|
||||
*/
|
||||
private final String jenaResponseFormat;
|
||||
|
||||
private ResultSetMediaType(String contentType, boolean nativeFormat,
|
||||
String rdfServiceFormat, String jenaResponseFormat) {
|
||||
this.contentType = contentType;
|
||||
this.nativeFormat = nativeFormat;
|
||||
this.rdfServiceFormat = rdfServiceFormat;
|
||||
this.jenaResponseFormat = jenaResponseFormat;
|
||||
}
|
||||
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
public boolean isNativeFormat() {
|
||||
return nativeFormat;
|
||||
}
|
||||
|
||||
public String getRdfServiceFormat() {
|
||||
return rdfServiceFormat;
|
||||
}
|
||||
|
||||
public String getJenaResponseFormat() {
|
||||
return jenaResponseFormat;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,52 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.InputStream;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Process ASK queries.
|
||||
*/
|
||||
public class SparqlQueryApiAskExecutor extends SparqlQueryApiResultSetProducer {
|
||||
public SparqlQueryApiAskExecutor(RDFService rdfService, String queryString,
|
||||
String acceptHeader) throws AcceptHeaderParsingException,
|
||||
NotAcceptableException {
|
||||
super(rdfService, queryString, acceptHeader);
|
||||
}
|
||||
|
||||
/**
|
||||
* The RDFService returns a boolean from an ASK query, without regard to a
|
||||
* requested format.
|
||||
*
|
||||
* For TEXT, CSV and TSV, we can simple return the String value of the
|
||||
* boolean as an InputStream. For XML and JSON, however, the W3C documents
|
||||
* require something a bit more fancy.
|
||||
*/
|
||||
@Override
|
||||
protected InputStream getRawResultStream() throws RDFServiceException {
|
||||
boolean queryResult = rdfService.sparqlAskQuery(queryString);
|
||||
String resultString;
|
||||
if (mediaType == ResultSetMediaType.XML) {
|
||||
resultString = String
|
||||
.format("<?xml version=\"1.0\"?>\n" //
|
||||
+ "<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\">\n" //
|
||||
+ " <head></head>\n" //
|
||||
+ " <boolean>%b</boolean>\n" //
|
||||
+ "</sparql>", queryResult);
|
||||
} else if (mediaType == ResultSetMediaType.JSON) {
|
||||
resultString = String.format(
|
||||
"{\n \"head\" : { } ,\n \"boolean\" : %b\n}\n",
|
||||
queryResult);
|
||||
} else {
|
||||
resultString = String.valueOf(queryResult);
|
||||
}
|
||||
return new ByteArrayInputStream(resultString.getBytes());
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,30 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Process CONSTRUCT queries
|
||||
*/
|
||||
public class SparqlQueryApiConstructExecutor extends SparqlQueryApiRdfProducer {
|
||||
|
||||
public SparqlQueryApiConstructExecutor(RDFService rdfService,
|
||||
String queryString, String acceptHeader)
|
||||
throws AcceptHeaderParsingException, NotAcceptableException {
|
||||
super(rdfService, queryString, acceptHeader);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InputStream getRawResultStream() throws RDFServiceException {
|
||||
ModelSerializationFormat format = ModelSerializationFormat
|
||||
.valueOf(mediaType.getSerializationFormat());
|
||||
return rdfService.sparqlConstructQuery(queryString, format);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,31 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Process DESCRIBE queries.
|
||||
*/
|
||||
public class SparqlQueryApiDescribeExecutor extends SparqlQueryApiRdfProducer {
|
||||
|
||||
public SparqlQueryApiDescribeExecutor(RDFService rdfService,
|
||||
String queryString, String acceptHeader)
|
||||
throws AcceptHeaderParsingException, NotAcceptableException {
|
||||
super(rdfService, queryString, acceptHeader);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InputStream getRawResultStream() throws RDFServiceException {
|
||||
ModelSerializationFormat format = ModelSerializationFormat
|
||||
.valueOf(mediaType.getSerializationFormat());
|
||||
return rdfService.sparqlDescribeQuery(queryString, format);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,87 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryParseException;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* The base class for the SPARQL query API.
|
||||
*/
|
||||
public abstract class SparqlQueryApiExecutor {
|
||||
/**
|
||||
* Get an instance that is appropriate to the query and the acceptable
|
||||
* types.
|
||||
*
|
||||
* @throws AcceptHeaderParsingException
|
||||
* if the accept header was not in a valid format
|
||||
* @throws NotAcceptableException
|
||||
* if the accept header did not contain a content type that is
|
||||
* supported by the query
|
||||
* @throws QueryParseException
|
||||
* if the query was not syntactically valid
|
||||
* @throws InvalidQueryTypeException
|
||||
* if the query was not SELECT, ASK, CONSTRUCT, or DESCRIBE
|
||||
*/
|
||||
public static SparqlQueryApiExecutor instance(RDFService rdfService,
|
||||
String queryString, String acceptHeader)
|
||||
throws NotAcceptableException, QueryParseException,
|
||||
InvalidQueryTypeException, AcceptHeaderParsingException {
|
||||
if (rdfService == null) {
|
||||
throw new NullPointerException("rdfService may not be null.");
|
||||
}
|
||||
if (queryString == null) {
|
||||
throw new NullPointerException("queryString may not be null.");
|
||||
}
|
||||
|
||||
Query query = SparqlQueryUtils.create(queryString);
|
||||
|
||||
if (query.isSelectType()) {
|
||||
return new SparqlQueryApiSelectExecutor(rdfService, queryString,
|
||||
acceptHeader);
|
||||
} else if (query.isAskType()) {
|
||||
return new SparqlQueryApiAskExecutor(rdfService, queryString,
|
||||
acceptHeader);
|
||||
} else if (query.isConstructType()) {
|
||||
return new SparqlQueryApiConstructExecutor(rdfService, queryString,
|
||||
acceptHeader);
|
||||
} else if (query.isDescribeType()) {
|
||||
return new SparqlQueryApiDescribeExecutor(rdfService, queryString,
|
||||
acceptHeader);
|
||||
} else {
|
||||
throw new InvalidQueryTypeException("The API only accepts SELECT, "
|
||||
+ "ASK, CONSTRUCT, or DESCRIBE queries: '" + queryString
|
||||
+ "'");
|
||||
}
|
||||
}
|
||||
|
||||
protected final RDFService rdfService;
|
||||
protected final String queryString;
|
||||
|
||||
protected SparqlQueryApiExecutor(RDFService rdfService, String queryString) {
|
||||
this.rdfService = rdfService;
|
||||
this.queryString = queryString;
|
||||
}
|
||||
|
||||
/**
|
||||
* What media type was selected, based on the Accept header?
|
||||
*/
|
||||
public abstract String getMediaType();
|
||||
|
||||
/**
|
||||
* Execute the query and write it to the output stream, in the selected
|
||||
* format.
|
||||
*/
|
||||
public abstract void executeAndFormat(OutputStream out)
|
||||
throws RDFServiceException, IOException;
|
||||
|
||||
}
|
|
@ -0,0 +1,86 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import com.github.jsonldjava.core.JSONLD;
|
||||
import com.github.jsonldjava.core.JSONLDProcessingError;
|
||||
import com.github.jsonldjava.impl.JenaRDFParser;
|
||||
import com.github.jsonldjava.utils.JSONUtils;
|
||||
import com.hp.hpl.jena.rdf.model.Model;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.ContentTypeUtil;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Base class for processing SPARQL queries that produce RDF: CONSTRUCT and
|
||||
* DESCRIBE.
|
||||
*/
|
||||
abstract class SparqlQueryApiRdfProducer extends SparqlQueryApiExecutor {
|
||||
protected final RdfResultMediaType mediaType;
|
||||
|
||||
public SparqlQueryApiRdfProducer(RDFService rdfService, String queryString,
|
||||
String acceptHeader) throws AcceptHeaderParsingException,
|
||||
NotAcceptableException {
|
||||
super(rdfService, queryString);
|
||||
|
||||
Collection<String> contentTypes = RdfResultMediaType.contentTypes();
|
||||
String bestType = ContentTypeUtil.bestContentType(acceptHeader,
|
||||
contentTypes);
|
||||
this.mediaType = RdfResultMediaType.fromContentType(bestType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMediaType() {
|
||||
return mediaType.getContentType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void executeAndFormat(OutputStream out) throws RDFServiceException,
|
||||
IOException {
|
||||
|
||||
InputStream rawResult = getRawResultStream();
|
||||
|
||||
if (mediaType.isNativeFormat()) {
|
||||
IOUtils.copy(rawResult, out);
|
||||
} else if (mediaType == RdfResultMediaType.JSON) {
|
||||
// JSON-LD is a special case, since jena 2.6.4 doesn't support it.
|
||||
try {
|
||||
JenaRDFParser parser = new JenaRDFParser();
|
||||
Object json = JSONLD.fromRDF(parseToModel(rawResult), parser);
|
||||
JSONUtils.write(new OutputStreamWriter(out, "UTF-8"), json);
|
||||
} catch (JSONLDProcessingError e) {
|
||||
throw new RDFServiceException(
|
||||
"Could not convert from Jena model to JSON-LD", e);
|
||||
}
|
||||
} else {
|
||||
parseToModel(rawResult).write(out,
|
||||
mediaType.getJenaResponseFormat());
|
||||
}
|
||||
}
|
||||
|
||||
private Model parseToModel(InputStream rawResult) {
|
||||
ModelSerializationFormat format = ModelSerializationFormat
|
||||
.valueOf(mediaType.getSerializationFormat());
|
||||
return RDFServiceUtils.parseModel(rawResult, format);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask the RDFService to run the query, and get the resulting stream.
|
||||
*/
|
||||
protected abstract InputStream getRawResultStream()
|
||||
throws RDFServiceException;
|
||||
|
||||
}
|
|
@ -0,0 +1,84 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import static edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery.ResultSetMediaType.TSV;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.util.Collection;
|
||||
|
||||
import org.apache.commons.io.IOUtils;
|
||||
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.ResultSetFactory;
|
||||
import com.hp.hpl.jena.query.ResultSetFormatter;
|
||||
import com.hp.hpl.jena.sparql.resultset.ResultSetFormat;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.ContentTypeUtil;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Base class for processing SPARQL queries that produce Result Sets: SELECT and
|
||||
* ASK.
|
||||
*/
|
||||
abstract class SparqlQueryApiResultSetProducer extends SparqlQueryApiExecutor {
|
||||
protected final ResultSetMediaType mediaType;
|
||||
|
||||
public SparqlQueryApiResultSetProducer(RDFService rdfService,
|
||||
String queryString, String acceptHeader)
|
||||
throws AcceptHeaderParsingException, NotAcceptableException {
|
||||
super(rdfService, queryString);
|
||||
|
||||
Collection<String> contentTypes = ResultSetMediaType.contentTypes();
|
||||
String bestType = ContentTypeUtil.bestContentType(acceptHeader,
|
||||
contentTypes);
|
||||
this.mediaType = ResultSetMediaType.fromContentType(bestType);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getMediaType() {
|
||||
return mediaType.getContentType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void executeAndFormat(OutputStream out) throws RDFServiceException,
|
||||
IOException {
|
||||
InputStream rawResult = getRawResultStream();
|
||||
if (mediaType.isNativeFormat()) {
|
||||
IOUtils.copy(rawResult, out);
|
||||
} else if (mediaType == TSV) {
|
||||
// ARQ doesn't support TSV, so we will do the translation.
|
||||
pipeWithReplacement(rawResult, out);
|
||||
} else {
|
||||
ResultSet rs = ResultSetFactory.fromJSON(rawResult);
|
||||
ResultSetFormat format = ResultSetFormat.lookup(mediaType
|
||||
.getJenaResponseFormat());
|
||||
ResultSetFormatter.output(out, rs, format);
|
||||
}
|
||||
}
|
||||
|
||||
private void pipeWithReplacement(InputStream in, OutputStream out)
|
||||
throws IOException {
|
||||
int size;
|
||||
byte[] buffer = new byte[4096];
|
||||
while ((size = in.read(buffer)) > -1) {
|
||||
for (int i = 0; i < size; i++) {
|
||||
if (buffer[i] == ',') {
|
||||
buffer[i] = '\t';
|
||||
}
|
||||
}
|
||||
out.write(buffer, 0, size);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Ask the RDFService to run the query, and get the resulting stream.
|
||||
*/
|
||||
protected abstract InputStream getRawResultStream()
|
||||
throws RDFServiceException;
|
||||
}
|
|
@ -0,0 +1,32 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import java.io.InputStream;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ResultFormat;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.AcceptHeaderParsingException;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Process SELECT queries.
|
||||
*/
|
||||
public class SparqlQueryApiSelectExecutor extends
|
||||
SparqlQueryApiResultSetProducer {
|
||||
|
||||
public SparqlQueryApiSelectExecutor(RDFService rdfService,
|
||||
String queryString, String acceptHeader)
|
||||
throws AcceptHeaderParsingException, NotAcceptableException {
|
||||
super(rdfService, queryString, acceptHeader);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected InputStream getRawResultStream() throws RDFServiceException {
|
||||
ResultFormat format = ResultFormat.valueOf(mediaType
|
||||
.getRdfServiceFormat());
|
||||
return rdfService.sparqlSelectQuery(queryString, format);
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,440 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.controller.api.sparqlquery;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.StringReader;
|
||||
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import com.hp.hpl.jena.ontology.OntModel;
|
||||
import com.hp.hpl.jena.ontology.OntModelSpec;
|
||||
import com.hp.hpl.jena.query.QueryParseException;
|
||||
import com.hp.hpl.jena.rdf.model.ModelFactory;
|
||||
|
||||
import edu.cornell.mannlib.vitro.testing.AbstractTestClass;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
|
||||
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.jena.model.RDFServiceModel;
|
||||
import edu.cornell.mannlib.vitro.webapp.utils.http.NotAcceptableException;
|
||||
|
||||
/**
|
||||
* Test that the SparqlQueryApiExecutor can handle all query types and all
|
||||
* formats.
|
||||
*/
|
||||
public class SparqlQueryApiExecutorTest extends AbstractTestClass {
|
||||
private static final String MODEL_CONTENTS_N3 = "" //
|
||||
+ "<http://here.edu/subject> \n"
|
||||
+ " <http://here.edu/predicate> <http://here.edu/object> ."
|
||||
+ "<http://here.edu/s2> \n"
|
||||
+ " <http://here.edu/p2> <http://here.edu/o2> .";
|
||||
private static final String BASE_URI = "http://here.edu";
|
||||
|
||||
private static final String SELECT_ALL_QUERY = "SELECT ?s ?p ?o WHERE {?s ?p ?o} ORDER BY DESC(?s)";
|
||||
private static final String SELECT_RESULT_TEXT = ""
|
||||
+ "--------------------------------------------------------------------------------------\n"
|
||||
+ "| s | p | o |\n"
|
||||
+ "======================================================================================\n"
|
||||
+ "| <http://here.edu/subject> | <http://here.edu/predicate> | <http://here.edu/object> |\n"
|
||||
+ "| <http://here.edu/s2> | <http://here.edu/p2> | <http://here.edu/o2> |\n"
|
||||
+ "--------------------------------------------------------------------------------------\n";
|
||||
private static final String SELECT_RESULT_CSV = "s,p,o\r\n"
|
||||
+ "http://here.edu/subject,http://here.edu/predicate,http://here.edu/object\r\n"
|
||||
+ "http://here.edu/s2,http://here.edu/p2,http://here.edu/o2\r\n";
|
||||
private static final String SELECT_RESULT_TSV = "s\tp\to\r\n"
|
||||
+ "http://here.edu/subject\thttp://here.edu/predicate\thttp://here.edu/object\r\n"
|
||||
+ "http://here.edu/s2\thttp://here.edu/p2\thttp://here.edu/o2\r\n";
|
||||
private static final String SELECT_RESULT_XML = "" //
|
||||
+ "<?xml version=\"1.0\"?>\n" //
|
||||
+ "<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\">\n" //
|
||||
+ " <head>\n" //
|
||||
+ " <variable name=\"s\"/>\n" //
|
||||
+ " <variable name=\"p\"/>\n" //
|
||||
+ " <variable name=\"o\"/>\n" //
|
||||
+ " </head>\n" //
|
||||
+ " <results>\n" //
|
||||
+ " <result>\n" //
|
||||
+ " <binding name=\"s\">\n" //
|
||||
+ " <uri>http://here.edu/subject</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " <binding name=\"p\">\n" //
|
||||
+ " <uri>http://here.edu/predicate</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " <binding name=\"o\">\n" //
|
||||
+ " <uri>http://here.edu/object</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " </result>\n" //
|
||||
+ " <result>\n" //
|
||||
+ " <binding name=\"s\">\n" //
|
||||
+ " <uri>http://here.edu/s2</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " <binding name=\"p\">\n" //
|
||||
+ " <uri>http://here.edu/p2</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " <binding name=\"o\">\n" //
|
||||
+ " <uri>http://here.edu/o2</uri>\n" //
|
||||
+ " </binding>\n" //
|
||||
+ " </result>\n" //
|
||||
+ " </results>\n" //
|
||||
+ "</sparql>\n";
|
||||
private static final String SELECT_RESULT_JSON = "" //
|
||||
+ "{\n" //
|
||||
+ " \"head\": {\n" //
|
||||
+ " \"vars\": [ \"s\" , \"p\" , \"o\" ]\n" //
|
||||
+ " } ,\n" //
|
||||
+ " \"results\": {\n" //
|
||||
+ " \"bindings\": [\n" //
|
||||
+ " {\n" //
|
||||
+ " \"s\": { \"type\": \"uri\" , \"value\": \"http://here.edu/subject\" } ,\n"
|
||||
+ " \"p\": { \"type\": \"uri\" , \"value\": \"http://here.edu/predicate\" } ,\n"
|
||||
+ " \"o\": { \"type\": \"uri\" , \"value\": \"http://here.edu/object\" }\n"
|
||||
+ " } ,\n" //
|
||||
+ " {\n" //
|
||||
+ " \"s\": { \"type\": \"uri\" , \"value\": \"http://here.edu/s2\" } ,\n"
|
||||
+ " \"p\": { \"type\": \"uri\" , \"value\": \"http://here.edu/p2\" } ,\n"
|
||||
+ " \"o\": { \"type\": \"uri\" , \"value\": \"http://here.edu/o2\" }\n"
|
||||
+ " }\n" //
|
||||
+ " ]\n" //
|
||||
+ " }\n" //
|
||||
+ "}\n";
|
||||
|
||||
private static final String ASK_ALL_QUERY = "ASK WHERE {?s ?p ?o}";
|
||||
private static final String ASK_RESULT_TEXT = "true";
|
||||
private static final String ASK_RESULT_CSV = "true";
|
||||
private static final String ASK_RESULT_TSV = "true";
|
||||
private static final String ASK_RESULT_XML = "" //
|
||||
+ "<?xml version=\"1.0\"?>\n" //
|
||||
+ "<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\">\n" //
|
||||
+ " <head></head>\n" //
|
||||
+ " <boolean>true</boolean>\n" //
|
||||
+ "</sparql>";
|
||||
private static final String ASK_RESULT_JSON = "" //
|
||||
+ "{\n" //
|
||||
+ " \"head\" : { } ,\n" //
|
||||
+ " \"boolean\" : true\n" //
|
||||
+ "}\n";
|
||||
|
||||
private static final String CONSTRUCT_ALL_QUERY = "CONSTRUCT {?s ?p ?o} WHERE { LET (?s := <http://here.edu/subject>) <http://here.edu/subject> ?p ?o}";
|
||||
private static final String CONSTRUCT_RESULT_TEXT = "" //
|
||||
+ "<http://here.edu/subject> <http://here.edu/predicate> <http://here.edu/object> .\n";
|
||||
private static final String CONSTRUCT_RESULT_TURTLE = "" //
|
||||
+ "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n" //
|
||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n" //
|
||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n" //
|
||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n" //
|
||||
+ "\n" //
|
||||
+ "<http://here.edu/subject>\n" //
|
||||
+ " <http://here.edu/predicate>\n" //
|
||||
+ " <http://here.edu/object> .\n";
|
||||
private static final String CONSTRUCT_RESULT_N3 = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"
|
||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n"
|
||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n"
|
||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n"
|
||||
+ "\n"
|
||||
+ "<http://here.edu/subject>\n"
|
||||
+ " <http://here.edu/predicate>\n"
|
||||
+ " <http://here.edu/object> .\n";
|
||||
private static final String CONSTRUCT_RESULT_RDFXML = "<rdf:RDF\n"
|
||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\" > \n"
|
||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
||||
+ " </rdf:Description>\n" //
|
||||
+ "</rdf:RDF>\n";
|
||||
private static final String CONSTRUCT_RESULT_JSONLD = "["
|
||||
+ "{\"@id\":\"http://here.edu/object\"},"
|
||||
+ "{\"@id\":\"http://here.edu/subject\",\"http://here.edu/predicate\":[{\"@id\":\"http://here.edu/object\"}]}"
|
||||
+ "]";
|
||||
|
||||
private static final String DESCRIBE_ALL_QUERY = "DESCRIBE <http://here.edu/subject>";
|
||||
private static final String DESCRIBE_RESULT_TEXT = "<http://here.edu/subject> "
|
||||
+ "<http://here.edu/predicate> <http://here.edu/object> .\n";
|
||||
private static final String DESCRIBE_RESULT_RDFXML = "<rdf:RDF\n"
|
||||
+ " xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\"\n"
|
||||
+ " xmlns:owl=\"http://www.w3.org/2002/07/owl#\"\n"
|
||||
+ " xmlns:xsd=\"http://www.w3.org/2001/XMLSchema#\"\n"
|
||||
+ " xmlns:j.0=\"http://here.edu/\"\n"
|
||||
+ " xmlns:rdfs=\"http://www.w3.org/2000/01/rdf-schema#\" > \n"
|
||||
+ " <rdf:Description rdf:about=\"http://here.edu/subject\">\n"
|
||||
+ " <j.0:predicate rdf:resource=\"http://here.edu/object\"/>\n"
|
||||
+ " </rdf:Description>\n" + "</rdf:RDF>\n";
|
||||
private static final String DESCRIBE_RESULT_N3 = "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n"
|
||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n"
|
||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n"
|
||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n"
|
||||
+ "\n"
|
||||
+ "<http://here.edu/subject>\n"
|
||||
+ " <http://here.edu/predicate>\n"
|
||||
+ " <http://here.edu/object> .\n";
|
||||
private static final String DESCRIBE_RESULT_TURTLE = "" //
|
||||
+ "@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .\n" //
|
||||
+ "@prefix owl: <http://www.w3.org/2002/07/owl#> .\n" //
|
||||
+ "@prefix xsd: <http://www.w3.org/2001/XMLSchema#> .\n" //
|
||||
+ "@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .\n" //
|
||||
+ "\n" //
|
||||
+ "<http://here.edu/subject>\n" //
|
||||
+ " <http://here.edu/predicate>\n" //
|
||||
+ " <http://here.edu/object> .\n";
|
||||
private static final String DESCRIBE_RESULT_JSONLD = "["
|
||||
+ "{\"@id\":\"http://here.edu/object\"},"
|
||||
+ "{\"@id\":\"http://here.edu/subject\",\"http://here.edu/predicate\":[{\"@id\":\"http://here.edu/object\"}]}"
|
||||
+ "]";
|
||||
|
||||
private OntModel model;
|
||||
private RDFService rdfService;
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
model = ModelFactory.createOntologyModel(OntModelSpec.OWL_MEM);
|
||||
model.read(new StringReader(MODEL_CONTENTS_N3), BASE_URI, "N3");
|
||||
rdfService = new RDFServiceModel(model);
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// Tests
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void nullRdfService() throws Exception {
|
||||
SparqlQueryApiExecutor.instance(null, SELECT_ALL_QUERY, "text/plain");
|
||||
}
|
||||
|
||||
@Test(expected = NullPointerException.class)
|
||||
public void nullQuery() throws Exception {
|
||||
SparqlQueryApiExecutor.instance(rdfService, null, "text/plain");
|
||||
fail("nullQuery not implemented");
|
||||
}
|
||||
|
||||
@Test(expected = QueryParseException.class)
|
||||
public void emptyQuery() throws Exception {
|
||||
SparqlQueryApiExecutor.instance(rdfService, "", "text/plain");
|
||||
fail("emptyQuery not implemented");
|
||||
}
|
||||
|
||||
@Test(expected = QueryParseException.class)
|
||||
public void cantParseQuery() throws Exception {
|
||||
SparqlQueryApiExecutor.instance(rdfService, "BOGUS", "text/plain");
|
||||
fail("cantParseQuery not implemented");
|
||||
}
|
||||
|
||||
// Can't figure out how to create a Query of a type other than SELECT, ASK,
|
||||
// CONSTRUCT and DESCRIBE.
|
||||
|
||||
// Null accept header is treated as "*/*"
|
||||
|
||||
@Test(expected = NotAcceptableException.class)
|
||||
public void noAcceptableContentType() throws Exception {
|
||||
SparqlQueryApiExecutor.instance(rdfService, SELECT_ALL_QUERY, "bogus");
|
||||
fail("noAcceptableContentType not implemented");
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
public void selectToText() throws Exception {
|
||||
executeQuery("select to text", SELECT_ALL_QUERY, "text/plain",
|
||||
SELECT_RESULT_TEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectToCsv() throws Exception {
|
||||
executeQuery("select to csv", SELECT_ALL_QUERY, "text/csv",
|
||||
SELECT_RESULT_CSV);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectToTsv() throws Exception {
|
||||
executeQuery("select to tsv", SELECT_ALL_QUERY,
|
||||
"text/tab-separated-values", SELECT_RESULT_TSV);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectToXml() throws Exception {
|
||||
executeQuery("select to xml", SELECT_ALL_QUERY,
|
||||
"application/sparql-results+xml", SELECT_RESULT_XML);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectToJson() throws Exception {
|
||||
executeQuery("select to json", SELECT_ALL_QUERY,
|
||||
"application/sparql-results+json", SELECT_RESULT_JSON);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void selectWithInvalidContentType() throws Exception {
|
||||
executeWithInvalidAcceptHeader("select with application/rdf+xml",
|
||||
SELECT_ALL_QUERY, "application/rdf+xml");
|
||||
executeWithInvalidAcceptHeader("select with text/n3", SELECT_ALL_QUERY,
|
||||
"text/n3");
|
||||
executeWithInvalidAcceptHeader("select with text/turtle",
|
||||
SELECT_ALL_QUERY, "text/turtle");
|
||||
executeWithInvalidAcceptHeader("select with application/json",
|
||||
SELECT_ALL_QUERY, "application/json");
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
public void askToText() throws Exception {
|
||||
executeQuery("ask to text", ASK_ALL_QUERY, "text/plain",
|
||||
ASK_RESULT_TEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void askToCsv() throws Exception {
|
||||
executeQuery("ask to csv", ASK_ALL_QUERY, "text/csv", ASK_RESULT_CSV);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void askToTsv() throws Exception {
|
||||
executeQuery("ask to tsv", ASK_ALL_QUERY, "text/tab-separated-values",
|
||||
ASK_RESULT_TSV);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void askToXml() throws Exception {
|
||||
executeQuery("ask to xml", ASK_ALL_QUERY,
|
||||
"application/sparql-results+xml", ASK_RESULT_XML);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void askToJson() throws Exception {
|
||||
executeQuery("ask to json", ASK_ALL_QUERY,
|
||||
"application/sparql-results+json", ASK_RESULT_JSON);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void askWithInvalidAcceptHeader() throws Exception {
|
||||
executeWithInvalidAcceptHeader("ask with application/rdf+xml",
|
||||
ASK_ALL_QUERY, "application/rdf+xml");
|
||||
executeWithInvalidAcceptHeader("ask with text/n3", ASK_ALL_QUERY,
|
||||
"text/n3");
|
||||
executeWithInvalidAcceptHeader("ask with text/turtle", ASK_ALL_QUERY,
|
||||
"text/turtle");
|
||||
executeWithInvalidAcceptHeader("ask with application/json",
|
||||
ASK_ALL_QUERY, "application/json");
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
public void constructToText() throws Exception {
|
||||
executeQuery("construct to text", CONSTRUCT_ALL_QUERY, "text/plain",
|
||||
CONSTRUCT_RESULT_TEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constructToRdfXml() throws Exception {
|
||||
executeQuery("construct to rdf/xml", CONSTRUCT_ALL_QUERY,
|
||||
"application/rdf+xml", CONSTRUCT_RESULT_RDFXML);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constructToN3() throws Exception {
|
||||
executeQuery("construct to n3", CONSTRUCT_ALL_QUERY, "text/n3",
|
||||
CONSTRUCT_RESULT_N3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constructToTurtle() throws Exception {
|
||||
executeQuery("construct to turtle", CONSTRUCT_ALL_QUERY, "text/turtle",
|
||||
CONSTRUCT_RESULT_TURTLE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constructToJsonld() throws Exception {
|
||||
executeQuery("construct to JSON-LD", CONSTRUCT_ALL_QUERY,
|
||||
"application/json", CONSTRUCT_RESULT_JSONLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void constructWithInvalidAcceptHeader() throws Exception {
|
||||
executeWithInvalidAcceptHeader("construct with text/csv",
|
||||
CONSTRUCT_ALL_QUERY, "text/csv");
|
||||
executeWithInvalidAcceptHeader("construct with text/tsv",
|
||||
CONSTRUCT_ALL_QUERY, "text/tsv");
|
||||
executeWithInvalidAcceptHeader(
|
||||
"construct with application/sparql-results+xml",
|
||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+xml");
|
||||
executeWithInvalidAcceptHeader(
|
||||
"construct with application/sparql-results+json",
|
||||
CONSTRUCT_ALL_QUERY, "application/sparql-results+json");
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
@Test
|
||||
public void describeToText() throws Exception {
|
||||
executeQuery("describe to text", DESCRIBE_ALL_QUERY, "text/plain",
|
||||
DESCRIBE_RESULT_TEXT);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void describeToRdfXml() throws Exception {
|
||||
executeQuery("describe to rdf/xml", DESCRIBE_ALL_QUERY,
|
||||
"application/rdf+xml", DESCRIBE_RESULT_RDFXML);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void describeToN3() throws Exception {
|
||||
executeQuery("describe to n3", DESCRIBE_ALL_QUERY, "text/n3",
|
||||
DESCRIBE_RESULT_N3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void describeToTurtle() throws Exception {
|
||||
executeQuery("describe to turtle", DESCRIBE_ALL_QUERY, "text/turtle",
|
||||
DESCRIBE_RESULT_TURTLE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void describeToJsonld() throws Exception {
|
||||
executeQuery("describe to JSON-LD", DESCRIBE_ALL_QUERY,
|
||||
"application/json", DESCRIBE_RESULT_JSONLD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void describeWithInvalidAcceptHeader() throws Exception {
|
||||
executeWithInvalidAcceptHeader("describe with text/csv",
|
||||
DESCRIBE_ALL_QUERY, "text/csv");
|
||||
executeWithInvalidAcceptHeader("describe with text/tsv",
|
||||
DESCRIBE_ALL_QUERY, "text/tsv");
|
||||
executeWithInvalidAcceptHeader(
|
||||
"describe with application/sparql-results+xml",
|
||||
DESCRIBE_ALL_QUERY, "application/sparql-results+xml");
|
||||
executeWithInvalidAcceptHeader(
|
||||
"describe with application/sparql-results+json",
|
||||
DESCRIBE_ALL_QUERY, "application/sparql-results+json");
|
||||
}
|
||||
|
||||
// ----------------------------------------------------------------------
|
||||
// Helper methods
|
||||
// ----------------------------------------------------------------------
|
||||
|
||||
private void executeQuery(String message, String queryString,
|
||||
String acceptHeader, String expected) throws Exception {
|
||||
ByteArrayOutputStream out = new ByteArrayOutputStream();
|
||||
|
||||
SparqlQueryApiExecutor executor = SparqlQueryApiExecutor.instance(
|
||||
rdfService, queryString, acceptHeader);
|
||||
executor.executeAndFormat(out);
|
||||
|
||||
assertEquals(message, expected, out.toString());
|
||||
}
|
||||
|
||||
private void executeWithInvalidAcceptHeader(String message,
|
||||
String queryString, String acceptHeader) throws Exception {
|
||||
try {
|
||||
SparqlQueryApiExecutor.instance(rdfService, queryString,
|
||||
acceptHeader);
|
||||
fail(message + " - Expected a NotAcceptableException");
|
||||
} catch (NotAcceptableException e) {
|
||||
// expected
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue