Merge branch 'issue-VIVO-282-content-neogitation' into develop

This commit is contained in:
Brian Caruso 2013-09-23 11:57:38 -04:00
commit 03ceec5d81
2 changed files with 189 additions and 140 deletions

View file

@ -9,9 +9,11 @@ import java.io.PrintWriter;
import java.io.Writer; import java.io.Writer;
import java.net.URLDecoder; import java.net.URLDecoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map;
import javax.servlet.RequestDispatcher; import javax.servlet.RequestDispatcher;
import javax.servlet.ServletException; import javax.servlet.ServletException;
@ -40,6 +42,7 @@ import com.hp.hpl.jena.vocabulary.XSD;
import edu.cornell.mannlib.vedit.controller.BaseEditController; import edu.cornell.mannlib.vedit.controller.BaseEditController;
import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission; import edu.cornell.mannlib.vitro.webapp.auth.permissions.SimplePermission;
import edu.cornell.mannlib.vitro.webapp.beans.Ontology; import edu.cornell.mannlib.vitro.webapp.beans.Ontology;
import edu.cornell.mannlib.vitro.webapp.controller.individual.IndividualController;
import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao; import edu.cornell.mannlib.vitro.webapp.dao.OntologyDao;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ModelSerializationFormat;
@ -47,44 +50,36 @@ import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFService.ResultFormat;
import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException; import edu.cornell.mannlib.vitro.webapp.rdfservice.RDFServiceException;
import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils; import edu.cornell.mannlib.vitro.webapp.rdfservice.impl.RDFServiceUtils;
import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils; import edu.cornell.mannlib.vitro.webapp.utils.SparqlQueryUtils;
import edu.cornell.mannlib.vitro.webapp.web.ContentType;
/** /**
* Services a sparql query. This will return a simple error message and a 501 if * Services a SPARQL query. This will return a simple error message and a 501 if
* there is no Model. * there is no Model.
* *
*
* @author bdc34 * @author bdc34
* *
*/ */
public class SparqlQueryServlet extends BaseEditController { public class SparqlQueryServlet extends BaseEditController {
private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName()); private static final Log log = LogFactory.getLog(SparqlQueryServlet.class.getName());
private final static boolean CONVERT = true;
/** /**
* format configurations for SELECT queries. * format configurations for SELECT queries.
*/ */
protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>(); protected static HashMap<String,RSFormatConfig> rsFormats = new HashMap<String,RSFormatConfig>();
private static RSFormatConfig[] rsfs = {
new RSFormatConfig( "RS_XML", !CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT", !CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv", !CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON", !CONVERT, ResultFormat.JSON, null, "application/javascript") };
/** /**
* format configurations for CONSTRUCT/DESCRIBE queries. * format configurations for CONSTRUCT/DESCRIBE queries.
*/ */
protected static HashMap<String,ModelFormatConfig> modelFormats = protected static HashMap<String,ModelFormatConfig> modelFormats =
new HashMap<String,ModelFormatConfig>(); new HashMap<String,ModelFormatConfig>();
private static ModelFormatConfig[] fmts = { /**
new ModelFormatConfig("RDF/XML", !CONVERT, ModelSerializationFormat.RDFXML, null, "application/rdf+xml" ), * Use this map to decide which MIME type is suited for the "accept" header.
new ModelFormatConfig("RDF/XML-ABBREV", CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ), */
new ModelFormatConfig("N3", !CONVERT, ModelSerializationFormat.N3, null, "text/n3" ), public static final Map<String, Float> ACCEPTED_CONTENT_TYPES;
new ModelFormatConfig("N-TRIPLE", !CONVERT, ModelSerializationFormat.NTRIPLE, null, "text/plain" ),
new ModelFormatConfig("TTL", CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD", CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
@Override @Override
protected void doPost(HttpServletRequest request, HttpServletResponse response) protected void doPost(HttpServletRequest request, HttpServletResponse response)
@ -113,100 +108,85 @@ public class SparqlQueryServlet extends BaseEditController {
String queryParam = vreq.getParameter("query"); String queryParam = vreq.getParameter("query");
log.debug("queryParam was : " + queryParam); log.debug("queryParam was : " + queryParam);
String resultFormatParam = vreq.getParameter("resultFormat"); if( queryParam == null || "".equals(queryParam) ){
log.debug("resultFormat was: " + resultFormatParam);
String rdfResultFormatParam = vreq.getParameter("rdfResultFormat");
if (rdfResultFormatParam == null) {
rdfResultFormatParam = "RDF/XML-ABBREV";
}
log.debug("rdfResultFormat was: " + rdfResultFormatParam);
if( queryParam == null || "".equals(queryParam) ||
resultFormatParam == null || "".equals(resultFormatParam) ||
!rsFormats.containsKey( resultFormatParam ) ||
rdfResultFormatParam == null || "".equals(rdfResultFormatParam) ||
!modelFormats.containsKey( rdfResultFormatParam ) ) {
doHelp(request,response); doHelp(request,response);
return; return;
} }
executeQuery(response, resultFormatParam, rdfResultFormatParam, String contentType = checkForContentType(vreq.getHeader("Accept"));
queryParam, vreq.getUnfilteredRDFService());
Query query = SparqlQueryUtils.create(queryParam);
if( query.isSelectType() ){
String format = contentType!=null ? contentType:vreq.getParameter("resultFormat");
RSFormatConfig formatConf = rsFormats.get(format);
doSelect(response, queryParam, formatConf, vreq.getRDFService());
}else if( query.isAskType()){
doAsk( queryParam, vreq.getRDFService(), response );
}else if( query.isConstructType() ){
String format = contentType != null ? contentType : vreq.getParameter("rdfResultFormat");
if (format== null) {
format= "RDF/XML-ABBREV";
}
ModelFormatConfig formatConf = modelFormats.get(format);
doConstruct(response, query, formatConf, vreq.getRDFService());
}else{
doHelp(request,response);
}
return; return;
} }
private void executeQuery(HttpServletResponse response,
String resultFormatParam,
String rdfResultFormatParam,
String queryParam,
RDFService rdfService ) throws IOException {
/* BJL23 2008-11-06
* modified to support CSV output.
* Unfortunately, ARQ doesn't make it easy to
* do this by implementing a new ResultSetFormat, because
* ResultSetFormatter is hardwired with expected values.
* This slightly ugly approach will have to do for now.
*/
// if ( !("vitro:csv").equals(resultFormatParam) ) {
// rsf = selectFormatSymbols.get(resultFormatParam);
// }
// String mimeType = rdfFormatToMimeType.get(resultFormatParam);
try{ private void doAsk(String queryParam, RDFService rdfService,
Query query = SparqlQueryUtils.create(queryParam); HttpServletResponse response) throws ServletException, IOException {
if( query.isSelectType() ){
doSelectQuery( queryParam, rdfService, resultFormatParam, response);
} else if(query.isAskType()){
// Irrespective of the ResultFormatParam, // Irrespective of the ResultFormatParam,
// this always prints a boolean to the default OutputStream. // this always prints a boolean to the default OutputStream.
String result = (rdfService.sparqlAskQuery(queryParam) == true) String result;
try {
result = (rdfService.sparqlAskQuery(queryParam) == true)
? "true" ? "true"
: "false"; : "false";
} catch (RDFServiceException e) {
throw new ServletException( "Could not execute ask query ", e );
}
PrintWriter p = response.getWriter(); PrintWriter p = response.getWriter();
p.write(result); p.write(result);
return; return;
} else {
doModelResultQuery( query, rdfService, rdfResultFormatParam, response);
}
} catch (RDFServiceException e) {
throw new RuntimeException(e);
}
} }
/** /**
* Execute the query and send the result to out. Attempt to * Execute the query and send the result to out. Attempt to
* send the RDFService the same format as the rdfResultFormatParam * send the RDFService the same format as the rdfResultFormatParam
* so that the results from the RDFService can be directly piped to the client. * so that the results from the RDFService can be directly piped to the client.
* @param rdfService
* @throws IOException
* @throws RDFServiceException
*/ */
private void doSelectQuery( String queryParam, private void doSelect(HttpServletResponse response,
RDFService rdfService, String resultFormatParam, String queryParam,
HttpServletResponse response) throws IOException, RDFServiceException{ RSFormatConfig formatConf,
RSFormatConfig config = rsFormats.get( resultFormatParam ); RDFService rdfService
) throws ServletException {
if( ! config.converstionFromWireFormat ){ try {
response.setContentType( config.responseMimeType ); if( ! formatConf.converstionFromWireFormat ){
InputStream results = rdfService.sparqlSelectQuery(queryParam, config.wireFormat ); response.setContentType( formatConf.responseMimeType );
InputStream results;
results = rdfService.sparqlSelectQuery(queryParam, formatConf.wireFormat );
pipe( results, response.getOutputStream() ); pipe( results, response.getOutputStream() );
}else{ }else{
//always use JSON when conversion is needed. //always use JSON when conversion is needed.
InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON ); InputStream results = rdfService.sparqlSelectQuery(queryParam, ResultFormat.JSON );
response.setContentType( config.responseMimeType ); response.setContentType( formatConf.responseMimeType );
ResultSet rs = ResultSetFactory.fromJSON( results ); ResultSet rs = ResultSetFactory.fromJSON( results );
OutputStream out = response.getOutputStream(); OutputStream out = response.getOutputStream();
ResultSetFormatter.output(out, rs, config.jenaResponseFormat); ResultSetFormatter.output(out, rs, formatConf.jenaResponseFormat);
}
} catch (RDFServiceException e) {
throw new ServletException("Cannot get result from the RDFService",e);
} catch (IOException e) {
throw new ServletException("Cannot perform SPARQL SELECT",e);
}
}
// } else {
// Writer out = response.getWriter();
// toCsv(out, results);
//}
}
}
/** /**
* Execute the query and send the result to out. Attempt to * Execute the query and send the result to out. Attempt to
@ -217,25 +197,24 @@ public class SparqlQueryServlet extends BaseEditController {
* @throws RDFServiceException * @throws RDFServiceException
* @throws * @throws
*/ */
private void doModelResultQuery( Query query, private void doConstruct( HttpServletResponse response,
RDFService rdfService, String rdfResultFormatParam, Query query,
HttpServletResponse response) throws IOException, RDFServiceException{ ModelFormatConfig formatConfig,
RDFService rdfService
//config drives what formats and conversions to use ) throws ServletException{
ModelFormatConfig config = modelFormats.get( rdfResultFormatParam ); try{
InputStream rawResult = null; InputStream rawResult = null;
if( query.isConstructType() ){ if( query.isConstructType() ){
rawResult= rdfService.sparqlConstructQuery( query.toString(), config.wireFormat ); rawResult= rdfService.sparqlConstructQuery( query.toString(), formatConfig.wireFormat );
}else if ( query.isDescribeType() ){ }else if ( query.isDescribeType() ){
rawResult = rdfService.sparqlDescribeQuery( query.toString(), config.wireFormat ); rawResult = rdfService.sparqlDescribeQuery( query.toString(), formatConfig.wireFormat );
} }
response.setContentType( config.responseMimeType ); response.setContentType( formatConfig.responseMimeType );
if( config.converstionFromWireFormat ){ if( formatConfig.converstionFromWireFormat ){
Model resultModel = RDFServiceUtils.parseModel( rawResult, config.wireFormat ); Model resultModel = RDFServiceUtils.parseModel( rawResult, formatConfig.wireFormat );
if( "JSON-LD".equals( config.jenaResponseFormat )){ if( "JSON-LD".equals( formatConfig.jenaResponseFormat )){
//since jena 2.6.4 doesn't support JSON-LD we do it //since jena 2.6.4 doesn't support JSON-LD we do it
try { try {
JenaRDFParser parser = new JenaRDFParser(); JenaRDFParser parser = new JenaRDFParser();
@ -246,12 +225,17 @@ public class SparqlQueryServlet extends BaseEditController {
} }
}else{ }else{
OutputStream out = response.getOutputStream(); OutputStream out = response.getOutputStream();
resultModel.write(out, config.jenaResponseFormat ); resultModel.write(out, formatConfig.jenaResponseFormat );
} }
}else{ }else{
OutputStream out = response.getOutputStream(); OutputStream out = response.getOutputStream();
pipe( rawResult, out ); pipe( rawResult, out );
} }
}catch( IOException ex){
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
} catch (RDFServiceException ex) {
throw new ServletException("could not run SPARQL CONSTRUCT",ex);
}
} }
private void pipe( InputStream in, OutputStream out) throws IOException{ private void pipe( InputStream in, OutputStream out) throws IOException{
@ -362,13 +346,35 @@ public class SparqlQueryServlet extends BaseEditController {
rd.forward(req,res); rd.forward(req,res);
} }
/** Simple boolean vaule to improve the legibility of confiugrations. */
private final static boolean CONVERT = true;
public static class ModelFormatConfig{ /** Simple vaule to improve the legibility of confiugrations. */
private final static String NO_CONVERSION = null;
public static class FormatConfig{
public String valueFromForm; public String valueFromForm;
public boolean converstionFromWireFormat; public boolean converstionFromWireFormat;
public String responseMimeType;
}
private static ModelFormatConfig[] fmts = {
new ModelFormatConfig("RDF/XML",
!CONVERT, ModelSerializationFormat.RDFXML, NO_CONVERSION, "application/rdf+xml" ),
new ModelFormatConfig("RDF/XML-ABBREV",
CONVERT, ModelSerializationFormat.N3, "RDF/XML-ABBREV", "application/rdf+xml" ),
new ModelFormatConfig("N3",
!CONVERT, ModelSerializationFormat.N3, NO_CONVERSION, "text/n3" ),
new ModelFormatConfig("N-TRIPLE",
!CONVERT, ModelSerializationFormat.NTRIPLE, NO_CONVERSION, "text/plain" ),
new ModelFormatConfig("TTL",
CONVERT, ModelSerializationFormat.N3, "TTL", "application/x-turtle" ),
new ModelFormatConfig("JSON-LD",
CONVERT, ModelSerializationFormat.N3, "JSON-LD", "application/javascript" ) };
public static class ModelFormatConfig extends FormatConfig{
public RDFService.ModelSerializationFormat wireFormat; public RDFService.ModelSerializationFormat wireFormat;
public String jenaResponseFormat; public String jenaResponseFormat;
public String responseMimeType;
public ModelFormatConfig( String valueFromForm, public ModelFormatConfig( String valueFromForm,
boolean converstionFromWireFormat, boolean converstionFromWireFormat,
@ -383,12 +389,20 @@ public class SparqlQueryServlet extends BaseEditController {
} }
} }
public static class RSFormatConfig{
public String valueFromForm; private static RSFormatConfig[] rsfs = {
public boolean converstionFromWireFormat; new RSFormatConfig( "RS_XML",
!CONVERT, ResultFormat.XML, null, "text/xml"),
new RSFormatConfig( "RS_TEXT",
!CONVERT, ResultFormat.TEXT, null, "text/plain"),
new RSFormatConfig( "vitro:csv",
!CONVERT, ResultFormat.CSV, null, "text/csv"),
new RSFormatConfig( "RS_JSON",
!CONVERT, ResultFormat.JSON, null, "application/javascript") };
public static class RSFormatConfig extends FormatConfig{
public ResultFormat wireFormat; public ResultFormat wireFormat;
public ResultSetFormat jenaResponseFormat; public ResultSetFormat jenaResponseFormat;
public String responseMimeType;
public RSFormatConfig( String valueFromForm, public RSFormatConfig( String valueFromForm,
boolean converstionFromWireFormat, boolean converstionFromWireFormat,
@ -404,13 +418,47 @@ public class SparqlQueryServlet extends BaseEditController {
} }
static{ static{
/* move the lists of configs into maps for easy lookup */ HashMap<String, Float> map = new HashMap<String, Float>();
/* move the lists of configurations into maps for easy lookup
* by both MIME content type and the parameters from the form */
for( RSFormatConfig rsfc : rsfs ){ for( RSFormatConfig rsfc : rsfs ){
rsFormats.put( rsfc.valueFromForm, rsfc ); rsFormats.put( rsfc.valueFromForm, rsfc );
rsFormats.put( rsfc.responseMimeType, rsfc);
map.put(rsfc.responseMimeType, 1.0f);
} }
for( ModelFormatConfig mfc : fmts ){ for( ModelFormatConfig mfc : fmts ){
modelFormats.put( mfc.valueFromForm, mfc); modelFormats.put( mfc.valueFromForm, mfc);
} modelFormats.put(mfc.responseMimeType, mfc);
map.put(mfc.responseMimeType, 1.0f);
} }
ACCEPTED_CONTENT_TYPES = Collections.unmodifiableMap(map);
}
/**
* Get the content type based on content negotiation.
* Returns null of no content type can be agreed on or
* if there is no accept header.
*/
protected String checkForContentType( String acceptHeader ) {
if (acceptHeader == null)
return null;
try {
Map<String, Float> typesAndQ = ContentType
.getTypesAndQ(acceptHeader);
String ctStr = ContentType
.getBestContentType(typesAndQ,ACCEPTED_CONTENT_TYPES);
if( ACCEPTED_CONTENT_TYPES.containsKey( ctStr )){
return ctStr;
}
} catch (Throwable th) {
log.error("Problem while checking accept header ", th);
}
return null;
}
} }

View file

@ -132,10 +132,11 @@ public class IndividualRequestAnalyzer {
* only provide a set of bytes. * only provide a set of bytes.
*/ */
protected ContentType checkAcceptHeaderForLinkedDataRequest() { protected ContentType checkAcceptHeaderForLinkedDataRequest() {
String acceptHeader = vreq.getHeader("accept"); String acceptHeader = vreq.getHeader("Accept");
if (acceptHeader == null) { if (acceptHeader == null)
acceptHeader = vreq.getHeader("accept");
if (acceptHeader == null)
return null; return null;
}
try { try {
Map<String, Float> typesAndQ = ContentType Map<String, Float> typesAndQ = ContentType