1. Attempting to delete old visualization back-end & front-end
This commit is contained in:
parent
3b8ac290a7
commit
fecd8d938d
19 changed files with 0 additions and 6023 deletions
|
@ -1,332 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Edge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
|
||||
public class CoAuthorshipGraphMLWriter {
|
||||
|
||||
private StringBuilder coAuthorshipGraphMLContent;
|
||||
|
||||
private final String GRAPHML_HEADER = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
||||
+ " <graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\"\n"
|
||||
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
|
||||
+ " xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns\n"
|
||||
+ " http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd\">\n\n";
|
||||
|
||||
private final String GRAPHML_FOOTER = "</graphml>";
|
||||
|
||||
public CoAuthorshipGraphMLWriter(CoAuthorshipData visVOContainer) {
|
||||
|
||||
coAuthorshipGraphMLContent = createCoAuthorshipGraphMLContent(visVOContainer);
|
||||
|
||||
}
|
||||
|
||||
public StringBuilder getCoAuthorshipGraphMLContent() {
|
||||
return coAuthorshipGraphMLContent;
|
||||
}
|
||||
|
||||
private StringBuilder createCoAuthorshipGraphMLContent(
|
||||
CoAuthorshipData coAuthorshipData) {
|
||||
|
||||
StringBuilder graphMLContent = new StringBuilder();
|
||||
|
||||
graphMLContent.append(GRAPHML_HEADER);
|
||||
|
||||
/*
|
||||
* We are side-effecting "graphMLContent" object in this method since creating
|
||||
* another String object to hold key definition data will be redundant & will
|
||||
* not serve the purpose.
|
||||
* */
|
||||
generateKeyDefinitionContent(coAuthorshipData, graphMLContent);
|
||||
|
||||
/*
|
||||
* Used to generate graph content. It will contain both the nodes & edge information.
|
||||
* We are side-effecting "graphMLContent".
|
||||
* */
|
||||
generateGraphContent(coAuthorshipData, graphMLContent);
|
||||
|
||||
graphMLContent.append(GRAPHML_FOOTER);
|
||||
|
||||
return graphMLContent;
|
||||
}
|
||||
|
||||
private void generateGraphContent(CoAuthorshipData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("\n<graph edgedefault=\"undirected\">\n");
|
||||
|
||||
if (coAuthorshipData.getNodes() != null & coAuthorshipData.getNodes().size() > 0) {
|
||||
generateNodeSectionContent(coAuthorshipData, graphMLContent);
|
||||
}
|
||||
|
||||
if (coAuthorshipData.getEdges() != null & coAuthorshipData.getEdges().size() > 0) {
|
||||
generateEdgeSectionContent(coAuthorshipData, graphMLContent);
|
||||
}
|
||||
|
||||
graphMLContent.append("</graph>\n");
|
||||
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void generateEdgeSectionContent(CoAuthorshipData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- edges -->\n");
|
||||
|
||||
Set<Edge> edges = coAuthorshipData.getEdges();
|
||||
|
||||
List<Edge> orderedEdges = new ArrayList<Edge>(edges);
|
||||
|
||||
Collections.sort(orderedEdges, new EdgeComparator());
|
||||
|
||||
for (Edge currentEdge : orderedEdges) {
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single edge. "graphMLContent"
|
||||
* is being side-effected.
|
||||
* */
|
||||
getEdgeContent(graphMLContent, currentEdge);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void getEdgeContent(StringBuilder graphMLContent, Edge currentEdge) {
|
||||
|
||||
graphMLContent.append("<edge "
|
||||
+ "id=\"" + currentEdge.getEdgeID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceNode().getNodeID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetNode().getNodeID() + "\" "
|
||||
+ ">\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator1\">"
|
||||
+ currentEdge.getSourceNode().getNodeName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator2\">"
|
||||
+ currentEdge.getTargetNode().getNodeName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_coauthored_works\">"
|
||||
+ currentEdge.getNumOfCoAuthoredWorks()
|
||||
+ "</data>\n");
|
||||
|
||||
if (currentEdge.getEarliestCollaborationYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
* we are sure to have only one entry on the map. So using the for loop.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: currentEdge.getEarliestCollaborationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_collaboration\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_earliest_collaboration\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (currentEdge.getLatestCollaborationYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: currentEdge.getLatestCollaborationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_collaboration\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_latest_collaboration\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (currentEdge.getUnknownCollaborationYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_collaboration\">"
|
||||
+ currentEdge.getUnknownCollaborationYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
||||
graphMLContent.append("</edge>\n");
|
||||
}
|
||||
|
||||
private void generateNodeSectionContent(CoAuthorshipData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- nodes -->\n");
|
||||
|
||||
Node egoNode = coAuthorshipData.getEgoNode();
|
||||
Set<Node> authorNodes = coAuthorshipData.getNodes();
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single node. "graphMLContent"
|
||||
* is being side-effected. The egoNode is added first because this is the "requirement"
|
||||
* of the co-author vis. Ego should always come first.
|
||||
*
|
||||
* */
|
||||
getNodeContent(graphMLContent, egoNode);
|
||||
|
||||
List<Node> orderedAuthorNodes = new ArrayList<Node>(authorNodes);
|
||||
orderedAuthorNodes.remove(egoNode);
|
||||
|
||||
Collections.sort(orderedAuthorNodes, new NodeComparator());
|
||||
|
||||
|
||||
for (Node currNode : orderedAuthorNodes) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != egoNode) {
|
||||
|
||||
getNodeContent(graphMLContent, currNode);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void getNodeContent(StringBuilder graphMLContent, Node node) {
|
||||
|
||||
String profileURL = null;
|
||||
try {
|
||||
profileURL = VisualizationFrameworkConstants.INDIVIDUAL_URL_PREFIX + "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(node.getNodeURI(),
|
||||
VisualizationController
|
||||
.URL_ENCODING_SCHEME).toString();
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
System.err.println("URL Encoding ERRor. Move this to use log.error ASAP");
|
||||
}
|
||||
|
||||
graphMLContent.append("<node id=\"" + node.getNodeID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getNodeURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getNodeName() + "</data>\n");
|
||||
|
||||
if (profileURL != null) {
|
||||
graphMLContent.append("\t<data key=\"profile_url\">" + profileURL + "</data>\n");
|
||||
}
|
||||
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_authored_works\">"
|
||||
+ node.getNumOfAuthoredWorks()
|
||||
+ "</data>\n");
|
||||
|
||||
if (node.getEarliestPublicationYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
* we are sure to have only one entry on the map. So using the for loop.
|
||||
* I am feeling dirty just about now.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getEarliestPublicationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_publication\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_earliest_publication\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (node.getLatestPublicationYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getLatestPublicationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_publication\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_latest_publication\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (node.getUnknownPublicationYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_publication\">"
|
||||
+ node.getUnknownPublicationYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
||||
graphMLContent.append("</node>\n");
|
||||
}
|
||||
|
||||
private void generateKeyDefinitionContent(CoAuthorshipData visVOContainer,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
/*
|
||||
* Generate the key definition content for node.
|
||||
* */
|
||||
getKeyDefinitionFromSchema(visVOContainer.getNodeSchema(), graphMLContent);
|
||||
|
||||
/*
|
||||
* Generate the key definition content for edge.
|
||||
* */
|
||||
getKeyDefinitionFromSchema(visVOContainer.getEdgeSchema(), graphMLContent);
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void getKeyDefinitionFromSchema(Set<Map<String, String>> schema,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
for (Map<String, String> currentNodeSchemaAttribute : schema) {
|
||||
|
||||
graphMLContent.append("\n<key ");
|
||||
|
||||
for (Map.Entry<String, String> currentAttributeKey
|
||||
: currentNodeSchemaAttribute.entrySet()) {
|
||||
|
||||
graphMLContent.append(currentAttributeKey.getKey()
|
||||
+ "=\"" + currentAttributeKey.getValue()
|
||||
+ "\" ");
|
||||
|
||||
}
|
||||
|
||||
if (currentNodeSchemaAttribute.containsKey("default")) {
|
||||
|
||||
graphMLContent.append(">\n");
|
||||
graphMLContent.append("<default>");
|
||||
graphMLContent.append(currentNodeSchemaAttribute.get("default"));
|
||||
graphMLContent.append("</default>\n");
|
||||
graphMLContent.append("</key>\n");
|
||||
|
||||
} else {
|
||||
graphMLContent.append("/>\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,475 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
import com.hp.hpl.jena.query.QueryFactory;
|
||||
import com.hp.hpl.jena.query.QuerySolution;
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.Syntax;
|
||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Edge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UniqueIDGenerator;
|
||||
|
||||
/**
|
||||
* This query runner is used to execute a sparql query to get all the publications
|
||||
* for a particular individual. It will also fetch all the authors that worked
|
||||
* on that particular publication.
|
||||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
||||
|
||||
private static final int MAX_AUTHORS_PER_PAPER_ALLOWED = 100;
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
private String egoURI;
|
||||
|
||||
private DataSource dataSource;
|
||||
|
||||
private Log log;
|
||||
|
||||
private UniqueIDGenerator nodeIDGenerator;
|
||||
|
||||
private UniqueIDGenerator edgeIDGenerator;
|
||||
|
||||
public CoAuthorshipQueryRunner(String egoURI,
|
||||
DataSource dataSource, Log log) {
|
||||
|
||||
this.egoURI = egoURI;
|
||||
this.dataSource = dataSource;
|
||||
this.log = log;
|
||||
|
||||
this.nodeIDGenerator = new UniqueIDGenerator();
|
||||
this.edgeIDGenerator = new UniqueIDGenerator();
|
||||
|
||||
}
|
||||
|
||||
private CoAuthorshipData createQueryResult(ResultSet resultSet) {
|
||||
|
||||
Set<Node> nodes = new HashSet<Node>();
|
||||
|
||||
Map<String, BiboDocument> biboDocumentURLToVO = new HashMap<String, BiboDocument>();
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors = new HashMap<String, Set<Node>>();
|
||||
Map<String, Node> nodeURLToVO = new HashMap<String, Node>();
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO = new HashMap<String, Edge>();
|
||||
|
||||
Node egoNode = null;
|
||||
|
||||
Set<Edge> edges = new HashSet<Edge>();
|
||||
|
||||
while (resultSet.hasNext()) {
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
/*
|
||||
* We only want to create only ONE ego node.
|
||||
* */
|
||||
RDFNode egoAuthorURLNode = solution.get(QueryFieldLabels.AUTHOR_URL);
|
||||
if (nodeURLToVO.containsKey(egoAuthorURLNode.toString())) {
|
||||
|
||||
egoNode = nodeURLToVO.get(egoAuthorURLNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
egoNode = new Node(egoAuthorURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(egoNode);
|
||||
nodeURLToVO.put(egoAuthorURLNode.toString(), egoNode);
|
||||
|
||||
RDFNode authorLabelNode = solution.get(QueryFieldLabels.AUTHOR_LABEL);
|
||||
if (authorLabelNode != null) {
|
||||
egoNode.setNodeName(authorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
RDFNode documentNode = solution.get(QueryFieldLabels.DOCUMENT_URL);
|
||||
BiboDocument biboDocument;
|
||||
|
||||
if (biboDocumentURLToVO.containsKey(documentNode.toString())) {
|
||||
biboDocument = biboDocumentURLToVO.get(documentNode.toString());
|
||||
} else {
|
||||
biboDocument = createDocumentVO(solution, documentNode.toString());
|
||||
biboDocumentURLToVO.put(documentNode.toString(), biboDocument);
|
||||
}
|
||||
|
||||
egoNode.addAuthorDocument(biboDocument);
|
||||
|
||||
/*
|
||||
* After some discussion we concluded that for the purpose of this visualization
|
||||
* we do not want a co-author node or edge if the publication has only one
|
||||
* author and that happens to be the ego.
|
||||
* */
|
||||
if (solution.get(QueryFieldLabels.AUTHOR_URL).toString().equalsIgnoreCase(
|
||||
solution.get(QueryFieldLabels.CO_AUTHOR_URL).toString())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Node coAuthorNode;
|
||||
|
||||
RDFNode coAuthorURLNode = solution.get(QueryFieldLabels.CO_AUTHOR_URL);
|
||||
if (nodeURLToVO.containsKey(coAuthorURLNode.toString())) {
|
||||
|
||||
coAuthorNode = nodeURLToVO.get(coAuthorURLNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
coAuthorNode = new Node(coAuthorURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(coAuthorNode);
|
||||
nodeURLToVO.put(coAuthorURLNode.toString(), coAuthorNode);
|
||||
|
||||
RDFNode coAuthorLabelNode = solution.get(QueryFieldLabels.CO_AUTHOR_LABEL);
|
||||
if (coAuthorLabelNode != null) {
|
||||
coAuthorNode.setNodeName(coAuthorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
coAuthorNode.addAuthorDocument(biboDocument);
|
||||
|
||||
Set<Node> coAuthorsForCurrentBiboDocument;
|
||||
|
||||
if (biboDocumentURLToCoAuthors.containsKey(biboDocument.getDocumentURL())) {
|
||||
coAuthorsForCurrentBiboDocument = biboDocumentURLToCoAuthors
|
||||
.get(biboDocument.getDocumentURL());
|
||||
} else {
|
||||
coAuthorsForCurrentBiboDocument = new HashSet<Node>();
|
||||
biboDocumentURLToCoAuthors.put(biboDocument.getDocumentURL(),
|
||||
coAuthorsForCurrentBiboDocument);
|
||||
}
|
||||
|
||||
coAuthorsForCurrentBiboDocument.add(coAuthorNode);
|
||||
|
||||
Edge egoCoAuthorEdge = getExistingEdge(egoNode, coAuthorNode, edgeUniqueIdentifierToVO);
|
||||
|
||||
/*
|
||||
* If "egoCoAuthorEdge" is null it means that no edge exists in between the egoNode
|
||||
* & current coAuthorNode. Else create a new edge, add it to the edges set & add
|
||||
* the collaborator document to it.
|
||||
* */
|
||||
if (egoCoAuthorEdge != null) {
|
||||
egoCoAuthorEdge.addCollaboratorDocument(biboDocument);
|
||||
} else {
|
||||
egoCoAuthorEdge = new Edge(egoNode, coAuthorNode, biboDocument, edgeIDGenerator);
|
||||
edges.add(egoCoAuthorEdge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(egoNode.getNodeID(),
|
||||
coAuthorNode.getNodeID()),
|
||||
egoCoAuthorEdge);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* This method takes out all the authors & edges between authors that belong to documents
|
||||
* that have more than 100 authors. We conjecture that these papers do not provide much
|
||||
* insight. However, we have left the documents be.
|
||||
*
|
||||
* This method side-effects "nodes" & "edges".
|
||||
* */
|
||||
removeLowQualityNodesAndEdges(nodes,
|
||||
biboDocumentURLToVO,
|
||||
biboDocumentURLToCoAuthors,
|
||||
edges);
|
||||
|
||||
/*
|
||||
* We need to create edges between 2 co-authors. E.g. On a paper there were 3 authors
|
||||
* ego, A & B then we have already created edges like,
|
||||
* ego - A
|
||||
* ego - B
|
||||
* The below sub-routine will take care of,
|
||||
* A - B
|
||||
*
|
||||
* We are side-effecting "edges" here. The only reason to do this is because we are adding
|
||||
* edges en masse for all the co-authors on all the publications considered so far. The
|
||||
* other reason being we dont want to compare against 2 sets of edges (edges created before
|
||||
* & co-author edges created during the course of this method) when we are creating a new
|
||||
* edge.
|
||||
* */
|
||||
createCoAuthorEdges(biboDocumentURLToVO,
|
||||
biboDocumentURLToCoAuthors,
|
||||
edges,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
|
||||
return new CoAuthorshipData(egoNode, nodes, edges);
|
||||
}
|
||||
|
||||
private void removeLowQualityNodesAndEdges(Set<Node> nodes,
|
||||
Map<String, BiboDocument> biboDocumentURLToVO,
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors,
|
||||
Set<Edge> edges) {
|
||||
|
||||
Set<Node> nodesToBeRemoved = new HashSet<Node>();
|
||||
for (Map.Entry<String, Set<Node>> currentBiboDocumentEntry
|
||||
: biboDocumentURLToCoAuthors.entrySet()) {
|
||||
|
||||
if (currentBiboDocumentEntry.getValue().size() > MAX_AUTHORS_PER_PAPER_ALLOWED) {
|
||||
|
||||
BiboDocument currentBiboDocument = biboDocumentURLToVO
|
||||
.get(currentBiboDocumentEntry.getKey());
|
||||
|
||||
Set<Edge> edgesToBeRemoved = new HashSet<Edge>();
|
||||
|
||||
for (Edge currentEdge : edges) {
|
||||
Set<BiboDocument> currentCollaboratorDocuments =
|
||||
currentEdge.getCollaboratorDocuments();
|
||||
|
||||
if (currentCollaboratorDocuments.contains(currentBiboDocument)) {
|
||||
currentCollaboratorDocuments.remove(currentBiboDocument);
|
||||
if (currentCollaboratorDocuments.isEmpty()) {
|
||||
edgesToBeRemoved.add(currentEdge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
edges.removeAll(edgesToBeRemoved);
|
||||
|
||||
for (Node currentCoAuthor : currentBiboDocumentEntry.getValue()) {
|
||||
currentCoAuthor.getAuthorDocuments().remove(currentBiboDocument);
|
||||
if (currentCoAuthor.getAuthorDocuments().isEmpty()) {
|
||||
nodesToBeRemoved.add(currentCoAuthor);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
nodes.removeAll(nodesToBeRemoved);
|
||||
}
|
||||
|
||||
private void createCoAuthorEdges(
|
||||
Map<String, BiboDocument> biboDocumentURLToVO,
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors, Set<Edge> edges,
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO) {
|
||||
|
||||
for (Map.Entry<String, Set<Node>> currentBiboDocumentEntry
|
||||
: biboDocumentURLToCoAuthors.entrySet()) {
|
||||
|
||||
/*
|
||||
* If there was only one co-author (other than ego) then we dont have to create any
|
||||
* edges. so the below condition will take care of that.
|
||||
*
|
||||
* We are restricting edges between co-author if a particular document has more than
|
||||
* 100 co-authors. Our conjecture is that such edges do not provide any good insight
|
||||
* & causes unnecessary computations causing the server to time-out.
|
||||
* */
|
||||
if (currentBiboDocumentEntry.getValue().size() > 1
|
||||
&& currentBiboDocumentEntry.getValue().size()
|
||||
<= MAX_AUTHORS_PER_PAPER_ALLOWED) {
|
||||
|
||||
|
||||
Set<Edge> newlyAddedEdges = new HashSet<Edge>();
|
||||
|
||||
/*
|
||||
* In order to leverage the nested "for loop" for making edges between all the
|
||||
* co-authors we need to create a list out of the set first.
|
||||
* */
|
||||
List<Node> coAuthorNodes = new ArrayList<Node>(currentBiboDocumentEntry.getValue());
|
||||
Collections.sort(coAuthorNodes, new NodeComparator());
|
||||
|
||||
int numOfCoAuthors = coAuthorNodes.size();
|
||||
|
||||
for (int ii = 0; ii < numOfCoAuthors - 1; ii++) {
|
||||
for (int jj = ii + 1; jj < numOfCoAuthors; jj++) {
|
||||
|
||||
Node coAuthor1 = coAuthorNodes.get(ii);
|
||||
Node coAuthor2 = coAuthorNodes.get(jj);
|
||||
|
||||
Edge coAuthor1_2Edge = getExistingEdge(coAuthor1,
|
||||
coAuthor2,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
BiboDocument currentBiboDocument = biboDocumentURLToVO
|
||||
.get(currentBiboDocumentEntry
|
||||
.getKey());
|
||||
|
||||
if (coAuthor1_2Edge != null) {
|
||||
coAuthor1_2Edge.addCollaboratorDocument(currentBiboDocument);
|
||||
} else {
|
||||
coAuthor1_2Edge = new Edge(coAuthor1,
|
||||
coAuthor2,
|
||||
currentBiboDocument,
|
||||
edgeIDGenerator);
|
||||
newlyAddedEdges.add(coAuthor1_2Edge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(coAuthor1.getNodeID(),
|
||||
coAuthor2.getNodeID()),
|
||||
coAuthor1_2Edge);
|
||||
}
|
||||
}
|
||||
}
|
||||
edges.addAll(newlyAddedEdges);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
private Edge getExistingEdge(
|
||||
Node collaboratingNode1,
|
||||
Node collaboratingNode2,
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO) {
|
||||
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getNodeID(),
|
||||
collaboratingNode2.getNodeID());
|
||||
|
||||
return edgeUniqueIdentifierToVO.get(edgeUniqueIdentifier);
|
||||
|
||||
}
|
||||
|
||||
private String getEdgeUniqueIdentifier(int nodeID1, int nodeID2) {
|
||||
|
||||
String separator = "*";
|
||||
|
||||
if (nodeID1 < nodeID2) {
|
||||
return nodeID1 + separator + nodeID2;
|
||||
} else {
|
||||
return nodeID2 + separator + nodeID1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
// public Map<String, VivoCollegeOrSchool> getCollegeURLToVO() {
|
||||
// return collegeURLToVO;
|
||||
// }
|
||||
|
||||
private BiboDocument createDocumentVO(QuerySolution solution, String documentURL) {
|
||||
|
||||
BiboDocument biboDocument = new BiboDocument(documentURL);
|
||||
|
||||
RDFNode documentLabelNode = solution.get(QueryFieldLabels.DOCUMENT_LABEL);
|
||||
if (documentLabelNode != null) {
|
||||
biboDocument.setDocumentLabel(documentLabelNode.toString());
|
||||
}
|
||||
|
||||
RDFNode documentBlurbNode = solution.get(QueryFieldLabels.DOCUMENT_BLURB);
|
||||
if (documentBlurbNode != null) {
|
||||
biboDocument.setDocumentBlurb(documentBlurbNode.toString());
|
||||
}
|
||||
|
||||
RDFNode documentMonikerNode = solution.get(QueryFieldLabels.DOCUMENT_MONIKER);
|
||||
if (documentMonikerNode != null) {
|
||||
biboDocument.setDocumentMoniker(documentMonikerNode.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationYearNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR);
|
||||
if (publicationYearNode != null) {
|
||||
biboDocument.setPublicationYear(publicationYearNode.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationYearMonthNode = solution.get(QueryFieldLabels
|
||||
.DOCUMENT_PUBLICATION_YEAR_MONTH);
|
||||
if (publicationYearMonthNode != null) {
|
||||
biboDocument.setPublicationYearMonth(publicationYearMonthNode.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationDateNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_DATE);
|
||||
if (publicationDateNode != null) {
|
||||
biboDocument.setPublicationDate(publicationDateNode.toString());
|
||||
}
|
||||
|
||||
return biboDocument;
|
||||
}
|
||||
|
||||
private ResultSet executeQuery(String queryText,
|
||||
DataSource dataSource) {
|
||||
|
||||
QueryExecution queryExecution = null;
|
||||
Query query = QueryFactory.create(queryText, SYNTAX);
|
||||
|
||||
queryExecution = QueryExecutionFactory.create(query, dataSource);
|
||||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
private String generateEgoCoAuthorshipSparqlQuery(String queryURI) {
|
||||
// Resource uri1 = ResourceFactory.createResource(queryURI);
|
||||
|
||||
String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
+ "SELECT "
|
||||
+ " (str(<" + queryURI + ">) as ?" + QueryFieldLabels.AUTHOR_URL + ") "
|
||||
+ " (str(?authorLabel) as ?" + QueryFieldLabels.AUTHOR_LABEL + ") "
|
||||
+ " (str(?coAuthorPerson) as ?" + QueryFieldLabels.CO_AUTHOR_URL + ") "
|
||||
+ " (str(?coAuthorPersonLabel) as ?" + QueryFieldLabels.CO_AUTHOR_LABEL + ") "
|
||||
+ " (str(?document) as ?" + QueryFieldLabels.DOCUMENT_URL + ") "
|
||||
+ " (str(?documentLabel) as ?" + QueryFieldLabels.DOCUMENT_LABEL + ") "
|
||||
+ " (str(?documentMoniker) as ?" + QueryFieldLabels.DOCUMENT_MONIKER + ") "
|
||||
+ " (str(?documentBlurb) as ?" + QueryFieldLabels.DOCUMENT_BLURB + ") "
|
||||
+ " (str(?publicationYear) as ?" + QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR + ") "
|
||||
+ " (str(?publicationYearMonth) as ?"
|
||||
+ QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR_MONTH + ") "
|
||||
+ " (str(?publicationDate) as ?"
|
||||
+ QueryFieldLabels.DOCUMENT_PUBLICATION_DATE + ") "
|
||||
+ "WHERE { "
|
||||
+ "<" + queryURI + "> rdf:type foaf:Person ;"
|
||||
+ " rdfs:label ?authorLabel ;"
|
||||
+ " core:authorInAuthorship ?authorshipNode . "
|
||||
+ "?authorshipNode rdf:type core:Authorship ;"
|
||||
+ " core:linkedInformationResource ?document . "
|
||||
+ "?document rdfs:label ?documentLabel . "
|
||||
+ "?document core:informationResourceInAuthorship ?coAuthorshipNode . "
|
||||
+ "?coAuthorshipNode core:linkedAuthor ?coAuthorPerson . "
|
||||
+ "?coAuthorPerson rdfs:label ?coAuthorPersonLabel . "
|
||||
+ "OPTIONAL { ?document core:year ?publicationYear } . "
|
||||
+ "OPTIONAL { ?document core:yearMonth ?publicationYearMonth } . "
|
||||
+ "OPTIONAL { ?document core:date ?publicationDate } . "
|
||||
+ "OPTIONAL { ?document vitro:moniker ?documentMoniker } . "
|
||||
+ "OPTIONAL { ?document vitro:blurb ?documentBlurb } . "
|
||||
+ "OPTIONAL { ?document vitro:description ?documentDescription } "
|
||||
+ "} "
|
||||
+ "ORDER BY ?document ?coAuthorPerson";
|
||||
|
||||
log.debug("COAUTHORSHIP QUERY - " + sparqlQuery);
|
||||
|
||||
return sparqlQuery;
|
||||
}
|
||||
|
||||
|
||||
public CoAuthorshipData getQueryResult()
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.egoURI)) {
|
||||
/*
|
||||
* To test for the validity of the URI submitted.
|
||||
* */
|
||||
IRIFactory iRIFactory = IRIFactory.jenaImplementation();
|
||||
IRI iri = iRIFactory.create(this.egoURI);
|
||||
if (iri.hasViolation(false)) {
|
||||
String errorMsg = ((Violation) iri.violations(false).next()).getShortMessage();
|
||||
log.error("Ego Co-Authorship Vis Query " + errorMsg);
|
||||
throw new MalformedQueryParametersException(
|
||||
"URI provided for an individual is malformed.");
|
||||
}
|
||||
} else {
|
||||
throw new MalformedQueryParametersException("URI parameter is either null or empty.");
|
||||
}
|
||||
|
||||
ResultSet resultSet = executeQuery(generateEgoCoAuthorshipSparqlQuery(this.egoURI),
|
||||
this.dataSource);
|
||||
return createQueryResult(resultSet);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,278 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import javax.servlet.RequestDispatcher;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.VisualizationRequestHandler;
|
||||
|
||||
/**
|
||||
* This request handler is used when information related to co-authorship network
|
||||
* for an individual is requested. It currently provides 2 outputs,
|
||||
* 1. Graphml content representing the individual's co-authorship network
|
||||
* 1. CSV file containing the list(& count) of unique co-authors with which
|
||||
* the individual has worked over the years. This data powers the related sparkline.
|
||||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
||||
|
||||
public void generateVisualization(VitroRequest vitroRequest,
|
||||
HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
Log log,
|
||||
DataSource dataSource) {
|
||||
|
||||
String egoURI = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.INDIVIDUAL_URI_KEY);
|
||||
|
||||
String renderMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.RENDER_MODE_KEY);
|
||||
|
||||
String visMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.VIS_MODE_KEY);
|
||||
|
||||
QueryRunner<CoAuthorshipData> queryManager =
|
||||
new CoAuthorshipQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
try {
|
||||
CoAuthorshipData authorNodesAndEdges =
|
||||
queryManager.getQueryResult();
|
||||
|
||||
|
||||
if (VisualizationFrameworkConstants.DATA_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
/*
|
||||
* We will be using the same visualization package for both sparkline & coauthorship
|
||||
* flash vis. We will use "VIS_MODE_KEY" as a modifier to differentiate
|
||||
* between these two. The default will be to render the coauthorship network vis.
|
||||
* */
|
||||
if (VisualizationFrameworkConstants.SPARKLINE_VIS_MODE
|
||||
.equalsIgnoreCase(visMode)) {
|
||||
/*
|
||||
* When the csv file is required - based on which sparkline visualization will
|
||||
* be rendered.
|
||||
* */
|
||||
prepareSparklineDataResponse(authorNodesAndEdges,
|
||||
response);
|
||||
return;
|
||||
|
||||
} else {
|
||||
/*
|
||||
* When the graphML file is required - based on which coauthorship network
|
||||
* visualization will be rendered.
|
||||
* */
|
||||
prepareNetworkDataResponse(authorNodesAndEdges, response);
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
|
||||
RequestDispatcher requestDispatcher = null;
|
||||
|
||||
prepareStandaloneResponse(
|
||||
egoURI,
|
||||
authorNodesAndEdges,
|
||||
vitroRequest,
|
||||
request);
|
||||
|
||||
requestDispatcher = request.getRequestDispatcher(Controllers.BASIC_JSP);
|
||||
|
||||
try {
|
||||
requestDispatcher.forward(request, response);
|
||||
} catch (Exception e) {
|
||||
log.error("EntityEditController could not forward to view.");
|
||||
log.error(e.getMessage());
|
||||
log.error(e.getStackTrace());
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
} catch (MalformedQueryParametersException e) {
|
||||
try {
|
||||
UtilityFunctions.handleMalformedParameters(
|
||||
e.getMessage(),
|
||||
"Visualization Query Error - Co-authorship Network",
|
||||
vitroRequest,
|
||||
request,
|
||||
response,
|
||||
log);
|
||||
} catch (ServletException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
} catch (IOException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void writeCoauthorsPerYearCSV(Map<String, Set<Node>> yearToCoauthors,
|
||||
PrintWriter printWriter) {
|
||||
|
||||
// printWriter.append("\"Year\", \"Count\", \"Co-Author(s)\"\n");
|
||||
printWriter.append("Year, Count, Co-Author(s)\n");
|
||||
|
||||
for (Entry<String, Set<Node>> currentEntry : yearToCoauthors.entrySet()) {
|
||||
|
||||
printWriter.append("\"" + currentEntry.getKey() + "\","
|
||||
+ "\"" + currentEntry.getValue().size() + "\","
|
||||
+ "\"" + getCoauthorNamesAsString(currentEntry.getValue())
|
||||
+ "\"\n");
|
||||
}
|
||||
|
||||
printWriter.flush();
|
||||
}
|
||||
|
||||
private String getCoauthorNamesAsString(Set<Node> coAuthors) {
|
||||
|
||||
StringBuilder coAuthorsMerged = new StringBuilder();
|
||||
|
||||
String coAuthorSeparator = "; ";
|
||||
for (Node currCoAuthor : coAuthors) {
|
||||
coAuthorsMerged.append(currCoAuthor.getNodeName() + coAuthorSeparator);
|
||||
}
|
||||
|
||||
return StringUtils.removeEnd(coAuthorsMerged.toString(), coAuthorSeparator);
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when a csv file containing number & names of unique co-authors per
|
||||
* year is requested.
|
||||
* @param authorNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private void prepareSparklineDataResponse(CoAuthorshipData authorNodesAndEdges,
|
||||
HttpServletResponse response) {
|
||||
|
||||
String outputFileName;
|
||||
Map<String, Set<Node>> yearToCoauthors = new TreeMap<String, Set<Node>>();
|
||||
|
||||
if (authorNodesAndEdges.getNodes() != null && authorNodesAndEdges.getNodes().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(authorNodesAndEdges
|
||||
.getEgoNode().getNodeName())
|
||||
+ "_coauthors-per-year" + ".csv";
|
||||
|
||||
yearToCoauthors = UtilityFunctions.getPublicationYearToCoAuthors(authorNodesAndEdges);
|
||||
|
||||
} else {
|
||||
|
||||
outputFileName = "no_coauthors-per-year" + ".csv";
|
||||
}
|
||||
|
||||
response.setContentType("application/octet-stream");
|
||||
response.setHeader("Content-Disposition",
|
||||
"attachment;filename=" + outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
writeCoauthorsPerYearCSV(yearToCoauthors, responseWriter);
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a response when graphml formatted co-authorship network is requested, typically by
|
||||
* the flash vis.
|
||||
* @param authorNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private void prepareNetworkDataResponse(
|
||||
CoAuthorshipData authorNodesAndEdges, HttpServletResponse response) {
|
||||
|
||||
response.setContentType("text/xml");
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
CoAuthorshipGraphMLWriter coAuthorshipGraphMLWriter =
|
||||
new CoAuthorshipGraphMLWriter(authorNodesAndEdges);
|
||||
|
||||
responseWriter.append(coAuthorshipGraphMLWriter.getCoAuthorshipGraphMLContent());
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When the page for person level visualization is requested.
|
||||
* @param egoURI
|
||||
* @param coAuthorshipVO
|
||||
* @param vitroRequest
|
||||
* @param request
|
||||
*/
|
||||
private void prepareStandaloneResponse(
|
||||
String egoURI,
|
||||
CoAuthorshipData coAuthorshipVO,
|
||||
VitroRequest vitroRequest,
|
||||
HttpServletRequest request) {
|
||||
|
||||
Portal portal = vitroRequest.getPortal();
|
||||
|
||||
request.setAttribute("egoURIParam", egoURI);
|
||||
|
||||
String title = "";
|
||||
if (coAuthorshipVO.getNodes() != null && coAuthorshipVO.getNodes().size() > 0) {
|
||||
request.setAttribute("numOfAuthors", coAuthorshipVO.getNodes().size());
|
||||
title = coAuthorshipVO.getEgoNode().getNodeName() + " - ";
|
||||
}
|
||||
|
||||
if (coAuthorshipVO.getEdges() != null && coAuthorshipVO.getEdges().size() > 0) {
|
||||
request.setAttribute("numOfCoAuthorShips", coAuthorshipVO.getEdges().size());
|
||||
}
|
||||
|
||||
|
||||
request.setAttribute("title", title + "Co-Authorship Visualization");
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("scripts", "/templates/visualization/person_level_inject_head.jsp");
|
||||
request.setAttribute("bodyJsp", "/templates/visualization/co_authorship.jsp");
|
||||
}
|
||||
|
||||
}
|
|
@ -1,633 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SparklineData;
|
||||
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public class CoAuthorshipVisCodeGenerator {
|
||||
|
||||
/*
|
||||
* There are 2 modes of sparkline that are available via this visualization.
|
||||
* 1. Short Sparkline - This sparkline will render all the data points (or sparks),
|
||||
* which in this case are the coauthors over the years, from the last 10 years.
|
||||
*
|
||||
* 2. Full Sparkline - This sparkline will render all the data points (or sparks)
|
||||
* spanning the career of the person & last 10 years at the minimum, in case if
|
||||
* the person started his career in the last 10 yeras.
|
||||
* */
|
||||
private static final Map<String, String> VIS_DIV_NAMES = new HashMap<String, String>() { {
|
||||
|
||||
put("SHORT_SPARK", "unique_coauthors_short_sparkline_vis");
|
||||
put("FULL_SPARK", "unique_coauthors_full_sparkline_vis");
|
||||
|
||||
} };
|
||||
|
||||
private static final String VISUALIZATION_STYLE_CLASS = "sparkline_style";
|
||||
|
||||
private static final String DEFAULT_VISCONTAINER_DIV_ID = "unique_coauthors_vis_container";
|
||||
|
||||
private Map<String, Set<Node>> yearToUniqueCoauthors;
|
||||
|
||||
private Log log;
|
||||
|
||||
private SparklineData sparklineData;
|
||||
|
||||
private String contextPath;
|
||||
|
||||
private String individualURI;
|
||||
|
||||
public CoAuthorshipVisCodeGenerator(String contextPath,
|
||||
String individualURI,
|
||||
String visMode,
|
||||
String visContainer,
|
||||
Map<String, Set<Node>> yearToUniqueCoauthors,
|
||||
Log log) {
|
||||
|
||||
this.contextPath = contextPath;
|
||||
this.individualURI = individualURI;
|
||||
|
||||
this.yearToUniqueCoauthors = yearToUniqueCoauthors;
|
||||
this.sparklineData = new SparklineData();
|
||||
|
||||
this.log = log;
|
||||
|
||||
generateVisualizationCode(visMode, visContainer);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is used to generate the visualization code (HMTL, CSS & JavaScript).
|
||||
* There 2 parts to it - 1. Actual Content Code & 2. Context Code.
|
||||
* 1. Actual Content code in this case is the sparkline image, text related to
|
||||
* data and the wrapping tables. This is generated via call to google vis API through
|
||||
* JavaScript.
|
||||
* 2. Context code is generally optional but contains code pertaining to tabulated
|
||||
* data & links to download files etc.
|
||||
* @param visMode
|
||||
* @param visContainer
|
||||
*/
|
||||
private void generateVisualizationCode(String visMode,
|
||||
String visContainer) {
|
||||
|
||||
sparklineData.setSparklineContent(getMainVisualizationCode(visMode,
|
||||
visContainer));
|
||||
|
||||
sparklineData.setSparklineContext(getVisualizationContextCode(visMode));
|
||||
|
||||
}
|
||||
|
||||
private String getMainVisualizationCode(String visMode,
|
||||
String providedVisContainerID) {
|
||||
|
||||
int numOfYearsToBeRendered = 0;
|
||||
int currentYear = Calendar.getInstance().get(Calendar.YEAR);
|
||||
int shortSparkMinYear = currentYear
|
||||
- VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE
|
||||
+ 1;
|
||||
|
||||
/*
|
||||
* This is required because when deciding the range of years over which the vis
|
||||
* was rendered we dont want to be influenced by the "DEFAULT_PUBLICATION_YEAR".
|
||||
* */
|
||||
Set<String> publishedYears = new HashSet<String>(yearToUniqueCoauthors.keySet());
|
||||
publishedYears.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* We are setting the default value of minPublishedYear to be 10 years before
|
||||
* the current year (which is suitably represented by the shortSparkMinYear),
|
||||
* this in case we run into invalid set of published years.
|
||||
* */
|
||||
int minPublishedYear = shortSparkMinYear;
|
||||
|
||||
String visContainerID = null;
|
||||
|
||||
StringBuilder visualizationCode = new StringBuilder();
|
||||
|
||||
if (yearToUniqueCoauthors.size() > 0) {
|
||||
try {
|
||||
minPublishedYear = Integer.parseInt(Collections.min(publishedYears));
|
||||
} catch (NoSuchElementException e1) {
|
||||
log.debug("vis: " + e1.getMessage() + " error occurred for "
|
||||
+ yearToUniqueCoauthors.toString());
|
||||
} catch (NumberFormatException e2) {
|
||||
log.debug("vis: " + e2.getMessage() + " error occurred for "
|
||||
+ yearToUniqueCoauthors.toString());
|
||||
}
|
||||
}
|
||||
|
||||
int minPubYearConsidered = 0;
|
||||
|
||||
/*
|
||||
* There might be a case that the author has made his first publication within the
|
||||
* last 10 years but we want to make sure that the sparkline is representative of
|
||||
* at least the last 10 years, so we will set the minPubYearConsidered to
|
||||
* "currentYear - 10" which is also given by "shortSparkMinYear".
|
||||
* */
|
||||
if (minPublishedYear > shortSparkMinYear) {
|
||||
minPubYearConsidered = shortSparkMinYear;
|
||||
} else {
|
||||
minPubYearConsidered = minPublishedYear;
|
||||
}
|
||||
|
||||
numOfYearsToBeRendered = currentYear - minPubYearConsidered + 1;
|
||||
|
||||
visualizationCode.append("<style type='text/css'>"
|
||||
+ "." + VISUALIZATION_STYLE_CLASS + " table{"
|
||||
+ " margin: 0;"
|
||||
+ " padding: 0;"
|
||||
+ " width: auto;"
|
||||
+ " border-collapse: collapse;"
|
||||
+ " border-spacing: 0;"
|
||||
+ " vertical-align: inherit;"
|
||||
+ "}"
|
||||
+ ".incomplete-data-holder {"
|
||||
+ ""
|
||||
+ "}"
|
||||
+ "td.sparkline_number { text-align:right; "
|
||||
+ "padding-right:5px; }"
|
||||
+ "td.sparkline_text {text-align:left;}"
|
||||
+ "</style>\n");
|
||||
|
||||
visualizationCode.append("<script type=\"text/javascript\">\n"
|
||||
+ "function drawUniqueCoauthorCountVisualization(providedSparklineImgTD) {\n"
|
||||
+ "var data = new google.visualization.DataTable();\n"
|
||||
+ "data.addColumn('string', 'Year');\n"
|
||||
+ "data.addColumn('number', 'Unique co-authors');\n"
|
||||
+ "data.addRows(" + numOfYearsToBeRendered + ");\n");
|
||||
|
||||
int uniqueCoAuthorCounter = 0;
|
||||
int renderedFullSparks = 0;
|
||||
Set<Node> allCoAuthorsWithKnownAuthorshipYears = new HashSet<Node>();
|
||||
|
||||
for (int publicationYear = minPubYearConsidered;
|
||||
publicationYear <= currentYear;
|
||||
publicationYear++) {
|
||||
|
||||
String publicationYearAsString = String.valueOf(publicationYear);
|
||||
Set<Node> currentCoAuthors = yearToUniqueCoauthors.get(publicationYearAsString);
|
||||
|
||||
Integer currentUniqueCoAuthors = null;
|
||||
|
||||
if (currentCoAuthors != null) {
|
||||
currentUniqueCoAuthors = currentCoAuthors.size();
|
||||
allCoAuthorsWithKnownAuthorshipYears.addAll(currentCoAuthors);
|
||||
} else {
|
||||
currentUniqueCoAuthors = 0;
|
||||
}
|
||||
|
||||
visualizationCode.append("data.setValue("
|
||||
+ uniqueCoAuthorCounter
|
||||
+ ", 0, '"
|
||||
+ publicationYearAsString
|
||||
+ "');\n");
|
||||
|
||||
visualizationCode.append("data.setValue("
|
||||
+ uniqueCoAuthorCounter
|
||||
+ ", 1, "
|
||||
+ currentUniqueCoAuthors
|
||||
+ ");\n");
|
||||
uniqueCoAuthorCounter++;
|
||||
}
|
||||
|
||||
/*
|
||||
* For the purpose of this visualization I have come up with a term "Sparks" which
|
||||
* essentially means data points.
|
||||
* Sparks that will be rendered in full mode will always be the one's which have any year
|
||||
* associated with it. Hence.
|
||||
* */
|
||||
renderedFullSparks = allCoAuthorsWithKnownAuthorshipYears.size();
|
||||
|
||||
/*
|
||||
* Total publications will also consider publications that have no year associated with
|
||||
* them. Hence.
|
||||
* */
|
||||
Integer unknownYearCoauthors = 0;
|
||||
if (yearToUniqueCoauthors.get(VOConstants.DEFAULT_PUBLICATION_YEAR) != null) {
|
||||
unknownYearCoauthors = yearToUniqueCoauthors
|
||||
.get(VOConstants.DEFAULT_PUBLICATION_YEAR).size();
|
||||
}
|
||||
|
||||
|
||||
String sparklineDisplayOptions = "{width: 150, height: 30, showAxisLines: false, "
|
||||
+ "showValueLabels: false, labelPosition: 'none'}";
|
||||
|
||||
if (providedVisContainerID != null) {
|
||||
visContainerID = providedVisContainerID;
|
||||
} else {
|
||||
visContainerID = DEFAULT_VISCONTAINER_DIV_ID;
|
||||
}
|
||||
|
||||
/*
|
||||
* By default these represents the range of the rendered sparks. Only in case of
|
||||
* "short" sparkline mode we will set the Earliest RenderedPublication year to
|
||||
* "currentYear - 10".
|
||||
* */
|
||||
sparklineData.setEarliestRenderedPublicationYear(minPublishedYear);
|
||||
sparklineData.setLatestRenderedPublicationYear(currentYear);
|
||||
|
||||
/*
|
||||
* The Full Sparkline will be rendered by default. Only if the url has specific mention of
|
||||
* SHORT_SPARKLINE_MODE_KEY then we render the short sparkline and not otherwise.
|
||||
* */
|
||||
|
||||
|
||||
/*
|
||||
* Since building StringBuilder objects (which is being used to store the vis code) is
|
||||
* essentially a side-effecting process, we have both the activators method as
|
||||
* side-effecting. They both side-effect "visualizationCode"
|
||||
* */
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {
|
||||
|
||||
sparklineData.setEarliestRenderedPublicationYear(shortSparkMinYear);
|
||||
generateShortSparklineVisualizationContent(currentYear,
|
||||
shortSparkMinYear,
|
||||
visContainerID,
|
||||
visualizationCode,
|
||||
unknownYearCoauthors,
|
||||
sparklineDisplayOptions);
|
||||
} else {
|
||||
generateFullSparklineVisualizationContent(currentYear,
|
||||
minPubYearConsidered,
|
||||
visContainerID,
|
||||
visualizationCode,
|
||||
unknownYearCoauthors,
|
||||
renderedFullSparks,
|
||||
sparklineDisplayOptions);
|
||||
}
|
||||
|
||||
log.debug(visualizationCode);
|
||||
|
||||
return visualizationCode.toString();
|
||||
}
|
||||
|
||||
private void generateShortSparklineVisualizationContent(int currentYear,
|
||||
int shortSparkMinYear,
|
||||
String visContainerID,
|
||||
StringBuilder visualizationCode,
|
||||
int unknownYearCoauthors,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
/*
|
||||
* Create a view of the data containing only the column pertaining to publication count.
|
||||
* */
|
||||
visualizationCode.append("var shortSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "shortSparklineView.setColumns([1]);\n");
|
||||
|
||||
/*
|
||||
* For the short view we only want the last 10 year's view of publication count,
|
||||
* hence we filter the data we actually want to use for render.
|
||||
* */
|
||||
visualizationCode.append("shortSparklineView.setRows("
|
||||
+ "data.getFilteredRows([{column: 0, "
|
||||
+ "minValue: '" + shortSparkMinYear + "', "
|
||||
+ "maxValue: '" + currentYear + "'}])"
|
||||
+ ");\n");
|
||||
|
||||
/*
|
||||
* Create the vis object and draw it in the div pertaining to short-sparkline.
|
||||
* */
|
||||
visualizationCode.append("var short_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "short_spark.draw(shortSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
/*
|
||||
* We want to display how many publication counts were considered, so this is used
|
||||
* to calculate this.
|
||||
* */
|
||||
visualizationCode.append("var shortSparkRows = shortSparklineView.getViewRows();\n"
|
||||
+ "var renderedShortSparks = 0;\n"
|
||||
+ "$.each(shortSparkRows, function(index, value) {"
|
||||
+ "renderedShortSparks += data.getValue(value, 1);"
|
||||
+ "});\n");
|
||||
|
||||
|
||||
|
||||
/*
|
||||
* Generate the text introducing the vis.
|
||||
* */
|
||||
|
||||
String imcompleteDataText = "This information is based solely on publications which "
|
||||
+ "have been loaded into the VIVO system. "
|
||||
+ "This may only be a small sample of the person\\'s "
|
||||
+ "total work.";
|
||||
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("SHORT_SPARK")
|
||||
+ " td.sparkline_number')" + ".text("
|
||||
+ "parseInt(renderedShortSparks) + "
|
||||
+ "parseInt(" + unknownYearCoauthors + "));");
|
||||
|
||||
visualizationCode.append("var shortSparksText = ''"
|
||||
+ "+ ' co-author(s) within the last 10 years '"
|
||||
+ "<span class=\"incomplete-data-holder\" title=\""
|
||||
+ imcompleteDataText + "\">incomplete data</span>'"
|
||||
+ "+ '';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("SHORT_SPARK")
|
||||
+ " td.sparkline_text').html(shortSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
/*
|
||||
* Generate the code that will activate the visualization. It takes care of creating div
|
||||
* elements to hold the actual sparkline image and then calling the
|
||||
* drawUniqueCoauthorCountVisualization function.
|
||||
* */
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES.get("SHORT_SPARK"),
|
||||
visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private void generateFullSparklineVisualizationContent(int currentYear,
|
||||
int minPubYearConsidered,
|
||||
String visContainerID,
|
||||
StringBuilder visualizationCode,
|
||||
int unknownYearCoauthors,
|
||||
int renderedFullSparks,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
csvDownloadURLHref = "<a href=\"" + getCSVDownloadURL()
|
||||
+ "\" class=\"inline_href\">(.CSV File)</a>";
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
visualizationCode.append("var fullSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "fullSparklineView.setColumns([1]);\n");
|
||||
|
||||
visualizationCode.append("var full_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "full_spark.draw(fullSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_number')"
|
||||
+ ".text('" + (renderedFullSparks
|
||||
+ unknownYearCoauthors) + "').css('font-weight', 'bold');");
|
||||
|
||||
visualizationCode.append("var allSparksText = ''"
|
||||
+ "+ ' <h3>co-author(s)</h3> '"
|
||||
+ "+ ' <span class=\"sparkline_range\">"
|
||||
+ "from " + minPubYearConsidered + " to " + currentYear + ""
|
||||
+ "</span> '"
|
||||
+ "+ ' " + csvDownloadURLHref + " ';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_text').html(allSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES.get("FULL_SPARK"),
|
||||
visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private String generateVisualizationActivator(String sparklineID, String visContainerID) {
|
||||
|
||||
String sparklineTableWrapper = "\n"
|
||||
+ "var table = $('<table>');"
|
||||
+ "table.attr('class', 'sparkline_wrapper_table');"
|
||||
+ "var row = $('<tr>');"
|
||||
+ "sparklineImgTD = $('<td>');"
|
||||
+ "sparklineImgTD.attr('id', '" + sparklineID + "_img');"
|
||||
+ "sparklineImgTD.attr('width', '65');"
|
||||
// + "sparklineImgTD.attr('align', 'right');"
|
||||
+ "sparklineImgTD.attr('class', '" + VISUALIZATION_STYLE_CLASS + "');"
|
||||
+ "row.append(sparklineImgTD);"
|
||||
+ "var row2 = $('<tr>');"
|
||||
+ "var sparklineNumberTD = $('<td>');"
|
||||
// + "sparklineNumberTD.attr('width', '30');"
|
||||
// + "sparklineNumberTD.attr('align', 'right');"
|
||||
+ "sparklineNumberTD.attr('class', 'sparkline_number');"
|
||||
+ "sparklineNumberTD.css('text-align', 'center');"
|
||||
+ "row2.append(sparklineNumberTD);"
|
||||
+ "var row3 = $('<tr>');"
|
||||
+ "var sparklineTextTD = $('<td>');"
|
||||
// + "sparklineTextTD.attr('width', '450');"
|
||||
+ "sparklineTextTD.attr('class', 'sparkline_text');"
|
||||
+ "row3.append(sparklineTextTD);"
|
||||
+ "table.append(row);"
|
||||
+ "table.append(row2);"
|
||||
+ "table.append(row3);"
|
||||
+ "table.prependTo('#" + sparklineID + "');\n";
|
||||
|
||||
return "$(document).ready(function() {"
|
||||
+ "var sparklineImgTD; "
|
||||
/*
|
||||
* This is a nuclear option (creating the container in which everything goes)
|
||||
* the only reason this will be ever used is the API user never submitted a
|
||||
* container ID in which everything goes. The alternative was to let the
|
||||
* vis not appear in the calling page at all. So now atleast vis appears but
|
||||
* appended at the bottom of the body.
|
||||
* */
|
||||
+ "if ($('#" + visContainerID + "').length === 0) {"
|
||||
+ " $('<div/>', {'id': '" + visContainerID + "'"
|
||||
+ " }).appendTo('body');"
|
||||
+ "}"
|
||||
+ "if ($('#" + sparklineID + "').length === 0) {"
|
||||
+ "$('<div/>', {'id': '" + sparklineID + "',"
|
||||
+ "'class': '" + VISUALIZATION_STYLE_CLASS + "'"
|
||||
+ "}).prependTo('#" + visContainerID + "');"
|
||||
+ sparklineTableWrapper
|
||||
+ "}"
|
||||
+ "drawUniqueCoauthorCountVisualization(sparklineImgTD);"
|
||||
+ "});"
|
||||
+ "</script>\n";
|
||||
}
|
||||
|
||||
private String getVisualizationContextCode(String visMode) {
|
||||
|
||||
String visualizationContextCode = "";
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {
|
||||
visualizationContextCode = generateShortVisContext();
|
||||
} else {
|
||||
visualizationContextCode = generateFullVisContext();
|
||||
}
|
||||
|
||||
log.debug(visualizationContextCode);
|
||||
|
||||
return visualizationContextCode;
|
||||
}
|
||||
|
||||
private String generateFullVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
if (yearToUniqueCoauthors.size() > 0) {
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
|
||||
csvDownloadURLHref = "Download data as <a href='"
|
||||
+ getCSVDownloadURL() + "'>.csv</a> file.<br />";
|
||||
sparklineData.setDownloadDataLink(getCSVDownloadURL());
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "No data available to export.<br />";
|
||||
}
|
||||
|
||||
String tableCode = generateDataTable();
|
||||
|
||||
divContextCode.append("<p>" + tableCode + csvDownloadURLHref + "</p>");
|
||||
|
||||
sparklineData.setTable(tableCode);
|
||||
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
private String getCSVDownloadURL() throws UnsupportedEncodingException {
|
||||
|
||||
if (yearToUniqueCoauthors.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
|
||||
String downloadURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?" + VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants
|
||||
.COAUTHORSHIP_VIS,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.VIS_MODE_KEY
|
||||
+ "=" + URLEncoder.encode("sparkline",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants.DATA_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
return downloadURL;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String generateShortVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
try {
|
||||
|
||||
String fullTimelineLink;
|
||||
if (yearToUniqueCoauthors.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
String fullTimelineNetworkURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode("person_level",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_CONTAINER_KEY
|
||||
+ "=" + URLEncoder.encode("ego_sparkline",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(
|
||||
VisualizationFrameworkConstants
|
||||
.STANDALONE_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
fullTimelineLink = "<a href='" + fullTimelineNetworkURL
|
||||
+ "'>View full timeline and co-author network.</a>";
|
||||
|
||||
sparklineData.setFullTimelineNetworkLink(fullTimelineNetworkURL);
|
||||
|
||||
} else {
|
||||
fullTimelineLink = "No data available to render full timeline.<br />";
|
||||
}
|
||||
|
||||
divContextCode.append("<p>" + fullTimelineLink + "</p>");
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
log.error(e);
|
||||
}
|
||||
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
|
||||
private String generateDataTable() {
|
||||
|
||||
StringBuilder dataTable = new StringBuilder();
|
||||
|
||||
dataTable.append("<table id='sparkline_data_table'>"
|
||||
+ "<caption>Unique Co-Authors per year</caption>"
|
||||
+ "<thead>"
|
||||
+ "<tr>"
|
||||
+ "<th>Year</th>"
|
||||
+ "<th>Count</th>"
|
||||
+ "</tr>"
|
||||
+ "</thead>"
|
||||
+ "<tbody>");
|
||||
|
||||
for (Entry<String, Set<Node>> currentEntry : yearToUniqueCoauthors.entrySet()) {
|
||||
dataTable.append("<tr>"
|
||||
+ "<td>" + currentEntry.getKey() + "</td>"
|
||||
+ "<td>" + currentEntry.getValue().size() + "</td>"
|
||||
+ "</tr>");
|
||||
}
|
||||
|
||||
dataTable.append("</tbody>\n </table>\n");
|
||||
|
||||
return dataTable.toString();
|
||||
}
|
||||
|
||||
public SparklineData getValueObjectContainer() {
|
||||
return sparklineData;
|
||||
}
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Edge;
|
||||
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the edges based on their IDs in ascending order.
|
||||
* @author cdtank
|
||||
*
|
||||
*/
|
||||
public class EdgeComparator implements Comparator<Edge> {
|
||||
|
||||
@Override
|
||||
public int compare(Edge arg0, Edge arg1) {
|
||||
return arg0.getEdgeID() - arg1.getEdgeID();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,21 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coauthorship;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the nodes based on their IDs in ascending order.
|
||||
* @author cdtank
|
||||
*/
|
||||
public class NodeComparator implements Comparator<Node> {
|
||||
|
||||
@Override
|
||||
public int compare(Node arg0, Node arg1) {
|
||||
return arg0.getNodeID() - arg1.getNodeID();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIEdge;
|
||||
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the edges based on their IDs in ascending order.
|
||||
* @author bkoniden
|
||||
*
|
||||
*/
|
||||
public class CoPIEdgeComparator implements Comparator<CoPIEdge> {
|
||||
|
||||
@Override
|
||||
public int compare(CoPIEdge arg0, CoPIEdge arg1) {
|
||||
return arg0.getEdgeID() - arg1.getEdgeID();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,517 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
import com.hp.hpl.jena.query.QueryFactory;
|
||||
import com.hp.hpl.jena.query.QuerySolution;
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.Syntax;
|
||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIEdge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UniqueIDGenerator;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
||||
|
||||
private static final int MAX_PI_PER_GRANT_ALLOWED = 100;
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
private String egoURI;
|
||||
|
||||
private DataSource dataSource;
|
||||
|
||||
private Log log;
|
||||
|
||||
private UniqueIDGenerator nodeIDGenerator;
|
||||
|
||||
private UniqueIDGenerator edgeIDGenerator;
|
||||
|
||||
public CoPIGrantCountQueryRunner(String egoURI,
|
||||
DataSource dataSource, Log log) {
|
||||
|
||||
this.egoURI = egoURI;
|
||||
this.dataSource = dataSource;
|
||||
this.log = log;
|
||||
|
||||
this.nodeIDGenerator = new UniqueIDGenerator();
|
||||
this.edgeIDGenerator = new UniqueIDGenerator();
|
||||
|
||||
}
|
||||
|
||||
private String generateEgoCoPIquery(String queryURI) {
|
||||
|
||||
|
||||
String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
+ "SELECT "
|
||||
+ " (str(<" + queryURI + ">) as ?" + QueryFieldLabels.PI_URL + ") "
|
||||
+ " (str(?PILabel) as ?" + QueryFieldLabels.PI_LABEL + ") "
|
||||
+ " (str(?Grant) as ?" + QueryFieldLabels.GRANT_URL + ") "
|
||||
+ " (str(?GrantLabel) as ?" + QueryFieldLabels.GRANT_LABEL + ") "
|
||||
+ " (str(?GrantStartDate) as ?" + QueryFieldLabels.GRANT_START_DATE + ") "
|
||||
+ " (str(?GrantEndDate) as ?" + QueryFieldLabels.GRANT_END_DATE + ") "
|
||||
+ " (str(?CoPI) as ?" + QueryFieldLabels.CO_PI_URL + ") "
|
||||
+ " (str(?CoPILabel) as ?" + QueryFieldLabels.CO_PI_LABEL + ") "
|
||||
+ "WHERE "
|
||||
+ "{ "
|
||||
+ "<" + queryURI + "> rdfs:label ?PILabel . "
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasCo-PrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel ; "
|
||||
|
||||
+ "core:relatedRole ?RelatedRole . "
|
||||
|
||||
+ "?RelatedRole core:principalInvestigatorRoleOf ?CoPI . "
|
||||
|
||||
+ "?CoPI rdfs:label ?CoPILabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
|
||||
+ "} "
|
||||
|
||||
+ "UNION "
|
||||
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasPrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel ; "
|
||||
|
||||
+ "core:relatedRole ?RelatedRole . "
|
||||
|
||||
+ "?RelatedRole core:principalInvestigatorRoleOf ?CoPI . "
|
||||
|
||||
+ "?CoPI rdfs:label ?CoPILabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
|
||||
+ "} "
|
||||
|
||||
+ "UNION "
|
||||
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasCo-PrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel ; "
|
||||
|
||||
+ "core:relatedRole ?RelatedRole . "
|
||||
|
||||
+ "?RelatedRole core:co-PrincipalInvestigatorRoleOf ?CoPI . "
|
||||
|
||||
+ "?CoPI rdfs:label ?CoPILabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
|
||||
+ "} "
|
||||
|
||||
+ "UNION "
|
||||
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasPrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel ; "
|
||||
|
||||
+ "core:relatedRole ?RelatedRole . "
|
||||
|
||||
+ "?RelatedRole core:co-PrincipalInvestigatorRoleOf ?CoPI . "
|
||||
|
||||
+ "?CoPI rdfs:label ?CoPILabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
+ "} "
|
||||
|
||||
+ "} ";
|
||||
|
||||
log.debug("COPI QUERY - " + sparqlQuery);
|
||||
|
||||
//System.out.println("\n\nCOPI QUERY - " + sparqlQuery + "\n\n");
|
||||
|
||||
return sparqlQuery;
|
||||
}
|
||||
|
||||
private ResultSet executeQuery(String queryText, DataSource dataSource) {
|
||||
|
||||
QueryExecution queryExecution = null;
|
||||
Query query = QueryFactory.create(queryText, SYNTAX);
|
||||
|
||||
queryExecution = QueryExecutionFactory.create(query, dataSource);
|
||||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
public CoPIData getQueryResult()
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.egoURI)) {
|
||||
/*
|
||||
* To test for the validity of the URI submitted.
|
||||
* */
|
||||
IRIFactory iRIFactory = IRIFactory.jenaImplementation();
|
||||
IRI iri = iRIFactory.create(this.egoURI);
|
||||
if (iri.hasViolation(false)) {
|
||||
String errorMsg = ((Violation) iri.violations(false).next()).getShortMessage();
|
||||
log.error("Ego Co-PI Vis Query " + errorMsg);
|
||||
throw new MalformedQueryParametersException(
|
||||
"URI provided for an individual is malformed.");
|
||||
}
|
||||
} else {
|
||||
throw new MalformedQueryParametersException("URI parameter is either null or empty.");
|
||||
}
|
||||
|
||||
ResultSet resultSet = executeQuery(generateEgoCoPIquery(this.egoURI),
|
||||
this.dataSource);
|
||||
return createQueryResult(resultSet);
|
||||
}
|
||||
|
||||
|
||||
private CoPIEdge getExistingEdge(
|
||||
CoPINode collaboratingNode1,
|
||||
CoPINode collaboratingNode2,
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO) {
|
||||
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getNodeID(),
|
||||
collaboratingNode2.getNodeID());
|
||||
|
||||
return edgeUniqueIdentifierToVO.get(edgeUniqueIdentifier);
|
||||
|
||||
}
|
||||
|
||||
private String getEdgeUniqueIdentifier(int nodeID1, int nodeID2) {
|
||||
|
||||
String separator = "*";
|
||||
|
||||
if (nodeID1 < nodeID2) {
|
||||
return nodeID1 + separator + nodeID2;
|
||||
} else {
|
||||
return nodeID2 + separator + nodeID1;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private CoPIData createQueryResult(ResultSet resultSet) {
|
||||
|
||||
Set<CoPINode> nodes = new HashSet<CoPINode>();
|
||||
|
||||
Map<String, Grant> grantURLToVO = new HashMap<String, Grant>();
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs = new HashMap<String, Set<CoPINode>>();
|
||||
Map<String, CoPINode> nodeURLToVO = new HashMap<String, CoPINode>();
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO = new HashMap<String, CoPIEdge>();
|
||||
|
||||
CoPINode egoNode = null;
|
||||
|
||||
Set<CoPIEdge> edges = new HashSet<CoPIEdge>();
|
||||
|
||||
while (resultSet.hasNext()) {
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
/*
|
||||
* We only want to create only ONE ego node.
|
||||
* */
|
||||
RDFNode egoPIURLNode = solution.get(QueryFieldLabels.PI_URL);
|
||||
if (nodeURLToVO.containsKey(egoPIURLNode.toString())) {
|
||||
|
||||
egoNode = nodeURLToVO.get(egoPIURLNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
egoNode = new CoPINode(egoPIURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(egoNode);
|
||||
nodeURLToVO.put(egoPIURLNode.toString(), egoNode);
|
||||
|
||||
|
||||
RDFNode authorLabelNode = solution.get(QueryFieldLabels.PI_LABEL);
|
||||
if (authorLabelNode != null) {
|
||||
egoNode.setNodeName(authorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
log.debug("PI: "+ egoNode.getIndividualLabel());
|
||||
|
||||
RDFNode grantNode = solution.get(QueryFieldLabels.GRANT_URL);
|
||||
Grant grant;
|
||||
|
||||
if (grantURLToVO.containsKey(grantNode.toString())) {
|
||||
grant = grantURLToVO.get(grantNode.toString());
|
||||
} else {
|
||||
grant = createGrantVO(solution, grantNode.toString());
|
||||
grantURLToVO.put(grantNode.toString(), grant);
|
||||
}
|
||||
|
||||
egoNode.addGrant(grant);
|
||||
log.debug("Adding grant: "+ grant.getIndividualLabel());
|
||||
|
||||
/*
|
||||
* After some discussion we concluded that for the purpose of this visualization
|
||||
* we do not want a co-pi node or edge if the grant has only one
|
||||
* pi and that happens to be the ego.
|
||||
* */
|
||||
if (solution.get(QueryFieldLabels.PI_URL).toString().equalsIgnoreCase(
|
||||
solution.get(QueryFieldLabels.CO_PI_URL).toString())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
CoPINode coPINode;
|
||||
|
||||
RDFNode coPIURLNode = solution.get(QueryFieldLabels.CO_PI_URL);
|
||||
if (nodeURLToVO.containsKey(coPIURLNode.toString())) {
|
||||
|
||||
coPINode = nodeURLToVO.get(coPIURLNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
coPINode = new CoPINode(coPIURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(coPINode);
|
||||
nodeURLToVO.put(coPIURLNode.toString(), coPINode);
|
||||
|
||||
RDFNode coPILabelNode = solution.get(QueryFieldLabels.CO_PI_LABEL);
|
||||
if (coPILabelNode != null) {
|
||||
coPINode.setNodeName(coPILabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
log.debug("Adding CO-PI: "+ coPINode.getIndividualLabel());
|
||||
coPINode.addGrant(grant);
|
||||
|
||||
Set<CoPINode> coPIsForCurrentGrant;
|
||||
|
||||
if (grantURLToCoPIs.containsKey(grant.getGrantURL())) {
|
||||
coPIsForCurrentGrant = grantURLToCoPIs
|
||||
.get(grant.getGrantURL());
|
||||
} else {
|
||||
coPIsForCurrentGrant = new HashSet<CoPINode>();
|
||||
grantURLToCoPIs.put(grant.getGrantURL(),
|
||||
coPIsForCurrentGrant);
|
||||
}
|
||||
|
||||
coPIsForCurrentGrant.add(coPINode);
|
||||
log.debug("Co-PI for current grant : "+ coPINode.getIndividualLabel());
|
||||
|
||||
CoPIEdge egoCoPIEdge = getExistingEdge(egoNode, coPINode, edgeUniqueIdentifierToVO);
|
||||
/*
|
||||
* If "egoCoPIEdge" is null it means that no edge exists in between the egoNode
|
||||
* & current coPINode. Else create a new edge, add it to the edges set & add
|
||||
* the collaborator grant to it.
|
||||
* */
|
||||
if (egoCoPIEdge != null) {
|
||||
egoCoPIEdge.addCollaboratorGrant(grant);
|
||||
} else {
|
||||
egoCoPIEdge = new CoPIEdge(egoNode, coPINode, grant, edgeIDGenerator);
|
||||
edges.add(egoCoPIEdge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(egoNode.getNodeID(),
|
||||
coPINode.getNodeID()),
|
||||
egoCoPIEdge);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* This method takes out all the PIs & edges between PIs that belong to grants
|
||||
* that have more than 100 PIs. We conjecture that these grants do not provide much
|
||||
* insight. However, we have left the grants be.
|
||||
*
|
||||
* This method side-effects "nodes" & "edges".
|
||||
* */
|
||||
removeLowQualityNodesAndEdges(nodes,
|
||||
grantURLToVO,
|
||||
grantURLToCoPIs,
|
||||
edges);
|
||||
/*
|
||||
* We need to create edges between 2 co-PIs. E.g. On a grant there were 3 PI
|
||||
* ego, A & B then we have already created edges like,
|
||||
* ego - A
|
||||
* ego - B
|
||||
* The below sub-routine will take care of,
|
||||
* A - B
|
||||
*
|
||||
* We are side-effecting "edges" here. The only reason to do this is because we are adding
|
||||
* edges en masse for all the co-PIs on all the grants considered so far. The
|
||||
* other reason being we dont want to compare against 2 sets of edges (edges created before
|
||||
* & co-PI edges created during the course of this method) when we are creating a new
|
||||
* edge.
|
||||
* */
|
||||
createCoPIEdges(grantURLToVO,
|
||||
grantURLToCoPIs,
|
||||
edges,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
|
||||
return new CoPIData(egoNode, nodes, edges);
|
||||
}
|
||||
|
||||
private void createCoPIEdges(Map<String, Grant> grantURLToVO,
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs, Set<CoPIEdge> edges,
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO) {
|
||||
|
||||
for (Map.Entry<String, Set<CoPINode>> currentGrantEntry
|
||||
: grantURLToCoPIs.entrySet()) {
|
||||
|
||||
/*
|
||||
* If there was only one co-PI (other than ego) then we dont have to create any
|
||||
* edges. so the below condition will take care of that.
|
||||
*
|
||||
* We are restricting edges between co-PI if a particular grant has more than
|
||||
* 100 co-PIs. Our conjecture is that such edges do not provide any good insight
|
||||
* & causes unnecessary computations causing the server to time-out.
|
||||
* */
|
||||
if (currentGrantEntry.getValue().size() > 1
|
||||
&& currentGrantEntry.getValue().size()
|
||||
<= MAX_PI_PER_GRANT_ALLOWED) {
|
||||
|
||||
|
||||
Set<CoPIEdge> newlyAddedEdges = new HashSet<CoPIEdge>();
|
||||
|
||||
/*
|
||||
* In order to leverage the nested "for loop" for making edges between all the
|
||||
* co-PIs we need to create a list out of the set first.
|
||||
* */
|
||||
List<CoPINode> coPINodes = new ArrayList<CoPINode>(currentGrantEntry.getValue());
|
||||
Collections.sort(coPINodes, new CoPINodeComparator());
|
||||
|
||||
int numOfCoPIs = coPINodes.size();
|
||||
|
||||
for (int ii = 0; ii < numOfCoPIs - 1; ii++) {
|
||||
for (int jj = ii + 1; jj < numOfCoPIs; jj++) {
|
||||
|
||||
CoPINode coPI1 = coPINodes.get(ii);
|
||||
CoPINode coPI2 = coPINodes.get(jj);
|
||||
|
||||
CoPIEdge coPI1_2Edge = getExistingEdge(coPI1,
|
||||
coPI2,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
Grant currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
|
||||
if (coPI1_2Edge != null) {
|
||||
coPI1_2Edge.addCollaboratorGrant(currentGrant);
|
||||
} else {
|
||||
coPI1_2Edge = new CoPIEdge(coPI1,
|
||||
coPI2,
|
||||
currentGrant,
|
||||
edgeIDGenerator);
|
||||
newlyAddedEdges.add(coPI1_2Edge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(coPI1.getNodeID(),
|
||||
coPI2.getNodeID()),
|
||||
coPI1_2Edge);
|
||||
}
|
||||
}
|
||||
}
|
||||
edges.addAll(newlyAddedEdges);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void removeLowQualityNodesAndEdges(Set<CoPINode> nodes,
|
||||
Map<String, Grant> grantURLToVO,
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs, Set<CoPIEdge> edges) {
|
||||
|
||||
Set<CoPINode> nodesToBeRemoved = new HashSet<CoPINode>();
|
||||
for (Map.Entry<String, Set<CoPINode>> currentGrantEntry
|
||||
: grantURLToCoPIs.entrySet()) {
|
||||
|
||||
if (currentGrantEntry.getValue().size() > MAX_PI_PER_GRANT_ALLOWED) {
|
||||
|
||||
Grant currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
|
||||
Set<CoPIEdge> edgesToBeRemoved = new HashSet<CoPIEdge>();
|
||||
|
||||
for (CoPIEdge currentEdge : edges) {
|
||||
Set<Grant> currentCollaboratorGrants =
|
||||
currentEdge.getCollaboratorGrants();
|
||||
|
||||
if (currentCollaboratorGrants.contains(currentGrant)) {
|
||||
currentCollaboratorGrants.remove(currentGrant);
|
||||
if (currentCollaboratorGrants.isEmpty()) {
|
||||
edgesToBeRemoved.add(currentEdge);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
edges.removeAll(edgesToBeRemoved);
|
||||
|
||||
for (CoPINode currentCoPI : currentGrantEntry.getValue()) {
|
||||
currentCoPI.getInvestigatedGrants().remove(currentGrant);
|
||||
if (currentCoPI.getInvestigatedGrants().isEmpty()) {
|
||||
nodesToBeRemoved.add(currentCoPI);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
nodes.removeAll(nodesToBeRemoved);
|
||||
|
||||
}
|
||||
|
||||
private Grant createGrantVO(QuerySolution solution, String grantURL) {
|
||||
|
||||
Grant grant = new Grant(grantURL);
|
||||
|
||||
RDFNode grantLabelNode = solution.get(QueryFieldLabels.GRANT_LABEL);
|
||||
if (grantLabelNode != null) {
|
||||
grant.setIndividualLabel(grantLabelNode.toString());
|
||||
}
|
||||
|
||||
|
||||
RDFNode grantStartYear = solution.get(QueryFieldLabels.GRANT_START_DATE);
|
||||
if (grantStartYear != null) {
|
||||
grant.setGrantStartDate(grantStartYear.toString());
|
||||
}
|
||||
|
||||
RDFNode grantEndDate = solution.get(QueryFieldLabels
|
||||
.GRANT_END_DATE);
|
||||
if (grantEndDate != null) {
|
||||
grant.setGrantEndDate(grantEndDate.toString());
|
||||
}
|
||||
|
||||
return grant;
|
||||
}
|
||||
}
|
|
@ -1,246 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import javax.servlet.RequestDispatcher;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.VisualizationRequestHandler;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler{
|
||||
|
||||
public void generateVisualization(VitroRequest vitroRequest, HttpServletRequest request, HttpServletResponse response, Log log, DataSource dataSource){
|
||||
|
||||
String egoURI = vitroRequest.getParameter(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
String renderMode = vitroRequest.getParameter(VisualizationFrameworkConstants.RENDER_MODE_KEY);
|
||||
String visMode = vitroRequest.getParameter(VisualizationFrameworkConstants.VIS_MODE_KEY);
|
||||
|
||||
QueryRunner<CoPIData> queryManager = new CoPIGrantCountQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
try{
|
||||
CoPIData PINodesAndEdges = queryManager.getQueryResult();
|
||||
|
||||
// PINodesAndEdges.print();
|
||||
|
||||
if (VisualizationFrameworkConstants.DATA_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
if (VisualizationFrameworkConstants.COPI_VIS_MODE
|
||||
.equalsIgnoreCase(visMode)) {
|
||||
|
||||
prepareCoPIDataResponse(PINodesAndEdges, response);
|
||||
return;
|
||||
|
||||
} else {
|
||||
/*
|
||||
* When the graphML file is required - based on which copi network
|
||||
* visualization will be rendered.
|
||||
* */
|
||||
prepareNetworkDataResponse(PINodesAndEdges, response);
|
||||
return;
|
||||
}
|
||||
}else {
|
||||
|
||||
RequestDispatcher requestDispatcher = null;
|
||||
|
||||
prepareStandaloneResponse(
|
||||
egoURI,
|
||||
PINodesAndEdges,
|
||||
vitroRequest,
|
||||
request);
|
||||
|
||||
requestDispatcher = request.getRequestDispatcher(Controllers.BASIC_JSP);
|
||||
|
||||
try {
|
||||
requestDispatcher.forward(request, response);
|
||||
} catch (Exception e) {
|
||||
log.error("EntityEditController could not forward to view.");
|
||||
log.error(e.getMessage());
|
||||
log.error(e.getStackTrace());
|
||||
}
|
||||
}
|
||||
|
||||
}catch(MalformedQueryParametersException e){
|
||||
try {
|
||||
UtilityFunctions.handleMalformedParameters(
|
||||
e.getMessage(),
|
||||
"Visualization Query Error - Co-authorship Network",
|
||||
vitroRequest,
|
||||
request,
|
||||
response,
|
||||
log);
|
||||
} catch (ServletException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
} catch (IOException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When the page for person level visualization is requested.
|
||||
* @param egoURI
|
||||
* @param coPIVO
|
||||
* @param vitroRequest
|
||||
* @param request
|
||||
*/
|
||||
private void prepareStandaloneResponse(String egoURI,
|
||||
CoPIData pINodesAndEdges, VitroRequest vitroRequest,
|
||||
HttpServletRequest request) {
|
||||
|
||||
Portal portal = vitroRequest.getPortal();
|
||||
|
||||
request.setAttribute("egoURIParam", egoURI);
|
||||
|
||||
String title = "";
|
||||
if (pINodesAndEdges.getNodes() != null
|
||||
&& pINodesAndEdges.getNodes().size() > 0) {
|
||||
request.setAttribute("numOfAuthors", pINodesAndEdges.getNodes()
|
||||
.size());
|
||||
title = pINodesAndEdges.getEgoNode().getNodeName() + " - ";
|
||||
}
|
||||
|
||||
if (pINodesAndEdges.getEdges() != null
|
||||
&& pINodesAndEdges.getEdges().size() > 0) {
|
||||
request.setAttribute("numOfCoPIs", pINodesAndEdges
|
||||
.getEdges().size());
|
||||
}
|
||||
|
||||
request.setAttribute("title", title + "Co-PI Visualization");
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("scripts",
|
||||
"/templates/visualization/person_level_inject_head.jsp");
|
||||
request.setAttribute("bodyJsp",
|
||||
"/templates/visualization/co_authorship.jsp");
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a response when graphml formatted co-pi network is requested, typically by
|
||||
* the flash vis.
|
||||
* @param pINodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private void prepareNetworkDataResponse(CoPIData pINodesAndEdges,
|
||||
HttpServletResponse response) {
|
||||
|
||||
response.setContentType("text/xml");
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
CoPIGraphMLWriter coPIGraphMLWriter =
|
||||
new CoPIGraphMLWriter(pINodesAndEdges);
|
||||
|
||||
responseWriter.append(coPIGraphMLWriter.getCoPIGraphMLContent());
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when a csv file containing number & names of unique co-pis per
|
||||
* year is requested.
|
||||
* @param pINodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private void prepareCoPIDataResponse(CoPIData pINodesAndEdges,
|
||||
HttpServletResponse response) {
|
||||
|
||||
String outputFileName;
|
||||
Map<String, Set<CoPINode>> yearToCoPI = new TreeMap<String, Set<CoPINode>>();
|
||||
|
||||
if (pINodesAndEdges.getNodes() != null && pINodesAndEdges.getNodes().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(pINodesAndEdges
|
||||
.getEgoNode().getNodeName())
|
||||
+ "_copis-per-year" + ".csv";
|
||||
|
||||
yearToCoPI = UtilityFunctions.getGrantYearToCoPI(pINodesAndEdges);
|
||||
|
||||
} else {
|
||||
|
||||
outputFileName = "no_copis-per-year" + ".csv";
|
||||
}
|
||||
|
||||
response.setContentType("application/octet-stream");
|
||||
response.setHeader("Content-Disposition",
|
||||
"attachment;filename=" + outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
writeCoPIsPerYearCSV(yearToCoPI, responseWriter);
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void writeCoPIsPerYearCSV(Map<String, Set<CoPINode>> yearToCoPI,
|
||||
PrintWriter responseWriter) {
|
||||
responseWriter.append("Year, Count, Co-PI(s)\n");
|
||||
for (Map.Entry<String, Set<CoPINode>> currentEntry : yearToCoPI.entrySet()) {
|
||||
|
||||
responseWriter.append("\"" + currentEntry.getKey() + "\","
|
||||
+ "\"" + currentEntry.getValue().size() + "\","
|
||||
+ "\"" + getCoPINamesAsString(currentEntry.getValue())
|
||||
+ "\"\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private String getCoPINamesAsString(Set<CoPINode> CoPIs) {
|
||||
|
||||
StringBuilder coPIsMerged = new StringBuilder();
|
||||
|
||||
String coPISeparator = ";";
|
||||
for(CoPINode currentCoPI : CoPIs){
|
||||
coPIsMerged.append(currentCoPI.getNodeName() + coPISeparator);
|
||||
}
|
||||
|
||||
return StringUtils.removeEnd(coPIsMerged.toString(), coPISeparator);
|
||||
}
|
||||
}
|
|
@ -1,329 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIEdge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPINode;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.Map;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPIGraphMLWriter {
|
||||
|
||||
private StringBuilder coPIGraphMLContent;
|
||||
|
||||
private final String GRAPHML_HEADER = "<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n"
|
||||
+ " <graphml xmlns=\"http://graphml.graphdrawing.org/xmlns\"\n"
|
||||
+ " xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n"
|
||||
+ " xsi:schemaLocation=\"http://graphml.graphdrawing.org/xmlns\n"
|
||||
+ " http://graphml.graphdrawing.org/xmlns/1.0/graphml.xsd\">\n\n";
|
||||
|
||||
private final String GRAPHML_FOOTER = "</graphml>";
|
||||
|
||||
public CoPIGraphMLWriter(CoPIData coPIData){
|
||||
coPIGraphMLContent = createCoPIGraphMLContent(coPIData);
|
||||
}
|
||||
|
||||
private StringBuilder createCoPIGraphMLContent(CoPIData coPIData) {
|
||||
|
||||
StringBuilder graphMLContent = new StringBuilder();
|
||||
|
||||
graphMLContent.append(GRAPHML_HEADER);
|
||||
|
||||
/*
|
||||
* We are side-effecting "graphMLContent" object in this method since creating
|
||||
* another String object to hold key definition data will be redundant & will
|
||||
* not serve the purpose.
|
||||
* */
|
||||
generateKeyDefinitionContent(coPIData, graphMLContent);
|
||||
|
||||
/*
|
||||
* Used to generate graph content. It will contain both the nodes & edge information.
|
||||
* We are side-effecting "graphMLContent".
|
||||
* */
|
||||
generateGraphContent(coPIData, graphMLContent);
|
||||
|
||||
graphMLContent.append(GRAPHML_FOOTER);
|
||||
|
||||
return graphMLContent;
|
||||
}
|
||||
|
||||
public StringBuilder getCoPIGraphMLContent(){
|
||||
return coPIGraphMLContent;
|
||||
}
|
||||
|
||||
private void generateGraphContent(CoPIData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("\n<graph edgedefault=\"undirected\">\n");
|
||||
|
||||
if (coPIData.getNodes() != null & coPIData.getNodes().size() > 0) {
|
||||
generateNodeSectionContent(coPIData, graphMLContent);
|
||||
}
|
||||
|
||||
if (coPIData.getEdges() != null & coPIData.getEdges().size() > 0) {
|
||||
generateEdgeSectionContent(coPIData, graphMLContent);
|
||||
}
|
||||
|
||||
graphMLContent.append("</graph>\n");
|
||||
|
||||
|
||||
}
|
||||
|
||||
private void generateEdgeSectionContent(CoPIData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- edges -->\n");
|
||||
|
||||
Set<CoPIEdge> edges = coPIData.getEdges();
|
||||
|
||||
List<CoPIEdge> orderedEdges = new ArrayList<CoPIEdge>(edges);
|
||||
|
||||
Collections.sort(orderedEdges, new CoPIEdgeComparator());
|
||||
|
||||
for (CoPIEdge currentEdge : orderedEdges) {
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single edge. "graphMLContent"
|
||||
* is being side-effected.
|
||||
* */
|
||||
getEdgeContent(graphMLContent, currentEdge);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void getEdgeContent(StringBuilder graphMLContent, CoPIEdge currentEdge) {
|
||||
|
||||
graphMLContent.append("<edge "
|
||||
+ "id=\"" + currentEdge.getEdgeID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceNode().getNodeID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetNode().getNodeID() + "\" "
|
||||
+ ">\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator1\">"
|
||||
+ currentEdge.getSourceNode().getNodeName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator2\">"
|
||||
+ currentEdge.getTargetNode().getNodeName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_coinvestigated_grants\">"
|
||||
+ currentEdge.getNumberOfCoInvestigatedGrants()
|
||||
+ "</data>\n");
|
||||
|
||||
if (currentEdge.getEarliestCollaborationYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
* we are sure to have only one entry on the map. So using the for loop.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: currentEdge.getEarliestCollaborationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_collaboration\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_earliest_collaboration\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (currentEdge.getLatestCollaborationYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: currentEdge.getLatestCollaborationYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_collaboration\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_latest_collaboration\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (currentEdge.getUnknownCollaborationYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_collaboration\">"
|
||||
+ currentEdge.getUnknownCollaborationYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
||||
graphMLContent.append("</edge>\n");
|
||||
}
|
||||
|
||||
|
||||
private void generateNodeSectionContent(CoPIData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- nodes -->\n");
|
||||
|
||||
CoPINode egoNode = coPIData.getEgoNode();
|
||||
Set<CoPINode> piNodes = coPIData.getNodes();
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single node. "graphMLContent"
|
||||
* is being side-effected. The egoNode is added first because this is the "requirement"
|
||||
* of the co-pi vis. Ego should always come first.
|
||||
*
|
||||
* */
|
||||
getNodeContent(graphMLContent, egoNode);
|
||||
|
||||
List<CoPINode> orderedPINodes = new ArrayList<CoPINode>(piNodes);
|
||||
orderedPINodes.remove(egoNode);
|
||||
|
||||
Collections.sort(orderedPINodes, new CoPINodeComparator());
|
||||
|
||||
|
||||
for (CoPINode currNode : orderedPINodes) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != egoNode) {
|
||||
|
||||
getNodeContent(graphMLContent, currNode);
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private void getNodeContent(StringBuilder graphMLContent, CoPINode node) {
|
||||
|
||||
String profileURL = null;
|
||||
try {
|
||||
profileURL = VisualizationFrameworkConstants.INDIVIDUAL_URL_PREFIX + "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(node.getNodeURI(),
|
||||
VisualizationController
|
||||
.URL_ENCODING_SCHEME).toString();
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
System.err.println("URL Encoding Error. Move this to use log.error ASAP");
|
||||
}
|
||||
|
||||
graphMLContent.append("<node id=\"" + node.getNodeID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getNodeURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getNodeName() + "</data>\n");
|
||||
|
||||
if (profileURL != null) {
|
||||
graphMLContent.append("\t<data key=\"profile_url\">" + profileURL + "</data>\n");
|
||||
}
|
||||
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_investigated_grants\">"
|
||||
+ node.getNumberOfInvestigatedGrants()
|
||||
+ "</data>\n");
|
||||
|
||||
if (node.getEarliestGrantYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
* we are sure to have only one entry on the map. So using the for loop.
|
||||
* I am feeling dirty just about now.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getEarliestGrantYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_grant\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_earliest_grant\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (node.getLatestGrantYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getLatestGrantYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_grant\">"
|
||||
+ publicationInfo.getKey()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_latest_grant\">"
|
||||
+ publicationInfo.getValue()
|
||||
+ "</data>\n");
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (node.getUnknownGrantYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_grant\">"
|
||||
+ node.getUnknownGrantYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
||||
graphMLContent.append("</node>\n");
|
||||
}
|
||||
|
||||
private void generateKeyDefinitionContent(CoPIData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
/*
|
||||
* Generate the key definition content for node.
|
||||
* */
|
||||
getKeyDefinitionFromSchema(coPIData.getNodeSchema(), graphMLContent);
|
||||
|
||||
/*
|
||||
* Generate the key definition content for edge.
|
||||
* */
|
||||
getKeyDefinitionFromSchema(coPIData.getEdgeSchema(), graphMLContent);
|
||||
}
|
||||
|
||||
private void getKeyDefinitionFromSchema(Set<Map<String, String>> schema,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
for (Map<String, String> currentNodeSchemaAttribute : schema) {
|
||||
|
||||
graphMLContent.append("\n<key ");
|
||||
|
||||
for (Map.Entry<String, String> currentAttributeKey
|
||||
: currentNodeSchemaAttribute.entrySet()) {
|
||||
|
||||
graphMLContent.append(currentAttributeKey.getKey()
|
||||
+ "=\"" + currentAttributeKey.getValue()
|
||||
+ "\" ");
|
||||
|
||||
}
|
||||
|
||||
if (currentNodeSchemaAttribute.containsKey("default")) {
|
||||
|
||||
graphMLContent.append(">\n");
|
||||
graphMLContent.append("<default>");
|
||||
graphMLContent.append(currentNodeSchemaAttribute.get("default"));
|
||||
graphMLContent.append("</default>\n");
|
||||
graphMLContent.append("</key>\n");
|
||||
|
||||
} else {
|
||||
graphMLContent.append("/>\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,20 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPINode;
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the CoPINodes based on their IDs in ascending order.
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
|
||||
public class CoPINodeComparator implements Comparator<CoPINode>{
|
||||
@Override
|
||||
public int compare(CoPINode arg0, CoPINode arg1) {
|
||||
return arg0.getNodeID() - arg1.getNodeID();
|
||||
}
|
||||
}
|
|
@ -1,628 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SparklineData;
|
||||
|
||||
/**
|
||||
* This class contains code for rendering sparklines and displaying tables for
|
||||
* Co-PI visualization.
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public class CoPIVisCodeGenerator {
|
||||
|
||||
/*
|
||||
* There are 2 modes of sparkline that are available via this visualization.
|
||||
* 1. Short Sparkline - This sparkline will render all the data points (or sparks),
|
||||
* which in this case are the copi(s) over the years, from the last 10 years.
|
||||
*
|
||||
* 2. Full Sparkline - This sparkline will render all the data points (or sparks)
|
||||
* spanning the career of the person & last 10 years at the minimum, in case if
|
||||
* the person started his career in the last 10 years.
|
||||
* */
|
||||
private static final Map<String, String> VIS_DIV_NAMES = new HashMap<String, String>() { {
|
||||
|
||||
put("SHORT_SPARK", "unique_copis_short_sparkline_vis");
|
||||
put("FULL_SPARK", "unique_copis_full_sparkline_vis");
|
||||
|
||||
} };
|
||||
|
||||
private static final String VISUALIZATION_STYLE_CLASS = "sparkline_style";
|
||||
|
||||
private static final String DEFAULT_VISCONTAINER_DIV_ID = "unique_copis_vis_container";
|
||||
|
||||
private Map<String, Set<CoPINode>> yearToUniqueCoPIs;
|
||||
|
||||
private Log log;
|
||||
|
||||
private SparklineData sparklineData;
|
||||
|
||||
private String contextPath;
|
||||
|
||||
private String individualURI;
|
||||
|
||||
public CoPIVisCodeGenerator(String contextPath,
|
||||
String individualURI,
|
||||
String visMode,
|
||||
String visContainer,
|
||||
Map<String, Set<CoPINode>> yearToUniqueCoPIs,
|
||||
Log log){
|
||||
|
||||
this.contextPath = contextPath;
|
||||
this.individualURI = individualURI;
|
||||
|
||||
this.yearToUniqueCoPIs = yearToUniqueCoPIs;
|
||||
this.sparklineData = new SparklineData();
|
||||
|
||||
this.log = log;
|
||||
|
||||
generateVisualizationCode(visMode, visContainer);
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is used to generate the visualization code (HMTL, CSS &
|
||||
* JavaScript). There 2 parts to it - 1. Actual Content Code & 2. Context
|
||||
* Code. 1. Actual Content code in this case is the sparkline image, text
|
||||
* related to data and the wrapping tables. This is generated via call to
|
||||
* google vis API through JavaScript. 2. Context code is generally optional
|
||||
* but contains code pertaining to tabulated data & links to download files
|
||||
* etc.
|
||||
*
|
||||
* @param visMode
|
||||
* @param visContainer
|
||||
*/
|
||||
private void generateVisualizationCode(String visMode, String visContainer) {
|
||||
|
||||
sparklineData.setSparklineContent(getMainVisualizationCode(visMode,
|
||||
visContainer));
|
||||
|
||||
sparklineData.setSparklineContext(getVisualizationContextCode(visMode));
|
||||
|
||||
}
|
||||
|
||||
private String getMainVisualizationCode(String visMode,
|
||||
String providedVisContainerID) {
|
||||
|
||||
int numOfYearsToBeRendered = 0;
|
||||
int currentYear = Calendar.getInstance().get(Calendar.YEAR);
|
||||
int shortSparkMinYear = currentYear
|
||||
- VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE + 1;
|
||||
|
||||
/*
|
||||
* This is required because when deciding the range of years over which
|
||||
* the vis was rendered we dont want to be influenced by the
|
||||
* "DEFAULT_GRANT_YEAR".
|
||||
*/
|
||||
Set<String> investigatedYears = new HashSet<String>(yearToUniqueCoPIs
|
||||
.keySet());
|
||||
investigatedYears.remove(VOConstants.DEFAULT_GRANT_YEAR);
|
||||
|
||||
/*
|
||||
* We are setting the default value of minGrantYear to be 10 years
|
||||
* before the current year (which is suitably represented by the
|
||||
* shortSparkMinYear), this in case we run into invalid set of investigated
|
||||
* years.
|
||||
*/
|
||||
int minGrantYear = shortSparkMinYear;
|
||||
|
||||
String visContainerID = null;
|
||||
|
||||
StringBuilder visualizationCode = new StringBuilder();
|
||||
|
||||
if (yearToUniqueCoPIs.size() > 0) {
|
||||
try {
|
||||
minGrantYear = Integer.parseInt(Collections
|
||||
.min(investigatedYears));
|
||||
} catch (NoSuchElementException e1) {
|
||||
log.debug("vis: " + e1.getMessage() + " error occurred for "
|
||||
+ yearToUniqueCoPIs.toString());
|
||||
} catch (NumberFormatException e2) {
|
||||
log.debug("vis: " + e2.getMessage() + " error occurred for "
|
||||
+ yearToUniqueCoPIs.toString());
|
||||
}
|
||||
}
|
||||
|
||||
int minGrantYearConsidered = 0;
|
||||
|
||||
/*
|
||||
* There might be a case that the person investigated his first grant
|
||||
* within the last 10 years but we want to make sure that the sparkline
|
||||
* is representative of at least the last 10 years, so we will set the
|
||||
* minGrantYearConsidered to "currentYear - 10" which is also given by
|
||||
* "shortSparkMinYear".
|
||||
*/
|
||||
if (minGrantYear > shortSparkMinYear) {
|
||||
minGrantYearConsidered = shortSparkMinYear;
|
||||
} else {
|
||||
minGrantYearConsidered = minGrantYear;
|
||||
}
|
||||
|
||||
numOfYearsToBeRendered = currentYear - minGrantYearConsidered + 1;
|
||||
|
||||
visualizationCode.append("<style type='text/css'>" + "."
|
||||
+ VISUALIZATION_STYLE_CLASS + " table{" + " margin: 0;"
|
||||
+ " padding: 0;" + " width: auto;"
|
||||
+ " border-collapse: collapse;" + " border-spacing: 0;"
|
||||
+ " vertical-align: inherit;" + "}"
|
||||
+ ".incomplete-data-holder {" + "" + "}"
|
||||
+ "td.sparkline_number { text-align:right; "
|
||||
+ "padding-right:5px; }"
|
||||
+ "td.sparkline_text {text-align:left;}" + "</style>\n");
|
||||
|
||||
visualizationCode
|
||||
.append("<script type=\"text/javascript\">\n"
|
||||
+ "function drawUniqueCoPICountVisualization(providedSparklineImgTD) {\n"
|
||||
+ "var data = new google.visualization.DataTable();\n"
|
||||
+ "data.addColumn('string', 'Year');\n"
|
||||
+ "data.addColumn('number', 'Unique Co-PI(s)');\n"
|
||||
+ "data.addRows(" + numOfYearsToBeRendered + ");\n");
|
||||
|
||||
int uniqueCoPICounter = 0;
|
||||
int renderedFullSparks = 0;
|
||||
Set<CoPINode> allCoPIsWithKnownGrantShipYears = new HashSet<CoPINode>();
|
||||
|
||||
for (int grantYear = minGrantYearConsidered; grantYear <= currentYear; grantYear++) {
|
||||
|
||||
String grantYearAsString = String.valueOf(grantYear);
|
||||
Set<CoPINode> currentCoPIs = yearToUniqueCoPIs
|
||||
.get(grantYearAsString);
|
||||
|
||||
Integer currentUniqueCoPIs = null;
|
||||
|
||||
if (currentCoPIs != null) {
|
||||
currentUniqueCoPIs = currentCoPIs.size();
|
||||
allCoPIsWithKnownGrantShipYears.addAll(currentCoPIs);
|
||||
} else {
|
||||
currentUniqueCoPIs = 0;
|
||||
}
|
||||
|
||||
visualizationCode.append("data.setValue(" + uniqueCoPICounter
|
||||
+ ", 0, '" + grantYearAsString + "');\n");
|
||||
|
||||
visualizationCode.append("data.setValue(" + uniqueCoPICounter
|
||||
+ ", 1, " + currentUniqueCoPIs + ");\n");
|
||||
uniqueCoPICounter++;
|
||||
}
|
||||
|
||||
/*
|
||||
* For the purpose of this visualization I have come up with a term
|
||||
* "Sparks" which essentially means data points. Sparks that will be
|
||||
* rendered in full mode will always be the one's which have any year
|
||||
* associated with it. Hence.
|
||||
*/
|
||||
renderedFullSparks = allCoPIsWithKnownGrantShipYears.size();
|
||||
|
||||
/*
|
||||
* Total grants will also consider publications that have no year
|
||||
* associated with them. Hence.
|
||||
*/
|
||||
Integer unknownYearCoPIs = 0;
|
||||
if (yearToUniqueCoPIs.get(VOConstants.DEFAULT_GRANT_YEAR) != null) {
|
||||
unknownYearCoPIs = yearToUniqueCoPIs.get(
|
||||
VOConstants.DEFAULT_GRANT_YEAR).size();
|
||||
}
|
||||
|
||||
String sparklineDisplayOptions = "{width: 150, height: 30, showAxisLines: false, "
|
||||
+ "showValueLabels: false, labelPosition: 'none'}";
|
||||
|
||||
if (providedVisContainerID != null) {
|
||||
visContainerID = providedVisContainerID;
|
||||
} else {
|
||||
visContainerID = DEFAULT_VISCONTAINER_DIV_ID;
|
||||
}
|
||||
|
||||
/*
|
||||
* By default these represents the range of the rendered sparks. Only in
|
||||
* case of "short" sparkline mode we will set the Earliest
|
||||
* RenderedGrant year to "currentYear - 10".
|
||||
*/
|
||||
sparklineData.setEarliestRenderedGrantYear(minGrantYear);
|
||||
sparklineData.setLatestRenderedGrantYear(currentYear);
|
||||
|
||||
/*
|
||||
* The Full Sparkline will be rendered by default. Only if the url has
|
||||
* specific mention of SHORT_SPARKLINE_MODE_KEY then we render the short
|
||||
* sparkline and not otherwise.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Since building StringBuilder objects (which is being used to store
|
||||
* the vis code) is essentially a side-effecting process, we have both
|
||||
* the activators method as side-effecting. They both side-effect
|
||||
* "visualizationCode"
|
||||
*/
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE
|
||||
.equalsIgnoreCase(visMode)) {
|
||||
|
||||
sparklineData.setEarliestRenderedGrantYear(shortSparkMinYear);
|
||||
|
||||
generateShortSparklineVisualizationContent(currentYear,
|
||||
shortSparkMinYear, visContainerID, visualizationCode,
|
||||
unknownYearCoPIs, sparklineDisplayOptions);
|
||||
} else {
|
||||
generateFullSparklineVisualizationContent(currentYear,
|
||||
minGrantYearConsidered, visContainerID, visualizationCode,
|
||||
unknownYearCoPIs, renderedFullSparks,
|
||||
sparklineDisplayOptions);
|
||||
}
|
||||
|
||||
log.debug(visualizationCode);
|
||||
|
||||
return visualizationCode.toString();
|
||||
}
|
||||
|
||||
private void generateShortSparklineVisualizationContent(int currentYear,
|
||||
int shortSparkMinYear, String visContainerID,
|
||||
StringBuilder visualizationCode, int unknownYearGrants,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
/*
|
||||
* Create a view of the data containing only the column pertaining to
|
||||
* grant count.
|
||||
*/
|
||||
visualizationCode.append("var shortSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "shortSparklineView.setColumns([1]);\n");
|
||||
|
||||
/*
|
||||
* For the short view we only want the last 10 year's view of
|
||||
* grant count, hence we filter the data we actually want to use
|
||||
* for render.
|
||||
*/
|
||||
visualizationCode.append("shortSparklineView.setRows("
|
||||
+ "data.getFilteredRows([{column: 0, " + "minValue: '"
|
||||
+ shortSparkMinYear + "', " + "maxValue: '" + currentYear
|
||||
+ "'}])" + ");\n");
|
||||
|
||||
/*
|
||||
* Create the vis object and draw it in the div pertaining to
|
||||
* short-sparkline.
|
||||
*/
|
||||
visualizationCode
|
||||
.append("var short_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "short_spark.draw(shortSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
/*
|
||||
* We want to display how many grant counts were considered, so
|
||||
* this is used to calculate this.
|
||||
*/
|
||||
visualizationCode
|
||||
.append("var shortSparkRows = shortSparklineView.getViewRows();\n"
|
||||
+ "var renderedShortSparks = 0;\n"
|
||||
+ "$.each(shortSparkRows, function(index, value) {"
|
||||
+ "renderedShortSparks += data.getValue(value, 1);"
|
||||
+ "});\n");
|
||||
|
||||
/*
|
||||
* Generate the text introducing the vis.
|
||||
*/
|
||||
|
||||
String imcompleteDataText = "This information is based solely on grants which "
|
||||
+ "have been loaded into the VIVO system. "
|
||||
+ "This may only be a small sample of the person\\'s "
|
||||
+ "total work.";
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("SHORT_SPARK")
|
||||
+ " td.sparkline_number').text("
|
||||
+ "parseInt(renderedShortSparks) " + "+ parseInt("
|
||||
+ unknownYearGrants + "));");
|
||||
|
||||
visualizationCode.append("var shortSparksText = ''"
|
||||
+ "+ ' grant(s) within the last 10 years "
|
||||
+ "<span class=\"incomplete-data-holder\" title=\""
|
||||
+ imcompleteDataText + "\">incomplete data</span>'" + "+ '';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("SHORT_SPARK") + " "
|
||||
+ "td.sparkline_text').html(shortSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
/*
|
||||
* Generate the code that will activate the visualization. It takes care
|
||||
* of creating div elements to hold the actual sparkline image and then
|
||||
* calling the drawUniqueCoPICountVisualization function.
|
||||
*/
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES
|
||||
.get("SHORT_SPARK"), visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private void generateFullSparklineVisualizationContent(
|
||||
int currentYear,
|
||||
int minGrantYearConsidered,
|
||||
String visContainerID,
|
||||
StringBuilder visualizationCode,
|
||||
int unknownYearGrants,
|
||||
int renderedFullSparks,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
|
||||
csvDownloadURLHref = "<a href=\"" + getCSVDownloadURL()
|
||||
+ "\" class=\"inline_href\">(.CSV File)</a>";
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
|
||||
visualizationCode.append("var fullSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "fullSparklineView.setColumns([1]);\n");
|
||||
|
||||
visualizationCode.append("var full_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "full_spark.draw(fullSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_number').text('" + (renderedFullSparks
|
||||
+ unknownYearGrants) + "').css('font-weight', 'bold');");
|
||||
|
||||
visualizationCode.append("var allSparksText = ''"
|
||||
+ "+ ' <h3>Co-Principal Investigator(s)</h3> '"
|
||||
+ "+ '"
|
||||
+ "<span class=\"sparkline_range\">"
|
||||
+ " from " + minGrantYearConsidered + " to " + currentYear + ""
|
||||
+ "</span> '"
|
||||
+ "+ ' " + csvDownloadURLHref + " ';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_text').html(allSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES.get("FULL_SPARK"),
|
||||
visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private String generateVisualizationActivator(String sparklineID, String visContainerID) {
|
||||
|
||||
String sparklineTableWrapper = "\n"
|
||||
+ "var table = $('<table>');"
|
||||
+ "table.attr('class', 'sparkline_wrapper_table');"
|
||||
+ "var row = $('<tr>');"
|
||||
+ "sparklineImgTD = $('<td>');"
|
||||
+ "sparklineImgTD.attr('id', '" + sparklineID + "_img');"
|
||||
+ "sparklineImgTD.attr('width', '65');"
|
||||
// + "sparklineImgTD.attr('align', 'right');"
|
||||
+ "sparklineImgTD.attr('class', '" + VISUALIZATION_STYLE_CLASS + "');"
|
||||
+ "row.append(sparklineImgTD);"
|
||||
+ "var row2 = $('<tr>');"
|
||||
+ "var sparklineNumberTD = $('<td>');"
|
||||
// + "sparklineNumberTD.attr('width', '30');"
|
||||
// + "sparklineNumberTD.attr('align', 'right');"
|
||||
+ "sparklineNumberTD.attr('class', 'sparkline_number');"
|
||||
+ "sparklineNumberTD.css('text-align', 'center');"
|
||||
+ "row2.append(sparklineNumberTD);"
|
||||
+ "var row3 = $('<tr>');"
|
||||
+ "var sparklineTextTD = $('<td>');"
|
||||
// + "sparklineTextTD.attr('width', '450');"
|
||||
+ "sparklineTextTD.attr('class', 'sparkline_text');"
|
||||
+ "row3.append(sparklineTextTD);"
|
||||
+ "table.append(row);"
|
||||
+ "table.append(row2);"
|
||||
+ "table.append(row3);"
|
||||
+ "table.prependTo('#" + sparklineID + "');\n";
|
||||
|
||||
return "$(document).ready(function() {"
|
||||
+ "var sparklineImgTD; "
|
||||
|
||||
/*
|
||||
* This is a nuclear option (creating the container in which everything goes)
|
||||
* the only reason this will be ever used is the API user never submitted a
|
||||
* container ID in which everything goes. The alternative was to let the
|
||||
* vis not appear in the calling page at all. So now atleast vis appears but
|
||||
* appended at the bottom of the body.
|
||||
* */
|
||||
|
||||
+ "if ($('#" + visContainerID + "').length === 0) {"
|
||||
+ " $('<div/>', {'id': '" + visContainerID + "'"
|
||||
+ " }).appendTo('body');"
|
||||
+ "}"
|
||||
+ "if ($('#" + sparklineID + "').length === 0) {"
|
||||
+ "$('<div/>', {'id': '" + sparklineID + "',"
|
||||
+ "'class': '" + VISUALIZATION_STYLE_CLASS + "'"
|
||||
+ "}).prependTo('#" + visContainerID + "');"
|
||||
+ sparklineTableWrapper
|
||||
+ "}"
|
||||
+ "drawUniqueCoPICountVisualization(sparklineImgTD);"
|
||||
+ "});"
|
||||
+ "</script>\n";
|
||||
}
|
||||
|
||||
private String getVisualizationContextCode(String visMode) {
|
||||
|
||||
String visualizationContextCode = "";
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {
|
||||
visualizationContextCode = generateShortVisContext();
|
||||
} else {
|
||||
visualizationContextCode = generateFullVisContext();
|
||||
}
|
||||
|
||||
log.debug(visualizationContextCode);
|
||||
|
||||
return visualizationContextCode;
|
||||
}
|
||||
|
||||
private String generateFullVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
if (yearToUniqueCoPIs.size() > 0) {
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
|
||||
csvDownloadURLHref = "Download data as <a href='"
|
||||
+ getCSVDownloadURL() + "'>.csv</a> file.<br />";
|
||||
sparklineData.setDownloadDataLink(getCSVDownloadURL());
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "No data available to export.<br />";
|
||||
}
|
||||
|
||||
String tableCode = generateDataTable();
|
||||
|
||||
divContextCode.append("<p>" + tableCode + csvDownloadURLHref + "</p>");
|
||||
|
||||
sparklineData.setTable(tableCode);
|
||||
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
private String getCSVDownloadURL() throws UnsupportedEncodingException {
|
||||
|
||||
if (yearToUniqueCoPIs.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
|
||||
String downloadURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?" + VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants
|
||||
.CO_PI_VIS,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.VIS_MODE_KEY
|
||||
+ "=" + URLEncoder.encode("sparkline",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&" + VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants.DATA_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
return downloadURL;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
private String generateShortVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
try {
|
||||
|
||||
String fullTimelineLink;
|
||||
if (yearToUniqueCoPIs.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
String fullTimelineNetworkURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode("person_level",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_CONTAINER_KEY
|
||||
+ "=" + URLEncoder.encode("ego_sparkline",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(
|
||||
VisualizationFrameworkConstants
|
||||
.STANDALONE_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
fullTimelineLink = "<a href='" + fullTimelineNetworkURL
|
||||
+ "'>View full timeline and co-pi network.</a>";
|
||||
|
||||
sparklineData.setFullTimelineNetworkLink(fullTimelineNetworkURL);
|
||||
|
||||
} else {
|
||||
fullTimelineLink = "No data available to render full timeline.<br />";
|
||||
}
|
||||
|
||||
divContextCode.append("<p>" + fullTimelineLink + "</p>");
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
log.error(e);
|
||||
}
|
||||
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
private String generateDataTable() {
|
||||
|
||||
StringBuilder dataTable = new StringBuilder();
|
||||
|
||||
dataTable.append("<table id='sparkline_data_table'>"
|
||||
+ "<caption>Unique Co-PIs per year</caption>"
|
||||
+ "<thead>"
|
||||
+ "<tr>"
|
||||
+ "<th>Year</th>"
|
||||
+ "<th>Count</th>"
|
||||
+ "</tr>"
|
||||
+ "</thead>"
|
||||
+ "<tbody>");
|
||||
|
||||
for (Entry<String, Set<CoPINode>> currentEntry : yearToUniqueCoPIs.entrySet()) {
|
||||
dataTable.append("<tr>"
|
||||
+ "<td>" + currentEntry.getKey() + "</td>"
|
||||
+ "<td>" + currentEntry.getValue().size() + "</td>"
|
||||
+ "</tr>");
|
||||
}
|
||||
|
||||
dataTable.append("</tbody>\n </table>\n");
|
||||
|
||||
return dataTable.toString();
|
||||
}
|
||||
|
||||
public SparklineData getValueObjectContainer() {
|
||||
return sparklineData;
|
||||
}
|
||||
}
|
|
@ -1,306 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.entitycomparison;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
import com.hp.hpl.jena.query.QueryFactory;
|
||||
import com.hp.hpl.jena.query.QuerySolution;
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.Syntax;
|
||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Entity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SubEntity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
|
||||
/**
|
||||
* This query runner is used to execute a sparql query that will fetch all the
|
||||
* publications defined by bibo:Document property for a particular
|
||||
* department/school/university.
|
||||
*
|
||||
* @author bkoniden
|
||||
*/
|
||||
public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
private String entityURI;
|
||||
private DataSource dataSource;
|
||||
private Log log;
|
||||
private String visMode;
|
||||
|
||||
private static final String SPARQL_QUERY_COMMON_SELECT_CLAUSE = ""
|
||||
+ " (str(?Person) as ?personLit) "
|
||||
+ " (str(?PersonLabel) as ?personLabelLit) "
|
||||
+ " (str(?SecondaryPositionLabel) as ?SecondaryPositionLabelLit)"
|
||||
+ " (str(?Document) as ?documentLit) "
|
||||
+ " (str(?DocumentLabel) as ?documentLabelLit) "
|
||||
+ " (str(?publicationYear) as ?publicationYearLit) "
|
||||
+ " (str(?publicationYearMonth) as ?publicationYearMonthLit) "
|
||||
+ " (str(?publicationDate) as ?publicationDateLit) "
|
||||
+ " (str(?StartYear) as ?StartYearLit)";
|
||||
|
||||
|
||||
private static final String SPARQL_QUERY_COMMON_WHERE_CLAUSE = ""
|
||||
+ "?Document rdf:type bibo:Document ;"
|
||||
+ " rdfs:label ?DocumentLabel ."
|
||||
+ "OPTIONAL { ?Document core:year ?publicationYear } ."
|
||||
+ "OPTIONAL { ?Document core:yearMonth ?publicationYearMonth } ."
|
||||
+ "OPTIONAL { ?Document core:date ?publicationDate } ."
|
||||
+ "OPTIONAL { ?SecondaryPosition core:startYear ?StartYear } .";
|
||||
|
||||
private static String ENTITY_LABEL;
|
||||
private static String ENTITY_URL;
|
||||
private static String SUBENTITY_LABEL;
|
||||
private static String SUBENTITY_URL;
|
||||
|
||||
public EntityPublicationCountQueryRunner(String entityURI,
|
||||
DataSource dataSource, Log log, String visMode) {
|
||||
|
||||
this.entityURI = entityURI;
|
||||
this.dataSource = dataSource;
|
||||
this.log = log;
|
||||
this.visMode = visMode;
|
||||
|
||||
}
|
||||
|
||||
private Entity createJavaValueObjects(ResultSet resultSet) {
|
||||
|
||||
Entity entity = null;
|
||||
Map<String, BiboDocument> biboDocumentURLToVO = new HashMap<String, BiboDocument>();
|
||||
Map<String, SubEntity> subentityURLToVO = new HashMap<String, SubEntity>();
|
||||
|
||||
while (resultSet.hasNext()) {
|
||||
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
if (entity == null) {
|
||||
entity = new Entity(solution.get(ENTITY_URL).toString(),
|
||||
solution.get(ENTITY_LABEL).toString());
|
||||
}
|
||||
|
||||
RDFNode documentNode = solution.get(QueryFieldLabels.DOCUMENT_URL);
|
||||
BiboDocument biboDocument;
|
||||
|
||||
if (biboDocumentURLToVO.containsKey(documentNode.toString())) {
|
||||
biboDocument = biboDocumentURLToVO.get(documentNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
biboDocument = new BiboDocument(documentNode.toString());
|
||||
biboDocumentURLToVO.put(documentNode.toString(), biboDocument);
|
||||
|
||||
RDFNode documentLabelNode = solution
|
||||
.get(QueryFieldLabels.DOCUMENT_LABEL);
|
||||
if (documentLabelNode != null) {
|
||||
biboDocument.setDocumentLabel(documentLabelNode.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationYearNode = solution
|
||||
.get(QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR);
|
||||
if (publicationYearNode != null) {
|
||||
biboDocument.setPublicationYear(publicationYearNode
|
||||
.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationYearMonthNode = solution
|
||||
.get(QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR_MONTH);
|
||||
if (publicationYearMonthNode != null) {
|
||||
biboDocument
|
||||
.setPublicationYearMonth(publicationYearMonthNode
|
||||
.toString());
|
||||
}
|
||||
|
||||
RDFNode publicationDateNode = solution
|
||||
.get(QueryFieldLabels.DOCUMENT_PUBLICATION_DATE);
|
||||
if (publicationDateNode != null) {
|
||||
biboDocument.setPublicationDate(publicationDateNode
|
||||
.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
RDFNode subEntityURLNode = solution.get(SUBENTITY_URL);
|
||||
|
||||
if (subEntityURLNode != null) {
|
||||
SubEntity subEntity;
|
||||
if (subentityURLToVO.containsKey(subEntityURLNode.toString())) {
|
||||
subEntity = subentityURLToVO.get(subEntityURLNode
|
||||
.toString());
|
||||
} else {
|
||||
subEntity = new SubEntity(subEntityURLNode.toString());
|
||||
subentityURLToVO
|
||||
.put(subEntityURLNode.toString(), subEntity);
|
||||
}
|
||||
|
||||
RDFNode subEntityLabelNode = solution.get(SUBENTITY_LABEL);
|
||||
if (subEntityLabelNode != null) {
|
||||
subEntity.setIndividualLabel(subEntityLabelNode.toString());
|
||||
}
|
||||
entity.addSubEntity(subEntity);
|
||||
subEntity.addPublications(biboDocument);
|
||||
}
|
||||
|
||||
entity.addPublications(biboDocument);
|
||||
}
|
||||
|
||||
return entity;
|
||||
}
|
||||
|
||||
private ResultSet executeQuery(String queryURI, DataSource dataSource) {
|
||||
|
||||
QueryExecution queryExecution = null;
|
||||
Query query = QueryFactory.create(
|
||||
getSparqlQuery(queryURI, this.visMode), SYNTAX);
|
||||
queryExecution = QueryExecutionFactory.create(query, dataSource);
|
||||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
private String getSparqlQuery(String queryURI, String visMode) {
|
||||
String result = "";
|
||||
|
||||
if (visMode.equals("DEPARTMENT")) {
|
||||
// result = getSparqlQueryForDepartment(queryURI);
|
||||
ENTITY_URL = QueryFieldLabels.DEPARTMENT_URL;
|
||||
ENTITY_LABEL = QueryFieldLabels.DEPARTMENT_LABEL;
|
||||
SUBENTITY_URL = QueryFieldLabels.PERSON_URL;
|
||||
SUBENTITY_LABEL = QueryFieldLabels.PERSON_LABEL;
|
||||
} else {
|
||||
// result = getSparqlQueryForOrganization(queryURI);
|
||||
ENTITY_URL = QueryFieldLabels.ORGANIZATION_URL;
|
||||
ENTITY_LABEL = QueryFieldLabels.ORGANIZATION_LABEL;
|
||||
SUBENTITY_URL = QueryFieldLabels.SUBORGANIZATION_URL;
|
||||
SUBENTITY_LABEL = QueryFieldLabels.SUBORGANIZATION_LABEL;
|
||||
}
|
||||
result = getSparqlQueryForOrganization(queryURI);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
// private String getSparqlQueryForDepartment(String queryURI) {
|
||||
//
|
||||
// String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
// + "SELECT (str(?DepartmentLabel) as ?departmentLabelLit) "
|
||||
// + SPARQL_QUERY_COMMON_SELECT_CLAUSE + " (str(<" + queryURI
|
||||
// + ">) as ?" + QueryFieldLabels.DEPARTMENT_URL + ") "
|
||||
// + "WHERE { " + "<" + queryURI + "> rdf:type core:Department ;"
|
||||
// + " rdfs:label ?DepartmentLabel ;"
|
||||
// + " core:organizationForPosition ?Position . "
|
||||
// + " ?Position rdf:type core:Position ;"
|
||||
// + " core:positionForPerson ?Person . "
|
||||
// + " ?Person core:authorInAuthorship ?Resource ; "
|
||||
// + " rdfs:label ?PersonLabel ; core:personInPosition ?SecondaryPosition . "
|
||||
// + " ?Resource core:linkedInformationResource ?Document ."
|
||||
// + " ?SecondaryPosition rdfs:label ?SecondaryPositionLabel ."
|
||||
// + SPARQL_QUERY_COMMON_WHERE_CLAUSE + "}"
|
||||
// + " ORDER BY ?DocumentLabel";
|
||||
// System.out.println("\nThe sparql query is :\n" + sparqlQuery);
|
||||
// return sparqlQuery;
|
||||
//
|
||||
// }
|
||||
|
||||
// private String getSparqlQueryForOrganization(String queryURI) {
|
||||
//
|
||||
// String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
// + "SELECT (str(?organizationLabel) as ?organizationLabelLit) "
|
||||
// + " (str(?subOrganization) as ?subOrganizationLit) "
|
||||
// + " (str(?subOrganizationLabel) as ?subOrganizationLabelLit) "
|
||||
// + SPARQL_QUERY_COMMON_SELECT_CLAUSE + " (str(<" + queryURI
|
||||
// + ">) as ?" + QueryFieldLabels.ORGANIZATION_URL + ") "
|
||||
// + "WHERE { " + "<" + queryURI + "> rdf:type foaf:Organization ;"
|
||||
// + " rdfs:label ?organizationLabel ;"
|
||||
// + " core:hasSubOrganization ?subOrganization ."
|
||||
// + " ?subOrganization rdfs:label ?subOrganizationLabel ;"
|
||||
// + " core:organizationForPosition ?Position . "
|
||||
// + " ?Position rdf:type core:Position ;"
|
||||
// + " core:positionForPerson ?Person . "
|
||||
// + " ?Person core:authorInAuthorship ?Resource ; "
|
||||
// + " rdfs:label ?PersonLabel ; core:personInPosition ?SecondaryPosition . "
|
||||
// + " ?Resource core:linkedInformationResource ?Document ."
|
||||
// + " ?SecondaryPosition rdfs:label ?SecondaryPositionLabel ."
|
||||
// + SPARQL_QUERY_COMMON_WHERE_CLAUSE + "}"
|
||||
// + " ORDER BY ?DocumentLabel";
|
||||
// System.out.println("\nThe sparql query is :\n" + sparqlQuery);
|
||||
// return sparqlQuery;
|
||||
//
|
||||
// }
|
||||
|
||||
private String getSparqlQueryForOrganization(String queryURI){
|
||||
|
||||
String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
+ "SELECT (str(?organizationLabel) as ?organizationLabelLit) "
|
||||
+ " (str(?subOrganization) as ?subOrganizationLit) "
|
||||
+ " (str(?subOrganizationLabel) as ?subOrganizationLabelLit) "
|
||||
+ " (str(?DepartmentLabel) as ?departmentLabelLit) "
|
||||
+ SPARQL_QUERY_COMMON_SELECT_CLAUSE + " (str(<" + queryURI
|
||||
+ ">) as ?" + ENTITY_URL + ") "
|
||||
+ "WHERE { " + "<" + queryURI + "> rdf:type foaf:Organization ;"
|
||||
+ " rdfs:label ?organizationLabel ."
|
||||
+ "{ "
|
||||
+ "<" + queryURI + "> core:hasSubOrganization ?subOrganization ."
|
||||
+ "?subOrganization rdfs:label ?subOrganizationLabel ; core:organizationForPosition ?Position . "
|
||||
+ " ?Position rdf:type core:Position ; core:positionForPerson ?Person ."
|
||||
+ " ?Person core:authorInAuthorship ?Resource ; rdfs:label ?PersonLabel ; core:personInPosition ?SecondaryPosition . "
|
||||
+ " ?Resource core:linkedInformationResource ?Document . "
|
||||
+ " ?SecondaryPosition rdfs:label ?SecondaryPositionLabel ."
|
||||
+ SPARQL_QUERY_COMMON_WHERE_CLAUSE + "}"
|
||||
+ "UNION "
|
||||
+ "{ "
|
||||
+ "<" + queryURI + "> rdf:type core:Department ; rdfs:label ?DepartmentLabel ; core:organizationForPosition ?Position ."
|
||||
+ " ?Position rdf:type core:Position ; core:positionForPerson ?Person ."
|
||||
+ " ?Person core:authorInAuthorship ?Resource ; rdfs:label ?PersonLabel ; core:personInPosition ?SecondaryPosition . "
|
||||
+ " ?Resource core:linkedInformationResource ?Document ."
|
||||
+ " ?SecondaryPosition rdfs:label ?SecondaryPositionLabel ."
|
||||
+ SPARQL_QUERY_COMMON_WHERE_CLAUSE + "}"
|
||||
+ "}";
|
||||
|
||||
log.debug("\nThe sparql query is :\n" + sparqlQuery);
|
||||
|
||||
return sparqlQuery;
|
||||
|
||||
}
|
||||
|
||||
public Entity getQueryResult() throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.entityURI)) {
|
||||
|
||||
/*
|
||||
* To test for the validity of the URI submitted.
|
||||
*/
|
||||
IRIFactory iRIFactory = IRIFactory.jenaImplementation();
|
||||
IRI iri = iRIFactory.create(this.entityURI);
|
||||
if (iri.hasViolation(false)) {
|
||||
String errorMsg = ((Violation) iri.violations(false).next())
|
||||
.getShortMessage();
|
||||
log.error("Entity Comparison vis Query " + errorMsg);
|
||||
throw new MalformedQueryParametersException(
|
||||
"URI provided for an entity is malformed.");
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new MalformedQueryParametersException(
|
||||
"URL parameter is either null or empty.");
|
||||
}
|
||||
|
||||
ResultSet resultSet = executeQuery(this.entityURI, this.dataSource);
|
||||
|
||||
return createJavaValueObjects(resultSet);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,269 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.entitycomparison;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.servlet.RequestDispatcher;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.google.gson.Gson;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Entity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.JsonObject;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SubEntity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.VisualizationRequestHandler;
|
||||
|
||||
public class EntityPublicationCountRequestHandler implements
|
||||
VisualizationRequestHandler {
|
||||
|
||||
/*
|
||||
* Vis container holds the "id" of the div on the final response html page
|
||||
* that the visualization actually appears on.
|
||||
*/
|
||||
public static String ENTITY_VIS_MODE;
|
||||
public static String SUB_ENTITY_VIS_MODE;
|
||||
|
||||
|
||||
public void generateVisualization(VitroRequest vitroRequest,
|
||||
HttpServletRequest request, HttpServletResponse response, Log log,
|
||||
DataSource dataSource) {
|
||||
|
||||
String entityURI = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
|
||||
String renderMode = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.RENDER_MODE_KEY);
|
||||
|
||||
ENTITY_VIS_MODE = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.VIS_MODE_KEY);
|
||||
|
||||
String visContainer = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Entity> queryManager = new EntityPublicationCountQueryRunner(
|
||||
entityURI, dataSource, log, ENTITY_VIS_MODE);
|
||||
|
||||
try {
|
||||
Entity entity = queryManager.getQueryResult();
|
||||
|
||||
if (ENTITY_VIS_MODE.equalsIgnoreCase("DEPARTMENT")) {
|
||||
|
||||
SUB_ENTITY_VIS_MODE = "PERSON";
|
||||
|
||||
}else if (ENTITY_VIS_MODE.equalsIgnoreCase("SCHOOL")) {
|
||||
|
||||
SUB_ENTITY_VIS_MODE = "DEPARTMENT";
|
||||
|
||||
}else {
|
||||
SUB_ENTITY_VIS_MODE = "SCHOOL";
|
||||
|
||||
}
|
||||
|
||||
QueryRunner<Map<String, Set<String>>> queryManagerForsubOrganisationTypes = new EntitySubOrganizationTypesQueryRunner(
|
||||
entityURI, dataSource, log, ENTITY_VIS_MODE);
|
||||
|
||||
Map<String, Set<String>> subOrganizationTypesResult = queryManagerForsubOrganisationTypes.getQueryResult();
|
||||
|
||||
RequestDispatcher requestDispatcher = null;
|
||||
|
||||
if (VisualizationFrameworkConstants.DATA_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
prepareDataResponse(entity, entity.getSubEntities(),subOrganizationTypesResult, response, log);
|
||||
|
||||
} else if (VisualizationFrameworkConstants.STANDALONE_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
prepareStandaloneResponse(request, response, vitroRequest,
|
||||
entity,entityURI, subOrganizationTypesResult, log);
|
||||
requestDispatcher = request
|
||||
.getRequestDispatcher(Controllers.BASIC_JSP);
|
||||
}
|
||||
|
||||
try {
|
||||
requestDispatcher.forward(request, response);
|
||||
} catch (Exception e) {
|
||||
log.error("EntityEditController could not forward to view.");
|
||||
log.error(e.getMessage());
|
||||
log.error(e.getStackTrace());
|
||||
}
|
||||
//
|
||||
} catch (MalformedQueryParametersException e) {
|
||||
try {
|
||||
UtilityFunctions
|
||||
.handleMalformedParameters(
|
||||
e.getMessage(),
|
||||
"Visualization Query Error - Individual Publication Count",
|
||||
vitroRequest, request, response, log);
|
||||
} catch (ServletException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
} catch (IOException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when csv file containing the publication count over the
|
||||
* years is requested.
|
||||
*
|
||||
* @param author
|
||||
* @param subentities
|
||||
* @param subOrganizationTypesResult
|
||||
* @param yearToPublicationCount
|
||||
* @param response
|
||||
* @param log
|
||||
*/
|
||||
private void prepareDataResponse(Entity entity, Set<SubEntity> subentities,
|
||||
Map<String, Set<String>> subOrganizationTypesResult, HttpServletResponse response, Log log) {
|
||||
|
||||
String entityLabel = entity.getEntityLabel();
|
||||
|
||||
String outputFileName = UtilityFunctions.slugify(entityLabel)
|
||||
+ "_publications-per-year" + ".json";
|
||||
|
||||
response.setContentType("text/plain");
|
||||
response.setHeader("Content-Disposition", "attachment;filename="
|
||||
+ outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly
|
||||
* manipulating the response object of the servlet.
|
||||
*/
|
||||
responseWriter.append(writePublicationsOverTimeJSON(subentities, subOrganizationTypesResult, log));
|
||||
|
||||
responseWriter.flush();
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when an entire page dedicated to publication sparkline
|
||||
* is requested.
|
||||
*
|
||||
* @param request
|
||||
* @param response
|
||||
* @param vreq
|
||||
* @param entity
|
||||
* @param entityURI
|
||||
* @param subOrganizationTypesResult
|
||||
* @param log
|
||||
*/
|
||||
private void prepareStandaloneResponse(HttpServletRequest request,
|
||||
HttpServletResponse response, VitroRequest vreq, Entity entity, String entityURI, Map<String, Set<String>> subOrganizationTypesResult, Log log) {
|
||||
|
||||
Portal portal = vreq.getPortal();
|
||||
String jsonContent = "";
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly
|
||||
* manipulating the response object of the servlet.
|
||||
*/
|
||||
jsonContent = writePublicationsOverTimeJSON(entity.getSubEntities(), subOrganizationTypesResult, log);
|
||||
|
||||
request.setAttribute("OrganizationURI", entityURI);
|
||||
request.setAttribute("OrganizationLabel", entity.getEntityLabel());
|
||||
request.setAttribute("JsonContent", jsonContent);
|
||||
|
||||
request.setAttribute("bodyJsp",
|
||||
"/templates/visualization/entity_comparison.jsp");
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("title", "Entity Comparison visualization");
|
||||
request.setAttribute("scripts",
|
||||
"/templates/visualization/entity_comparison_inject_head.jsp");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* function to generate a json file for year <-> publication count mapping
|
||||
* @param subOrganizationTypesResult
|
||||
* @param log
|
||||
*
|
||||
* @param yearToPublicationCount
|
||||
* @param responseWriter
|
||||
* @param visMode
|
||||
*/
|
||||
private String writePublicationsOverTimeJSON(Set<SubEntity> subentities, Map<String, Set<String>> subOrganizationTypesResult, Log log) {
|
||||
// System.out.println("\nsub entity vis mode ------>"
|
||||
// + SUB_ENTITY_VIS_MODE + "\n");
|
||||
log.debug("Creating JSONObject \n-----------------------");
|
||||
Gson json = new Gson();
|
||||
Set<JsonObject> subEntitiesJson = new HashSet<JsonObject>();
|
||||
|
||||
for (SubEntity subentity : subentities) {
|
||||
JsonObject entityJson = new JsonObject(
|
||||
subentity.getIndividualLabel());
|
||||
|
||||
List<List<Integer>> yearPubCount = new ArrayList<List<Integer>>();
|
||||
|
||||
for (Map.Entry<String, Integer> pubEntry : UtilityFunctions
|
||||
.getYearToPublicationCount(subentity.getDocuments())
|
||||
.entrySet()) {
|
||||
|
||||
List<Integer> currentPubYear = new ArrayList<Integer>();
|
||||
if (pubEntry.getKey().equals(
|
||||
VOConstants.DEFAULT_PUBLICATION_YEAR))
|
||||
currentPubYear.add(-1);
|
||||
else
|
||||
currentPubYear.add(Integer.parseInt(pubEntry.getKey()));
|
||||
currentPubYear.add(pubEntry.getValue());
|
||||
yearPubCount.add(currentPubYear);
|
||||
}
|
||||
|
||||
entityJson.setYearToPublicationCount(yearPubCount);
|
||||
entityJson.getOrganizationType().addAll(subOrganizationTypesResult.get(entityJson.getLabel()));
|
||||
|
||||
entityJson.setEntityURI(subentity.getIndividualURI());
|
||||
setEntityVisMode(entityJson);
|
||||
//entityJson.setVisMode(SUB_ENTITY_VIS_MODE);
|
||||
log.debug("Adding object with uri: "
|
||||
+ entityJson.getEntityURI() + " vismode: "
|
||||
+ entityJson.getVisMode() + " label: "
|
||||
+ entityJson.getLabel() + " type: "
|
||||
+ entityJson.getOrganizationType().toString());
|
||||
subEntitiesJson.add(entityJson);
|
||||
}
|
||||
|
||||
// System.out.println("\nStopWords are "+ EntitySubOrganizationTypesQueryRunner.stopWords.toString() + "\n");
|
||||
return json.toJson(subEntitiesJson);
|
||||
|
||||
}
|
||||
|
||||
private void setEntityVisMode(JsonObject entityJson) {
|
||||
if(entityJson.getOrganizationType().contains("Department")){
|
||||
entityJson.setVisMode("DEPARTMENT");
|
||||
}else if(entityJson.getOrganizationType().contains("School")){
|
||||
entityJson.setVisMode("SCHOOL");
|
||||
}else{
|
||||
entityJson.setVisMode(SUB_ENTITY_VIS_MODE);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
|
@ -1,212 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.entitycomparison;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
import com.hp.hpl.jena.query.QueryFactory;
|
||||
import com.hp.hpl.jena.query.QuerySolution;
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.Syntax;
|
||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class EntitySubOrganizationTypesQueryRunner implements QueryRunner<Map<String, Set<String>>> {
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
private String entityURI;
|
||||
private DataSource dataSource;
|
||||
private Log log;
|
||||
private String visMode;
|
||||
static String SUBORGANISATION_LABEL;
|
||||
static String SUBORGANISATION_TYPE_LABEL;
|
||||
// public static Map<String, Integer> subOrganizationTypesToCount = new HashMap<String, Integer>();
|
||||
// public static Set<String> stopWords = new HashSet<String>();
|
||||
// public static Set<String> subOrganizations = new HashSet<String>();
|
||||
// public static Set<String> STOP_WORDS = new HashSet<String>() {
|
||||
// {
|
||||
// add("Person");
|
||||
// add("Organization");
|
||||
// }
|
||||
// };
|
||||
|
||||
private static final String SPARQL_QUERY_SELECT_CLAUSE = ""
|
||||
+ " (str(?organizationLabel) as ?"+QueryFieldLabels.ORGANIZATION_LABEL+") "
|
||||
+ " (str(?subOrganizationLabel) as ?"+QueryFieldLabels.SUBORGANIZATION_LABEL+") "
|
||||
+ " (str(?subOrganizationType) as ?"+QueryFieldLabels.SUBORGANIZATION_TYPE +")"
|
||||
+ " (str(?subOrganizationTypeLabel) as ?"+QueryFieldLabels.SUBORGANIZATION_TYPE_LABEL+") ";
|
||||
|
||||
|
||||
public EntitySubOrganizationTypesQueryRunner(String entityURI,
|
||||
DataSource dataSource, Log log, String visMode){
|
||||
|
||||
this.entityURI = entityURI;
|
||||
this.dataSource = dataSource;
|
||||
this.log = log;
|
||||
this.visMode = visMode;
|
||||
// stopWords.clear();
|
||||
// subOrganizations.clear();
|
||||
// subOrganizationTypesToCount.clear();
|
||||
}
|
||||
|
||||
private ResultSet executeQuery(String queryURI, DataSource dataSource) {
|
||||
|
||||
QueryExecution queryExecution = null;
|
||||
Query query = QueryFactory.create(
|
||||
getSparqlQuery(queryURI), SYNTAX);
|
||||
queryExecution = QueryExecutionFactory.create(query, dataSource);
|
||||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
private String getSparqlQuery(String queryURI) {
|
||||
String sparqlQuery = "";
|
||||
|
||||
if (!this.visMode.equals("DEPARTMENT")) {
|
||||
|
||||
SUBORGANISATION_LABEL = QueryFieldLabels.SUBORGANIZATION_LABEL;
|
||||
SUBORGANISATION_TYPE_LABEL = QueryFieldLabels.SUBORGANIZATION_TYPE_LABEL;
|
||||
sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
+ "SELECT "
|
||||
+ SPARQL_QUERY_SELECT_CLAUSE
|
||||
+ " WHERE { "
|
||||
+ "<"
|
||||
+ queryURI
|
||||
+ "> rdf:type foaf:Organization ;"
|
||||
+ " rdfs:label ?organizationLabel ;"
|
||||
+ " core:hasSubOrganization ?subOrganization . "
|
||||
+ " ?subOrganization rdfs:label ?subOrganizationLabel ;"
|
||||
+ " rdf:type ?subOrganizationType . "
|
||||
+ " ?subOrganizationType rdfs:label ?subOrganizationTypeLabel ."
|
||||
+ "}";
|
||||
|
||||
} else{
|
||||
|
||||
SUBORGANISATION_LABEL = QueryFieldLabels.PERSON_LABEL;
|
||||
SUBORGANISATION_TYPE_LABEL = QueryFieldLabels.PERSON_TYPE_LABEL;
|
||||
sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
+ "SELECT "
|
||||
+ " (str(?departmentLabel) as ?"+QueryFieldLabels.DEPARTMENT_LABEL+") "
|
||||
+ " (str(?personLabel) as ?"+QueryFieldLabels.PERSON_LABEL+") "
|
||||
+ " (str(?personType) as ?"+QueryFieldLabels.PERSON_TYPE +")"
|
||||
+ " (str(?personTypeLabel) as ?"+QueryFieldLabels.PERSON_TYPE_LABEL+") "
|
||||
+ " WHERE { "
|
||||
+ "<"
|
||||
+ queryURI
|
||||
+ "> rdf:type core:Department ;"
|
||||
+ " rdfs:label ?departmentLabel ;"
|
||||
+ " core:organizationForPosition ?position . "
|
||||
+ " ?position rdf:type core:Position ; core:positionForPerson ?person . "
|
||||
+ " ?person rdfs:label ?personLabel ;"
|
||||
+ " rdf:type ?personType . "
|
||||
+ " ?personType rdfs:label ?personTypeLabel ."
|
||||
+ "}";;
|
||||
}
|
||||
log.debug("\nThe sparql query is :\n" + sparqlQuery);
|
||||
return sparqlQuery;
|
||||
|
||||
}
|
||||
|
||||
private Map<String, Set<String>> createJavaValueObjects(ResultSet resultSet) {
|
||||
|
||||
Map<String, Set<String>> subOrganizationLabelToTypes = new HashMap<String, Set<String>>();
|
||||
|
||||
while(resultSet.hasNext()){
|
||||
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
RDFNode subOrganizationLabel = solution.get(SUBORGANISATION_LABEL);
|
||||
|
||||
if(subOrganizationLabelToTypes.containsKey(subOrganizationLabel.toString())){
|
||||
RDFNode subOrganizationType = solution.get(SUBORGANISATION_TYPE_LABEL);
|
||||
if(subOrganizationType != null){
|
||||
subOrganizationLabelToTypes.get(subOrganizationLabel.toString()).add(subOrganizationType.toString());
|
||||
// updateSubOrganizationTypesToCount(subOrganizationType.toString());
|
||||
// subOrganizations.add(subOrganizationLabel.toString());
|
||||
}
|
||||
}else{
|
||||
RDFNode subOrganizationType = solution.get(SUBORGANISATION_TYPE_LABEL);
|
||||
if(subOrganizationType != null){
|
||||
subOrganizationLabelToTypes.put(subOrganizationLabel.toString(), new HashSet<String>());
|
||||
subOrganizationLabelToTypes.get(subOrganizationLabel.toString()).add(subOrganizationType.toString());
|
||||
// updateSubOrganizationTypesToCount(subOrganizationType.toString());
|
||||
// subOrganizations.add(subOrganizationLabel.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// collectStopWords();
|
||||
|
||||
return subOrganizationLabelToTypes;
|
||||
}
|
||||
|
||||
|
||||
// private void collectStopWords() {
|
||||
// System.out.println("Inside collectStopWords \n-----------------------------\n");
|
||||
// for(Map.Entry<String, Integer> typesCount : subOrganizationTypesToCount.entrySet()){
|
||||
// System.out.println(typesCount.getKey() + ": "+ typesCount.getValue());
|
||||
// if(typesCount.getValue() >= subOrganizations.size()){
|
||||
// stopWords.add(typesCount.getKey());
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// private void updateSubOrganizationTypesToCount(String typeLabel) {
|
||||
// int count = 0;
|
||||
// if(subOrganizationTypesToCount.containsKey(typeLabel)){
|
||||
// count = subOrganizationTypesToCount.get(typeLabel);
|
||||
// subOrganizationTypesToCount.put(typeLabel, ++count);
|
||||
// }else{
|
||||
// subOrganizationTypesToCount.put(typeLabel, 1);
|
||||
// }
|
||||
// }
|
||||
|
||||
public Map<String, Set<String>> getQueryResult() throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.entityURI)) {
|
||||
|
||||
/*
|
||||
* To test for the validity of the URI submitted.
|
||||
*/
|
||||
IRIFactory iRIFactory = IRIFactory.jenaImplementation();
|
||||
IRI iri = iRIFactory.create(this.entityURI);
|
||||
if (iri.hasViolation(false)) {
|
||||
String errorMsg = ((Violation) iri.violations(false).next())
|
||||
.getShortMessage();
|
||||
log.error("Entity Comparison sub organization types query " + errorMsg);
|
||||
throw new MalformedQueryParametersException(
|
||||
"URI provided for an entity is malformed.");
|
||||
}
|
||||
|
||||
} else {
|
||||
throw new MalformedQueryParametersException(
|
||||
"URL parameter is either null or empty.");
|
||||
}
|
||||
|
||||
ResultSet resultSet = executeQuery(this.entityURI, this.dataSource);
|
||||
|
||||
return createJavaValueObjects(resultSet);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,193 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
import com.hp.hpl.jena.query.QueryFactory;
|
||||
import com.hp.hpl.jena.query.QuerySolution;
|
||||
import com.hp.hpl.jena.query.ResultSet;
|
||||
import com.hp.hpl.jena.query.Syntax;
|
||||
import com.hp.hpl.jena.rdf.model.RDFNode;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
|
||||
/**
|
||||
* This query runner is used to execute Sparql query that will fetch all the grants for an individual
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*
|
||||
*/
|
||||
public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
private String personURI;
|
||||
private DataSource dataSource;
|
||||
private Individual principalInvestigator;
|
||||
|
||||
public Individual getPrincipalInvestigator(){
|
||||
return principalInvestigator;
|
||||
}
|
||||
|
||||
private Log log;
|
||||
|
||||
private static final String SPARQL_QUERY_COMMON_SELECT_CLAUSE = ""
|
||||
+ "SELECT (str(?PILabel) as ?PILabelLit) "
|
||||
+ "(str(?Grant) as ?grantLit)"
|
||||
+ "(str(?GrantLabel) as ?grantLabelLit)"
|
||||
+ "(str(?GrantStartDate) as ?grantStartDateLit)"
|
||||
+ "(str(?GrantEndDate) as ?grantEndDateLit)" ;
|
||||
|
||||
public PersonGrantCountQueryRunner(String personURI, DataSource dataSource, Log log){
|
||||
|
||||
this.personURI = personURI;
|
||||
this.dataSource = dataSource;
|
||||
this.log = log;
|
||||
}
|
||||
|
||||
private Set<Grant> createJavaValueObjects(ResultSet resultSet){
|
||||
Set<Grant> PIGrant = new HashSet<Grant>();
|
||||
|
||||
while(resultSet.hasNext()){
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
Grant grant = new Grant(solution.get(QueryFieldLabels.GRANT_URL).toString());
|
||||
|
||||
RDFNode grantLabelNode = solution.get(QueryFieldLabels.GRANT_LABEL);
|
||||
if(grantLabelNode != null){
|
||||
grant.setIndividualLabel(grantLabelNode.toString());
|
||||
}
|
||||
|
||||
RDFNode grantStartDateNode = solution.get(QueryFieldLabels.GRANT_START_DATE);
|
||||
if(grantStartDateNode != null){
|
||||
grant.setGrantStartDate(grantStartDateNode.toString());
|
||||
}
|
||||
|
||||
RDFNode grantEndDateNode = solution.get(QueryFieldLabels.GRANT_END_DATE);
|
||||
if(grantEndDateNode != null){
|
||||
grant.setGrantEndDate(grantEndDateNode.toString());
|
||||
}
|
||||
|
||||
/*
|
||||
* Since we are getting grant count for just one PI at a time we need
|
||||
* to create only one "Individual" instance. We test against the null for "PI" to
|
||||
* make sure that it has not already been instantiated.
|
||||
* */
|
||||
RDFNode PIURLNode = solution.get(QueryFieldLabels.PI_URL);
|
||||
if (PIURLNode != null && principalInvestigator == null) {
|
||||
principalInvestigator = new Individual(PIURLNode.toString());
|
||||
RDFNode PILabelNode = solution.get(QueryFieldLabels.PI_LABEL);
|
||||
if (PILabelNode != null) {
|
||||
principalInvestigator.setIndividualLabel(PILabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
PIGrant.add(grant);
|
||||
}
|
||||
return PIGrant;
|
||||
}
|
||||
|
||||
private ResultSet executeQuery(String queryURI, DataSource dataSource){
|
||||
|
||||
QueryExecution queryExecution = null;
|
||||
|
||||
Query query = QueryFactory.create(getSparqlQuery(queryURI), SYNTAX);
|
||||
queryExecution = QueryExecutionFactory.create(query,dataSource);
|
||||
|
||||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
|
||||
|
||||
private String getSparqlQuery(String queryURI){
|
||||
|
||||
String sparqlQuery = QueryConstants.getSparqlPrefixQuery()
|
||||
|
||||
+ SPARQL_QUERY_COMMON_SELECT_CLAUSE
|
||||
|
||||
+ "(str(<" + queryURI + ">) as ?PILit) "
|
||||
|
||||
+ "WHERE "
|
||||
+ "{ "
|
||||
+ "<" + queryURI + "> rdfs:label ?PILabel . "
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasCo-PrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
+ "} "
|
||||
|
||||
+ "UNION "
|
||||
|
||||
+ "{ "
|
||||
|
||||
+ "<" + queryURI + "> core:hasPrincipalInvestigatorRole ?Role . "
|
||||
|
||||
+ "?Role core:roleIn ?Grant . "
|
||||
|
||||
+ "?Grant rdfs:label ?GrantLabel . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:startDate ?GrantStartDate } . "
|
||||
|
||||
+ "OPTIONAL { ?Grant core:endDate ?GrantEndDate } . "
|
||||
|
||||
|
||||
+ "} "
|
||||
|
||||
|
||||
+ "} ";
|
||||
|
||||
log.debug("SPARQL query for person grant count -> \n"+ sparqlQuery);
|
||||
|
||||
return sparqlQuery;
|
||||
}
|
||||
|
||||
public Set<Grant> getQueryResult() throws MalformedQueryParametersException{
|
||||
|
||||
if(StringUtils.isNotBlank(this.personURI)){
|
||||
|
||||
/*
|
||||
* To test the validity of the URI submitted
|
||||
*/
|
||||
IRIFactory iriFactory = IRIFactory.jenaImplementation();
|
||||
IRI iri = iriFactory.create(this.personURI);
|
||||
|
||||
if(iri.hasViolation(false)){
|
||||
String errorMsg = ((Violation) iri.violations(false).next()).getShortMessage();
|
||||
log.error("Grant Count vis Query " + errorMsg);
|
||||
throw new MalformedQueryParametersException(
|
||||
"URI provided for an individual is malformed.");
|
||||
}
|
||||
} else {
|
||||
throw new MalformedQueryParametersException("URL parameter is either null or empty.");
|
||||
}
|
||||
|
||||
ResultSet resultSet = executeQuery(this.personURI, this.dataSource);
|
||||
|
||||
return createJavaValueObjects(resultSet);
|
||||
}
|
||||
|
||||
}
|
|
@ -1,374 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import javax.servlet.RequestDispatcher;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.skife.csv.CSVWriter;
|
||||
import org.skife.csv.SimpleWriter;
|
||||
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.itextpdf.text.Document;
|
||||
import com.itextpdf.text.DocumentException;
|
||||
import com.itextpdf.text.pdf.PdfWriter;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SparklineData;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.PDFDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.VisualizationRequestHandler;
|
||||
/**
|
||||
*
|
||||
* This request handler is used to serve the content related to an individual's
|
||||
* grants over the years like,
|
||||
* 1. Sparkline representing this
|
||||
* 2. An entire page dedicated to the sparkline vis which will also have links to
|
||||
* download the data using which the sparkline was rendered & its tabular representation etc.
|
||||
* 3. Downloadable CSV file containing number of grants over the years.
|
||||
* 4. Downloadable PDf file containing the grant content, among other things.
|
||||
* Currently this is disabled because the feature is half-baked. We plan to activate this in
|
||||
* the next major release.
|
||||
*
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class PersonGrantCountRequestHandler implements VisualizationRequestHandler {
|
||||
|
||||
public void generateVisualization(VitroRequest vitroRequest,
|
||||
HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
Log log,
|
||||
DataSource dataSource) {
|
||||
|
||||
String personURI = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.INDIVIDUAL_URI_KEY);
|
||||
|
||||
String renderMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.RENDER_MODE_KEY);
|
||||
|
||||
String visMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.VIS_MODE_KEY);
|
||||
|
||||
String visContainer = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants
|
||||
.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Set<Grant>> queryManager =
|
||||
new PersonGrantCountQueryRunner(personURI, dataSource, log);
|
||||
|
||||
try {
|
||||
Set<Grant> PIGrants = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedPublicationYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(PIGrants);
|
||||
|
||||
Individual investigator = ((PersonGrantCountQueryRunner) queryManager).getPrincipalInvestigator();
|
||||
|
||||
if (VisualizationFrameworkConstants.DATA_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
prepareDataResponse(investigator,
|
||||
PIGrants,
|
||||
yearToGrantCount,
|
||||
response);
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* For now we are disabling the capability to render pdf file.
|
||||
* */
|
||||
/*
|
||||
if (VisualizationFrameworkConstants.PDF_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
preparePDFResponse(author,
|
||||
authorDocuments,
|
||||
yearToPublicationCount,
|
||||
response);
|
||||
return;
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
* Computations required to generate HTML for the sparkline & related context.
|
||||
* */
|
||||
PersonGrantCountVisCodeGenerator visualizationCodeGenerator =
|
||||
new PersonGrantCountVisCodeGenerator(vitroRequest.getContextPath(),
|
||||
personURI,
|
||||
visMode,
|
||||
visContainer,
|
||||
PIGrants,
|
||||
yearToGrantCount,
|
||||
log);
|
||||
|
||||
SparklineData sparklineData = visualizationCodeGenerator
|
||||
.getValueObjectContainer();
|
||||
|
||||
/*
|
||||
* This is side-effecting because the response of this method is just to redirect to
|
||||
* a page with visualization on it.
|
||||
* */
|
||||
RequestDispatcher requestDispatcher = null;
|
||||
|
||||
if (VisualizationFrameworkConstants.DYNAMIC_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
prepareDynamicResponse(request,
|
||||
response,
|
||||
vitroRequest,
|
||||
sparklineData,
|
||||
yearToGrantCount);
|
||||
requestDispatcher = request.getRequestDispatcher("/templates/page/blankPage.jsp");
|
||||
|
||||
} else {
|
||||
prepareStandaloneResponse(request,
|
||||
response,
|
||||
vitroRequest,
|
||||
sparklineData);
|
||||
requestDispatcher = request.getRequestDispatcher(Controllers.BASIC_JSP);
|
||||
}
|
||||
|
||||
try {
|
||||
requestDispatcher.forward(request, response);
|
||||
} catch (Exception e) {
|
||||
log.error("EntityEditController could not forward to view.");
|
||||
log.error(e.getMessage());
|
||||
log.error(e.getStackTrace());
|
||||
}
|
||||
|
||||
} catch (MalformedQueryParametersException e) {
|
||||
try {
|
||||
UtilityFunctions.handleMalformedParameters(
|
||||
e.getMessage(),
|
||||
"Visualization Query Error - Individual Publication Count",
|
||||
vitroRequest,
|
||||
request,
|
||||
response,
|
||||
log);
|
||||
} catch (ServletException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
} catch (IOException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private void writeGrantsOverTimeCSV(
|
||||
Map<String, Integer> yearToGrantCount,
|
||||
PrintWriter responseWriter) {
|
||||
|
||||
CSVWriter csvWriter = new SimpleWriter(responseWriter);
|
||||
|
||||
try {
|
||||
csvWriter.append(new String[] { "Year", "Grants" });
|
||||
for (Entry<String, Integer> currentEntry : yearToGrantCount
|
||||
.entrySet()) {
|
||||
csvWriter.append(new Object[] { currentEntry.getKey(),
|
||||
currentEntry.getValue() });
|
||||
}
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
responseWriter.flush();
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when csv file containing the grant count over the years
|
||||
* is requested.
|
||||
* @param investigator
|
||||
* @param piGrants
|
||||
* @param yearToGrantCount
|
||||
* @param response
|
||||
*/
|
||||
private void prepareDataResponse(
|
||||
Individual investigator,
|
||||
Set<Grant> piGrants,
|
||||
Map<String, Integer> yearToGrantCount,
|
||||
HttpServletResponse response) {
|
||||
|
||||
String investigatorName = null;
|
||||
|
||||
/*
|
||||
* To protect against cases where there are no grants associated with the
|
||||
* individual.
|
||||
* */
|
||||
if (piGrants.size() > 0) {
|
||||
investigatorName = investigator.getIndividualLabel();
|
||||
}
|
||||
|
||||
/*
|
||||
* To make sure that null/empty records for investigator names do not cause any mischief.
|
||||
* */
|
||||
if (StringUtils.isBlank(investigatorName)) {
|
||||
investigatorName = "no-investigator";
|
||||
}
|
||||
|
||||
String outputFileName = UtilityFunctions.slugify(investigatorName)
|
||||
+ "_grants-per-year" + ".csv";
|
||||
|
||||
response.setContentType("application/octet-stream");
|
||||
response.setHeader("Content-Disposition", "attachment;filename=" + outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
writeGrantsOverTimeCSV(yearToGrantCount, responseWriter);
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when an entire page dedicated to publication sparkline is requested.
|
||||
* @param request
|
||||
* @param response
|
||||
* @param vreq
|
||||
* @param valueObjectContainer
|
||||
*/
|
||||
private void prepareStandaloneResponse(HttpServletRequest request,
|
||||
HttpServletResponse response, VitroRequest vreq,
|
||||
SparklineData valueObjectContainer) {
|
||||
|
||||
Portal portal = vreq.getPortal();
|
||||
|
||||
request.setAttribute("sparklineVO", valueObjectContainer);
|
||||
|
||||
request.setAttribute("bodyJsp", "/templates/visualization/grant_count.jsp");
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("title", "Individual Grant Count visualization");
|
||||
request.setAttribute("scripts", "/templates/visualization/visualization_scripts.jsp");
|
||||
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when the grant sparkline has to be rendered in already existing
|
||||
* page, e.g. profile page.
|
||||
* @param request
|
||||
* @param response
|
||||
* @param vreq
|
||||
* @param valueObjectContainer
|
||||
* @param yearToGrantCount
|
||||
*/
|
||||
private void prepareDynamicResponse(
|
||||
HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
VitroRequest vreq,
|
||||
SparklineData valueObjectContainer,
|
||||
Map<String, Integer> yearToGrantCount) {
|
||||
|
||||
Portal portal = vreq.getPortal();
|
||||
|
||||
request.setAttribute("sparklineVO", valueObjectContainer);
|
||||
|
||||
if (yearToGrantCount.size() > 0) {
|
||||
request.setAttribute("shouldVIVOrenderVis", true);
|
||||
} else {
|
||||
request.setAttribute("shouldVIVOrenderVis", false);
|
||||
}
|
||||
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("bodyJsp", "/templates/visualization/ajax_vis_content.jsp");
|
||||
}
|
||||
|
||||
private void preparePDFResponse(Individual investigator,
|
||||
Set<Grant> piGrants,
|
||||
Map<String, Integer> yearToGrantCount,
|
||||
HttpServletResponse response) {
|
||||
|
||||
String investigatorName = null;
|
||||
|
||||
/*
|
||||
* To protect against cases where there are no PI Grants
|
||||
* associated with the individual.
|
||||
*/
|
||||
if (piGrants.size() > 0) {
|
||||
investigatorName = investigator.getIndividualLabel();
|
||||
}
|
||||
|
||||
/*
|
||||
* To make sure that null/empty records for PI names do not cause
|
||||
* any mischief.
|
||||
*/
|
||||
if (StringUtils.isBlank(investigatorName)) {
|
||||
investigatorName = "no-investigator";
|
||||
}
|
||||
|
||||
String outputFileName = UtilityFunctions.slugify(investigatorName)
|
||||
+ "_report" + ".pdf";
|
||||
|
||||
response.setContentType("application/pdf");
|
||||
response.setHeader("Content-Disposition", "attachment;filename="
|
||||
+ outputFileName);
|
||||
|
||||
ServletOutputStream responseOutputStream;
|
||||
try {
|
||||
responseOutputStream = response.getOutputStream();
|
||||
|
||||
Document document = new Document();
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PdfWriter pdfWriter = PdfWriter.getInstance(document, baos);
|
||||
document.open();
|
||||
|
||||
PDFDocument pdfDocument = new PDFDocument(investigatorName,
|
||||
yearToGrantCount, document, pdfWriter);
|
||||
|
||||
document.close();
|
||||
|
||||
// setting some response headers & content type
|
||||
response.setHeader("Expires", "0");
|
||||
response.setHeader("Cache-Control",
|
||||
"must-revalidate, post-check=0, pre-check=0");
|
||||
response.setHeader("Pragma", "public");
|
||||
response.setContentLength(baos.size());
|
||||
// write ByteArrayOutputStream to the ServletOutputStream
|
||||
baos.writeTo(responseOutputStream);
|
||||
responseOutputStream.flush();
|
||||
responseOutputStream.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
} catch (DocumentException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,680 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount;
|
||||
|
||||
import java.io.UnsupportedEncodingException;
|
||||
import java.net.URLEncoder;
|
||||
import java.util.Calendar;
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Set;
|
||||
import java.util.Map.Entry;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SparklineData;
|
||||
|
||||
/**
|
||||
* Class for rendering sparklines of grants over time for a person
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
@SuppressWarnings("serial")
|
||||
public class PersonGrantCountVisCodeGenerator {
|
||||
|
||||
/*
|
||||
* There are 2 modes of sparkline that are available via this visualization.
|
||||
* 1. Short Sparkline - This sparkline will render all the data points (or sparks),
|
||||
* which in this case are the grants over the years, from the last 10 years.
|
||||
*
|
||||
* 2. Full Sparkline - This sparkline will render all the data points (or sparks)
|
||||
* spanning the career of the person & last 10 years at the minimum, in case if
|
||||
* the person started his career in the last 10 years.
|
||||
* */
|
||||
|
||||
private static final Map<String, String> VIS_DIV_NAMES = new HashMap<String, String>() { {
|
||||
|
||||
put("SHORT_SPARK", "grant_count_short_sparkline_vis");
|
||||
put("FULL_SPARK", "grant_count_full_sparkline_vis");
|
||||
|
||||
} };
|
||||
|
||||
private static final String VISUALIZATION_STYLE_CLASS = "sparkline_style";
|
||||
|
||||
private static final String DEFAULT_VIS_CONTAINER_DIV_ID = "grant_count_vis_container";
|
||||
|
||||
private Map<String, Integer> yearToGrantCount;
|
||||
|
||||
private Log log;
|
||||
|
||||
private SparklineData sparklineData;
|
||||
|
||||
private String contextPath;
|
||||
|
||||
private String individualURI;
|
||||
|
||||
public PersonGrantCountVisCodeGenerator(String contextPath,
|
||||
String individualURIParam, String visMode, String visContainer,
|
||||
Set<Grant> piGrants,
|
||||
Map<String, Integer> yearToGrantCount, Log log) {
|
||||
|
||||
this.contextPath = contextPath;
|
||||
this.individualURI = individualURIParam;
|
||||
|
||||
this.yearToGrantCount = yearToGrantCount;
|
||||
this.sparklineData = new SparklineData();
|
||||
|
||||
this.log = log;
|
||||
|
||||
generateVisualizationCode(visMode, visContainer, piGrants);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is used to generate the visualization code (HMTL, CSS & JavaScript).
|
||||
* There 2 parts to it - 1. Actual Content Code & 2. Context Code.
|
||||
* 1. Actual Content code in this case is the sparkline image, text related to
|
||||
* data and the wrapping tables. This is generated via call to google vis API through
|
||||
* JavaScript.
|
||||
* 2. Context code is generally optional but contains code pertaining to tabulated
|
||||
* data & links to download files etc.
|
||||
* @param visMode
|
||||
* @param visContainer
|
||||
* @param piGrants
|
||||
*/
|
||||
private void generateVisualizationCode(String visMode,
|
||||
String visContainer,
|
||||
Set<Grant> piGrants) {
|
||||
|
||||
sparklineData.setSparklineContent(getMainVisualizationCode(piGrants,
|
||||
visMode,
|
||||
visContainer));
|
||||
|
||||
|
||||
sparklineData.setSparklineContext(getVisualizationContextCode(visMode));
|
||||
|
||||
}
|
||||
|
||||
private String getMainVisualizationCode(Set<Grant> piGrants,
|
||||
String visMode, String providedVisContainerID) {
|
||||
|
||||
int numOfYearsToBeRendered = 0;
|
||||
int currentYear = Calendar.getInstance().get(Calendar.YEAR);
|
||||
int shortSparkMinYear = currentYear
|
||||
- VisConstants.MINIMUM_YEARS_CONSIDERED_FOR_SPARKLINE + 1;
|
||||
|
||||
/*
|
||||
* This is required because when deciding the range of years over which
|
||||
* the vis was rendered we dont want to be influenced by the
|
||||
* "DEFAULT_GRANT_YEAR".
|
||||
*/
|
||||
Set<String> investigatedYears = new HashSet<String>(yearToGrantCount
|
||||
.keySet());
|
||||
investigatedYears.remove(VOConstants.DEFAULT_GRANT_YEAR);
|
||||
|
||||
/*
|
||||
* We are setting the default value of minGrantYear to be 10 years
|
||||
* before the current year (which is suitably represented by the
|
||||
* shortSparkMinYear), this in case we run into invalid set of investigated
|
||||
* years.
|
||||
*/
|
||||
int minInvestigatedYear = shortSparkMinYear;
|
||||
|
||||
String visContainerID = null;
|
||||
|
||||
StringBuilder visualizationCode = new StringBuilder();
|
||||
|
||||
if (yearToGrantCount.size() > 0) {
|
||||
try {
|
||||
minInvestigatedYear = Integer.parseInt(Collections
|
||||
.min(investigatedYears));
|
||||
} catch (NoSuchElementException e1) {
|
||||
log.debug("vis: " + e1.getMessage() + " error occurred for "
|
||||
+ yearToGrantCount.toString());
|
||||
} catch (NumberFormatException e2) {
|
||||
log.debug("vis: " + e2.getMessage() + " error occurred for "
|
||||
+ yearToGrantCount.toString());
|
||||
}
|
||||
}
|
||||
|
||||
int minInvestigatedYearConsidered = 0;
|
||||
|
||||
/*
|
||||
* There might be a case that the author has investigated his first grant
|
||||
* within the last 10 years but we want to make sure that the sparkline
|
||||
* is representative of at least the last 10 years, so we will set the
|
||||
* minInvestigatedYearConsidered to "currentYear - 10" which is also given by
|
||||
* "shortSparkMinYear".
|
||||
*/
|
||||
if (minInvestigatedYear > shortSparkMinYear) {
|
||||
minInvestigatedYearConsidered = shortSparkMinYear;
|
||||
} else {
|
||||
minInvestigatedYearConsidered = minInvestigatedYear;
|
||||
}
|
||||
|
||||
numOfYearsToBeRendered = currentYear - minInvestigatedYearConsidered + 1;
|
||||
|
||||
visualizationCode.append("<style type='text/css'>" + "."
|
||||
+ VISUALIZATION_STYLE_CLASS + " table{" + " margin: 0;"
|
||||
+ " padding: 0;" + " width: auto;"
|
||||
+ " border-collapse: collapse;" + " border-spacing: 0;"
|
||||
+ " vertical-align: inherit;" + "}"
|
||||
+ "table.sparkline_wrapper_table td, th {"
|
||||
+ " vertical-align: bottom;" + "}" + ".vis_link a{"
|
||||
+ " padding-top: 5px;" + "}"
|
||||
+ "td.sparkline_number { text-align:right; "
|
||||
+ "padding-right:5px; }"
|
||||
+ "td.sparkline_text {text-align:left;}"
|
||||
+ ".incomplete-data-holder {" + "" + "}" + "</style>\n");
|
||||
|
||||
visualizationCode.append("<script type=\"text/javascript\">\n"
|
||||
+ "function drawGrantCountVisualization(providedSparklineImgTD) "
|
||||
+ "{\n" + "var data = new google.visualization.DataTable();\n"
|
||||
+ "data.addColumn('string', 'Year');\n"
|
||||
+ "data.addColumn('number', 'Publications');\n"
|
||||
+ "data.addRows(" + numOfYearsToBeRendered + ");\n");
|
||||
|
||||
int grantCounter = 0;
|
||||
|
||||
/*
|
||||
* For the purpose of this visualization I have come up with a term
|
||||
* "Sparks" which essentially means data points. Sparks that will be
|
||||
* rendered in full mode will always be the one's which have any year
|
||||
* associated with it. Hence.
|
||||
*/
|
||||
int renderedFullSparks = 0;
|
||||
|
||||
for (int grantYear = minInvestigatedYearConsidered; grantYear <= currentYear; grantYear++) {
|
||||
|
||||
String stringInvestigatedYear = String.valueOf(grantYear);
|
||||
Integer currentGrants = yearToGrantCount
|
||||
.get(stringInvestigatedYear);
|
||||
|
||||
if (currentGrants == null) {
|
||||
currentGrants = 0;
|
||||
}
|
||||
|
||||
visualizationCode.append("data.setValue(" + grantCounter
|
||||
+ ", 0, '" + stringInvestigatedYear + "');\n");
|
||||
|
||||
visualizationCode.append("data.setValue(" + grantCounter
|
||||
+ ", 1, " + currentGrants + ");\n");
|
||||
|
||||
/*
|
||||
* Sparks that will be rendered in full mode will always be the
|
||||
* one's which has any year associated with it. Hence.
|
||||
*/
|
||||
renderedFullSparks += currentGrants;
|
||||
grantCounter++;
|
||||
}
|
||||
|
||||
/*
|
||||
* Total grants will also consider grants that have no year
|
||||
* associated with it. Hence.
|
||||
*/
|
||||
Integer unknownYearGrants = 0;
|
||||
if (yearToGrantCount.get(VOConstants.DEFAULT_GRANT_YEAR) != null) {
|
||||
unknownYearGrants = yearToGrantCount
|
||||
.get(VOConstants.DEFAULT_GRANT_YEAR);
|
||||
}
|
||||
|
||||
String sparklineDisplayOptions = "{width: 150, height: 30, showAxisLines: false, "
|
||||
+ "showValueLabels: false, labelPosition: 'none'}";
|
||||
|
||||
if (providedVisContainerID != null) {
|
||||
visContainerID = providedVisContainerID;
|
||||
} else {
|
||||
visContainerID = DEFAULT_VIS_CONTAINER_DIV_ID;
|
||||
}
|
||||
|
||||
/*
|
||||
* By default these represents the range of the rendered sparks. Only in
|
||||
* case of "short" sparkline mode we will set the Earliest
|
||||
* RenderedGrant year to "currentYear - 10".
|
||||
*/
|
||||
sparklineData.setEarliestRenderedGrantYear(minInvestigatedYear);
|
||||
sparklineData.setLatestRenderedGrantYear(currentYear);
|
||||
|
||||
/*
|
||||
* The Full Sparkline will be rendered by default. Only if the url has
|
||||
* specific mention of SHORT_SPARKLINE_MODE_URL_HANDLE then we render
|
||||
* the short sparkline and not otherwise.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Since building StringBuilder objects (which is being used to store
|
||||
* the vis code) is essentially a side-effecting process, we have both
|
||||
* the activators method as side- effecting. They both side-effect
|
||||
* "visualizationCode"
|
||||
*/
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE
|
||||
.equalsIgnoreCase(visMode)) {
|
||||
|
||||
sparklineData.setEarliestRenderedGrantYear(shortSparkMinYear);
|
||||
generateShortSparklineVisualizationContent(currentYear,
|
||||
shortSparkMinYear, visContainerID, visualizationCode,
|
||||
unknownYearGrants, sparklineDisplayOptions);
|
||||
} else {
|
||||
generateFullSparklineVisualizationContent(currentYear,
|
||||
minInvestigatedYearConsidered, visContainerID, visualizationCode,
|
||||
unknownYearGrants, renderedFullSparks,
|
||||
sparklineDisplayOptions);
|
||||
}
|
||||
log.debug(visualizationCode);
|
||||
return visualizationCode.toString();
|
||||
}
|
||||
|
||||
|
||||
private void generateShortSparklineVisualizationContent(int currentYear,
|
||||
int shortSparkMinYear, String visContainerID,
|
||||
StringBuilder visualizationCode, int unknownYearGrants,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
/*
|
||||
* Create a view of the data containing only the column pertaining to
|
||||
* grant count.
|
||||
*/
|
||||
visualizationCode.append("var shortSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "shortSparklineView.setColumns([1]);\n");
|
||||
|
||||
/*
|
||||
* For the short view we only want the last 10 year's view of
|
||||
* grant count, hence we filter the data we actually want to use
|
||||
* for render.
|
||||
*/
|
||||
visualizationCode.append("shortSparklineView.setRows("
|
||||
+ "data.getFilteredRows([{column: 0, " + "minValue: '"
|
||||
+ shortSparkMinYear + "', " + "maxValue: '" + currentYear
|
||||
+ "'}])" + ");\n");
|
||||
|
||||
/*
|
||||
* Create the vis object and draw it in the div pertaining to
|
||||
* short-sparkline.
|
||||
*/
|
||||
visualizationCode
|
||||
.append("var short_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "short_spark.draw(shortSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
/*
|
||||
* We want to display how many grant counts were considered, so
|
||||
* this is used to calculate this.
|
||||
*/
|
||||
visualizationCode
|
||||
.append("var shortSparkRows = shortSparklineView.getViewRows();\n"
|
||||
+ "var renderedShortSparks = 0;\n"
|
||||
+ "$.each(shortSparkRows, function(index, value) {"
|
||||
+ "renderedShortSparks += data.getValue(value, 1);"
|
||||
+ "});\n");
|
||||
|
||||
/*
|
||||
* Generate the text introducing the vis.
|
||||
*/
|
||||
|
||||
String imcompleteDataText = "This information is based solely on grants which "
|
||||
+ "have been loaded into the VIVO system. "
|
||||
+ "This may only be a small sample of the person\\'s "
|
||||
+ "total work.";
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("SHORT_SPARK")
|
||||
+ " td.sparkline_number').text("
|
||||
+ "parseInt(renderedShortSparks) " + "+ parseInt("
|
||||
+ unknownYearGrants + "));");
|
||||
|
||||
visualizationCode.append("var shortSparksText = ''"
|
||||
+ "+ ' grant(s) within the last 10 years "
|
||||
+ "<span class=\"incomplete-data-holder\" title=\""
|
||||
+ imcompleteDataText + "\">incomplete data</span>'" + "+ '';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("SHORT_SPARK") + " "
|
||||
+ "td.sparkline_text').html(shortSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
/*
|
||||
* Generate the code that will activate the visualization. It takes care
|
||||
* of creating div elements to hold the actual sparkline image and then
|
||||
* calling the drawGrantCountVisualization function.
|
||||
*/
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES
|
||||
.get("SHORT_SPARK"), visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private void generateFullSparklineVisualizationContent(
|
||||
int currentYear,
|
||||
int minGrantYearConsidered,
|
||||
String visContainerID,
|
||||
StringBuilder visualizationCode,
|
||||
int unknownYearGrants,
|
||||
int renderedFullSparks,
|
||||
String sparklineDisplayOptions) {
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
|
||||
csvDownloadURLHref = "<a href=\"" + getCSVDownloadURL()
|
||||
+ "\" class=\"inline_href\">(.CSV File)</a>";
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
|
||||
visualizationCode.append("var fullSparklineView = "
|
||||
+ "new google.visualization.DataView(data);\n"
|
||||
+ "fullSparklineView.setColumns([1]);\n");
|
||||
|
||||
visualizationCode.append("var full_spark = new google.visualization.ImageSparkLine("
|
||||
+ "providedSparklineImgTD[0]"
|
||||
+ ");\n"
|
||||
+ "full_spark.draw(fullSparklineView, "
|
||||
+ sparklineDisplayOptions + ");\n");
|
||||
|
||||
visualizationCode.append("$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_number').text('" + (renderedFullSparks
|
||||
+ unknownYearGrants) + "').css('font-weight', 'bold');");
|
||||
|
||||
visualizationCode.append("var allSparksText = ''"
|
||||
+ "+ ' <h3>grant(s)</h3> '"
|
||||
+ "+ ' "
|
||||
+ "<span class=\"sparkline_range\">"
|
||||
+ "from " + minGrantYearConsidered + " to " + currentYear + ""
|
||||
+ "</span> '"
|
||||
+ "+ ' " + csvDownloadURLHref + " ';"
|
||||
+ "$('#" + VIS_DIV_NAMES.get("FULL_SPARK")
|
||||
+ " td.sparkline_text').html(allSparksText);");
|
||||
|
||||
visualizationCode.append("}\n ");
|
||||
|
||||
visualizationCode.append(generateVisualizationActivator(VIS_DIV_NAMES.get("FULL_SPARK"),
|
||||
visContainerID));
|
||||
|
||||
}
|
||||
|
||||
private String generateVisualizationActivator(String sparklineID, String visContainerID) {
|
||||
|
||||
String sparklineTableWrapper = "\n"
|
||||
+ "var table = $('<table>');"
|
||||
+ "table.attr('class', 'sparkline_wrapper_table');"
|
||||
+ "var row = $('<tr>');"
|
||||
+ "sparklineImgTD = $('<td>');"
|
||||
+ "sparklineImgTD.attr('id', '" + sparklineID + "_img');"
|
||||
+ "sparklineImgTD.attr('width', '65');"
|
||||
// + "sparklineImgTD.attr('align', 'right');"
|
||||
+ "sparklineImgTD.attr('class', '" + VISUALIZATION_STYLE_CLASS + "');"
|
||||
+ "row.append(sparklineImgTD);"
|
||||
+ "var row2 = $('<tr>');"
|
||||
+ "var sparklineNumberTD = $('<td>');"
|
||||
// + "sparklineNumberTD.attr('width', '30');"
|
||||
// + "sparklineNumberTD.attr('align', 'right');"
|
||||
+ "sparklineNumberTD.attr('class', 'sparkline_number');"
|
||||
+ "sparklineNumberTD.css('text-align', 'center');"
|
||||
+ "row2.append(sparklineNumberTD);"
|
||||
+ "var row3 = $('<tr>');"
|
||||
+ "var sparklineTextTD = $('<td>');"
|
||||
// + "sparklineTextTD.attr('width', '450');"
|
||||
+ "sparklineTextTD.attr('class', 'sparkline_text');"
|
||||
+ "row3.append(sparklineTextTD);"
|
||||
+ "table.append(row);"
|
||||
+ "table.append(row2);"
|
||||
+ "table.append(row3);"
|
||||
+ "table.prependTo('#" + sparklineID + "');\n";
|
||||
|
||||
return "$(document).ready(function() {"
|
||||
+ "var sparklineImgTD; "
|
||||
|
||||
/*
|
||||
* This is a nuclear option (creating the container in which everything goes)
|
||||
* the only reason this will be ever used is the API user never submitted a
|
||||
* container ID in which everything goes. The alternative was to let the
|
||||
* vis not appear in the calling page at all. So now atleast vis appears but
|
||||
* appended at the bottom of the body.
|
||||
* */
|
||||
|
||||
+ "if ($('#" + visContainerID + "').length === 0) {"
|
||||
+ " $('<div/>', {'id': '" + visContainerID + "'"
|
||||
+ " }).appendTo('body');"
|
||||
+ "}"
|
||||
+ "if ($('#" + sparklineID + "').length === 0) {"
|
||||
+ "$('<div/>', {'id': '" + sparklineID + "',"
|
||||
+ "'class': '" + VISUALIZATION_STYLE_CLASS + "'"
|
||||
+ "}).prependTo('#" + visContainerID + "');"
|
||||
+ sparklineTableWrapper
|
||||
+ "}"
|
||||
+ "drawGrantCountVisualization(sparklineImgTD);"
|
||||
+ "});"
|
||||
+ "</script>\n";
|
||||
}
|
||||
|
||||
private String getVisualizationContextCode(String visMode) {
|
||||
|
||||
String visualizationContextCode = "";
|
||||
if (VisualizationFrameworkConstants.SHORT_SPARKLINE_VIS_MODE.equalsIgnoreCase(visMode)) {
|
||||
visualizationContextCode = generateShortVisContext();
|
||||
} else {
|
||||
visualizationContextCode = generateFullVisContext();
|
||||
}
|
||||
|
||||
log.debug(visualizationContextCode);
|
||||
|
||||
return visualizationContextCode;
|
||||
}
|
||||
|
||||
private String generateFullVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
if (yearToGrantCount.size() > 0) {
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
|
||||
csvDownloadURLHref = "Download data as <a href='"
|
||||
+ getCSVDownloadURL() + "'>.csv</a> file.<br />";
|
||||
sparklineData.setDownloadDataLink(getCSVDownloadURL());
|
||||
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
} else {
|
||||
csvDownloadURLHref = "No data available to export.<br />";
|
||||
}
|
||||
|
||||
String tableCode = generateDataTable();
|
||||
|
||||
divContextCode.append("<p>" + tableCode + csvDownloadURLHref + "</p>");
|
||||
|
||||
sparklineData.setTable(tableCode);
|
||||
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
private String getCSVDownloadURL() throws UnsupportedEncodingException {
|
||||
|
||||
if (yearToGrantCount.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath
|
||||
.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
String downloadURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "="
|
||||
+ URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME)
|
||||
.toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "="
|
||||
+ URLEncoder
|
||||
.encode(
|
||||
VisualizationFrameworkConstants.PERSON_GRANT_COUNT_VIS,
|
||||
VisualizationController.URL_ENCODING_SCHEME)
|
||||
.toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "="
|
||||
+ URLEncoder.encode(
|
||||
VisualizationFrameworkConstants.DATA_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME)
|
||||
.toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_MODE_KEY
|
||||
+ "="
|
||||
+ URLEncoder.encode("copi",
|
||||
VisualizationController.URL_ENCODING_SCHEME)
|
||||
.toString();
|
||||
return downloadURL;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String generateShortVisContext() {
|
||||
|
||||
StringBuilder divContextCode = new StringBuilder();
|
||||
|
||||
try {
|
||||
|
||||
String fullTimelineLink, fullTimelineCoPILink;
|
||||
if (yearToGrantCount.size() > 0) {
|
||||
|
||||
String secondaryContextPath = "";
|
||||
if (!contextPath.contains(VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX)) {
|
||||
secondaryContextPath = VisualizationFrameworkConstants.VISUALIZATION_URL_PREFIX;
|
||||
}
|
||||
|
||||
String fullTimelineNetworkURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode("person_level",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants
|
||||
.STANDALONE_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_MODE_KEY
|
||||
+ "=" + URLEncoder.encode("copi",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
fullTimelineLink = "<a href='" + fullTimelineNetworkURL + "'>View all VIVO "
|
||||
+ "grants and corresponding co-pi network.</a>";
|
||||
|
||||
sparklineData.setFullTimelineNetworkLink(fullTimelineNetworkURL);
|
||||
|
||||
String fullTimelineCoPINetworkURL = contextPath
|
||||
+ secondaryContextPath
|
||||
+ "?"
|
||||
+ VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY
|
||||
+ "=" + URLEncoder.encode(individualURI,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_TYPE_KEY
|
||||
+ "=" + URLEncoder.encode("person_level",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.RENDER_MODE_KEY
|
||||
+ "=" + URLEncoder.encode(VisualizationFrameworkConstants
|
||||
.STANDALONE_RENDER_MODE,
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString()
|
||||
+ "&"
|
||||
+ VisualizationFrameworkConstants.VIS_MODE_KEY
|
||||
+ "=" + URLEncoder.encode("copi",
|
||||
VisualizationController.URL_ENCODING_SCHEME).toString();
|
||||
|
||||
fullTimelineCoPILink = "<a href='" + fullTimelineCoPINetworkURL + "'>View all "
|
||||
+ "grants and corresponding co-pi network.</a>";
|
||||
|
||||
sparklineData.setFullTimelineCoPINetworkLink(fullTimelineCoPINetworkURL);
|
||||
|
||||
} else {
|
||||
fullTimelineLink = "No data available to render full timeline.<br />";
|
||||
fullTimelineCoPILink = "No data available to render full timeline.<br />";
|
||||
|
||||
}
|
||||
|
||||
divContextCode.append("<span class=\"vis_link\">" + fullTimelineLink + "</span>");
|
||||
divContextCode.append("<br/><br/><span class=\"vis_link_copi\">" + fullTimelineCoPILink + "</span>");
|
||||
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
log.error(e);
|
||||
}
|
||||
return divContextCode.toString();
|
||||
}
|
||||
|
||||
private String generateDataTable() {
|
||||
|
||||
String csvDownloadURLHref = "";
|
||||
|
||||
try {
|
||||
if (getCSVDownloadURL() != null) {
|
||||
csvDownloadURLHref = "<a href=\"" + getCSVDownloadURL() + "\">(.CSV File)</a>";
|
||||
} else {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
} catch (UnsupportedEncodingException e) {
|
||||
csvDownloadURLHref = "";
|
||||
}
|
||||
|
||||
StringBuilder dataTable = new StringBuilder();
|
||||
|
||||
dataTable.append("<table id='sparkline_data_table'>"
|
||||
+ "<caption>Grants per year " + csvDownloadURLHref + "</caption>"
|
||||
+ "<thead>"
|
||||
+ "<tr>"
|
||||
+ "<th>Year</th>"
|
||||
+ "<th>Grants</th>"
|
||||
+ "</tr>"
|
||||
+ "</thead>"
|
||||
+ "<tbody>");
|
||||
|
||||
for (Entry<String, Integer> currentEntry : yearToGrantCount.entrySet()) {
|
||||
dataTable.append("<tr>"
|
||||
+ "<td>" + currentEntry.getKey() + "</td>"
|
||||
+ "<td>" + currentEntry.getValue() + "</td>"
|
||||
+ "</tr>");
|
||||
}
|
||||
|
||||
dataTable.append("</tbody>\n </table>\n");
|
||||
|
||||
return dataTable.toString();
|
||||
}
|
||||
|
||||
public SparklineData getValueObjectContainer() {
|
||||
return sparklineData;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,466 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.personlevel;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.PrintWriter;
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import javax.servlet.RequestDispatcher;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.Controllers;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.coauthorship.CoAuthorshipGraphMLWriter;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.coauthorship.CoAuthorshipQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.coauthorship.CoAuthorshipVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator.CoPIGrantCountQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.coprincipalinvestigator.CoPIVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount.PersonGrantCountQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.persongrantcount.PersonGrantCountVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.personpubcount.PersonPublicationCountQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.personpubcount.PersonPublicationCountVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.visutils.VisualizationRequestHandler;
|
||||
|
||||
/**
|
||||
* This request handler is used to serve content rendered on the person level vis page
|
||||
* like,
|
||||
* 1. Front end of the vis including the co-author & publication sparkline.
|
||||
* 2. Downloadable file having the co-author network in graphml format.
|
||||
* 3. Downloadable file having the list of co-authors that the individual has
|
||||
* worked with & count of such co-authorships.
|
||||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
||||
|
||||
private static final String EGO_PUB_SPARKLINE_VIS_CONTAINER_ID = "ego_pub_sparkline";
|
||||
private static final String UNIQUE_COAUTHORS_SPARKLINE_VIS_CONTAINER_ID =
|
||||
"unique_coauthors_sparkline";
|
||||
private static final String EGO_GRANT_SPARKLINE_VIS_CONTAINER_ID = "ego_grant_sparkline";
|
||||
private static final String UNIQUE_COPIS_SPARKLINE_VIS_CONTAINER_ID =
|
||||
"unique_copis_sparkline";
|
||||
|
||||
|
||||
public void generateVisualization(VitroRequest vitroRequest,
|
||||
HttpServletRequest request,
|
||||
HttpServletResponse response,
|
||||
Log log,
|
||||
DataSource dataSource) {
|
||||
|
||||
String egoURI = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
|
||||
String renderMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants.RENDER_MODE_KEY);
|
||||
|
||||
String visMode = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants.VIS_MODE_KEY);
|
||||
|
||||
QueryRunner<CoAuthorshipData> coAuthorshipQueryManager =
|
||||
new CoAuthorshipQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
QueryRunner<Set<BiboDocument>> publicationQueryManager =
|
||||
new PersonPublicationCountQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
QueryRunner<CoPIData> coPIQueryManager = new CoPIGrantCountQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
|
||||
QueryRunner<Set<Grant>> grantQueryManager =
|
||||
new PersonGrantCountQueryRunner(egoURI, dataSource, log);
|
||||
|
||||
try {
|
||||
|
||||
CoAuthorshipData coAuthorshipData = coAuthorshipQueryManager.getQueryResult();
|
||||
|
||||
CoPIData coPIData = coPIQueryManager.getQueryResult();
|
||||
|
||||
if (VisualizationFrameworkConstants.DATA_RENDER_MODE
|
||||
.equalsIgnoreCase(renderMode)) {
|
||||
|
||||
/*
|
||||
* We will be using the same visualization package for providing data for both
|
||||
* list of unique coauthors & network of coauthors (used in the flash vis).
|
||||
* We will use "VIS_MODE_KEY" as a modifier to differentiate between
|
||||
* these two. The default will be to provide data used to render the co-
|
||||
* authorship network vis.
|
||||
* */
|
||||
if (VisualizationFrameworkConstants.COAUTHORSLIST_VIS_MODE
|
||||
.equalsIgnoreCase(visMode)) {
|
||||
/*
|
||||
* When the csv file is required - containing the co-authors & how
|
||||
* many times they have co-authored with the ego.
|
||||
* */
|
||||
prepareListCoauthorsDataResponse(coAuthorshipData,
|
||||
response);
|
||||
return;
|
||||
|
||||
} else {
|
||||
/*
|
||||
* When the graphML file is required - based on which co-authorship
|
||||
* network visualization will be rendered.
|
||||
* */
|
||||
prepareNetworkDataResponse(coAuthorshipData,
|
||||
response);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/*
|
||||
* When the front-end for the person level vis has to be displayed we render couple of
|
||||
* sparklines. This will prepare all the data for the sparklines & other requested
|
||||
* files.
|
||||
* */
|
||||
|
||||
Set<BiboDocument> authorDocuments = publicationQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the BiboDocument's
|
||||
* parsedPublicationYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToPublicationCount =
|
||||
UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
|
||||
/*
|
||||
* Computations required to generate HTML for the sparklines & related context.
|
||||
* */
|
||||
PersonPublicationCountVisCodeGenerator personPubCountVisCodeGenerator =
|
||||
new PersonPublicationCountVisCodeGenerator(
|
||||
vitroRequest.getRequestURI(),
|
||||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
EGO_PUB_SPARKLINE_VIS_CONTAINER_ID,
|
||||
authorDocuments,
|
||||
yearToPublicationCount,
|
||||
log);
|
||||
|
||||
SparklineData publicationSparklineVO = personPubCountVisCodeGenerator
|
||||
.getValueObjectContainer();
|
||||
|
||||
CoAuthorshipVisCodeGenerator uniqueCoauthorsVisCodeGenerator =
|
||||
new CoAuthorshipVisCodeGenerator(
|
||||
vitroRequest.getRequestURI(),
|
||||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
UNIQUE_COAUTHORS_SPARKLINE_VIS_CONTAINER_ID,
|
||||
UtilityFunctions.getPublicationYearToCoAuthors(coAuthorshipData),
|
||||
log);
|
||||
|
||||
SparklineData uniqueCoauthorsSparklineVO = uniqueCoauthorsVisCodeGenerator
|
||||
.getValueObjectContainer();
|
||||
|
||||
/*
|
||||
* grants over time sparkline
|
||||
*/
|
||||
|
||||
Set<Grant> piGrants = grantQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(piGrants);
|
||||
|
||||
PersonGrantCountVisCodeGenerator personGrantCountVisCodeGenerator =
|
||||
new PersonGrantCountVisCodeGenerator(
|
||||
vitroRequest.getRequestURI(),
|
||||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
EGO_PUB_SPARKLINE_VIS_CONTAINER_ID,
|
||||
piGrants,
|
||||
yearToGrantCount,
|
||||
log);
|
||||
|
||||
SparklineData grantSparklineVO = personGrantCountVisCodeGenerator
|
||||
.getValueObjectContainer();
|
||||
|
||||
|
||||
/*
|
||||
* Co-PI's over time sparkline
|
||||
*/
|
||||
CoPIVisCodeGenerator uniqueCopisVisCodeGenerator =
|
||||
new CoPIVisCodeGenerator(
|
||||
vitroRequest.getRequestURI(),
|
||||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
UNIQUE_COAUTHORS_SPARKLINE_VIS_CONTAINER_ID,
|
||||
UtilityFunctions.getGrantYearToCoPI(coPIData),
|
||||
log);
|
||||
|
||||
SparklineData uniqueCopisSparklineVO = uniqueCopisVisCodeGenerator
|
||||
.getValueObjectContainer();
|
||||
|
||||
|
||||
RequestDispatcher requestDispatcher = null;
|
||||
|
||||
prepareStandaloneResponse(
|
||||
egoURI,
|
||||
publicationSparklineVO,
|
||||
uniqueCoauthorsSparklineVO,
|
||||
grantSparklineVO,
|
||||
uniqueCopisSparklineVO,
|
||||
coAuthorshipData,
|
||||
coPIData,
|
||||
EGO_PUB_SPARKLINE_VIS_CONTAINER_ID,
|
||||
UNIQUE_COAUTHORS_SPARKLINE_VIS_CONTAINER_ID,
|
||||
vitroRequest,
|
||||
request, visMode);
|
||||
|
||||
requestDispatcher = request.getRequestDispatcher(Controllers.BASIC_JSP);
|
||||
|
||||
try {
|
||||
requestDispatcher.forward(request, response);
|
||||
} catch (Exception e) {
|
||||
log.error("EntityEditController could not forward to view.");
|
||||
log.error(e.getMessage());
|
||||
log.error(e.getStackTrace());
|
||||
}
|
||||
|
||||
} catch (MalformedQueryParametersException e) {
|
||||
try {
|
||||
UtilityFunctions.handleMalformedParameters(
|
||||
e.getMessage(),
|
||||
"Visualization Query Error - Person Level Visualization",
|
||||
vitroRequest,
|
||||
request,
|
||||
response,
|
||||
log);
|
||||
|
||||
} catch (ServletException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
} catch (IOException e1) {
|
||||
log.error(e1.getStackTrace());
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private Map<String, Integer> getCoAuthorsList(CoAuthorshipData coAuthorsipVO) {
|
||||
|
||||
Map<String, Integer> coAuthorsToCount = new TreeMap<String, Integer>();
|
||||
|
||||
for (Node currNode : coAuthorsipVO.getNodes()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != coAuthorsipVO.getEgoNode()) {
|
||||
|
||||
coAuthorsToCount.put(currNode.getNodeName(), currNode.getNumOfAuthoredWorks());
|
||||
|
||||
}
|
||||
}
|
||||
return coAuthorsToCount;
|
||||
}
|
||||
|
||||
private void writeCoAuthorsToWorksCSV(Map<String, Integer> coAuthorsToCount,
|
||||
PrintWriter printWriter) {
|
||||
|
||||
// printWriter.append("\"Co-Author\", \"Count\"\n");
|
||||
printWriter.append("Co-Author, Count\n");
|
||||
|
||||
for (Entry<String, Integer> currentEntry : coAuthorsToCount.entrySet()) {
|
||||
|
||||
printWriter.append("\"" + currentEntry.getKey() + "\","
|
||||
+ "\"" + currentEntry.getValue() + "\"\n"
|
||||
);
|
||||
}
|
||||
|
||||
printWriter.flush();
|
||||
}
|
||||
|
||||
/**
|
||||
* Provide response when graphml file for the co-authorship network is requested.
|
||||
* @param coAuthorsipData
|
||||
* @param response
|
||||
*/
|
||||
private void prepareNetworkDataResponse(
|
||||
CoAuthorshipData coAuthorsipData, HttpServletResponse response) {
|
||||
|
||||
String outputFileName = "";
|
||||
|
||||
if (coAuthorsipData.getNodes() != null && coAuthorsipData.getNodes().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(coAuthorsipData.getEgoNode().getNodeName())
|
||||
+ "_coauthor-network.graphml" + ".xml";
|
||||
|
||||
} else {
|
||||
outputFileName = "no_coauthor-network.graphml" + ".xml";
|
||||
}
|
||||
|
||||
response.setContentType("application/octet-stream");
|
||||
response.setHeader("Content-Disposition", "attachment;filename=" + outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
CoAuthorshipGraphMLWriter coAuthorShipGraphMLWriter =
|
||||
new CoAuthorshipGraphMLWriter(coAuthorsipData);
|
||||
|
||||
responseWriter.append(coAuthorShipGraphMLWriter.getCoAuthorshipGraphMLContent());
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides response when a csv file containing co-author names & number of co-authored works
|
||||
* is requested.
|
||||
* @param coAuthorshipData
|
||||
* @param response
|
||||
*/
|
||||
private void prepareListCoauthorsDataResponse(
|
||||
CoAuthorshipData coAuthorshipData, HttpServletResponse response) {
|
||||
|
||||
String outputFileName = "";
|
||||
Map<String, Integer> coAuthorsToCount = new TreeMap<String, Integer>();
|
||||
|
||||
if (coAuthorshipData.getNodes() != null && coAuthorshipData.getNodes().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(coAuthorshipData.getEgoNode().getNodeName())
|
||||
+ "_coauthors" + ".csv";
|
||||
|
||||
coAuthorsToCount = getCoAuthorsList(coAuthorshipData);
|
||||
|
||||
} else {
|
||||
outputFileName = "no_coauthors" + ".csv";
|
||||
}
|
||||
|
||||
response.setContentType("application/octet-stream");
|
||||
response.setHeader("Content-Disposition", "attachment;filename=" + outputFileName);
|
||||
|
||||
try {
|
||||
|
||||
PrintWriter responseWriter = response.getWriter();
|
||||
|
||||
/*
|
||||
* We are side-effecting responseWriter since we are directly manipulating the response
|
||||
* object of the servlet.
|
||||
* */
|
||||
writeCoAuthorsToWorksCSV(coAuthorsToCount, responseWriter);
|
||||
|
||||
responseWriter.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When the page for person level visualization is requested.
|
||||
* @param egoURI
|
||||
* @param egoPubSparklineVO
|
||||
* @param uniqueCoauthorsSparklineVO
|
||||
* @param uniqueCopisSparklineVO
|
||||
* @param grantSparklineVO
|
||||
* @param coAuthorshipVO
|
||||
* @param coPIVO
|
||||
* @param egoPubSparklineVisContainer
|
||||
* @param uniqueCoauthorsSparklineVisContainer
|
||||
* @param vitroRequest
|
||||
* @param request
|
||||
* @param visMode
|
||||
*/
|
||||
private void prepareStandaloneResponse (
|
||||
String egoURI,
|
||||
SparklineData egoPubSparklineVO,
|
||||
SparklineData uniqueCoauthorsSparklineVO,
|
||||
SparklineData egoGrantSparklineVO, SparklineData uniqueCopisSparklineVO, CoAuthorshipData coAuthorshipVO,
|
||||
CoPIData coPIVO, String egoPubSparklineVisContainer,
|
||||
String uniqueCoauthorsSparklineVisContainer,
|
||||
VitroRequest vitroRequest,
|
||||
HttpServletRequest request, String visMode) {
|
||||
|
||||
String completeURL = "";
|
||||
Portal portal = vitroRequest.getPortal();
|
||||
|
||||
request.setAttribute("egoURIParam", egoURI);
|
||||
|
||||
String title = "";
|
||||
if (coAuthorshipVO.getNodes() != null && coAuthorshipVO.getNodes().size() > 0) {
|
||||
request.setAttribute("numOfAuthors", coAuthorshipVO.getNodes().size());
|
||||
title = coAuthorshipVO.getEgoNode().getNodeName() + " - ";
|
||||
}
|
||||
|
||||
if (coAuthorshipVO.getEdges() != null && coAuthorshipVO.getEdges().size() > 0) {
|
||||
request.setAttribute("numOfCoAuthorShips", coAuthorshipVO.getEdges().size());
|
||||
}
|
||||
|
||||
if (coPIVO.getNodes() != null && coPIVO.getNodes().size() > 0) {
|
||||
request.setAttribute("numOfInvestigators", coPIVO.getNodes().size());
|
||||
//title = coPIVO.getEgoNode().getNodeName() + " - ";
|
||||
}
|
||||
|
||||
if (coPIVO.getEdges() != null && coPIVO.getEdges().size() > 0) {
|
||||
request.setAttribute("numOfCoPIs", coPIVO.getEdges().size());
|
||||
}
|
||||
|
||||
|
||||
try {
|
||||
completeURL = getCompleteURL(request);
|
||||
} catch (MalformedURLException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
|
||||
request.setAttribute("visMode", visMode);
|
||||
request.setAttribute("completeURL", completeURL);
|
||||
request.setAttribute("egoPubSparklineVO", egoPubSparklineVO);
|
||||
request.setAttribute("egoGrantSparklineVO", egoGrantSparklineVO);
|
||||
request.setAttribute("uniqueCoauthorsSparklineVO", uniqueCoauthorsSparklineVO);
|
||||
request.setAttribute("uniqueCopisSparklineVO", uniqueCopisSparklineVO);
|
||||
|
||||
request.setAttribute("egoPubSparklineContainerID", egoPubSparklineVisContainer);
|
||||
request.setAttribute("uniqueCoauthorsSparklineVisContainerID",
|
||||
uniqueCoauthorsSparklineVisContainer);
|
||||
|
||||
request.setAttribute("title", title + "Person Level Visualization");
|
||||
request.setAttribute("portalBean", portal);
|
||||
request.setAttribute("scripts", "/templates/visualization/person_level_inject_head.jsp");
|
||||
request.setAttribute("bodyJsp", "/templates/visualization/person_level.jsp");
|
||||
}
|
||||
|
||||
private String getCompleteURL(HttpServletRequest request) throws MalformedURLException {
|
||||
|
||||
String file = request.getRequestURI();
|
||||
// System.out.println("\ngetRequestURI() --> "+ file + "\ngetQueryString() --> "+request.getQueryString()+ "\ngetScheme() --> "+ request.getScheme());
|
||||
// System.out.println("\ngetServerName() --> "+ request.getServerName() + "\ngetServerPort() --> "+request.getServerPort());
|
||||
|
||||
URL reconstructedURL = new URL(request.getScheme(), request.getServerName(), request.getServerPort(), file);
|
||||
|
||||
// System.out.println("\nReconstructed URL is --> " + reconstructedURL);
|
||||
|
||||
return reconstructedURL.toString();
|
||||
}
|
||||
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue