1. Major refactor of all the VOs that related to publications & grants.
2. Removed some redundant code re: overriden requals & hashcode.
This commit is contained in:
parent
9006db1126
commit
187bcc1d9a
40 changed files with 1117 additions and 2086 deletions
|
@ -15,6 +15,7 @@ import org.joda.time.format.ISODateTimeFormat;
|
|||
*/
|
||||
public class VOConstants {
|
||||
|
||||
public static final String DEFAULT_ACTIVITY_YEAR = "Unknown";
|
||||
public static final String DEFAULT_PUBLICATION_YEAR = "Unknown";
|
||||
public static final String DEFAULT_GRANT_YEAR = "Unknown";
|
||||
|
||||
|
|
|
@ -11,9 +11,11 @@ import java.util.Set;
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Edge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaboratorComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
|
||||
public class CoAuthorshipGraphMLWriter {
|
||||
|
||||
|
@ -27,12 +29,12 @@ public class CoAuthorshipGraphMLWriter {
|
|||
|
||||
private final String GRAPHML_FOOTER = "</graphml>";
|
||||
|
||||
public CoAuthorshipGraphMLWriter(CoAuthorshipData visVOContainer) {
|
||||
public CoAuthorshipGraphMLWriter(CollaborationData visVOContainer) {
|
||||
coAuthorshipGraphMLContent = createCoAuthorshipGraphMLContent(visVOContainer);
|
||||
}
|
||||
|
||||
private StringBuilder createCoAuthorshipGraphMLContent(
|
||||
CoAuthorshipData coAuthorshipData) {
|
||||
CollaborationData coAuthorshipData) {
|
||||
|
||||
StringBuilder graphMLContent = new StringBuilder();
|
||||
|
||||
|
@ -60,61 +62,61 @@ public class CoAuthorshipGraphMLWriter {
|
|||
return coAuthorshipGraphMLContent;
|
||||
}
|
||||
|
||||
private void generateGraphContent(CoAuthorshipData coAuthorshipData,
|
||||
private void generateGraphContent(CollaborationData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("\n<graph edgedefault=\"undirected\">\n");
|
||||
|
||||
if (coAuthorshipData.getNodes() != null & coAuthorshipData.getNodes().size() > 0) {
|
||||
if (coAuthorshipData.getCollaborators() != null & coAuthorshipData.getCollaborators().size() > 0) {
|
||||
generateNodeSectionContent(coAuthorshipData, graphMLContent);
|
||||
}
|
||||
|
||||
if (coAuthorshipData.getEdges() != null & coAuthorshipData.getEdges().size() > 0) {
|
||||
if (coAuthorshipData.getCollaborations() != null & coAuthorshipData.getCollaborations().size() > 0) {
|
||||
generateEdgeSectionContent(coAuthorshipData, graphMLContent);
|
||||
}
|
||||
|
||||
graphMLContent.append("</graph>\n");
|
||||
}
|
||||
|
||||
private void generateEdgeSectionContent(CoAuthorshipData coAuthorshipData,
|
||||
private void generateEdgeSectionContent(CollaborationData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- edges -->\n");
|
||||
|
||||
Set<Edge> edges = coAuthorshipData.getEdges();
|
||||
Set<Collaboration> edges = coAuthorshipData.getCollaborations();
|
||||
|
||||
List<Edge> orderedEdges = new ArrayList<Edge>(edges);
|
||||
List<Collaboration> orderedEdges = new ArrayList<Collaboration>(edges);
|
||||
|
||||
Collections.sort(orderedEdges, new EdgeComparator());
|
||||
Collections.sort(orderedEdges, new CollaborationComparator());
|
||||
|
||||
for (Edge currentEdge : orderedEdges) {
|
||||
for (Collaboration currentEdge : orderedEdges) {
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single edge. "graphMLContent"
|
||||
* This method actually creates the XML code for a single Collaboration. "graphMLContent"
|
||||
* is being side-effected.
|
||||
* */
|
||||
getEdgeContent(graphMLContent, currentEdge);
|
||||
}
|
||||
}
|
||||
|
||||
private void getEdgeContent(StringBuilder graphMLContent, Edge currentEdge) {
|
||||
private void getEdgeContent(StringBuilder graphMLContent, Collaboration currentEdge) {
|
||||
|
||||
graphMLContent.append("<edge "
|
||||
+ "id=\"" + currentEdge.getEdgeID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceNode().getNodeID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetNode().getNodeID() + "\" "
|
||||
+ "id=\"" + currentEdge.getCollaborationID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceCollaborator().getCollaboratorID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetCollaborator().getCollaboratorID() + "\" "
|
||||
+ ">\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator1\">"
|
||||
+ currentEdge.getSourceNode().getNodeName()
|
||||
+ currentEdge.getSourceCollaborator().getCollaboratorName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator2\">"
|
||||
+ currentEdge.getTargetNode().getNodeName()
|
||||
+ currentEdge.getTargetCollaborator().getCollaboratorName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_coauthored_works\">"
|
||||
+ currentEdge.getNumOfCoAuthoredWorks()
|
||||
+ currentEdge.getNumOfCollaborations()
|
||||
+ "</data>\n");
|
||||
|
||||
if (currentEdge.getEarliestCollaborationYearCount() != null) {
|
||||
|
@ -164,32 +166,32 @@ public class CoAuthorshipGraphMLWriter {
|
|||
graphMLContent.append("</edge>\n");
|
||||
}
|
||||
|
||||
private void generateNodeSectionContent(CoAuthorshipData coAuthorshipData,
|
||||
private void generateNodeSectionContent(CollaborationData coAuthorshipData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- nodes -->\n");
|
||||
|
||||
Node egoNode = coAuthorshipData.getEgoNode();
|
||||
Set<Node> authorNodes = coAuthorshipData.getNodes();
|
||||
Collaborator egoNode = coAuthorshipData.getEgoCollaborator();
|
||||
Set<Collaborator> authorNodes = coAuthorshipData.getCollaborators();
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single node. "graphMLContent"
|
||||
* This method actually creates the XML code for a single Collaborator. "graphMLContent"
|
||||
* is being side-effected. The egoNode is added first because this is the "requirement"
|
||||
* of the co-author vis. Ego should always come first.
|
||||
*
|
||||
* */
|
||||
getNodeContent(graphMLContent, egoNode);
|
||||
|
||||
List<Node> orderedAuthorNodes = new ArrayList<Node>(authorNodes);
|
||||
List<Collaborator> orderedAuthorNodes = new ArrayList<Collaborator>(authorNodes);
|
||||
orderedAuthorNodes.remove(egoNode);
|
||||
|
||||
Collections.sort(orderedAuthorNodes, new NodeComparator());
|
||||
Collections.sort(orderedAuthorNodes, new CollaboratorComparator());
|
||||
|
||||
|
||||
for (Node currNode : orderedAuthorNodes) {
|
||||
for (Collaborator currNode : orderedAuthorNodes) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* We have already printed the Ego Collaborator info.
|
||||
* */
|
||||
if (currNode != egoNode) {
|
||||
|
||||
|
@ -201,17 +203,17 @@ public class CoAuthorshipGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
private void getNodeContent(StringBuilder graphMLContent, Node node) {
|
||||
private void getNodeContent(StringBuilder graphMLContent, Collaborator node) {
|
||||
|
||||
ParamMap individualProfileURLParams = new ParamMap(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY,
|
||||
node.getNodeURI());
|
||||
node.getCollaboratorURI());
|
||||
|
||||
String profileURL = UrlBuilder.getUrl(VisualizationFrameworkConstants.INDIVIDUAL_URL_PREFIX,
|
||||
individualProfileURLParams);
|
||||
|
||||
graphMLContent.append("<node id=\"" + node.getNodeID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getNodeURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getNodeName() + "</data>\n");
|
||||
graphMLContent.append("<node id=\"" + node.getCollaboratorID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getCollaboratorURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getCollaboratorName() + "</data>\n");
|
||||
|
||||
if (profileURL != null) {
|
||||
graphMLContent.append("\t<data key=\"profile_url\">" + profileURL + "</data>\n");
|
||||
|
@ -219,10 +221,10 @@ public class CoAuthorshipGraphMLWriter {
|
|||
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_authored_works\">"
|
||||
+ node.getNumOfAuthoredWorks()
|
||||
+ node.getNumOfActivities()
|
||||
+ "</data>\n");
|
||||
|
||||
if (node.getEarliestPublicationYearCount() != null) {
|
||||
if (node.getEarliestActivityYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
|
@ -230,7 +232,7 @@ public class CoAuthorshipGraphMLWriter {
|
|||
* I am feeling dirty just about now.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getEarliestPublicationYearCount().entrySet()) {
|
||||
: node.getEarliestActivityYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_publication\">"
|
||||
+ publicationInfo.getKey()
|
||||
|
@ -243,10 +245,10 @@ public class CoAuthorshipGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
if (node.getLatestPublicationYearCount() != null) {
|
||||
if (node.getLatestActivityYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getLatestPublicationYearCount().entrySet()) {
|
||||
: node.getLatestActivityYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_publication\">"
|
||||
+ publicationInfo.getKey()
|
||||
|
@ -259,10 +261,10 @@ public class CoAuthorshipGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
if (node.getUnknownPublicationYearCount() != null) {
|
||||
if (node.getUnknownActivityYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_publication\">"
|
||||
+ node.getUnknownPublicationYearCount()
|
||||
+ node.getUnknownActivityYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
@ -270,7 +272,7 @@ public class CoAuthorshipGraphMLWriter {
|
|||
graphMLContent.append("</node>\n");
|
||||
}
|
||||
|
||||
private void generateKeyDefinitionContent(CoAuthorshipData visVOContainer,
|
||||
private void generateKeyDefinitionContent(CollaborationData visVOContainer,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
/*
|
||||
|
|
|
@ -29,10 +29,12 @@ import com.hp.hpl.jena.rdf.model.RDFNode;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Edge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaboratorComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
|
||||
|
@ -43,7 +45,7 @@ import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.Unique
|
|||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
||||
public class CoAuthorshipQueryRunner implements QueryRunner<CollaborationData> {
|
||||
|
||||
private static final int MAX_AUTHORS_PER_PAPER_ALLOWED = 100;
|
||||
|
||||
|
@ -71,18 +73,18 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
|
||||
}
|
||||
|
||||
private CoAuthorshipData createQueryResult(ResultSet resultSet) {
|
||||
private CollaborationData createQueryResult(ResultSet resultSet) {
|
||||
|
||||
Set<Node> nodes = new HashSet<Node>();
|
||||
Set<Collaborator> nodes = new HashSet<Collaborator>();
|
||||
|
||||
Map<String, BiboDocument> biboDocumentURLToVO = new HashMap<String, BiboDocument>();
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors = new HashMap<String, Set<Node>>();
|
||||
Map<String, Node> nodeURLToVO = new HashMap<String, Node>();
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO = new HashMap<String, Edge>();
|
||||
Map<String, Activity> biboDocumentURLToVO = new HashMap<String, Activity>();
|
||||
Map<String, Set<Collaborator>> biboDocumentURLToCoAuthors = new HashMap<String, Set<Collaborator>>();
|
||||
Map<String, Collaborator> nodeURLToVO = new HashMap<String, Collaborator>();
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO = new HashMap<String, Collaboration>();
|
||||
|
||||
Node egoNode = null;
|
||||
Collaborator egoNode = null;
|
||||
|
||||
Set<Edge> edges = new HashSet<Edge>();
|
||||
Set<Collaboration> edges = new HashSet<Collaboration>();
|
||||
|
||||
while (resultSet.hasNext()) {
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
@ -97,18 +99,18 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
|
||||
} else {
|
||||
|
||||
egoNode = new Node(egoAuthorURLNode.toString(), nodeIDGenerator);
|
||||
egoNode = new Collaborator(egoAuthorURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(egoNode);
|
||||
nodeURLToVO.put(egoAuthorURLNode.toString(), egoNode);
|
||||
|
||||
RDFNode authorLabelNode = solution.get(QueryFieldLabels.AUTHOR_LABEL);
|
||||
if (authorLabelNode != null) {
|
||||
egoNode.setNodeName(authorLabelNode.toString());
|
||||
egoNode.setCollaboratorName(authorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
RDFNode documentNode = solution.get(QueryFieldLabels.DOCUMENT_URL);
|
||||
BiboDocument biboDocument;
|
||||
Activity biboDocument;
|
||||
|
||||
if (biboDocumentURLToVO.containsKey(documentNode.toString())) {
|
||||
biboDocument = biboDocumentURLToVO.get(documentNode.toString());
|
||||
|
@ -117,11 +119,11 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
biboDocumentURLToVO.put(documentNode.toString(), biboDocument);
|
||||
}
|
||||
|
||||
egoNode.addAuthorDocument(biboDocument);
|
||||
egoNode.addActivity(biboDocument);
|
||||
|
||||
/*
|
||||
* After some discussion we concluded that for the purpose of this visualization
|
||||
* we do not want a co-author node or edge if the publication has only one
|
||||
* we do not want a co-author node or Collaboration if the publication has only one
|
||||
* author and that happens to be the ego.
|
||||
* */
|
||||
if (solution.get(QueryFieldLabels.AUTHOR_URL).toString().equalsIgnoreCase(
|
||||
|
@ -129,7 +131,7 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
continue;
|
||||
}
|
||||
|
||||
Node coAuthorNode;
|
||||
Collaborator coAuthorNode;
|
||||
|
||||
RDFNode coAuthorURLNode = solution.get(QueryFieldLabels.CO_AUTHOR_URL);
|
||||
if (nodeURLToVO.containsKey(coAuthorURLNode.toString())) {
|
||||
|
@ -138,46 +140,46 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
|
||||
} else {
|
||||
|
||||
coAuthorNode = new Node(coAuthorURLNode.toString(), nodeIDGenerator);
|
||||
coAuthorNode = new Collaborator(coAuthorURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(coAuthorNode);
|
||||
nodeURLToVO.put(coAuthorURLNode.toString(), coAuthorNode);
|
||||
|
||||
RDFNode coAuthorLabelNode = solution.get(QueryFieldLabels.CO_AUTHOR_LABEL);
|
||||
if (coAuthorLabelNode != null) {
|
||||
coAuthorNode.setNodeName(coAuthorLabelNode.toString());
|
||||
coAuthorNode.setCollaboratorName(coAuthorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
coAuthorNode.addAuthorDocument(biboDocument);
|
||||
coAuthorNode.addActivity(biboDocument);
|
||||
|
||||
Set<Node> coAuthorsForCurrentBiboDocument;
|
||||
Set<Collaborator> coAuthorsForCurrentBiboDocument;
|
||||
|
||||
if (biboDocumentURLToCoAuthors.containsKey(biboDocument.getDocumentURL())) {
|
||||
if (biboDocumentURLToCoAuthors.containsKey(biboDocument.getActivityURI())) {
|
||||
coAuthorsForCurrentBiboDocument = biboDocumentURLToCoAuthors
|
||||
.get(biboDocument.getDocumentURL());
|
||||
.get(biboDocument.getActivityURI());
|
||||
} else {
|
||||
coAuthorsForCurrentBiboDocument = new HashSet<Node>();
|
||||
biboDocumentURLToCoAuthors.put(biboDocument.getDocumentURL(),
|
||||
coAuthorsForCurrentBiboDocument = new HashSet<Collaborator>();
|
||||
biboDocumentURLToCoAuthors.put(biboDocument.getActivityURI(),
|
||||
coAuthorsForCurrentBiboDocument);
|
||||
}
|
||||
|
||||
coAuthorsForCurrentBiboDocument.add(coAuthorNode);
|
||||
|
||||
Edge egoCoAuthorEdge = getExistingEdge(egoNode, coAuthorNode, edgeUniqueIdentifierToVO);
|
||||
Collaboration egoCoAuthorEdge = getExistingEdge(egoNode, coAuthorNode, edgeUniqueIdentifierToVO);
|
||||
|
||||
/*
|
||||
* If "egoCoAuthorEdge" is null it means that no edge exists in between the egoNode
|
||||
* & current coAuthorNode. Else create a new edge, add it to the edges set & add
|
||||
* If "egoCoAuthorEdge" is null it means that no Collaboration exists in between the egoNode
|
||||
* & current coAuthorNode. Else create a new Collaboration, add it to the edges set & add
|
||||
* the collaborator document to it.
|
||||
* */
|
||||
if (egoCoAuthorEdge != null) {
|
||||
egoCoAuthorEdge.addCollaboratorDocument(biboDocument);
|
||||
egoCoAuthorEdge.addActivity(biboDocument);
|
||||
} else {
|
||||
egoCoAuthorEdge = new Edge(egoNode, coAuthorNode, biboDocument, edgeIDGenerator);
|
||||
egoCoAuthorEdge = new Collaboration(egoNode, coAuthorNode, biboDocument, edgeIDGenerator);
|
||||
edges.add(egoCoAuthorEdge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(egoNode.getNodeID(),
|
||||
coAuthorNode.getNodeID()),
|
||||
getEdgeUniqueIdentifier(egoNode.getCollaboratorID(),
|
||||
coAuthorNode.getCollaboratorID()),
|
||||
egoCoAuthorEdge);
|
||||
}
|
||||
|
||||
|
@ -210,7 +212,7 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
* edges en masse for all the co-authors on all the publications considered so far. The
|
||||
* other reason being we dont want to compare against 2 sets of edges (edges created before
|
||||
* & co-author edges created during the course of this method) when we are creating a new
|
||||
* edge.
|
||||
* Collaboration.
|
||||
* */
|
||||
createCoAuthorEdges(biboDocumentURLToVO,
|
||||
biboDocumentURLToCoAuthors,
|
||||
|
@ -221,25 +223,25 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
return new CoAuthorshipData(egoNode, nodes, edges);
|
||||
}
|
||||
|
||||
private void removeLowQualityNodesAndEdges(Set<Node> nodes,
|
||||
Map<String, BiboDocument> biboDocumentURLToVO,
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors,
|
||||
Set<Edge> edges) {
|
||||
private void removeLowQualityNodesAndEdges(Set<Collaborator> nodes,
|
||||
Map<String, Activity> biboDocumentURLToVO,
|
||||
Map<String, Set<Collaborator>> biboDocumentURLToCoAuthors,
|
||||
Set<Collaboration> edges) {
|
||||
|
||||
Set<Node> nodesToBeRemoved = new HashSet<Node>();
|
||||
for (Map.Entry<String, Set<Node>> currentBiboDocumentEntry
|
||||
Set<Collaborator> nodesToBeRemoved = new HashSet<Collaborator>();
|
||||
for (Map.Entry<String, Set<Collaborator>> currentBiboDocumentEntry
|
||||
: biboDocumentURLToCoAuthors.entrySet()) {
|
||||
|
||||
if (currentBiboDocumentEntry.getValue().size() > MAX_AUTHORS_PER_PAPER_ALLOWED) {
|
||||
|
||||
BiboDocument currentBiboDocument = biboDocumentURLToVO
|
||||
Activity currentBiboDocument = biboDocumentURLToVO
|
||||
.get(currentBiboDocumentEntry.getKey());
|
||||
|
||||
Set<Edge> edgesToBeRemoved = new HashSet<Edge>();
|
||||
Set<Collaboration> edgesToBeRemoved = new HashSet<Collaboration>();
|
||||
|
||||
for (Edge currentEdge : edges) {
|
||||
Set<BiboDocument> currentCollaboratorDocuments =
|
||||
currentEdge.getCollaboratorDocuments();
|
||||
for (Collaboration currentEdge : edges) {
|
||||
Set<Activity> currentCollaboratorDocuments =
|
||||
currentEdge.getCollaborationActivities();
|
||||
|
||||
if (currentCollaboratorDocuments.contains(currentBiboDocument)) {
|
||||
currentCollaboratorDocuments.remove(currentBiboDocument);
|
||||
|
@ -251,9 +253,9 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
|
||||
edges.removeAll(edgesToBeRemoved);
|
||||
|
||||
for (Node currentCoAuthor : currentBiboDocumentEntry.getValue()) {
|
||||
currentCoAuthor.getAuthorDocuments().remove(currentBiboDocument);
|
||||
if (currentCoAuthor.getAuthorDocuments().isEmpty()) {
|
||||
for (Collaborator currentCoAuthor : currentBiboDocumentEntry.getValue()) {
|
||||
currentCoAuthor.getCollaboratorActivities().remove(currentBiboDocument);
|
||||
if (currentCoAuthor.getCollaboratorActivities().isEmpty()) {
|
||||
nodesToBeRemoved.add(currentCoAuthor);
|
||||
}
|
||||
}
|
||||
|
@ -263,11 +265,11 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
}
|
||||
|
||||
private void createCoAuthorEdges(
|
||||
Map<String, BiboDocument> biboDocumentURLToVO,
|
||||
Map<String, Set<Node>> biboDocumentURLToCoAuthors, Set<Edge> edges,
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO) {
|
||||
Map<String, Activity> biboDocumentURLToVO,
|
||||
Map<String, Set<Collaborator>> biboDocumentURLToCoAuthors, Set<Collaboration> edges,
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO) {
|
||||
|
||||
for (Map.Entry<String, Set<Node>> currentBiboDocumentEntry
|
||||
for (Map.Entry<String, Set<Collaborator>> currentBiboDocumentEntry
|
||||
: biboDocumentURLToCoAuthors.entrySet()) {
|
||||
|
||||
/*
|
||||
|
@ -283,42 +285,42 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
<= MAX_AUTHORS_PER_PAPER_ALLOWED) {
|
||||
|
||||
|
||||
Set<Edge> newlyAddedEdges = new HashSet<Edge>();
|
||||
Set<Collaboration> newlyAddedEdges = new HashSet<Collaboration>();
|
||||
|
||||
/*
|
||||
* In order to leverage the nested "for loop" for making edges between all the
|
||||
* co-authors we need to create a list out of the set first.
|
||||
* */
|
||||
List<Node> coAuthorNodes = new ArrayList<Node>(currentBiboDocumentEntry.getValue());
|
||||
Collections.sort(coAuthorNodes, new NodeComparator());
|
||||
List<Collaborator> coAuthorNodes = new ArrayList<Collaborator>(currentBiboDocumentEntry.getValue());
|
||||
Collections.sort(coAuthorNodes, new CollaboratorComparator());
|
||||
|
||||
int numOfCoAuthors = coAuthorNodes.size();
|
||||
|
||||
for (int ii = 0; ii < numOfCoAuthors - 1; ii++) {
|
||||
for (int jj = ii + 1; jj < numOfCoAuthors; jj++) {
|
||||
|
||||
Node coAuthor1 = coAuthorNodes.get(ii);
|
||||
Node coAuthor2 = coAuthorNodes.get(jj);
|
||||
Collaborator coAuthor1 = coAuthorNodes.get(ii);
|
||||
Collaborator coAuthor2 = coAuthorNodes.get(jj);
|
||||
|
||||
Edge coAuthor1_2Edge = getExistingEdge(coAuthor1,
|
||||
Collaboration coAuthor1_2Edge = getExistingEdge(coAuthor1,
|
||||
coAuthor2,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
BiboDocument currentBiboDocument = biboDocumentURLToVO
|
||||
Activity currentBiboDocument = biboDocumentURLToVO
|
||||
.get(currentBiboDocumentEntry
|
||||
.getKey());
|
||||
|
||||
if (coAuthor1_2Edge != null) {
|
||||
coAuthor1_2Edge.addCollaboratorDocument(currentBiboDocument);
|
||||
coAuthor1_2Edge.addActivity(currentBiboDocument);
|
||||
} else {
|
||||
coAuthor1_2Edge = new Edge(coAuthor1,
|
||||
coAuthor1_2Edge = new Collaboration(coAuthor1,
|
||||
coAuthor2,
|
||||
currentBiboDocument,
|
||||
edgeIDGenerator);
|
||||
newlyAddedEdges.add(coAuthor1_2Edge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(coAuthor1.getNodeID(),
|
||||
coAuthor2.getNodeID()),
|
||||
getEdgeUniqueIdentifier(coAuthor1.getCollaboratorID(),
|
||||
coAuthor2.getCollaboratorID()),
|
||||
coAuthor1_2Edge);
|
||||
}
|
||||
}
|
||||
|
@ -329,13 +331,13 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
}
|
||||
}
|
||||
|
||||
private Edge getExistingEdge(
|
||||
Node collaboratingNode1,
|
||||
Node collaboratingNode2,
|
||||
Map<String, Edge> edgeUniqueIdentifierToVO) {
|
||||
private Collaboration getExistingEdge(
|
||||
Collaborator collaboratingNode1,
|
||||
Collaborator collaboratingNode2,
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO) {
|
||||
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getNodeID(),
|
||||
collaboratingNode2.getNodeID());
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getCollaboratorID(),
|
||||
collaboratingNode2.getCollaboratorID());
|
||||
|
||||
return edgeUniqueIdentifierToVO.get(edgeUniqueIdentifier);
|
||||
|
||||
|
@ -353,27 +355,15 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
|
||||
}
|
||||
|
||||
// public Map<String, VivoCollegeOrSchool> getCollegeURLToVO() {
|
||||
// return collegeURLToVO;
|
||||
// }
|
||||
private Activity createDocumentVO(QuerySolution solution, String documentURL) {
|
||||
|
||||
private BiboDocument createDocumentVO(QuerySolution solution, String documentURL) {
|
||||
|
||||
BiboDocument biboDocument = new BiboDocument(documentURL);
|
||||
Activity biboDocument = new Activity(documentURL);
|
||||
|
||||
RDFNode publicationDateNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_DATE);
|
||||
if (publicationDateNode != null) {
|
||||
biboDocument.setPublicationDate(publicationDateNode.toString());
|
||||
biboDocument.setActivityDate(publicationDateNode.toString());
|
||||
}
|
||||
|
||||
/*
|
||||
* This is being used so that date in the data from pre-1.2 ontology can be captured.
|
||||
* */
|
||||
// RDFNode publicationYearUsing_1_1_PropertyNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR_USING_1_1_PROPERTY);
|
||||
// if (publicationYearUsing_1_1_PropertyNode != null) {
|
||||
// biboDocument.setPublicationYear(publicationYearUsing_1_1_PropertyNode.toString());
|
||||
// }
|
||||
|
||||
return biboDocument;
|
||||
}
|
||||
|
||||
|
@ -420,7 +410,7 @@ public class CoAuthorshipQueryRunner implements QueryRunner<CoAuthorshipData> {
|
|||
}
|
||||
|
||||
|
||||
public CoAuthorshipData getQueryResult()
|
||||
public CollaborationData getQueryResult()
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.egoURI)) {
|
||||
|
|
|
@ -21,8 +21,8 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Tem
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.DataVisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.VisualizationRequestHandler;
|
||||
|
@ -58,10 +58,10 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
VisualizationFrameworkConstants
|
||||
.VIS_MODE_KEY);
|
||||
|
||||
QueryRunner<CoAuthorshipData> queryManager =
|
||||
QueryRunner<CollaborationData> queryManager =
|
||||
new CoAuthorshipQueryRunner(egoURI, Dataset, log);
|
||||
|
||||
CoAuthorshipData authorNodesAndEdges =
|
||||
CollaborationData authorNodesAndEdges =
|
||||
queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
|
@ -133,42 +133,40 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
|
||||
|
||||
private String getCoauthorsListCSVContent(CoAuthorshipData coAuthorshipData) {
|
||||
private String getCoauthorsListCSVContent(CollaborationData coAuthorshipData) {
|
||||
|
||||
StringBuilder csvFileContent = new StringBuilder();
|
||||
|
||||
csvFileContent.append("Co-author, Count\n");
|
||||
|
||||
//for (Entry<String, Integer> currentEntry : coAuthorsToCount.entrySet()) {
|
||||
for (Node currNode : coAuthorshipData.getNodes()) {
|
||||
for (Collaborator currNode : coAuthorshipData.getCollaborators()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != coAuthorshipData.getEgoNode()) {
|
||||
if (currNode != coAuthorshipData.getEgoCollaborator()) {
|
||||
|
||||
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currNode.getNodeName()));
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currNode.getCollaboratorName()));
|
||||
csvFileContent.append(",");
|
||||
csvFileContent.append(currNode.getNumOfAuthoredWorks());
|
||||
csvFileContent.append(currNode.getNumOfActivities());
|
||||
csvFileContent.append("\n");
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
return csvFileContent.toString();
|
||||
|
||||
}
|
||||
|
||||
private String getCoauthorsPerYearCSVContent(Map<String, Set<Node>> yearToCoauthors) {
|
||||
private String getCoauthorsPerYearCSVContent(Map<String, Set<Collaborator>> yearToCoauthors) {
|
||||
|
||||
StringBuilder csvFileContent = new StringBuilder();
|
||||
|
||||
csvFileContent.append("Year, Count, Co-author(s)\n");
|
||||
|
||||
for (Entry<String, Set<Node>> currentEntry : yearToCoauthors.entrySet()) {
|
||||
for (Entry<String, Set<Collaborator>> currentEntry : yearToCoauthors.entrySet()) {
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currentEntry.getKey()));
|
||||
csvFileContent.append(",");
|
||||
csvFileContent.append(currentEntry.getValue().size());
|
||||
|
@ -181,13 +179,13 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
}
|
||||
|
||||
private String getCoauthorNamesAsString(Set<Node> coAuthors) {
|
||||
private String getCoauthorNamesAsString(Set<Collaborator> coAuthors) {
|
||||
|
||||
StringBuilder coAuthorsMerged = new StringBuilder();
|
||||
|
||||
String coAuthorSeparator = "; ";
|
||||
for (Node currCoAuthor : coAuthors) {
|
||||
coAuthorsMerged.append(currCoAuthor.getNodeName() + coAuthorSeparator);
|
||||
for (Collaborator currCoAuthor : coAuthors) {
|
||||
coAuthorsMerged.append(currCoAuthor.getCollaboratorName() + coAuthorSeparator);
|
||||
}
|
||||
|
||||
return StringUtils.removeEnd(coAuthorsMerged.toString(), coAuthorSeparator);
|
||||
|
@ -199,18 +197,18 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
* @param authorNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareCoauthorsCountPerYearDataResponse(CoAuthorshipData authorNodesAndEdges) {
|
||||
private Map<String, String> prepareCoauthorsCountPerYearDataResponse(CollaborationData authorNodesAndEdges) {
|
||||
|
||||
String outputFileName;
|
||||
Map<String, Set<Node>> yearToCoauthors = new TreeMap<String, Set<Node>>();
|
||||
Map<String, Set<Collaborator>> yearToCoauthors = new TreeMap<String, Set<Collaborator>>();
|
||||
|
||||
if (authorNodesAndEdges.getNodes() != null && authorNodesAndEdges.getNodes().size() > 0) {
|
||||
if (authorNodesAndEdges.getCollaborators() != null && authorNodesAndEdges.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(authorNodesAndEdges
|
||||
.getEgoNode().getNodeName())
|
||||
.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-authors-per-year" + ".csv";
|
||||
|
||||
yearToCoauthors = UtilityFunctions.getPublicationYearToCoAuthors(authorNodesAndEdges);
|
||||
yearToCoauthors = UtilityFunctions.getActivityYearToCollaborators(authorNodesAndEdges);
|
||||
|
||||
} else {
|
||||
|
||||
|
@ -234,13 +232,13 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
* @param authorNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareCoauthorsListDataResponse(CoAuthorshipData coAuthorshipData) {
|
||||
private Map<String, String> prepareCoauthorsListDataResponse(CollaborationData coAuthorshipData) {
|
||||
|
||||
String outputFileName = "";
|
||||
|
||||
if (coAuthorshipData.getNodes() != null && coAuthorshipData.getNodes().size() > 0) {
|
||||
if (coAuthorshipData.getCollaborators() != null && coAuthorshipData.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(coAuthorshipData.getEgoNode().getNodeName())
|
||||
outputFileName = UtilityFunctions.slugify(coAuthorshipData.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-authors" + ".csv";
|
||||
} else {
|
||||
outputFileName = "no_co-authors" + ".csv";
|
||||
|
@ -263,7 +261,7 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
* @param authorNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareNetworkStreamDataResponse(CoAuthorshipData authorNodesAndEdges) {
|
||||
private Map<String, String> prepareNetworkStreamDataResponse(CollaborationData authorNodesAndEdges) {
|
||||
|
||||
CoAuthorshipGraphMLWriter coAuthorshipGraphMLWriter =
|
||||
new CoAuthorshipGraphMLWriter(authorNodesAndEdges);
|
||||
|
@ -278,13 +276,13 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
}
|
||||
|
||||
private Map<String, String> prepareNetworkDownloadDataResponse(CoAuthorshipData authorNodesAndEdges) {
|
||||
private Map<String, String> prepareNetworkDownloadDataResponse(CollaborationData authorNodesAndEdges) {
|
||||
|
||||
String outputFileName = "";
|
||||
|
||||
if (authorNodesAndEdges.getNodes() != null && authorNodesAndEdges.getNodes().size() > 0) {
|
||||
if (authorNodesAndEdges.getCollaborators() != null && authorNodesAndEdges.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(authorNodesAndEdges.getEgoNode().getNodeName())
|
||||
outputFileName = UtilityFunctions.slugify(authorNodesAndEdges.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-author-network.graphml" + ".xml";
|
||||
|
||||
} else {
|
||||
|
@ -316,7 +314,7 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
*/
|
||||
private TemplateResponseValues prepareStandaloneResponse(
|
||||
String egoURI,
|
||||
CoAuthorshipData coAuthorshipVO,
|
||||
CollaborationData coAuthorshipVO,
|
||||
VitroRequest vitroRequest) {
|
||||
|
||||
Portal portal = vitroRequest.getPortal();
|
||||
|
@ -324,13 +322,13 @@ public class CoAuthorshipRequestHandler implements VisualizationRequestHandler {
|
|||
String title = "";
|
||||
Map<String, Object> body = new HashMap<String, Object>();
|
||||
|
||||
if (coAuthorshipVO.getNodes() != null && coAuthorshipVO.getNodes().size() > 0) {
|
||||
title = coAuthorshipVO.getEgoNode().getNodeName() + " - ";
|
||||
body.put("numOfAuthors", coAuthorshipVO.getNodes().size());
|
||||
if (coAuthorshipVO.getCollaborators() != null && coAuthorshipVO.getCollaborators().size() > 0) {
|
||||
title = coAuthorshipVO.getEgoCollaborator().getCollaboratorName() + " - ";
|
||||
body.put("numOfAuthors", coAuthorshipVO.getCollaborators().size());
|
||||
}
|
||||
|
||||
if (coAuthorshipVO.getEdges() != null && coAuthorshipVO.getEdges().size() > 0) {
|
||||
body.put("numOfCoAuthorShips", coAuthorshipVO.getEdges().size());
|
||||
if (coAuthorshipVO.getCollaborations() != null && coAuthorshipVO.getCollaborations().size() > 0) {
|
||||
body.put("numOfCoAuthorShips", coAuthorshipVO.getCollaborations().size());
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -17,7 +17,7 @@ import org.apache.commons.logging.Log;
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.YearToEntityCountDataElement;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
@ -36,7 +36,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
* */
|
||||
private static final String DEFAULT_VISCONTAINER_DIV_ID = "unique_coauthors_vis_container";
|
||||
|
||||
private Map<String, Set<Node>> yearToUniqueCoauthors;
|
||||
private Map<String, Set<Collaborator>> yearToUniqueCoauthors;
|
||||
|
||||
private Log log;
|
||||
|
||||
|
@ -47,7 +47,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
public CoAuthorshipVisCodeGenerator(String individualURI,
|
||||
String visMode,
|
||||
String visContainer,
|
||||
Map<String, Set<Node>> yearToUniqueCoauthors,
|
||||
Map<String, Set<Collaborator>> yearToUniqueCoauthors,
|
||||
Log log) {
|
||||
|
||||
this.individualURI = individualURI;
|
||||
|
@ -123,7 +123,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
sparklineData.setNumOfYearsToBeRendered(numOfYearsToBeRendered);
|
||||
|
||||
int uniqueCoAuthorCounter = 0;
|
||||
Set<Node> allCoAuthorsWithKnownAuthorshipYears = new HashSet<Node>();
|
||||
Set<Collaborator> allCoAuthorsWithKnownAuthorshipYears = new HashSet<Collaborator>();
|
||||
List<YearToEntityCountDataElement> yearToUniqueCoauthorsCountDataTable = new ArrayList<YearToEntityCountDataElement>();
|
||||
|
||||
for (int publicationYear = minPubYearConsidered;
|
||||
|
@ -131,7 +131,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
publicationYear++) {
|
||||
|
||||
String publicationYearAsString = String.valueOf(publicationYear);
|
||||
Set<Node> currentCoAuthors = yearToUniqueCoauthors.get(publicationYearAsString);
|
||||
Set<Collaborator> currentCoAuthors = yearToUniqueCoauthors.get(publicationYearAsString);
|
||||
|
||||
Integer currentUniqueCoAuthors = null;
|
||||
|
||||
|
@ -164,7 +164,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
* with known & unknown year. We do not want to repeat the count for this collaborator when we present
|
||||
* it in the front-end.
|
||||
* */
|
||||
Set<Node> totalUniqueCoInvestigators = new HashSet<Node>(allCoAuthorsWithKnownAuthorshipYears);
|
||||
Set<Collaborator> totalUniqueCoInvestigators = new HashSet<Collaborator>(allCoAuthorsWithKnownAuthorshipYears);
|
||||
|
||||
/*
|
||||
* Total publications will also consider publications that have no year associated with
|
||||
|
@ -226,7 +226,7 @@ public class CoAuthorshipVisCodeGenerator {
|
|||
|
||||
Map<String, Integer> yearToUniqueCoauthorsCount = new HashMap<String, Integer>();
|
||||
|
||||
for (Map.Entry<String, Set<Node>> currentYearToCoAuthors : yearToUniqueCoauthors.entrySet()) {
|
||||
for (Map.Entry<String, Set<Collaborator>> currentYearToCoAuthors : yearToUniqueCoauthors.entrySet()) {
|
||||
yearToUniqueCoauthorsCount.put(currentYearToCoAuthors.getKey(),
|
||||
currentYearToCoAuthors.getValue().size());
|
||||
}
|
||||
|
|
|
@ -1,229 +1,187 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class CoAuthorshipData {
|
||||
|
||||
private Set<Node> nodes;
|
||||
private Set<Edge> edges;
|
||||
private Node egoNode;
|
||||
private Set<Map<String, String>> NODE_SCHEMA;
|
||||
private Set<Map<String, String>> EDGE_SCHEMA;
|
||||
|
||||
public CoAuthorshipData(Node egoNode, Set<Node> nodes, Set<Edge> edges) {
|
||||
this.egoNode = egoNode;
|
||||
this.nodes = nodes;
|
||||
this.edges = edges;
|
||||
}
|
||||
|
||||
public Set<Node> getNodes() {
|
||||
return nodes;
|
||||
}
|
||||
|
||||
public Set<Edge> getEdges() {
|
||||
return edges;
|
||||
}
|
||||
|
||||
public Node getEgoNode() {
|
||||
return egoNode;
|
||||
}
|
||||
|
||||
/*
|
||||
* Node Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getNodeSchema() {
|
||||
|
||||
if (NODE_SCHEMA == null) {
|
||||
NODE_SCHEMA = initializeNodeSchema();
|
||||
}
|
||||
|
||||
return NODE_SCHEMA;
|
||||
}
|
||||
|
||||
/*
|
||||
* Edge Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getEdgeSchema() {
|
||||
|
||||
if (EDGE_SCHEMA == null) {
|
||||
EDGE_SCHEMA = initializeEdgeSchema();
|
||||
}
|
||||
|
||||
return EDGE_SCHEMA;
|
||||
}
|
||||
|
||||
private Set<Map<String, String>> initializeEdgeSchema() {
|
||||
|
||||
Set<Map<String, String>> edgeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "collaborator1");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "collaborator1");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "collaborator2");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "collaborator2");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "number_of_coauthored_works");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "number_of_coauthored_works");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "earliest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "earliest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_earliest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_earliest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "latest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "latest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_latest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_latest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_unknown_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_unknown_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
return edgeSchema;
|
||||
}
|
||||
|
||||
|
||||
private Set<Map<String, String>> initializeNodeSchema() {
|
||||
|
||||
Set<Map<String, String>> nodeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "url");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "url");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "label");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "label");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "profile_url");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "profile_url");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "number_of_authored_works");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "number_of_authored_works");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "earliest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "earliest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_earliest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_earliest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "latest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "latest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_latest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_latest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_unknown_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_unknown_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
|
||||
return nodeSchema;
|
||||
}
|
||||
|
||||
}
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
|
||||
public class CoAuthorshipData extends CollaborationData {
|
||||
|
||||
public CoAuthorshipData(Collaborator egoCollaborator,
|
||||
Set<Collaborator> collaborators, Set<Collaboration> collaborations) {
|
||||
super(egoCollaborator, collaborators, collaborations);
|
||||
}
|
||||
|
||||
public Set<Map<String, String>> initializeEdgeSchema() {
|
||||
|
||||
Set<Map<String, String>> edgeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "collaborator1");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "collaborator1");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "collaborator2");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "collaborator2");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "number_of_coauthored_works");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "number_of_coauthored_works");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "earliest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "earliest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_earliest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_earliest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "latest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "latest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_latest_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_latest_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_unknown_collaboration");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "num_unknown_collaboration");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
edgeSchema.add(schemaAttributes);
|
||||
|
||||
return edgeSchema;
|
||||
}
|
||||
|
||||
|
||||
public Set<Map<String, String>> initializeNodeSchema() {
|
||||
|
||||
Set<Map<String, String>> nodeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "url");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "url");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "label");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "label");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "profile_url");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "profile_url");
|
||||
schemaAttributes.put("attr.type", "string");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "number_of_authored_works");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "number_of_authored_works");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "earliest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "earliest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_earliest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_earliest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "latest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "latest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_latest_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_latest_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "num_unknown_publication");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "num_unknown_publication");
|
||||
schemaAttributes.put("attr.type", "int");
|
||||
|
||||
nodeSchema.add(schemaAttributes);
|
||||
|
||||
|
||||
return nodeSchema;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,90 +1,26 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
public class CoPIData {
|
||||
|
||||
private Set<CoPINode> nodes;
|
||||
private Set<CoPIEdge> edges;
|
||||
private CoPINode egoNode;
|
||||
private Set<Map<String, String>> NODE_SCHEMA;
|
||||
private Set<Map<String, String>> EDGE_SCHEMA;
|
||||
|
||||
public CoPIData(CoPINode egoNode, Set<CoPINode> nodes, Set<CoPIEdge> edges) {
|
||||
this.egoNode = egoNode;
|
||||
this.nodes = nodes;
|
||||
this.edges = edges;
|
||||
}
|
||||
|
||||
public Set<CoPINode> getNodes() {
|
||||
return nodes;
|
||||
}
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
|
||||
public Set<CoPIEdge> getEdges() {
|
||||
return edges;
|
||||
}
|
||||
|
||||
public CoPINode getEgoNode() {
|
||||
return egoNode;
|
||||
public class CoInvestigationData extends CollaborationData {
|
||||
|
||||
public CoInvestigationData(Collaborator egoCollaborator,
|
||||
Set<Collaborator> collaborators, Set<Collaboration> collaborations) {
|
||||
super(egoCollaborator, collaborators, collaborations);
|
||||
}
|
||||
|
||||
/*
|
||||
* Node Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getNodeSchema() {
|
||||
|
||||
if (NODE_SCHEMA == null) {
|
||||
NODE_SCHEMA = initializeNodeSchema();
|
||||
}
|
||||
|
||||
return NODE_SCHEMA;
|
||||
}
|
||||
|
||||
/*
|
||||
* Edge Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getEdgeSchema() {
|
||||
|
||||
if (EDGE_SCHEMA == null) {
|
||||
EDGE_SCHEMA = initializeEdgeSchema();
|
||||
}
|
||||
|
||||
return EDGE_SCHEMA;
|
||||
}
|
||||
|
||||
public void print(){
|
||||
|
||||
System.out.println("\n-----------------------------");
|
||||
|
||||
System.out.println("Ego node is "+ this.getEgoNode().getNodeName());
|
||||
|
||||
System.out.println("\nNodes are: ");
|
||||
|
||||
for(CoPINode node : this.getNodes()){
|
||||
System.out.println(node.getNodeName());
|
||||
}
|
||||
|
||||
System.out.println("\nEdges are: ");
|
||||
|
||||
for(CoPIEdge edge : this.getEdges()){
|
||||
System.out.println(edge.getSourceNode() + "-->" + edge.getTargetNode());
|
||||
}
|
||||
|
||||
System.out.println("\n-----------------------------");
|
||||
|
||||
}
|
||||
|
||||
private Set<Map<String, String>> initializeEdgeSchema() {
|
||||
public Set<Map<String, String>> initializeEdgeSchema() {
|
||||
|
||||
Set<Map<String, String>> edgeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "collaborator1");
|
||||
schemaAttributes.put("for", "edge");
|
||||
schemaAttributes.put("attr.name", "collaborator1");
|
||||
|
@ -159,12 +95,12 @@ public class CoPIData {
|
|||
}
|
||||
|
||||
|
||||
private Set<Map<String, String>> initializeNodeSchema() {
|
||||
public Set<Map<String, String>> initializeNodeSchema() {
|
||||
|
||||
Set<Map<String, String>> nodeSchema = new HashSet<Map<String, String>>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
Map<String, String> schemaAttributes = new LinkedHashMap<String, String>();
|
||||
|
||||
schemaAttributes.put("id", "url");
|
||||
schemaAttributes.put("for", "node");
|
||||
schemaAttributes.put("attr.name", "url");
|
||||
|
@ -247,5 +183,5 @@ public class CoPIData {
|
|||
|
||||
return nodeSchema;
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -1,10 +1,10 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coauthorship;
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Edge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
|
||||
|
||||
/**
|
||||
|
@ -12,11 +12,11 @@ import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Ed
|
|||
* @author cdtank
|
||||
*
|
||||
*/
|
||||
public class EdgeComparator implements Comparator<Edge> {
|
||||
public class CollaborationComparator implements Comparator<Collaboration> {
|
||||
|
||||
@Override
|
||||
public int compare(Edge arg0, Edge arg1) {
|
||||
return arg0.getEdgeID() - arg1.getEdgeID();
|
||||
public int compare(Collaboration arg0, Collaboration arg1) {
|
||||
return arg0.getCollaborationID() - arg1.getCollaborationID();
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,66 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils;
|
||||
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
|
||||
public abstract class CollaborationData {
|
||||
|
||||
private Set<Collaborator> collaborators;
|
||||
private Set<Collaboration> collaborations;
|
||||
private Collaborator egoCollaborator;
|
||||
private Set<Map<String, String>> NODE_SCHEMA;
|
||||
private Set<Map<String, String>> EDGE_SCHEMA;
|
||||
|
||||
public CollaborationData(Collaborator egoCollaborator,
|
||||
Set<Collaborator> collaborators,
|
||||
Set<Collaboration> collaborations) {
|
||||
this.egoCollaborator = egoCollaborator;
|
||||
this.collaborators = collaborators;
|
||||
this.collaborations = collaborations;
|
||||
}
|
||||
|
||||
public Set<Collaborator> getCollaborators() {
|
||||
return collaborators;
|
||||
}
|
||||
|
||||
public Set<Collaboration> getCollaborations() {
|
||||
return collaborations;
|
||||
}
|
||||
|
||||
public Collaborator getEgoCollaborator() {
|
||||
return egoCollaborator;
|
||||
}
|
||||
|
||||
/*
|
||||
* Node Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getNodeSchema() {
|
||||
|
||||
if (NODE_SCHEMA == null) {
|
||||
NODE_SCHEMA = initializeNodeSchema();
|
||||
}
|
||||
|
||||
return NODE_SCHEMA;
|
||||
}
|
||||
|
||||
/*
|
||||
* Edge Schema for graphML
|
||||
* */
|
||||
public Set<Map<String, String>> getEdgeSchema() {
|
||||
|
||||
if (EDGE_SCHEMA == null) {
|
||||
EDGE_SCHEMA = initializeEdgeSchema();
|
||||
}
|
||||
|
||||
return EDGE_SCHEMA;
|
||||
}
|
||||
|
||||
abstract Set<Map<String, String>> initializeEdgeSchema();
|
||||
|
||||
abstract Set<Map<String, String>> initializeNodeSchema();
|
||||
}
|
|
@ -1,21 +1,21 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coauthorship;
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the nodes based on their IDs in ascending order.
|
||||
* @author cdtank
|
||||
*/
|
||||
public class NodeComparator implements Comparator<Node> {
|
||||
public class CollaboratorComparator implements Comparator<Collaborator> {
|
||||
|
||||
@Override
|
||||
public int compare(Node arg0, Node arg1) {
|
||||
return arg0.getNodeID() - arg1.getNodeID();
|
||||
public int compare(Collaborator arg0, Collaborator arg1) {
|
||||
return arg0.getCollaboratorID() - arg1.getCollaboratorID();
|
||||
}
|
||||
|
||||
}
|
|
@ -1,22 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinvestigator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIEdge;
|
||||
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the edges based on their IDs in ascending order.
|
||||
* @author bkoniden
|
||||
*
|
||||
*/
|
||||
public class CoPIEdgeComparator implements Comparator<CoPIEdge> {
|
||||
|
||||
@Override
|
||||
public int compare(CoPIEdge arg0, CoPIEdge arg1) {
|
||||
return arg0.getEdgeID() - arg1.getEdgeID();
|
||||
}
|
||||
|
||||
}
|
|
@ -17,7 +17,6 @@ import org.apache.commons.logging.LogFactory;
|
|||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
|
@ -31,17 +30,19 @@ import com.hp.hpl.jena.rdf.model.RDFNode;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIEdge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CoInvestigationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaboratorComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
||||
public class CoPIGrantCountQueryRunner implements QueryRunner<CollaborationData> {
|
||||
|
||||
private static final int MAX_PI_PER_GRANT_ALLOWED = 100;
|
||||
|
||||
|
@ -334,7 +335,7 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
return queryExecution.execSelect();
|
||||
}
|
||||
|
||||
public CoPIData getQueryResult()
|
||||
public CollaborationData getQueryResult()
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.egoURI)) {
|
||||
|
@ -365,13 +366,13 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
}
|
||||
|
||||
|
||||
private CoPIEdge getExistingEdge(
|
||||
CoPINode collaboratingNode1,
|
||||
CoPINode collaboratingNode2,
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO) {
|
||||
private Collaboration getExistingEdge(
|
||||
Collaborator collaboratingNode1,
|
||||
Collaborator collaboratingNode2,
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO) {
|
||||
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getNodeID(),
|
||||
collaboratingNode2.getNodeID());
|
||||
String edgeUniqueIdentifier = getEdgeUniqueIdentifier(collaboratingNode1.getCollaboratorID(),
|
||||
collaboratingNode2.getCollaboratorID());
|
||||
|
||||
return edgeUniqueIdentifierToVO.get(edgeUniqueIdentifier);
|
||||
|
||||
|
@ -389,18 +390,18 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
}
|
||||
|
||||
private CoPIData createQueryResult(ResultSet resultSet) {
|
||||
private CollaborationData createQueryResult(ResultSet resultSet) {
|
||||
|
||||
Set<CoPINode> nodes = new HashSet<CoPINode>();
|
||||
Set<Collaborator> nodes = new HashSet<Collaborator>();
|
||||
|
||||
Map<String, Grant> grantURLToVO = new HashMap<String, Grant>();
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs = new HashMap<String, Set<CoPINode>>();
|
||||
Map<String, CoPINode> nodeURLToVO = new HashMap<String, CoPINode>();
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO = new HashMap<String, CoPIEdge>();
|
||||
Map<String, Activity> grantURLToVO = new HashMap<String, Activity>();
|
||||
Map<String, Set<Collaborator>> grantURLToCoPIs = new HashMap<String, Set<Collaborator>>();
|
||||
Map<String, Collaborator> nodeURLToVO = new HashMap<String, Collaborator>();
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO = new HashMap<String, Collaboration>();
|
||||
|
||||
CoPINode egoNode = null;
|
||||
Collaborator egoNode = null;
|
||||
|
||||
Set<CoPIEdge> edges = new HashSet<CoPIEdge>();
|
||||
Set<Collaboration> edges = new HashSet<Collaboration>();
|
||||
|
||||
before = System.currentTimeMillis();
|
||||
|
||||
|
@ -417,20 +418,20 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
} else {
|
||||
|
||||
egoNode = new CoPINode(egoPIURLNode.toString(), nodeIDGenerator);
|
||||
egoNode = new Collaborator(egoPIURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(egoNode);
|
||||
nodeURLToVO.put(egoPIURLNode.toString(), egoNode);
|
||||
|
||||
|
||||
RDFNode authorLabelNode = solution.get(QueryFieldLabels.PI_LABEL);
|
||||
if (authorLabelNode != null) {
|
||||
egoNode.setNodeName(authorLabelNode.toString());
|
||||
egoNode.setCollaboratorName(authorLabelNode.toString());
|
||||
}
|
||||
}
|
||||
log.debug("PI: "+ egoNode.getIndividualLabel());
|
||||
|
||||
RDFNode grantNode = solution.get(QueryFieldLabels.GRANT_URL);
|
||||
Grant grant;
|
||||
Activity grant;
|
||||
|
||||
if (grantURLToVO.containsKey(grantNode.toString())) {
|
||||
grant = grantURLToVO.get(grantNode.toString());
|
||||
|
@ -439,7 +440,7 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
grantURLToVO.put(grantNode.toString(), grant);
|
||||
}
|
||||
|
||||
egoNode.addGrant(grant);
|
||||
egoNode.addActivity(grant);
|
||||
log.debug("Adding grant: "+ grant.getIndividualLabel());
|
||||
|
||||
/*
|
||||
|
@ -452,7 +453,7 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
continue;
|
||||
}
|
||||
|
||||
CoPINode coPINode;
|
||||
Collaborator coPINode;
|
||||
|
||||
RDFNode coPIURLNode = solution.get(QueryFieldLabels.CO_PI_URL);
|
||||
if (nodeURLToVO.containsKey(coPIURLNode.toString())) {
|
||||
|
@ -461,47 +462,46 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
} else {
|
||||
|
||||
coPINode = new CoPINode(coPIURLNode.toString(), nodeIDGenerator);
|
||||
coPINode = new Collaborator(coPIURLNode.toString(), nodeIDGenerator);
|
||||
nodes.add(coPINode);
|
||||
nodeURLToVO.put(coPIURLNode.toString(), coPINode);
|
||||
|
||||
RDFNode coPILabelNode = solution.get(QueryFieldLabels.CO_PI_LABEL);
|
||||
if (coPILabelNode != null) {
|
||||
coPINode.setNodeName(coPILabelNode.toString());
|
||||
coPINode.setCollaboratorName(coPILabelNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
log.debug("Adding CO-PI: "+ coPINode.getIndividualLabel());
|
||||
coPINode.addGrant(grant);
|
||||
coPINode.addActivity(grant);
|
||||
|
||||
Set<CoPINode> coPIsForCurrentGrant;
|
||||
Set<Collaborator> coPIsForCurrentGrant;
|
||||
|
||||
if (grantURLToCoPIs.containsKey(grant.getGrantURL())) {
|
||||
coPIsForCurrentGrant = grantURLToCoPIs
|
||||
.get(grant.getGrantURL());
|
||||
if (grantURLToCoPIs.containsKey(grant.getActivityURI())) {
|
||||
coPIsForCurrentGrant = grantURLToCoPIs.get(grant.getActivityURI());
|
||||
} else {
|
||||
coPIsForCurrentGrant = new HashSet<CoPINode>();
|
||||
grantURLToCoPIs.put(grant.getGrantURL(),
|
||||
coPIsForCurrentGrant = new HashSet<Collaborator>();
|
||||
grantURLToCoPIs.put(grant.getActivityURI(),
|
||||
coPIsForCurrentGrant);
|
||||
}
|
||||
|
||||
coPIsForCurrentGrant.add(coPINode);
|
||||
log.debug("Co-PI for current grant : "+ coPINode.getIndividualLabel());
|
||||
|
||||
CoPIEdge egoCoPIEdge = getExistingEdge(egoNode, coPINode, edgeUniqueIdentifierToVO);
|
||||
Collaboration egoCoPIEdge = getExistingEdge(egoNode, coPINode, edgeUniqueIdentifierToVO);
|
||||
/*
|
||||
* If "egoCoPIEdge" is null it means that no edge exists in between the egoNode
|
||||
* & current coPINode. Else create a new edge, add it to the edges set & add
|
||||
* the collaborator grant to it.
|
||||
* */
|
||||
if (egoCoPIEdge != null) {
|
||||
egoCoPIEdge.addCollaboratorGrant(grant);
|
||||
egoCoPIEdge.addActivity(grant);
|
||||
} else {
|
||||
egoCoPIEdge = new CoPIEdge(egoNode, coPINode, grant, edgeIDGenerator);
|
||||
egoCoPIEdge = new Collaboration(egoNode, coPINode, grant, edgeIDGenerator);
|
||||
edges.add(egoCoPIEdge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(egoNode.getNodeID(),
|
||||
coPINode.getNodeID()),
|
||||
getEdgeUniqueIdentifier(egoNode.getCollaboratorID(),
|
||||
coPINode.getCollaboratorID()),
|
||||
egoCoPIEdge);
|
||||
}
|
||||
|
||||
|
@ -542,14 +542,14 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
after = System.currentTimeMillis();
|
||||
log.debug("Time taken to iterate through the ResultSet of SELECT queries is in milliseconds: " + (after - before) );
|
||||
|
||||
return new CoPIData(egoNode, nodes, edges);
|
||||
return new CoInvestigationData(egoNode, nodes, edges);
|
||||
}
|
||||
|
||||
private void createCoPIEdges(Map<String, Grant> grantURLToVO,
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs, Set<CoPIEdge> edges,
|
||||
Map<String, CoPIEdge> edgeUniqueIdentifierToVO) {
|
||||
private void createCoPIEdges(Map<String, Activity> grantURLToVO,
|
||||
Map<String, Set<Collaborator>> grantURLToCoPIs, Set<Collaboration> edges,
|
||||
Map<String, Collaboration> edgeUniqueIdentifierToVO) {
|
||||
|
||||
for (Map.Entry<String, Set<CoPINode>> currentGrantEntry
|
||||
for (Map.Entry<String, Set<Collaborator>> currentGrantEntry
|
||||
: grantURLToCoPIs.entrySet()) {
|
||||
|
||||
/*
|
||||
|
@ -565,40 +565,40 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
<= MAX_PI_PER_GRANT_ALLOWED) {
|
||||
|
||||
|
||||
Set<CoPIEdge> newlyAddedEdges = new HashSet<CoPIEdge>();
|
||||
Set<Collaboration> newlyAddedEdges = new HashSet<Collaboration>();
|
||||
|
||||
/*
|
||||
* In order to leverage the nested "for loop" for making edges between all the
|
||||
* co-PIs we need to create a list out of the set first.
|
||||
* */
|
||||
List<CoPINode> coPINodes = new ArrayList<CoPINode>(currentGrantEntry.getValue());
|
||||
Collections.sort(coPINodes, new CoPINodeComparator());
|
||||
List<Collaborator> coPINodes = new ArrayList<Collaborator>(currentGrantEntry.getValue());
|
||||
Collections.sort(coPINodes, new CollaboratorComparator());
|
||||
|
||||
int numOfCoPIs = coPINodes.size();
|
||||
|
||||
for (int ii = 0; ii < numOfCoPIs - 1; ii++) {
|
||||
for (int jj = ii + 1; jj < numOfCoPIs; jj++) {
|
||||
|
||||
CoPINode coPI1 = coPINodes.get(ii);
|
||||
CoPINode coPI2 = coPINodes.get(jj);
|
||||
Collaborator coPI1 = coPINodes.get(ii);
|
||||
Collaborator coPI2 = coPINodes.get(jj);
|
||||
|
||||
CoPIEdge coPI1_2Edge = getExistingEdge(coPI1,
|
||||
Collaboration coPI1_2Edge = getExistingEdge(coPI1,
|
||||
coPI2,
|
||||
edgeUniqueIdentifierToVO);
|
||||
|
||||
Grant currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
Activity currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
|
||||
if (coPI1_2Edge != null) {
|
||||
coPI1_2Edge.addCollaboratorGrant(currentGrant);
|
||||
coPI1_2Edge.addActivity(currentGrant);
|
||||
} else {
|
||||
coPI1_2Edge = new CoPIEdge(coPI1,
|
||||
coPI1_2Edge = new Collaboration(coPI1,
|
||||
coPI2,
|
||||
currentGrant,
|
||||
edgeIDGenerator);
|
||||
newlyAddedEdges.add(coPI1_2Edge);
|
||||
edgeUniqueIdentifierToVO.put(
|
||||
getEdgeUniqueIdentifier(coPI1.getNodeID(),
|
||||
coPI2.getNodeID()),
|
||||
getEdgeUniqueIdentifier(coPI1.getCollaboratorID(),
|
||||
coPI2.getCollaboratorID()),
|
||||
coPI1_2Edge);
|
||||
}
|
||||
}
|
||||
|
@ -610,23 +610,23 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
}
|
||||
|
||||
private void removeLowQualityNodesAndEdges(Set<CoPINode> nodes,
|
||||
Map<String, Grant> grantURLToVO,
|
||||
Map<String, Set<CoPINode>> grantURLToCoPIs, Set<CoPIEdge> edges) {
|
||||
private void removeLowQualityNodesAndEdges(Set<Collaborator> nodes,
|
||||
Map<String, Activity> grantURLToVO,
|
||||
Map<String, Set<Collaborator>> grantURLToCoPIs, Set<Collaboration> edges) {
|
||||
|
||||
Set<CoPINode> nodesToBeRemoved = new HashSet<CoPINode>();
|
||||
for (Map.Entry<String, Set<CoPINode>> currentGrantEntry
|
||||
Set<Collaborator> nodesToBeRemoved = new HashSet<Collaborator>();
|
||||
for (Map.Entry<String, Set<Collaborator>> currentGrantEntry
|
||||
: grantURLToCoPIs.entrySet()) {
|
||||
|
||||
if (currentGrantEntry.getValue().size() > MAX_PI_PER_GRANT_ALLOWED) {
|
||||
|
||||
Grant currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
Activity currentGrant = grantURLToVO.get(currentGrantEntry.getKey());
|
||||
|
||||
Set<CoPIEdge> edgesToBeRemoved = new HashSet<CoPIEdge>();
|
||||
Set<Collaboration> edgesToBeRemoved = new HashSet<Collaboration>();
|
||||
|
||||
for (CoPIEdge currentEdge : edges) {
|
||||
Set<Grant> currentCollaboratorGrants =
|
||||
currentEdge.getCollaboratorGrants();
|
||||
for (Collaboration currentEdge : edges) {
|
||||
Set<Activity> currentCollaboratorGrants =
|
||||
currentEdge.getCollaborationActivities();
|
||||
|
||||
if (currentCollaboratorGrants.contains(currentGrant)) {
|
||||
currentCollaboratorGrants.remove(currentGrant);
|
||||
|
@ -638,9 +638,9 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
edges.removeAll(edgesToBeRemoved);
|
||||
|
||||
for (CoPINode currentCoPI : currentGrantEntry.getValue()) {
|
||||
currentCoPI.getInvestigatedGrants().remove(currentGrant);
|
||||
if (currentCoPI.getInvestigatedGrants().isEmpty()) {
|
||||
for (Collaborator currentCoPI : currentGrantEntry.getValue()) {
|
||||
currentCoPI.getCollaboratorActivities().remove(currentGrant);
|
||||
if (currentCoPI.getCollaboratorActivities().isEmpty()) {
|
||||
nodesToBeRemoved.add(currentCoPI);
|
||||
}
|
||||
}
|
||||
|
@ -650,9 +650,9 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
}
|
||||
|
||||
private Grant createGrantVO(QuerySolution solution, String grantURL) {
|
||||
private Activity createGrantVO(QuerySolution solution, String grantURL) {
|
||||
|
||||
Grant grant = new Grant(grantURL);
|
||||
Activity grant = new Activity(grantURL);
|
||||
|
||||
RDFNode grantLabelNode = solution.get(QueryFieldLabels.GRANT_LABEL);
|
||||
if (grantLabelNode != null) {
|
||||
|
@ -662,14 +662,16 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
|
||||
RDFNode grantStartYear = solution.get(QueryFieldLabels.ROLE_START_DATE);
|
||||
if (grantStartYear != null) {
|
||||
grant.setGrantStartDate(grantStartYear.toString());
|
||||
grant.setActivityDate(grantStartYear.toString());
|
||||
}else{
|
||||
grantStartYear = solution.get(QueryFieldLabels.GRANT_START_DATE);
|
||||
if(grantStartYear != null){
|
||||
grant.setGrantStartDate(grantStartYear.toString());
|
||||
grant.setActivityDate(grantStartYear.toString());
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: Verify that grant end date is not required.
|
||||
/*
|
||||
RDFNode grantEndDate = solution.get(QueryFieldLabels.ROLE_END_DATE);
|
||||
if (grantEndDate != null) {
|
||||
grant.setGrantEndDate(grantEndDate.toString());
|
||||
|
@ -679,6 +681,7 @@ public class CoPIGrantCountQueryRunner implements QueryRunner<CoPIData> {
|
|||
grant.setGrantEndDate(grantEndDate.toString());
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
return grant;
|
||||
}
|
||||
|
|
|
@ -3,7 +3,6 @@ package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinv
|
|||
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
|
@ -11,7 +10,6 @@ import org.apache.commons.lang.StringEscapeUtils;
|
|||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.query.Dataset;
|
||||
import com.hp.hpl.jena.query.Dataset;
|
||||
import com.hp.hpl.jena.rdf.model.Model;
|
||||
|
||||
|
@ -19,11 +17,11 @@ import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.ResponseValues;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.TemplateResponseValues;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.DataVisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.VisualizationRequestHandler;
|
||||
|
@ -51,9 +49,9 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
CoPIGrantCountConstructQueryRunner constructQueryRunner = new CoPIGrantCountConstructQueryRunner(egoURI, Dataset, log);
|
||||
Model constructedModel = constructQueryRunner.getConstructedModel();
|
||||
|
||||
QueryRunner<CoPIData> queryManager = new CoPIGrantCountQueryRunner(egoURI, constructedModel, log);
|
||||
QueryRunner<CollaborationData> queryManager = new CoPIGrantCountQueryRunner(egoURI, constructedModel, log);
|
||||
|
||||
CoPIData PINodesAndEdges = queryManager.getQueryResult();
|
||||
CollaborationData PINodesAndEdges = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* We will be using the same visualization package for both sparkline & co-pi
|
||||
|
@ -102,15 +100,6 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
* Support for this has ceased to exist. Standalone mode was created only for demo
|
||||
* purposes for VIVO Conf.
|
||||
* */
|
||||
/* String egoURI = vitroRequest.getParameter(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
|
||||
QueryRunner<CoPIData> queryManager = new CoPIGrantCountQueryRunner(egoURI, Dataset, log);
|
||||
|
||||
CoPIData PINodesAndEdges = queryManager.getQueryResult();
|
||||
|
||||
return prepareStandaloneResponse(egoURI,
|
||||
PINodesAndEdges,
|
||||
vitroRequest); */
|
||||
throw new UnsupportedOperationException("CoPI does not provide Standalone Response.");
|
||||
|
||||
}
|
||||
|
@ -122,7 +111,7 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
* @param coPIVO
|
||||
*/
|
||||
private TemplateResponseValues prepareStandaloneResponse(String egoURI,
|
||||
CoPIData coPIVO, VitroRequest vitroRequest) {
|
||||
CollaborationData coPIVO, VitroRequest vitroRequest) {
|
||||
|
||||
Portal portal = vitroRequest.getPortal();
|
||||
|
||||
|
@ -130,17 +119,17 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
Map<String, Object> body = new HashMap<String, Object>();
|
||||
|
||||
|
||||
if (coPIVO.getNodes() != null
|
||||
&& coPIVO.getNodes().size() > 0) {
|
||||
title = coPIVO.getEgoNode().getNodeName() + " - ";
|
||||
body.put("numOfInvestigators", coPIVO.getNodes().size());
|
||||
if (coPIVO.getCollaborators() != null
|
||||
&& coPIVO.getCollaborators().size() > 0) {
|
||||
title = coPIVO.getEgoCollaborator().getCollaboratorName() + " - ";
|
||||
body.put("numOfInvestigators", coPIVO.getCollaborators().size());
|
||||
|
||||
title = coPIVO.getEgoNode().getNodeName() + " - ";
|
||||
title = coPIVO.getEgoCollaborator().getCollaboratorName() + " - ";
|
||||
}
|
||||
|
||||
if (coPIVO.getEdges() != null
|
||||
&& coPIVO.getEdges().size() > 0) {
|
||||
body.put("numOfCoInvestigations", coPIVO.getEdges().size());
|
||||
if (coPIVO.getCollaborations() != null
|
||||
&& coPIVO.getCollaborations().size() > 0) {
|
||||
body.put("numOfCoInvestigations", coPIVO.getCollaborations().size());
|
||||
}
|
||||
|
||||
String standaloneTemplate = "coInvestigation.ftl";
|
||||
|
@ -153,23 +142,23 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
}
|
||||
|
||||
|
||||
private String getCoPIsListCSVContent(CoPIData coPIData) {
|
||||
private String getCoPIsListCSVContent(CollaborationData coPIData) {
|
||||
|
||||
StringBuilder csvFileContent = new StringBuilder();
|
||||
|
||||
csvFileContent.append("Co-investigator, Count\n");
|
||||
|
||||
// for (Entry<String, Integer> currentEntry : coPIData.entrySet()) {
|
||||
for (CoPINode currNode : coPIData.getNodes()) {
|
||||
for (Collaborator currNode : coPIData.getCollaborators()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != coPIData.getEgoNode()) {
|
||||
if (currNode != coPIData.getEgoCollaborator()) {
|
||||
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currNode.getNodeName()));
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currNode.getCollaboratorName()));
|
||||
csvFileContent.append(",");
|
||||
csvFileContent.append(currNode.getNumberOfInvestigatedGrants());
|
||||
csvFileContent.append(currNode.getNumOfActivities());
|
||||
csvFileContent.append("\n");
|
||||
|
||||
}
|
||||
|
@ -180,13 +169,13 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
}
|
||||
|
||||
|
||||
private String getCoPIsPerYearCSVContent(Map<String, Set<CoPINode>> yearToCoPI) {
|
||||
private String getCoPIsPerYearCSVContent(Map<String, Set<Collaborator>> yearToCoPI) {
|
||||
|
||||
StringBuilder csvFileContent = new StringBuilder();
|
||||
|
||||
csvFileContent.append("Year, Count, Co-investigator(s)\n");
|
||||
|
||||
for (Map.Entry<String, Set<CoPINode>> currentEntry : yearToCoPI.entrySet()) {
|
||||
for (Map.Entry<String, Set<Collaborator>> currentEntry : yearToCoPI.entrySet()) {
|
||||
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(currentEntry.getKey()));
|
||||
csvFileContent.append(",");
|
||||
|
@ -199,13 +188,13 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
return csvFileContent.toString();
|
||||
}
|
||||
|
||||
private String getCoPINamesAsString(Set<CoPINode> CoPIs) {
|
||||
private String getCoPINamesAsString(Set<Collaborator> CoPIs) {
|
||||
|
||||
StringBuilder coPIsMerged = new StringBuilder();
|
||||
|
||||
String coPISeparator = ";";
|
||||
for(CoPINode currentCoPI : CoPIs){
|
||||
coPIsMerged.append(currentCoPI.getNodeName() + coPISeparator);
|
||||
for(Collaborator currentCoPI : CoPIs){
|
||||
coPIsMerged.append(currentCoPI.getCollaboratorName() + coPISeparator);
|
||||
}
|
||||
|
||||
return StringUtils.removeEnd(coPIsMerged.toString(), coPISeparator);
|
||||
|
@ -218,18 +207,18 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
* @param piNodesAndEdges
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareCoPIsCountPerYearDataResponse(CoPIData piNodesAndEdges) {
|
||||
private Map<String, String> prepareCoPIsCountPerYearDataResponse(CollaborationData piNodesAndEdges) {
|
||||
|
||||
String outputFileName;
|
||||
Map<String, Set<CoPINode>> yearToCoPIs = new TreeMap<String, Set<CoPINode>>();
|
||||
Map<String, Set<Collaborator>> yearToCoPIs = new TreeMap<String, Set<Collaborator>>();
|
||||
|
||||
if (piNodesAndEdges.getNodes() != null && piNodesAndEdges.getNodes().size() > 0) {
|
||||
if (piNodesAndEdges.getCollaborators() != null && piNodesAndEdges.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(piNodesAndEdges
|
||||
.getEgoNode().getNodeName())
|
||||
.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-investigators-per-year" + ".csv";
|
||||
|
||||
yearToCoPIs = UtilityFunctions.getGrantYearToCoPI(piNodesAndEdges);
|
||||
yearToCoPIs = UtilityFunctions.getActivityYearToCollaborators(piNodesAndEdges);
|
||||
|
||||
} else {
|
||||
|
||||
|
@ -253,13 +242,13 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
* @param coPIData
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareCoPIsListDataResponse(CoPIData coPIData) {
|
||||
private Map<String, String> prepareCoPIsListDataResponse(CollaborationData coPIData) {
|
||||
|
||||
String outputFileName = "";
|
||||
|
||||
if (coPIData.getNodes() != null && coPIData.getNodes().size() > 0) {
|
||||
if (coPIData.getCollaborators() != null && coPIData.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(coPIData.getEgoNode().getNodeName())
|
||||
outputFileName = UtilityFunctions.slugify(coPIData.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-investigators" + ".csv";
|
||||
|
||||
} else {
|
||||
|
@ -277,31 +266,13 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
return fileData;
|
||||
}
|
||||
|
||||
private Map<String, Integer> getCoPIsList(CoPIData coPIVO) {
|
||||
|
||||
Map<String, Integer> coPIsToCount = new TreeMap<String, Integer>();
|
||||
|
||||
for (CoPINode currNode : coPIVO.getNodes()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != coPIVO.getEgoNode()) {
|
||||
|
||||
coPIsToCount.put(currNode.getNodeName(), currNode.getNumberOfInvestigatedGrants());
|
||||
|
||||
}
|
||||
}
|
||||
return coPIsToCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides a response when graphml formatted co-pi network is requested, typically by
|
||||
* the flash vis.
|
||||
* @param coPIData
|
||||
* @param response
|
||||
*/
|
||||
private Map<String, String> prepareNetworkStreamDataResponse(CoPIData coPIData) {
|
||||
private Map<String, String> prepareNetworkStreamDataResponse(CollaborationData coPIData) {
|
||||
|
||||
CoPIGraphMLWriter coPIGraphMLWriter =
|
||||
new CoPIGraphMLWriter(coPIData);
|
||||
|
@ -316,13 +287,13 @@ public class CoPIGrantCountRequestHandler implements VisualizationRequestHandler
|
|||
|
||||
}
|
||||
|
||||
private Map<String, String> prepareNetworkDownloadDataResponse(CoPIData coPIData) {
|
||||
private Map<String, String> prepareNetworkDownloadDataResponse(CollaborationData coPIData) {
|
||||
|
||||
String outputFileName = "";
|
||||
|
||||
if (coPIData.getNodes() != null && coPIData.getNodes().size() > 0) {
|
||||
if (coPIData.getCollaborators() != null && coPIData.getCollaborators().size() > 0) {
|
||||
|
||||
outputFileName = UtilityFunctions.slugify(coPIData.getEgoNode().getNodeName())
|
||||
outputFileName = UtilityFunctions.slugify(coPIData.getEgoCollaborator().getCollaboratorName())
|
||||
+ "_co-investigator-network.graphml" + ".xml";
|
||||
|
||||
} else {
|
||||
|
|
|
@ -11,9 +11,11 @@ import java.util.Set;
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.freemarker.UrlBuilder.ParamMap;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIEdge;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaboratorComparator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaboration;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
|
@ -30,11 +32,11 @@ public class CoPIGraphMLWriter {
|
|||
|
||||
private final String GRAPHML_FOOTER = "</graphml>";
|
||||
|
||||
public CoPIGraphMLWriter(CoPIData coPIData){
|
||||
public CoPIGraphMLWriter(CollaborationData coPIData){
|
||||
coPIGraphMLContent = createCoPIGraphMLContent(coPIData);
|
||||
}
|
||||
|
||||
private StringBuilder createCoPIGraphMLContent(CoPIData coPIData) {
|
||||
private StringBuilder createCoPIGraphMLContent(CollaborationData coPIData) {
|
||||
|
||||
StringBuilder graphMLContent = new StringBuilder();
|
||||
|
||||
|
@ -62,34 +64,34 @@ public class CoPIGraphMLWriter {
|
|||
return coPIGraphMLContent;
|
||||
}
|
||||
|
||||
private void generateGraphContent(CoPIData coPIData,
|
||||
private void generateGraphContent(CollaborationData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("\n<graph edgedefault=\"undirected\">\n");
|
||||
|
||||
if (coPIData.getNodes() != null & coPIData.getNodes().size() > 0) {
|
||||
if (coPIData.getCollaborators() != null & coPIData.getCollaborators().size() > 0) {
|
||||
generateNodeSectionContent(coPIData, graphMLContent);
|
||||
}
|
||||
|
||||
if (coPIData.getEdges() != null & coPIData.getEdges().size() > 0) {
|
||||
if (coPIData.getCollaborations() != null & coPIData.getCollaborations().size() > 0) {
|
||||
generateEdgeSectionContent(coPIData, graphMLContent);
|
||||
}
|
||||
|
||||
graphMLContent.append("</graph>\n");
|
||||
}
|
||||
|
||||
private void generateEdgeSectionContent(CoPIData coPIData,
|
||||
private void generateEdgeSectionContent(CollaborationData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- edges -->\n");
|
||||
|
||||
Set<CoPIEdge> edges = coPIData.getEdges();
|
||||
Set<Collaboration> edges = coPIData.getCollaborations();
|
||||
|
||||
List<CoPIEdge> orderedEdges = new ArrayList<CoPIEdge>(edges);
|
||||
List<Collaboration> orderedEdges = new ArrayList<Collaboration>(edges);
|
||||
|
||||
Collections.sort(orderedEdges, new CoPIEdgeComparator());
|
||||
Collections.sort(orderedEdges, new CollaborationComparator());
|
||||
|
||||
for (CoPIEdge currentEdge : orderedEdges) {
|
||||
for (Collaboration currentEdge : orderedEdges) {
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single edge. "graphMLContent"
|
||||
|
@ -99,24 +101,24 @@ public class CoPIGraphMLWriter {
|
|||
}
|
||||
}
|
||||
|
||||
private void getEdgeContent(StringBuilder graphMLContent, CoPIEdge currentEdge) {
|
||||
private void getEdgeContent(StringBuilder graphMLContent, Collaboration currentEdge) {
|
||||
|
||||
graphMLContent.append("<edge "
|
||||
+ "id=\"" + currentEdge.getEdgeID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceNode().getNodeID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetNode().getNodeID() + "\" "
|
||||
+ "id=\"" + currentEdge.getCollaborationID() + "\" "
|
||||
+ "source=\"" + currentEdge.getSourceCollaborator().getCollaboratorID() + "\" "
|
||||
+ "target=\"" + currentEdge.getTargetCollaborator().getCollaboratorID() + "\" "
|
||||
+ ">\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator1\">"
|
||||
+ currentEdge.getSourceNode().getNodeName()
|
||||
+ currentEdge.getSourceCollaborator().getCollaboratorName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"collaborator2\">"
|
||||
+ currentEdge.getTargetNode().getNodeName()
|
||||
+ currentEdge.getTargetCollaborator().getCollaboratorName()
|
||||
+ "</data>\n");
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_coinvestigated_grants\">"
|
||||
+ currentEdge.getNumberOfCoInvestigatedGrants()
|
||||
+ currentEdge.getNumOfCollaborations()
|
||||
+ "</data>\n");
|
||||
|
||||
if (currentEdge.getEarliestCollaborationYearCount() != null) {
|
||||
|
@ -167,13 +169,13 @@ public class CoPIGraphMLWriter {
|
|||
}
|
||||
|
||||
|
||||
private void generateNodeSectionContent(CoPIData coPIData,
|
||||
private void generateNodeSectionContent(CollaborationData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
|
||||
graphMLContent.append("<!-- nodes -->\n");
|
||||
|
||||
CoPINode egoNode = coPIData.getEgoNode();
|
||||
Set<CoPINode> piNodes = coPIData.getNodes();
|
||||
Collaborator egoNode = coPIData.getEgoCollaborator();
|
||||
Set<Collaborator> piNodes = coPIData.getCollaborators();
|
||||
|
||||
/*
|
||||
* This method actually creates the XML code for a single node. "graphMLContent"
|
||||
|
@ -183,13 +185,13 @@ public class CoPIGraphMLWriter {
|
|||
* */
|
||||
getNodeContent(graphMLContent, egoNode);
|
||||
|
||||
List<CoPINode> orderedPINodes = new ArrayList<CoPINode>(piNodes);
|
||||
List<Collaborator> orderedPINodes = new ArrayList<Collaborator>(piNodes);
|
||||
orderedPINodes.remove(egoNode);
|
||||
|
||||
Collections.sort(orderedPINodes, new CoPINodeComparator());
|
||||
Collections.sort(orderedPINodes, new CollaboratorComparator());
|
||||
|
||||
|
||||
for (CoPINode currNode : orderedPINodes) {
|
||||
for (Collaborator currNode : orderedPINodes) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
|
@ -204,17 +206,17 @@ public class CoPIGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
private void getNodeContent(StringBuilder graphMLContent, CoPINode node) {
|
||||
private void getNodeContent(StringBuilder graphMLContent, Collaborator node) {
|
||||
|
||||
ParamMap individualProfileURLParams = new ParamMap(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY,
|
||||
node.getNodeURI());
|
||||
node.getCollaboratorURI());
|
||||
|
||||
String profileURL = UrlBuilder.getUrl(VisualizationFrameworkConstants.INDIVIDUAL_URL_PREFIX,
|
||||
individualProfileURLParams);
|
||||
|
||||
graphMLContent.append("<node id=\"" + node.getNodeID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getNodeURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getNodeName() + "</data>\n");
|
||||
graphMLContent.append("<node id=\"" + node.getCollaboratorID() + "\">\n");
|
||||
graphMLContent.append("\t<data key=\"url\">" + node.getCollaboratorURI() + "</data>\n");
|
||||
graphMLContent.append("\t<data key=\"label\">" + node.getCollaboratorName() + "</data>\n");
|
||||
|
||||
if (profileURL != null) {
|
||||
graphMLContent.append("\t<data key=\"profile_url\">" + profileURL + "</data>\n");
|
||||
|
@ -222,10 +224,10 @@ public class CoPIGraphMLWriter {
|
|||
|
||||
|
||||
graphMLContent.append("\t<data key=\"number_of_investigated_grants\">"
|
||||
+ node.getNumberOfInvestigatedGrants()
|
||||
+ node.getNumOfActivities()
|
||||
+ "</data>\n");
|
||||
|
||||
if (node.getEarliestGrantYearCount() != null) {
|
||||
if (node.getEarliestActivityYearCount() != null) {
|
||||
|
||||
/*
|
||||
* There is no clean way of getting the map contents in java even though
|
||||
|
@ -233,7 +235,7 @@ public class CoPIGraphMLWriter {
|
|||
* I am feeling dirty just about now.
|
||||
* */
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getEarliestGrantYearCount().entrySet()) {
|
||||
: node.getEarliestActivityYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"earliest_grant\">"
|
||||
+ publicationInfo.getKey()
|
||||
|
@ -246,10 +248,10 @@ public class CoPIGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
if (node.getLatestGrantYearCount() != null) {
|
||||
if (node.getLatestActivityYearCount() != null) {
|
||||
|
||||
for (Map.Entry<String, Integer> publicationInfo
|
||||
: node.getLatestGrantYearCount().entrySet()) {
|
||||
: node.getLatestActivityYearCount().entrySet()) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"latest_grant\">"
|
||||
+ publicationInfo.getKey()
|
||||
|
@ -262,10 +264,10 @@ public class CoPIGraphMLWriter {
|
|||
|
||||
}
|
||||
|
||||
if (node.getUnknownGrantYearCount() != null) {
|
||||
if (node.getUnknownActivityYearCount() != null) {
|
||||
|
||||
graphMLContent.append("\t<data key=\"num_unknown_grant\">"
|
||||
+ node.getUnknownGrantYearCount()
|
||||
+ node.getUnknownActivityYearCount()
|
||||
+ "</data>\n");
|
||||
|
||||
}
|
||||
|
@ -273,7 +275,7 @@ public class CoPIGraphMLWriter {
|
|||
graphMLContent.append("</node>\n");
|
||||
}
|
||||
|
||||
private void generateKeyDefinitionContent(CoPIData coPIData,
|
||||
private void generateKeyDefinitionContent(CollaborationData coPIData,
|
||||
StringBuilder graphMLContent) {
|
||||
/*
|
||||
* Generate the key definition content for node.
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinvestigator;
|
||||
|
||||
import java.util.Comparator;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
|
||||
/**
|
||||
* This Comparator is used to sort the CoPINodes based on their IDs in ascending order.
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
|
||||
public class CoPINodeComparator implements Comparator<CoPINode>{
|
||||
@Override
|
||||
public int compare(CoPINode arg0, CoPINode arg1) {
|
||||
return arg0.getNodeID() - arg1.getNodeID();
|
||||
}
|
||||
}
|
|
@ -17,7 +17,7 @@ import org.apache.commons.logging.Log;
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.YearToEntityCountDataElement;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
@ -41,7 +41,7 @@ public class CoPIVisCodeGenerator {
|
|||
* */
|
||||
private static final String DEFAULT_VISCONTAINER_DIV_ID = "unique_coinvestigators_vis_container";
|
||||
|
||||
private Map<String, Set<CoPINode>> yearToUniqueCoPIs;
|
||||
private Map<String, Set<Collaborator>> yearToUniqueCoPIs;
|
||||
|
||||
private Log log;
|
||||
|
||||
|
@ -52,7 +52,7 @@ public class CoPIVisCodeGenerator {
|
|||
public CoPIVisCodeGenerator(String individualURI,
|
||||
String visMode,
|
||||
String visContainer,
|
||||
Map<String, Set<CoPINode>> yearToUniqueCoPIs,
|
||||
Map<String, Set<Collaborator>> yearToUniqueCoPIs,
|
||||
Log log){
|
||||
|
||||
this.individualURI = individualURI;
|
||||
|
@ -133,13 +133,13 @@ public class CoPIVisCodeGenerator {
|
|||
sparklineData.setNumOfYearsToBeRendered(numOfYearsToBeRendered);
|
||||
|
||||
int uniqueCoPICounter = 0;
|
||||
Set<CoPINode> allCoPIsWithKnownGrantShipYears = new HashSet<CoPINode>();
|
||||
Set<Collaborator> allCoPIsWithKnownGrantShipYears = new HashSet<Collaborator>();
|
||||
List<YearToEntityCountDataElement> yearToUniqueInvestigatorsCountDataTable = new ArrayList<YearToEntityCountDataElement>();
|
||||
|
||||
for (int grantYear = minGrantYearConsidered; grantYear <= currentYear; grantYear++) {
|
||||
|
||||
String grantYearAsString = String.valueOf(grantYear);
|
||||
Set<CoPINode> currentCoPIs = yearToUniqueCoPIs
|
||||
Set<Collaborator> currentCoPIs = yearToUniqueCoPIs
|
||||
.get(grantYearAsString);
|
||||
|
||||
Integer currentUniqueCoPIs = null;
|
||||
|
@ -174,7 +174,7 @@ public class CoPIVisCodeGenerator {
|
|||
* with known & unknown year. We do not want to repeat the count for this collaborator when we present
|
||||
* it in the front-end.
|
||||
* */
|
||||
Set<CoPINode> totalUniqueCoInvestigators = new HashSet<CoPINode>(allCoPIsWithKnownGrantShipYears);
|
||||
Set<Collaborator> totalUniqueCoInvestigators = new HashSet<Collaborator>(allCoPIsWithKnownGrantShipYears);
|
||||
|
||||
/*
|
||||
* Total grants will also consider grants that have no year
|
||||
|
@ -238,7 +238,7 @@ public class CoPIVisCodeGenerator {
|
|||
VisualizationFrameworkConstants.COPIS_COUNT_PER_YEAR_VIS_MODE));
|
||||
|
||||
Map<String, Integer> yearToUniqueCoPIsCount = new HashMap<String, Integer>();
|
||||
for (Map.Entry<String, Set<CoPINode>> currentYearToUniqueCoPIsCount : yearToUniqueCoPIs.entrySet()) {
|
||||
for (Map.Entry<String, Set<Collaborator>> currentYearToUniqueCoPIsCount : yearToUniqueCoPIs.entrySet()) {
|
||||
yearToUniqueCoPIsCount.put(currentYearToUniqueCoPIsCount.getKey(),
|
||||
currentYearToUniqueCoPIsCount.getValue().size());
|
||||
}
|
||||
|
|
|
@ -26,7 +26,7 @@ import com.hp.hpl.jena.rdf.model.Model;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Entity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SubEntity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
|
@ -82,7 +82,7 @@ public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
|||
private Entity createJavaValueObjects(ResultSet resultSet) {
|
||||
|
||||
Entity entity = null;
|
||||
Map<String, BiboDocument> biboDocumentURLToVO = new HashMap<String, BiboDocument>();
|
||||
Map<String, Activity> biboDocumentURLToVO = new HashMap<String, Activity>();
|
||||
Map<String, SubEntity> subentityURLToVO = new HashMap<String, SubEntity>();
|
||||
Map<String, SubEntity> personURLToVO = new HashMap<String, SubEntity>();
|
||||
|
||||
|
@ -98,14 +98,14 @@ public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
|||
}
|
||||
|
||||
RDFNode documentNode = solution.get(QueryFieldLabels.DOCUMENT_URL);
|
||||
BiboDocument biboDocument;
|
||||
Activity biboDocument;
|
||||
|
||||
if (biboDocumentURLToVO.containsKey(documentNode.toString())) {
|
||||
biboDocument = biboDocumentURLToVO.get(documentNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
biboDocument = new BiboDocument(documentNode.toString());
|
||||
biboDocument = new Activity(documentNode.toString());
|
||||
biboDocumentURLToVO.put(documentNode.toString(), biboDocument);
|
||||
|
||||
// RDFNode documentLabelNode = solution
|
||||
|
@ -116,15 +116,7 @@ public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
|||
|
||||
RDFNode publicationDateNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_DATE);
|
||||
if (publicationDateNode != null) {
|
||||
biboDocument.setPublicationDate(publicationDateNode.toString());
|
||||
}
|
||||
|
||||
/*
|
||||
* This is being used so that date in the data from pre-1.2 ontology can be captured.
|
||||
* */
|
||||
RDFNode publicationYearUsing_1_1_PropertyNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_YEAR_USING_1_1_PROPERTY);
|
||||
if (publicationYearUsing_1_1_PropertyNode != null) {
|
||||
biboDocument.setPublicationYear(publicationYearUsing_1_1_PropertyNode.toString());
|
||||
biboDocument.setActivityDate(publicationDateNode.toString());
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -149,7 +141,7 @@ public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
|||
|
||||
entity.addSubEntity(subEntity);
|
||||
|
||||
subEntity.addPublication(biboDocument);
|
||||
subEntity.addActivity(biboDocument);
|
||||
}
|
||||
|
||||
RDFNode personURLNode = solution.get(QueryFieldLabels.PERSON_URL);
|
||||
|
@ -182,29 +174,19 @@ public class EntityPublicationCountQueryRunner implements QueryRunner<Entity> {
|
|||
|
||||
}
|
||||
|
||||
person.addPublication(biboDocument);
|
||||
person.addActivity(biboDocument);
|
||||
|
||||
}
|
||||
|
||||
entity.addPublication(biboDocument);
|
||||
entity.addActivity(biboDocument);
|
||||
}
|
||||
|
||||
/*
|
||||
if (subentityURLToVO.size() != 0) {
|
||||
|
||||
entity.addSubEntitities(subentityURLToVO.values());
|
||||
|
||||
} else if (subentityURLToVO.size() == 0 && personURLToVO.size() != 0) {
|
||||
|
||||
entity.addSubEntitities(personURLToVO.values());
|
||||
|
||||
} else*/ if (subentityURLToVO.size() == 0 && personURLToVO.size() == 0) {
|
||||
if (subentityURLToVO.size() == 0 && personURLToVO.size() == 0) {
|
||||
|
||||
entity = new Entity(this.entityURI, "no-label");
|
||||
|
||||
}
|
||||
|
||||
//TODO: return non-null value
|
||||
// log.debug("Returning entity that contains the following set of subentities: "+entity.getSubEntities().toString());
|
||||
after = System.currentTimeMillis();
|
||||
log.debug("Time taken to iterate through the ResultSet of SELECT queries is in milliseconds: " + (after - before) );
|
||||
|
|
|
@ -280,7 +280,7 @@ public class EntityPublicationCountRequestHandler implements
|
|||
List<List<Integer>> yearPubCount = new ArrayList<List<Integer>>();
|
||||
|
||||
for (Map.Entry<String, Integer> pubEntry : UtilityFunctions
|
||||
.getYearToPublicationCount(subentity.getDocuments())
|
||||
.getYearToActivityCount(subentity.getActivities())
|
||||
.entrySet()) {
|
||||
|
||||
List<Integer> currentPubYear = new ArrayList<Integer>();
|
||||
|
@ -326,7 +326,7 @@ public class EntityPublicationCountRequestHandler implements
|
|||
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(subEntity.getIndividualLabel()));
|
||||
csvFileContent.append(", ");
|
||||
csvFileContent.append(subEntity.getDocuments().size());
|
||||
csvFileContent.append(subEntity.getActivities().size());
|
||||
csvFileContent.append(", ");
|
||||
|
||||
StringBuilder joinedTypes = new StringBuilder();
|
||||
|
|
|
@ -12,7 +12,6 @@ import org.apache.commons.logging.LogFactory;
|
|||
import com.hp.hpl.jena.iri.IRI;
|
||||
import com.hp.hpl.jena.iri.IRIFactory;
|
||||
import com.hp.hpl.jena.iri.Violation;
|
||||
import com.hp.hpl.jena.query.DataSource;
|
||||
import com.hp.hpl.jena.query.Query;
|
||||
import com.hp.hpl.jena.query.QueryExecution;
|
||||
import com.hp.hpl.jena.query.QueryExecutionFactory;
|
||||
|
@ -26,7 +25,7 @@ import com.hp.hpl.jena.rdf.model.RDFNode;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Entity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SubEntity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
|
@ -107,7 +106,7 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
private Entity createJavaValueObjects(ResultSet resultSet) {
|
||||
|
||||
Entity entity = null;
|
||||
Map<String, Grant> grantURIToVO = new HashMap<String, Grant>();
|
||||
Map<String, Activity> grantURIToVO = new HashMap<String, Activity>();
|
||||
Map<String, SubEntity> subentityURLToVO = new HashMap<String, SubEntity>();
|
||||
Map<String, SubEntity> personURLToVO = new HashMap<String, SubEntity>();
|
||||
|
||||
|
@ -123,34 +122,36 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
}
|
||||
|
||||
RDFNode grantNode = solution.get(QueryFieldLabels.GRANT_URL);
|
||||
Grant grant;
|
||||
Activity grant;
|
||||
|
||||
if (grantURIToVO.containsKey(grantNode.toString())) {
|
||||
grant = grantURIToVO.get(grantNode.toString());
|
||||
|
||||
} else {
|
||||
|
||||
grant = new Grant(grantNode.toString());
|
||||
grant = new Activity(grantNode.toString());
|
||||
grantURIToVO.put(grantNode.toString(), grant);
|
||||
|
||||
RDFNode grantLabelNode = solution
|
||||
.get(QueryFieldLabels.GRANT_LABEL);
|
||||
if (grantLabelNode != null) {
|
||||
grant.setGrantLabel(grantLabelNode.toString());
|
||||
grant.setActivityLabel(grantLabelNode.toString());
|
||||
}
|
||||
|
||||
RDFNode grantStartDateNode = solution
|
||||
.get(QueryFieldLabels.ROLE_START_DATE);
|
||||
if (grantStartDateNode != null) {
|
||||
grant.setGrantStartDate(grantStartDateNode.toString());
|
||||
grant.setActivityDate(grantStartDateNode.toString());
|
||||
} else {
|
||||
grantStartDateNode = solution
|
||||
.get(QueryFieldLabels.GRANT_START_DATE);
|
||||
if (grantStartDateNode != null) {
|
||||
grant.setGrantStartDate(grantStartDateNode.toString());
|
||||
grant.setActivityDate(grantStartDateNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: Verify grant end date not needed.
|
||||
/*
|
||||
RDFNode grantEndDateNode = solution
|
||||
.get(QueryFieldLabels.ROLE_END_DATE);
|
||||
if (grantEndDateNode != null) {
|
||||
|
@ -162,6 +163,7 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
grant.setGrantEndDate(grantEndDateNode.toString());
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
}
|
||||
|
||||
|
@ -183,7 +185,7 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
subEntity.setIndividualLabel(subEntityLabelNode.toString());
|
||||
}
|
||||
entity.addSubEntity(subEntity);
|
||||
subEntity.addGrant(grant);
|
||||
subEntity.addActivity(grant);
|
||||
}
|
||||
|
||||
RDFNode personURLNode = solution.get(QueryFieldLabels.PERSON_URL);
|
||||
|
@ -216,11 +218,11 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
|
||||
}
|
||||
|
||||
person.addGrant(grant);
|
||||
person.addActivity(grant);
|
||||
|
||||
}
|
||||
|
||||
entity.addGrant(grant);
|
||||
entity.addActivity(grant);
|
||||
}
|
||||
|
||||
/*if (subentityURLToVO.size() == 0 && personURLToVO.size() != 0) {
|
||||
|
@ -297,7 +299,7 @@ public class EntityGrantCountQueryRunner implements QueryRunner<Entity> {
|
|||
+ SPARQL_QUERY_COMMON_OPTIONAL_BLOCK_FOR_GRANT_DATE_TIME + "}"
|
||||
+ " } ";
|
||||
|
||||
//System.out.println("\n\nEntity Grant Count query is: "+ sparqlQuery);
|
||||
//System.out.println("\n\nEntity Activity Count query is: "+ sparqlQuery);
|
||||
|
||||
// log.debug("\nThe sparql query is :\n" + sparqlQuery);
|
||||
|
||||
|
|
|
@ -278,7 +278,7 @@ public class EntityGrantCountRequestHandler implements
|
|||
List<List<Integer>> yearGrantCount = new ArrayList<List<Integer>>();
|
||||
|
||||
for (Map.Entry<String, Integer> grantEntry : UtilityFunctions
|
||||
.getYearToGrantCount(subentity.getGrants())
|
||||
.getYearToActivityCount(subentity.getActivities())
|
||||
.entrySet()) {
|
||||
|
||||
List<Integer> currentGrantYear = new ArrayList<Integer>();
|
||||
|
@ -328,7 +328,7 @@ public class EntityGrantCountRequestHandler implements
|
|||
|
||||
csvFileContent.append(StringEscapeUtils.escapeCsv(subEntity.getIndividualLabel()));
|
||||
csvFileContent.append(", ");
|
||||
csvFileContent.append(subEntity.getGrants().size());
|
||||
csvFileContent.append(subEntity.getActivities().size());
|
||||
csvFileContent.append(", ");
|
||||
|
||||
StringBuilder joinedTypes = new StringBuilder();
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.hp.hpl.jena.rdf.model.RDFNode;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
|
||||
|
@ -35,7 +35,7 @@ import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryR
|
|||
* Deepak Konidena
|
||||
*
|
||||
*/
|
||||
public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
||||
public class PersonGrantCountQueryRunner implements QueryRunner<Set<Activity>>{
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
|
@ -90,13 +90,13 @@ public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
|||
this.log = log;
|
||||
}
|
||||
|
||||
private Set<Grant> createJavaValueObjects(ResultSet resultSet){
|
||||
Set<Grant> PIGrant = new HashSet<Grant>();
|
||||
private Set<Activity> createJavaValueObjects(ResultSet resultSet){
|
||||
Set<Activity> PIGrant = new HashSet<Activity>();
|
||||
|
||||
while(resultSet.hasNext()){
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
Grant grant = new Grant(solution.get(QueryFieldLabels.GRANT_URL).toString());
|
||||
Activity grant = new Activity(solution.get(QueryFieldLabels.GRANT_URL).toString());
|
||||
|
||||
RDFNode grantLabelNode = solution.get(QueryFieldLabels.GRANT_LABEL);
|
||||
if(grantLabelNode != null){
|
||||
|
@ -105,14 +105,16 @@ public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
|||
|
||||
RDFNode grantStartDateNode = solution.get(QueryFieldLabels.ROLE_START_DATE);
|
||||
if(grantStartDateNode != null){
|
||||
grant.setGrantStartDate(grantStartDateNode.toString());
|
||||
grant.setActivityDate(grantStartDateNode.toString());
|
||||
}else {
|
||||
grantStartDateNode = solution.get(QueryFieldLabels.GRANT_START_DATE);
|
||||
if(grantStartDateNode != null){
|
||||
grant.setGrantStartDate(grantStartDateNode.toString());
|
||||
grant.setActivityDate(grantStartDateNode.toString());
|
||||
}
|
||||
}
|
||||
|
||||
//TODO: verify grant end date is used or not.
|
||||
/*
|
||||
RDFNode grantEndDateNode = solution.get(QueryFieldLabels.ROLE_END_DATE);
|
||||
if(grantEndDateNode != null){
|
||||
grant.setGrantEndDate(grantEndDateNode.toString());
|
||||
|
@ -122,6 +124,7 @@ public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
|||
grant.setGrantEndDate(grantEndDateNode.toString());
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
* Since we are getting grant count for just one PI at a time we need
|
||||
|
@ -221,7 +224,7 @@ public class PersonGrantCountQueryRunner implements QueryRunner<Set<Grant>>{
|
|||
return sparqlQuery;
|
||||
}
|
||||
|
||||
public Set<Grant> getQueryResult() throws MalformedQueryParametersException{
|
||||
public Set<Activity> getQueryResult() throws MalformedQueryParametersException{
|
||||
|
||||
if(StringUtils.isNotBlank(this.personURI)){
|
||||
|
||||
|
|
|
@ -20,7 +20,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Tem
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.DataVisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
@ -50,20 +50,19 @@ public class PersonGrantCountRequestHandler implements VisualizationRequestHandl
|
|||
VitroRequest vitroRequest, Log log, Dataset Dataset)
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
|
||||
String personURI = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
|
||||
QueryRunner<Set<Grant>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
|
||||
Set<Grant> piGrants = queryManager.getQueryResult();
|
||||
Set<Activity> piGrants = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(piGrants);
|
||||
UtilityFunctions.getYearToActivityCount(piGrants);
|
||||
|
||||
Individual investigator = ((PersonGrantCountQueryRunner) queryManager).getPrincipalInvestigator();
|
||||
|
||||
|
@ -88,16 +87,16 @@ public class PersonGrantCountRequestHandler implements VisualizationRequestHandl
|
|||
String visContainer = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Set<Grant>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
|
||||
Set<Grant> piGrants = queryManager.getQueryResult();
|
||||
Set<Activity> piGrants = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(piGrants);
|
||||
UtilityFunctions.getYearToActivityCount(piGrants);
|
||||
|
||||
|
||||
boolean shouldVIVOrenderVis =
|
||||
|
@ -138,16 +137,16 @@ public class PersonGrantCountRequestHandler implements VisualizationRequestHandl
|
|||
String visContainer = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Set<Grant>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonGrantCountQueryRunner(personURI, Dataset, log );
|
||||
|
||||
Set<Grant> piGrants = queryManager.getQueryResult();
|
||||
Set<Activity> piGrants = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(piGrants);
|
||||
UtilityFunctions.getYearToActivityCount(piGrants);
|
||||
|
||||
/*
|
||||
* Computations required to generate HTML for the sparkline & related context.
|
||||
|
@ -192,7 +191,7 @@ public class PersonGrantCountRequestHandler implements VisualizationRequestHandl
|
|||
*/
|
||||
private Map<String, String> prepareDataResponse(
|
||||
Individual investigator,
|
||||
Set<Grant> piGrants,
|
||||
Set<Activity> piGrants,
|
||||
Map<String, Integer> yearToGrantCount) {
|
||||
|
||||
|
||||
|
|
|
@ -2,14 +2,10 @@
|
|||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.personlevel;
|
||||
|
||||
import java.net.MalformedURLException;
|
||||
import java.net.URL;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.query.Dataset;
|
||||
|
@ -23,6 +19,7 @@ import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.Visu
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coauthorship.CoAuthorshipQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coauthorship.CoAuthorshipVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinvestigator.CoPIGrantCountConstructQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinvestigator.CoPIGrantCountQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.coprincipalinvestigator.CoPIVisCodeGenerator;
|
||||
|
@ -30,10 +27,7 @@ import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.persongrantcoun
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.persongrantcount.PersonGrantCountVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.personpubcount.PersonPublicationCountQueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.personpubcount.PersonPublicationCountVisCodeGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
@ -89,24 +83,24 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
CoPIGrantCountConstructQueryRunner constructQueryRunner = new CoPIGrantCountConstructQueryRunner(egoURI, Dataset, log);
|
||||
Model constructedModel = constructQueryRunner.getConstructedModel();
|
||||
|
||||
QueryRunner<CoPIData> coPIQueryManager = new CoPIGrantCountQueryRunner(egoURI, constructedModel, log);
|
||||
QueryRunner<CollaborationData> coPIQueryManager = new CoPIGrantCountQueryRunner(egoURI, constructedModel, log);
|
||||
|
||||
QueryRunner<Set<Grant>> grantQueryManager = new PersonGrantCountQueryRunner(egoURI, Dataset, log);
|
||||
QueryRunner<Set<Activity>> grantQueryManager = new PersonGrantCountQueryRunner(egoURI, Dataset, log);
|
||||
|
||||
CoPIData coPIData = coPIQueryManager.getQueryResult();
|
||||
CollaborationData coPIData = coPIQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* grants over time sparkline
|
||||
*/
|
||||
|
||||
Set<Grant> piGrants = grantQueryManager.getQueryResult();
|
||||
Set<Activity> piGrants = grantQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount =
|
||||
UtilityFunctions.getYearToGrantCount(piGrants);
|
||||
UtilityFunctions.getYearToActivityCount(piGrants);
|
||||
|
||||
PersonGrantCountVisCodeGenerator personGrantCountVisCodeGenerator =
|
||||
new PersonGrantCountVisCodeGenerator(
|
||||
|
@ -128,7 +122,7 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
UNIQUE_COPIS_SPARKLINE_VIS_CONTAINER_ID,
|
||||
UtilityFunctions.getGrantYearToCoPI(coPIData),
|
||||
UtilityFunctions.getActivityYearToCollaborators(coPIData),
|
||||
log);
|
||||
|
||||
SparklineData uniqueCopisSparklineVO = uniqueCopisVisCodeGenerator
|
||||
|
@ -145,25 +139,25 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
} else {
|
||||
|
||||
QueryRunner<CoAuthorshipData> coAuthorshipQueryManager = new CoAuthorshipQueryRunner(egoURI, Dataset, log);
|
||||
QueryRunner<CollaborationData> coAuthorshipQueryManager = new CoAuthorshipQueryRunner(egoURI, Dataset, log);
|
||||
|
||||
QueryRunner<Set<BiboDocument>> publicationQueryManager = new PersonPublicationCountQueryRunner(egoURI, Dataset, log);
|
||||
QueryRunner<Set<Activity>> publicationQueryManager = new PersonPublicationCountQueryRunner(egoURI, Dataset, log);
|
||||
|
||||
CoAuthorshipData coAuthorshipData = coAuthorshipQueryManager.getQueryResult();
|
||||
CollaborationData coAuthorshipData = coAuthorshipQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* When the front-end for the person level vis has to be displayed we render couple of
|
||||
* sparklines. This will prepare all the data for the sparklines & other requested
|
||||
* files.
|
||||
* */
|
||||
Set<BiboDocument> authorDocuments = publicationQueryManager.getQueryResult();
|
||||
Set<Activity> authorDocuments = publicationQueryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the BiboDocument's
|
||||
* parsedPublicationYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToPublicationCount =
|
||||
UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
UtilityFunctions.getYearToActivityCount(authorDocuments);
|
||||
|
||||
/*
|
||||
* Computations required to generate HTML for the sparklines & related context.
|
||||
|
@ -184,7 +178,7 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
egoURI,
|
||||
VisualizationFrameworkConstants.FULL_SPARKLINE_VIS_MODE,
|
||||
UNIQUE_COAUTHORS_SPARKLINE_VIS_CONTAINER_ID,
|
||||
UtilityFunctions.getPublicationYearToCoAuthors(coAuthorshipData),
|
||||
UtilityFunctions.getActivityYearToCollaborators(coAuthorshipData),
|
||||
log);
|
||||
|
||||
SparklineData uniqueCoauthorsSparklineVO = uniqueCoauthorsVisCodeGenerator
|
||||
|
@ -205,7 +199,7 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
String egoURI,
|
||||
SparklineData egoPubSparklineVO,
|
||||
SparklineData uniqueCoauthorsSparklineVO,
|
||||
CoAuthorshipData coAuthorshipVO,
|
||||
CollaborationData coAuthorshipVO,
|
||||
VitroRequest vitroRequest) {
|
||||
|
||||
Map<String, Object> body = new HashMap<String, Object>();
|
||||
|
@ -217,13 +211,13 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
String title = "";
|
||||
|
||||
if (coAuthorshipVO.getNodes() != null && coAuthorshipVO.getNodes().size() > 0) {
|
||||
body.put("numOfAuthors", coAuthorshipVO.getNodes().size());
|
||||
title = coAuthorshipVO.getEgoNode().getNodeName() + " - ";
|
||||
if (coAuthorshipVO.getCollaborators() != null && coAuthorshipVO.getCollaborators().size() > 0) {
|
||||
body.put("numOfAuthors", coAuthorshipVO.getCollaborators().size());
|
||||
title = coAuthorshipVO.getEgoCollaborator().getCollaboratorName() + " - ";
|
||||
}
|
||||
|
||||
if (coAuthorshipVO.getEdges() != null && coAuthorshipVO.getEdges().size() > 0) {
|
||||
body.put("numOfCoAuthorShips", coAuthorshipVO.getEdges().size());
|
||||
if (coAuthorshipVO.getCollaborations() != null && coAuthorshipVO.getCollaborations().size() > 0) {
|
||||
body.put("numOfCoAuthorShips", coAuthorshipVO.getCollaborations().size());
|
||||
}
|
||||
|
||||
body.put("egoPubSparklineVO", egoPubSparklineVO);
|
||||
|
@ -240,7 +234,7 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
String egoURI,
|
||||
SparklineData egoGrantSparklineVO,
|
||||
SparklineData uniqueCopisSparklineVO,
|
||||
CoPIData coPIVO,
|
||||
CollaborationData coPIVO,
|
||||
VitroRequest vitroRequest) {
|
||||
|
||||
Map<String, Object> body = new HashMap<String, Object>();
|
||||
|
@ -251,13 +245,13 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
|
||||
String title = "";
|
||||
|
||||
if (coPIVO.getNodes() != null && coPIVO.getNodes().size() > 0) {
|
||||
body.put("numOfInvestigators", coPIVO.getNodes().size());
|
||||
title = coPIVO.getEgoNode().getNodeName() + " - ";
|
||||
if (coPIVO.getCollaborators() != null && coPIVO.getCollaborators().size() > 0) {
|
||||
body.put("numOfInvestigators", coPIVO.getCollaborators().size());
|
||||
title = coPIVO.getEgoCollaborator().getCollaboratorName() + " - ";
|
||||
}
|
||||
|
||||
if (coPIVO.getEdges() != null && coPIVO.getEdges().size() > 0) {
|
||||
body.put("numOfCoInvestigations", coPIVO.getEdges().size());
|
||||
if (coPIVO.getCollaborations() != null && coPIVO.getCollaborations().size() > 0) {
|
||||
body.put("numOfCoInvestigations", coPIVO.getCollaborations().size());
|
||||
}
|
||||
|
||||
String standaloneTemplate = "coPIPersonLevel.ftl";
|
||||
|
@ -271,19 +265,5 @@ public class PersonLevelRequestHandler implements VisualizationRequestHandler {
|
|||
return new TemplateResponseValues(standaloneTemplate, body);
|
||||
|
||||
}
|
||||
|
||||
private String getCompleteURL(HttpServletRequest request) throws MalformedURLException {
|
||||
|
||||
String file = request.getRequestURI();
|
||||
// System.out.println("\ngetRequestURI() --> "+ file + "\ngetQueryString() --> "+request.getQueryString()+ "\ngetScheme() --> "+ request.getScheme());
|
||||
// System.out.println("\ngetServerName() --> "+ request.getServerName() + "\ngetServerPort() --> "+request.getServerPort());
|
||||
|
||||
URL reconstructedURL = new URL(request.getScheme(), request.getServerName(), request.getServerPort(), file);
|
||||
|
||||
// System.out.println("\nReconstructed URL is --> " + reconstructedURL);
|
||||
|
||||
return reconstructedURL.toString();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ import com.hp.hpl.jena.rdf.model.RDFNode;
|
|||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.QueryFieldLabels;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
|
||||
|
@ -36,7 +36,7 @@ import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryR
|
|||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class PersonPublicationCountQueryRunner implements QueryRunner<Set<BiboDocument>> {
|
||||
public class PersonPublicationCountQueryRunner implements QueryRunner<Set<Activity>> {
|
||||
|
||||
protected static final Syntax SYNTAX = Syntax.syntaxARQ;
|
||||
|
||||
|
@ -70,19 +70,19 @@ public class PersonPublicationCountQueryRunner implements QueryRunner<Set<BiboDo
|
|||
|
||||
}
|
||||
|
||||
private Set<BiboDocument> createJavaValueObjects(ResultSet resultSet) {
|
||||
Set<BiboDocument> authorDocuments = new HashSet<BiboDocument>();
|
||||
private Set<Activity> createJavaValueObjects(ResultSet resultSet) {
|
||||
Set<Activity> authorDocuments = new HashSet<Activity>();
|
||||
|
||||
while (resultSet.hasNext()) {
|
||||
QuerySolution solution = resultSet.nextSolution();
|
||||
|
||||
BiboDocument biboDocument = new BiboDocument(
|
||||
Activity biboDocument = new Activity(
|
||||
solution.get(QueryFieldLabels.DOCUMENT_URL)
|
||||
.toString());
|
||||
|
||||
RDFNode publicationDateNode = solution.get(QueryFieldLabels.DOCUMENT_PUBLICATION_DATE);
|
||||
if (publicationDateNode != null) {
|
||||
biboDocument.setPublicationDate(publicationDateNode.toString());
|
||||
biboDocument.setActivityDate(publicationDateNode.toString());
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -132,7 +132,7 @@ public class PersonPublicationCountQueryRunner implements QueryRunner<Set<BiboDo
|
|||
return sparqlQuery;
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getQueryResult()
|
||||
public Set<Activity> getQueryResult()
|
||||
throws MalformedQueryParametersException {
|
||||
|
||||
if (StringUtils.isNotBlank(this.personURI)) {
|
||||
|
|
|
@ -2,24 +2,16 @@
|
|||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.personpubcount;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import javax.servlet.ServletOutputStream;
|
||||
import javax.servlet.http.HttpServletResponse;
|
||||
|
||||
import org.apache.commons.lang.StringEscapeUtils;
|
||||
import org.apache.commons.lang.StringUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
|
||||
import com.hp.hpl.jena.query.Dataset;
|
||||
import com.itextpdf.text.Document;
|
||||
import com.itextpdf.text.DocumentException;
|
||||
import com.itextpdf.text.pdf.PdfWriter;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.beans.Portal;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.VitroRequest;
|
||||
|
@ -28,10 +20,9 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Tem
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.DataVisualizationController;
|
||||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.exceptions.MalformedQueryParametersException;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Individual;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SparklineData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.PDFDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.QueryRunner;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.VisualizationRequestHandler;
|
||||
|
@ -68,19 +59,19 @@ VisualizationRequestHandler {
|
|||
.getParameter(
|
||||
VisualizationFrameworkConstants.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Set<BiboDocument>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
personURI,
|
||||
Dataset,
|
||||
log);
|
||||
|
||||
Set<BiboDocument> authorDocuments = queryManager.getQueryResult();
|
||||
Set<Activity> authorDocuments = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the
|
||||
* BiboDocument's parsedPublicationYear to populate the data.
|
||||
*/
|
||||
Map<String, Integer> yearToPublicationCount =
|
||||
UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
UtilityFunctions.getYearToActivityCount(authorDocuments);
|
||||
|
||||
boolean shouldVIVOrenderVis =
|
||||
yearToPublicationCount.size() > 0 ? true : false;
|
||||
|
@ -111,19 +102,19 @@ VisualizationRequestHandler {
|
|||
String personURI = vitroRequest
|
||||
.getParameter(VisualizationFrameworkConstants.INDIVIDUAL_URI_KEY);
|
||||
|
||||
QueryRunner<Set<BiboDocument>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
personURI,
|
||||
Dataset,
|
||||
log);
|
||||
|
||||
Set<BiboDocument> authorDocuments = queryManager.getQueryResult();
|
||||
Set<Activity> authorDocuments = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the
|
||||
* BiboDocument's parsedPublicationYear to populate the data.
|
||||
*/
|
||||
Map<String, Integer> yearToPublicationCount =
|
||||
UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
UtilityFunctions.getYearToActivityCount(authorDocuments);
|
||||
|
||||
Individual author = ((PersonPublicationCountQueryRunner) queryManager).getAuthor();
|
||||
|
||||
|
@ -147,19 +138,19 @@ VisualizationRequestHandler {
|
|||
String visContainer = vitroRequest.getParameter(
|
||||
VisualizationFrameworkConstants.VIS_CONTAINER_KEY);
|
||||
|
||||
QueryRunner<Set<BiboDocument>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
QueryRunner<Set<Activity>> queryManager = new PersonPublicationCountQueryRunner(
|
||||
personURI,
|
||||
Dataset,
|
||||
log);
|
||||
|
||||
Set<BiboDocument> authorDocuments = queryManager.getQueryResult();
|
||||
Set<Activity> authorDocuments = queryManager.getQueryResult();
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the
|
||||
* BiboDocument's parsedPublicationYear to populate the data.
|
||||
*/
|
||||
Map<String, Integer> yearToPublicationCount =
|
||||
UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
UtilityFunctions.getYearToActivityCount(authorDocuments);
|
||||
|
||||
/*
|
||||
* Computations required to generate HTML for the sparkline & related
|
||||
|
@ -208,7 +199,7 @@ VisualizationRequestHandler {
|
|||
* @return
|
||||
*/
|
||||
private Map<String, String> prepareDataResponse(Individual author,
|
||||
Set<BiboDocument> authorDocuments,
|
||||
Set<Activity> authorDocuments,
|
||||
Map<String, Integer> yearToPublicationCount) {
|
||||
|
||||
String authorName = null;
|
||||
|
@ -281,7 +272,6 @@ VisualizationRequestHandler {
|
|||
|
||||
Portal portal = vreq.getPortal();
|
||||
|
||||
// String dynamicTemplate = "/visualization/publication/personPublicationCountDynamicActivator.ftl";
|
||||
String dynamicTemplate = "personPublicationCountDynamicActivator.ftl";
|
||||
|
||||
Map<String, Object> body = new HashMap<String, Object>();
|
||||
|
@ -292,64 +282,4 @@ VisualizationRequestHandler {
|
|||
return new TemplateResponseValues(dynamicTemplate, body);
|
||||
|
||||
}
|
||||
|
||||
private void preparePDFResponse(Individual author,
|
||||
Set<BiboDocument> authorDocuments,
|
||||
Map<String, Integer> yearToPublicationCount,
|
||||
HttpServletResponse response) {
|
||||
|
||||
String authorName = null;
|
||||
|
||||
// To protect against cases where there are no author documents
|
||||
// associated with the
|
||||
// / individual.
|
||||
if (authorDocuments.size() > 0) {
|
||||
authorName = author.getIndividualLabel();
|
||||
}
|
||||
|
||||
// To make sure that null/empty records for author names do not cause
|
||||
// any mischief.
|
||||
if (StringUtils.isBlank(authorName)) {
|
||||
authorName = "no-author";
|
||||
}
|
||||
|
||||
String outputFileName = UtilityFunctions.slugify(authorName)
|
||||
+ "_report" + ".pdf";
|
||||
|
||||
response.setContentType("application/pdf");
|
||||
response.setHeader("Content-Disposition", "attachment;filename="
|
||||
+ outputFileName);
|
||||
|
||||
ServletOutputStream responseOutputStream;
|
||||
try {
|
||||
responseOutputStream = response.getOutputStream();
|
||||
|
||||
Document document = new Document();
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
PdfWriter pdfWriter = PdfWriter.getInstance(document, baos);
|
||||
document.open();
|
||||
|
||||
PDFDocument pdfDocument = new PDFDocument(authorName,
|
||||
yearToPublicationCount, document, pdfWriter);
|
||||
|
||||
document.close();
|
||||
|
||||
// setting some response headers & content type
|
||||
response.setHeader("Expires", "0");
|
||||
response.setHeader("Cache-Control",
|
||||
"must-revalidate, post-check=0, pre-check=0");
|
||||
response.setHeader("Pragma", "public");
|
||||
response.setContentLength(baos.size());
|
||||
// write ByteArrayOutputStream to the ServletOutputStream
|
||||
baos.writeTo(responseOutputStream);
|
||||
responseOutputStream.flush();
|
||||
responseOutputStream.close();
|
||||
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
} catch (DocumentException e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,59 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
* This interface will make sure that VOs conveying any person's academic output like publications,
|
||||
* grants etc implement certain methods which will be used to generalize methods which are just
|
||||
* interested in certain common properties like what was the year in which the activity was
|
||||
* published (or started).
|
||||
* @author cdtank
|
||||
*/
|
||||
public class Activity extends Individual{
|
||||
|
||||
private String activityDate;
|
||||
|
||||
public Activity(String activityURI) {
|
||||
super(activityURI);
|
||||
}
|
||||
|
||||
public String getActivityURI() {
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public String getActivityLabel() {
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void setActivityLabel(String activityLabel) {
|
||||
this.setIndividualLabel(activityLabel);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be called to get the final/inferred year for the publication.
|
||||
* The 2 choices, in order, are,
|
||||
* 1. parsed year from xs:DateTime object saved in core:dateTimeValue
|
||||
* 2. Default Publication Year
|
||||
* @return
|
||||
*/
|
||||
public String getParsedActivityYear() {
|
||||
|
||||
return UtilityFunctions.getValidYearFromCoreDateTimeString(activityDate,
|
||||
VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
}
|
||||
|
||||
/**
|
||||
* This method should be used to get the raw date & not the parsed publication year.
|
||||
* For the later use getParsedPublicationYear.
|
||||
* @return
|
||||
*/
|
||||
public String getActivityDate() {
|
||||
return activityDate;
|
||||
}
|
||||
|
||||
public void setActivityDate(String activityDate) {
|
||||
this.activityDate = activityDate;
|
||||
}
|
||||
}
|
|
@ -1,134 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
* @author cdtank
|
||||
*
|
||||
*/
|
||||
public class BiboDocument extends Individual {
|
||||
|
||||
private String publicationYear;
|
||||
private String publicationDate;
|
||||
private String parsedPublicationYear = VOConstants.DEFAULT_PUBLICATION_YEAR;
|
||||
|
||||
public BiboDocument(String documentURL) {
|
||||
super(documentURL);
|
||||
}
|
||||
|
||||
public String getDocumentURL() {
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
private String parsePublicationYear(String documentBlurb) {
|
||||
|
||||
/*
|
||||
* This pattern will match all group of numbers which have only 4 digits
|
||||
* delimited by the word boundary.
|
||||
* */
|
||||
String pattern = "(?<!-)\\b\\d{4}\\b(?=[^-])";
|
||||
|
||||
Pattern yearPattern = Pattern.compile(pattern);
|
||||
String publishedYear = VOConstants.DEFAULT_PUBLICATION_YEAR;
|
||||
|
||||
Matcher yearMatcher = yearPattern.matcher(documentBlurb);
|
||||
|
||||
while (yearMatcher.find()) {
|
||||
|
||||
String yearCandidate = yearMatcher.group();
|
||||
|
||||
Integer candidateYearInteger = Integer.valueOf(yearCandidate);
|
||||
|
||||
/*
|
||||
* Published year has to be equal or less than the current year
|
||||
* and more than a minimum default year.
|
||||
* */
|
||||
if (candidateYearInteger <= VOConstants.CURRENT_YEAR
|
||||
&& candidateYearInteger >= VOConstants.MINIMUM_PUBLICATION_YEAR) {
|
||||
publishedYear = candidateYearInteger.toString();
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return publishedYear;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be called to get the final/inferred year for the publication.
|
||||
* The 3 choices, in order, are,
|
||||
* 1. parsed year from xs:DateTime object saved in core:dateTimeValue
|
||||
* 2. core:year which was property used in vivo 1.1 ontology
|
||||
* 3. Default Publication Year
|
||||
* @return
|
||||
*/
|
||||
public String getParsedPublicationYear() {
|
||||
|
||||
if (publicationDate != null) {
|
||||
|
||||
DateTime validParsedDateTimeObject = UtilityFunctions.getValidParsedDateTimeObject(publicationDate);
|
||||
|
||||
if (validParsedDateTimeObject != null) {
|
||||
return String.valueOf(validParsedDateTimeObject.getYear());
|
||||
} else {
|
||||
return publicationYear != null ? publicationYear : VOConstants.DEFAULT_PUBLICATION_YEAR;
|
||||
}
|
||||
|
||||
} else {
|
||||
|
||||
/*
|
||||
* If all else fails return default unknown year identifier if publicationYear is
|
||||
* not mentioned.
|
||||
* */
|
||||
return publicationYear != null ? publicationYear : VOConstants.DEFAULT_PUBLICATION_YEAR;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* This publicationYear value is directly from the data supported by the ontology.
|
||||
* If this is empty only then use the parsedPublicationYear.
|
||||
*
|
||||
* @Deprecated Use getParsedPublicationYear() instead.
|
||||
* */
|
||||
@Deprecated
|
||||
public String getPublicationYear() {
|
||||
if (publicationYear != null && isValidPublicationYear(publicationYear)) {
|
||||
return publicationYear;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public void setPublicationYear(String publicationYear) {
|
||||
this.publicationYear = publicationYear;
|
||||
}
|
||||
|
||||
public String getPublicationDate() {
|
||||
return publicationDate;
|
||||
}
|
||||
|
||||
public void setPublicationDate(String publicationDate) {
|
||||
this.publicationDate = publicationDate;
|
||||
}
|
||||
|
||||
private boolean isValidPublicationYear(String testPublicationYear) {
|
||||
|
||||
if (testPublicationYear.length() != 0
|
||||
&& testPublicationYear.trim().length() == VOConstants.NUM_CHARS_IN_YEAR_FORMAT
|
||||
&& testPublicationYear.matches("\\d+")
|
||||
&& Integer.parseInt(testPublicationYear) >= VOConstants.MINIMUM_PUBLICATION_YEAR) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -1,42 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*
|
||||
*/
|
||||
public class Child extends Individual {
|
||||
|
||||
Set<BiboDocument> documents = new HashSet<BiboDocument>();
|
||||
|
||||
public Child(String individualURI) {
|
||||
super(individualURI);
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getDocuments() {
|
||||
return documents;
|
||||
}
|
||||
|
||||
public Child(String individualURI, String individualLabel) {
|
||||
super(individualURI, individualLabel);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other){
|
||||
boolean result = false;
|
||||
if (other instanceof Child){
|
||||
Child person = (Child) other;
|
||||
result = (this.getIndividualLabel().equals(person.getIndividualLabel())
|
||||
&& this.getIndividualURI().equals(person.getIndividualURI()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(){
|
||||
return(41*(getIndividualLabel().hashCode() + 41*(getIndividualURI().hashCode())));
|
||||
}
|
||||
}
|
|
@ -1,144 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
* This stores edge information for Co-PI vis.
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPIEdge {
|
||||
|
||||
private int edgeID;
|
||||
private Map<String, Integer> yearToGrantCount;
|
||||
private Set<Grant> collaboratorGrants = new HashSet<Grant>();
|
||||
private CoPINode sourceNode;
|
||||
private CoPINode targetNode;
|
||||
|
||||
public CoPIEdge(CoPINode sourceNode, CoPINode targetNode, Grant seedCoPIedGrant, UniqueIDGenerator uniqueIDGenerator){
|
||||
edgeID = uniqueIDGenerator.getNextNumericID();
|
||||
this.sourceNode = sourceNode;
|
||||
this.targetNode = targetNode;
|
||||
this.collaboratorGrants.add(seedCoPIedGrant);
|
||||
}
|
||||
|
||||
public int getEdgeID() {
|
||||
return edgeID;
|
||||
}
|
||||
public Set<Grant> getCollaboratorGrants() {
|
||||
return collaboratorGrants;
|
||||
}
|
||||
public CoPINode getSourceNode() {
|
||||
return sourceNode;
|
||||
}
|
||||
public CoPINode getTargetNode() {
|
||||
return targetNode;
|
||||
}
|
||||
|
||||
public int getNumberOfCoInvestigatedGrants(){
|
||||
return collaboratorGrants.size();
|
||||
}
|
||||
|
||||
public void addCollaboratorGrant(Grant grant){
|
||||
this.collaboratorGrants.add(grant);
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Grant YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestCollaborationYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(collaboratorGrants);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default grant year when we are checking
|
||||
* for the min or max grant year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToGrantCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearGrantCount = yearToGrantCount.get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearGrantCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestCollaborationYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(collaboratorGrants);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default grant year when we are checking
|
||||
* for the min or max grant year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToGrantCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only grant the PI has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearGrantCount = yearToGrantCount.get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearGrantCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public Integer getUnknownCollaborationYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(collaboratorGrants);
|
||||
}
|
||||
|
||||
Integer unknownYearGrantCount = yearToGrantCount
|
||||
.get(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearGrantCount != null) {
|
||||
return unknownYearGrantCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -1,154 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
* CoPINode is the node in a CoPI vis.
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class CoPINode extends Individual {
|
||||
|
||||
private int nodeID;
|
||||
private Map<String, Integer> yearToGrantCount;
|
||||
|
||||
private Set<Grant> pIGrants = new HashSet<Grant>();
|
||||
|
||||
public CoPINode(String nodeURI, UniqueIDGenerator uniqueIDGenerator){
|
||||
super(nodeURI);
|
||||
nodeID = uniqueIDGenerator.getNextNumericID();
|
||||
}
|
||||
|
||||
public int getNodeID(){
|
||||
return nodeID;
|
||||
}
|
||||
|
||||
public String getNodeURI(){
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public String getNodeName(){
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void setNodeName(String nodeName) {
|
||||
this.setIndividualLabel(nodeName);
|
||||
}
|
||||
|
||||
public Set<Grant> getInvestigatedGrants(){
|
||||
return pIGrants;
|
||||
}
|
||||
|
||||
public int getNumberOfInvestigatedGrants(){
|
||||
return pIGrants.size();
|
||||
}
|
||||
|
||||
public void addGrant(Grant grant){
|
||||
this.pIGrants.add(grant);
|
||||
}
|
||||
|
||||
public Map<String, Integer> getYearToGrantCount(){
|
||||
if(yearToGrantCount == null){
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(pIGrants);
|
||||
}
|
||||
return yearToGrantCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Grant YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestGrantYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(pIGrants);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default grant year when we are checking
|
||||
* for the min or max grant year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToGrantCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearGrantCount = yearToGrantCount.get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearGrantCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestGrantYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(pIGrants);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default grant year when we are checking
|
||||
* for the min or max grant year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToGrantCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only grant the PI has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearGrantCount = yearToGrantCount.get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearGrantCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public Integer getUnknownGrantYearCount() {
|
||||
if (yearToGrantCount == null) {
|
||||
yearToGrantCount = UtilityFunctions.getYearToGrantCount(pIGrants);
|
||||
}
|
||||
|
||||
Integer unknownYearGrantCount = yearToGrantCount
|
||||
.get(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearGrantCount != null) {
|
||||
return unknownYearGrantCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,150 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
*
|
||||
* This is stores collaboration information mainly for ego-centric visualizations.
|
||||
*
|
||||
* @author cdtank
|
||||
*
|
||||
*/
|
||||
public class Collaboration {
|
||||
|
||||
private int collaborationID;
|
||||
private Map<String, Integer> yearToActivityCount;
|
||||
private Set<Activity> activities = new HashSet<Activity>();
|
||||
private Collaborator sourceCollaborator;
|
||||
private Collaborator targetCollaborator;
|
||||
|
||||
public Collaboration(Collaborator sourceCollaborator,
|
||||
Collaborator targetCollaborator,
|
||||
Activity seedActivity,
|
||||
UniqueIDGenerator uniqueIDGenerator) {
|
||||
collaborationID = uniqueIDGenerator.getNextNumericID();
|
||||
this.sourceCollaborator = sourceCollaborator;
|
||||
this.targetCollaborator = targetCollaborator;
|
||||
this.activities.add(seedActivity);
|
||||
}
|
||||
|
||||
public int getCollaborationID() {
|
||||
return collaborationID;
|
||||
}
|
||||
|
||||
public Collaborator getSourceCollaborator() {
|
||||
return sourceCollaborator;
|
||||
}
|
||||
|
||||
public Collaborator getTargetCollaborator() {
|
||||
return targetCollaborator;
|
||||
}
|
||||
|
||||
public Set<Activity> getCollaborationActivities() {
|
||||
return activities;
|
||||
}
|
||||
|
||||
public int getNumOfCollaborations() {
|
||||
return activities.size();
|
||||
}
|
||||
|
||||
public void addActivity(Activity activity) {
|
||||
this.activities.add(activity);
|
||||
}
|
||||
|
||||
public Map<String, Integer> getYearToActivityCount() {
|
||||
if (yearToActivityCount == null) {
|
||||
yearToActivityCount = UtilityFunctions.getYearToActivityCount(activities);
|
||||
}
|
||||
return yearToActivityCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Activity YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestCollaborationYearCount() {
|
||||
|
||||
/*
|
||||
* We do not want to consider the default Activity year when we are checking
|
||||
* for the min or max Activity year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(this.getYearToActivityCount().keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only Activity the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearActivityCount = this.getYearToActivityCount().get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearActivityCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestCollaborationYearCount() {
|
||||
|
||||
/*
|
||||
* We do not want to consider the default Activity year when we are checking
|
||||
* for the min or max Activity year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(this.getYearToActivityCount().keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only Activity the collaborator has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearActivityCount = this.getYearToActivityCount().get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearActivityCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Integer getUnknownCollaborationYearCount() {
|
||||
|
||||
Integer unknownYearActivityCount = this.getYearToActivityCount()
|
||||
.get(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearActivityCount != null) {
|
||||
return unknownYearActivityCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -0,0 +1,147 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
*
|
||||
* This stores collbaorator's information involved in ego-centric networks & represents
|
||||
* a collaborator's activities.
|
||||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class Collaborator extends Individual {
|
||||
|
||||
private int collaboratorID;
|
||||
private Map<String, Integer> yearToActivityCount;
|
||||
|
||||
private Set<Activity> activities = new HashSet<Activity>();
|
||||
|
||||
public Collaborator(String collaboratorURI,
|
||||
UniqueIDGenerator uniqueIDGenerator) {
|
||||
super(collaboratorURI);
|
||||
collaboratorID = uniqueIDGenerator.getNextNumericID();
|
||||
}
|
||||
|
||||
public int getCollaboratorID() {
|
||||
return collaboratorID;
|
||||
}
|
||||
|
||||
public String getCollaboratorURI() {
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public String getCollaboratorName() {
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void setCollaboratorName(String collaboratorName) {
|
||||
this.setIndividualLabel(collaboratorName);
|
||||
}
|
||||
|
||||
public Set<Activity> getCollaboratorActivities() {
|
||||
return activities;
|
||||
}
|
||||
|
||||
public int getNumOfActivities() {
|
||||
return activities.size();
|
||||
}
|
||||
|
||||
public void addActivity(Activity activity) {
|
||||
this.activities.add(activity);
|
||||
}
|
||||
|
||||
public Map<String, Integer> getYearToActivityCount() {
|
||||
if (yearToActivityCount == null) {
|
||||
yearToActivityCount = UtilityFunctions.getYearToActivityCount(activities);
|
||||
}
|
||||
return yearToActivityCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Collaborator YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestActivityYearCount() {
|
||||
|
||||
/*
|
||||
* We do not want to consider the default activity year when we are checking
|
||||
* for the min or max activity year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(this.getYearToActivityCount().keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only activity the collaborator has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearActivityCount = this.getYearToActivityCount().get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearActivityCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestActivityYearCount() {
|
||||
|
||||
/*
|
||||
* We do not want to consider the default Activity year when we are checking
|
||||
* for the min or max Activity year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(this.getYearToActivityCount().keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only Activity the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearActivityCount = this.getYearToActivityCount().get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearActivityCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Integer getUnknownActivityYearCount() {
|
||||
|
||||
Integer unknownYearActivityCount = this.getYearToActivityCount()
|
||||
.get(VOConstants.DEFAULT_ACTIVITY_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearActivityCount != null) {
|
||||
return unknownYearActivityCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,54 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*
|
||||
*/
|
||||
public class Department extends Individual{
|
||||
|
||||
Set<BiboDocument> publication;
|
||||
Set<Person> person;
|
||||
|
||||
public Department(String departmentURI, String departmentLabel){
|
||||
super(departmentURI, departmentLabel);
|
||||
}
|
||||
|
||||
public void setDepartmentLabel(String departmentURI){
|
||||
this.setIndividualLabel(departmentURI);
|
||||
}
|
||||
|
||||
public String getDepartmentURI(){
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getPublication() {
|
||||
return publication;
|
||||
}
|
||||
|
||||
public String getDepartmentLabel(){
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public Set<Person> getPerson() {
|
||||
return person;
|
||||
}
|
||||
|
||||
public void addPublication(BiboDocument biboDocument) {
|
||||
this.publication.add(biboDocument);
|
||||
}
|
||||
|
||||
public void addPersons(Person person) {
|
||||
this.person.add(person);
|
||||
|
||||
}
|
||||
|
||||
public void addPerson(Person person) {
|
||||
this.person.add(person);
|
||||
|
||||
}
|
||||
|
||||
}
|
|
@ -1,153 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
*
|
||||
* This is stores edge information mainly for co-author vis.
|
||||
*
|
||||
* @author cdtank
|
||||
*
|
||||
*/
|
||||
public class Edge {
|
||||
|
||||
private int edgeID;
|
||||
private Map<String, Integer> yearToPublicationCount;
|
||||
private Set<BiboDocument> collaboratorDocuments = new HashSet<BiboDocument>();
|
||||
private Node sourceNode;
|
||||
private Node targetNode;
|
||||
|
||||
public Edge(Node sourceNode, Node targetNode, BiboDocument seedCoAuthoredDocument,
|
||||
UniqueIDGenerator uniqueIDGenerator) {
|
||||
edgeID = uniqueIDGenerator.getNextNumericID();
|
||||
this.sourceNode = sourceNode;
|
||||
this.targetNode = targetNode;
|
||||
this.collaboratorDocuments.add(seedCoAuthoredDocument);
|
||||
}
|
||||
|
||||
public int getEdgeID() {
|
||||
return edgeID;
|
||||
}
|
||||
|
||||
public Node getSourceNode() {
|
||||
return sourceNode;
|
||||
}
|
||||
|
||||
public Node getTargetNode() {
|
||||
return targetNode;
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getCollaboratorDocuments() {
|
||||
return collaboratorDocuments;
|
||||
}
|
||||
|
||||
public int getNumOfCoAuthoredWorks() {
|
||||
return collaboratorDocuments.size();
|
||||
}
|
||||
|
||||
public void addCollaboratorDocument(BiboDocument authorDocument) {
|
||||
this.collaboratorDocuments.add(authorDocument);
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Publication YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestCollaborationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions
|
||||
.getYearToPublicationCount(collaboratorDocuments);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default publication year when we are checking
|
||||
* for the min or max publication year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToPublicationCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearPubCount = yearToPublicationCount.get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearPubCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestCollaborationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions
|
||||
.getYearToPublicationCount(collaboratorDocuments);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default publication year when we are checking
|
||||
* for the min or max publication year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToPublicationCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearPubCount = yearToPublicationCount.get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearPubCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
public Integer getUnknownCollaborationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions
|
||||
.getYearToPublicationCount(collaboratorDocuments);
|
||||
}
|
||||
|
||||
Integer unknownYearPubCount = yearToPublicationCount
|
||||
.get(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearPubCount != null) {
|
||||
return unknownYearPubCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
}
|
|
@ -13,9 +13,8 @@ import java.util.LinkedHashSet;
|
|||
*/
|
||||
public class Entity extends Individual{
|
||||
|
||||
Set<BiboDocument> publications = new HashSet<BiboDocument>();
|
||||
Set<Activity> activities = new HashSet<Activity>();
|
||||
Set<SubEntity> children = new LinkedHashSet<SubEntity>();
|
||||
Set<Grant> grants = new HashSet<Grant>();
|
||||
|
||||
public Entity(String departmentURI, String departmentLabel){
|
||||
super(departmentURI, departmentLabel);
|
||||
|
@ -29,8 +28,8 @@ public class Entity extends Individual{
|
|||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getPublications() {
|
||||
return publications;
|
||||
public Set<Activity> getActivities() {
|
||||
return activities;
|
||||
}
|
||||
|
||||
public String getEntityLabel(){
|
||||
|
@ -41,8 +40,8 @@ public class Entity extends Individual{
|
|||
return children;
|
||||
}
|
||||
|
||||
public void addPublication(BiboDocument biboDocument) {
|
||||
this.publications.add(biboDocument);
|
||||
public void addActivity(Activity activity) {
|
||||
this.activities.add(activity);
|
||||
}
|
||||
|
||||
public void addSubEntity(SubEntity subEntity) {
|
||||
|
@ -55,8 +54,4 @@ public class Entity extends Individual{
|
|||
|
||||
}
|
||||
|
||||
public void addGrant(Grant grant) {
|
||||
this.grants.add(grant);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,150 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import org.joda.time.DateTime;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*
|
||||
*/
|
||||
|
||||
public class Grant extends Individual {
|
||||
|
||||
private String grantStartDate;
|
||||
private String grantEndDate;
|
||||
|
||||
public Grant(String grantURL, String grantLabel){
|
||||
super(grantURL, grantLabel);
|
||||
}
|
||||
|
||||
public Grant(String grantURL){
|
||||
super(grantURL);
|
||||
}
|
||||
|
||||
public String getGrantURL() {
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public String getGrantLabel(){
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void setGrantLabel(String grantLabel) {
|
||||
this.setIndividualLabel(grantLabel);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* This method will be called to get the inferred start year for the grant.
|
||||
* The 3 choices, in order, are,
|
||||
* 1. parsed year from xs:DateTime object saved in core:dateTimeValue
|
||||
* 2. core:year which was property used in vivo 1.1 ontology
|
||||
* 3. Default Grant Start Year
|
||||
* @return
|
||||
*/
|
||||
public String getParsedGrantStartYear() {
|
||||
|
||||
if (grantStartDate != null) {
|
||||
|
||||
DateTime validParsedDateTimeObject = UtilityFunctions
|
||||
.getValidParsedDateTimeObject(grantStartDate);
|
||||
|
||||
if (validParsedDateTimeObject != null) {
|
||||
return String.valueOf(validParsedDateTimeObject.getYear());
|
||||
} else {
|
||||
return VOConstants.DEFAULT_GRANT_YEAR;
|
||||
}
|
||||
} else {
|
||||
return VOConstants.DEFAULT_GRANT_YEAR;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other){
|
||||
boolean result = false;
|
||||
if (other instanceof Grant){
|
||||
Grant grant = (Grant) other;
|
||||
result = (this.getIndividualLabel().equals(grant.getIndividualLabel())
|
||||
&& this.getIndividualURI().equals(grant.getIndividualURI()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(){
|
||||
return(41*(getIndividualLabel().hashCode() + 41*(getIndividualURI().hashCode())));
|
||||
}
|
||||
|
||||
public String getGrantStartDate() {
|
||||
return grantStartDate;
|
||||
}
|
||||
|
||||
public void setGrantStartDate(String grantStartDate) {
|
||||
this.grantStartDate = grantStartDate;
|
||||
}
|
||||
|
||||
|
||||
public String getGrantEndDate() {
|
||||
return grantEndDate;
|
||||
}
|
||||
|
||||
public void setGrantEndDate(String grantEndDate) {
|
||||
this.grantEndDate = grantEndDate;
|
||||
}
|
||||
|
||||
private boolean isValidPublicationYear(String testGrantYear) {
|
||||
|
||||
if (testGrantYear.length() != 0
|
||||
&& testGrantYear.trim().length() == VOConstants.NUM_CHARS_IN_YEAR_FORMAT
|
||||
&& testGrantYear.matches("\\d+")
|
||||
&& Integer.parseInt(testGrantYear) >= VOConstants.MINIMUM_PUBLICATION_YEAR) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
// /**
|
||||
// * This method will be called when there is no usable core:year value found
|
||||
// * for the core:Grant. It will first check & parse core:yearMonth failing
|
||||
// * which it will try core:date
|
||||
// * @return
|
||||
// */
|
||||
// public String getParsedGrantStartYear() {
|
||||
//
|
||||
// /*
|
||||
// * We are assuming that core:yearMonth has "YYYY-MM-DD" format. This is based
|
||||
// * off of http://www.w3.org/TR/xmlschema-2/#gYearMonth , which is what
|
||||
// * core:yearMonth points to internally.
|
||||
// * */
|
||||
// if (grantStartYearMonth != null
|
||||
// && grantStartYearMonth.length() >= VOConstants.NUM_CHARS_IN_YEAR_FORMAT
|
||||
// && isValidPublicationYear(grantStartYearMonth.substring(
|
||||
// 0,
|
||||
// VOConstants.NUM_CHARS_IN_YEAR_FORMAT))) {
|
||||
//
|
||||
// return grantStartYearMonth.substring(0, VOConstants.NUM_CHARS_IN_YEAR_FORMAT);
|
||||
//
|
||||
// }
|
||||
//
|
||||
// if (grantStartDate != null
|
||||
// && grantStartDate.length() >= VOConstants.NUM_CHARS_IN_YEAR_FORMAT
|
||||
// && isValidPublicationYear(grantStartDate
|
||||
// .substring(0,
|
||||
// VOConstants.NUM_CHARS_IN_YEAR_FORMAT))) {
|
||||
//
|
||||
// return grantStartDate.substring(0, VOConstants.NUM_CHARS_IN_YEAR_FORMAT);
|
||||
// }
|
||||
//
|
||||
// /*
|
||||
// * If all else fails return default unknown year identifier
|
||||
// * */
|
||||
// return VOConstants.DEFAULT_GRANT_YEAR;
|
||||
// }
|
||||
|
||||
}
|
|
@ -1,156 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Collections;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UniqueIDGenerator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.visutils.UtilityFunctions;
|
||||
|
||||
/**
|
||||
*
|
||||
* This stores node information mainly for co-author vis.
|
||||
*
|
||||
* @author cdtank
|
||||
*/
|
||||
public class Node extends Individual {
|
||||
|
||||
private int nodeID;
|
||||
private Map<String, Integer> yearToPublicationCount;
|
||||
|
||||
private Set<BiboDocument> authorDocuments = new HashSet<BiboDocument>();
|
||||
|
||||
public Node(String nodeURI,
|
||||
UniqueIDGenerator uniqueIDGenerator) {
|
||||
super(nodeURI);
|
||||
nodeID = uniqueIDGenerator.getNextNumericID();
|
||||
}
|
||||
|
||||
public int getNodeID() {
|
||||
return nodeID;
|
||||
}
|
||||
|
||||
public String getNodeURI() {
|
||||
return this.getIndividualURI();
|
||||
}
|
||||
|
||||
public String getNodeName() {
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void setNodeName(String nodeName) {
|
||||
this.setIndividualLabel(nodeName);
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getAuthorDocuments() {
|
||||
return authorDocuments;
|
||||
}
|
||||
|
||||
public int getNumOfAuthoredWorks() {
|
||||
return authorDocuments.size();
|
||||
}
|
||||
|
||||
public void addAuthorDocument(BiboDocument authorDocument) {
|
||||
this.authorDocuments.add(authorDocument);
|
||||
}
|
||||
|
||||
public Map<String, Integer> getYearToPublicationCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
}
|
||||
return yearToPublicationCount;
|
||||
}
|
||||
|
||||
/*
|
||||
* getEarliest, Latest & Unknown Publication YearCount should only be used after
|
||||
* the parsing of the entire sparql is done. Else it will give results based on
|
||||
* incomplete dataset.
|
||||
* */
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getEarliestPublicationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default publication year when we are checking
|
||||
* for the min or max publication year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToPublicationCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String earliestYear = Collections.min(yearsToBeConsidered);
|
||||
final Integer earliestYearPubCount = yearToPublicationCount.get(earliestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(earliestYear, earliestYearPubCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("serial")
|
||||
public Map<String, Integer> getLatestPublicationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
}
|
||||
|
||||
/*
|
||||
* We do not want to consider the default publication year when we are checking
|
||||
* for the min or max publication year.
|
||||
* */
|
||||
Set<String> yearsToBeConsidered = new HashSet<String>(yearToPublicationCount.keySet());
|
||||
yearsToBeConsidered.remove(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* There can be a case when the only publication the author has no attached year to it
|
||||
* so essentially an "Unknown". In that case Collections.max or min will throw an
|
||||
* NoSuchElementException.
|
||||
*
|
||||
* If there is no maximum year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (yearsToBeConsidered.size() > 0) {
|
||||
final String latestYear = Collections.max(yearsToBeConsidered);
|
||||
final Integer latestYearPubCount = yearToPublicationCount.get(latestYear);
|
||||
|
||||
return new HashMap<String, Integer>() { {
|
||||
put(latestYear, latestYearPubCount);
|
||||
} };
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
public Integer getUnknownPublicationYearCount() {
|
||||
if (yearToPublicationCount == null) {
|
||||
yearToPublicationCount = UtilityFunctions.getYearToPublicationCount(authorDocuments);
|
||||
}
|
||||
|
||||
Integer unknownYearPubCount = yearToPublicationCount
|
||||
.get(VOConstants.DEFAULT_PUBLICATION_YEAR);
|
||||
|
||||
/*
|
||||
* If there is no unknown year available then we should imply so by returning a "null".
|
||||
* */
|
||||
if (unknownYearPubCount != null) {
|
||||
return unknownYearPubCount;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,27 +0,0 @@
|
|||
/* $This file is distributed under the terms of the license in /doc/license.txt$ */
|
||||
package edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects;
|
||||
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
/**
|
||||
* @author bkoniden
|
||||
* Deepak Konidena
|
||||
*/
|
||||
public class Person extends Individual {
|
||||
|
||||
Set<BiboDocument> documents = new HashSet<BiboDocument>();
|
||||
|
||||
public Person(String individualURI) {
|
||||
super(individualURI);
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getDocuments() {
|
||||
return documents;
|
||||
}
|
||||
|
||||
public Person(String individualURI, String individualLabel) {
|
||||
super(individualURI, individualLabel);
|
||||
}
|
||||
|
||||
}
|
|
@ -12,10 +12,8 @@ import java.util.HashSet;
|
|||
*/
|
||||
public class SubEntity extends Individual {
|
||||
|
||||
Set<BiboDocument> publications = new HashSet<BiboDocument>();
|
||||
Set<Activity> activities = new HashSet<Activity>();
|
||||
Map<String, Map<String, String>> personToPositionAndStartYear = new HashMap<String, Map<String, String>>();
|
||||
Set<Grant> grants = new HashSet<Grant>();
|
||||
|
||||
|
||||
public SubEntity(String individualURI) {
|
||||
super(individualURI);
|
||||
|
@ -30,45 +28,21 @@ public class SubEntity extends Individual {
|
|||
this.personToPositionAndStartYear = personToPositionAndStartYear;
|
||||
}
|
||||
|
||||
public Set<BiboDocument> getDocuments() {
|
||||
return publications;
|
||||
public Set<Activity> getActivities() {
|
||||
return activities;
|
||||
}
|
||||
|
||||
public Set<Grant> getGrants() {
|
||||
return grants;
|
||||
}
|
||||
|
||||
public SubEntity(String individualURI, String individualLabel) {
|
||||
super(individualURI, individualLabel);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object other){
|
||||
boolean result = false;
|
||||
if (other instanceof SubEntity){
|
||||
SubEntity person = (SubEntity) other;
|
||||
result = (this.getIndividualLabel().equals(person.getIndividualLabel())
|
||||
&& this.getIndividualURI().equals(person.getIndividualURI()));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode(){
|
||||
return(41*(getIndividualLabel().hashCode() + 41*(getIndividualURI().hashCode())));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString(){
|
||||
return this.getIndividualLabel();
|
||||
}
|
||||
|
||||
public void addPublication(BiboDocument biboDocument) {
|
||||
this.publications.add(biboDocument);
|
||||
public void addActivity(Activity activity) {
|
||||
this.activities.add(activity);
|
||||
}
|
||||
|
||||
|
||||
public void addGrant(Grant grant) {
|
||||
this.grants.add(grant);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,97 +29,86 @@ import edu.cornell.mannlib.vitro.webapp.controller.freemarker.responsevalues.Tem
|
|||
import edu.cornell.mannlib.vitro.webapp.controller.visualization.freemarker.VisualizationFrameworkConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VOConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.constants.VisConstants;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.BiboDocument;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoAuthorshipData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPIData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.CoPINode;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Grant;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Node;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.collaborationutils.CollaborationData;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Activity;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.Collaborator;
|
||||
import edu.cornell.mannlib.vitro.webapp.visualization.freemarker.valueobjects.SubEntity;
|
||||
|
||||
public class UtilityFunctions {
|
||||
|
||||
public static Map<String, Integer> getYearToPublicationCount(
|
||||
Set<BiboDocument> authorDocuments) {
|
||||
public static Map<String, Integer> getYearToActivityCount(
|
||||
Set<Activity> activities) {
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of publications. Use the BiboDocument's
|
||||
* parsedPublicationYear to populate the data.
|
||||
* or Grant's parsedPublicationYear or parsedGrantYear to populate the data passed
|
||||
* via Activity's getParsedActivityYear.
|
||||
* */
|
||||
Map<String, Integer> yearToPublicationCount = new TreeMap<String, Integer>();
|
||||
Map<String, Integer> yearToActivityCount = new TreeMap<String, Integer>();
|
||||
|
||||
for (BiboDocument curr : authorDocuments) {
|
||||
for (Activity currentActivity : activities) {
|
||||
|
||||
/*
|
||||
* Increment the count because there is an entry already available for
|
||||
* that particular year.
|
||||
*
|
||||
* I am pushing the logic to check for validity of year in "getPublicationYear" itself
|
||||
* because,
|
||||
* 1. We will be using getPub... multiple times & this will save us duplication of code
|
||||
* 2. If we change the logic of validity of a pub year we would not have to make
|
||||
* changes all throughout the codebase.
|
||||
* 3. We are asking for a publicationDate which is captured using the vivo 1.2 ontology
|
||||
* & if not saved then we are being nice & checking if date is saved using core:year if so
|
||||
* we use that else we return UNKOWN_YEAR.
|
||||
* */
|
||||
String publicationYear = curr.getParsedPublicationYear();
|
||||
String activityYear = currentActivity.getParsedActivityYear();
|
||||
|
||||
if (yearToPublicationCount.containsKey(publicationYear)) {
|
||||
yearToPublicationCount.put(publicationYear,
|
||||
yearToPublicationCount
|
||||
.get(publicationYear) + 1);
|
||||
if (yearToActivityCount.containsKey(activityYear)) {
|
||||
yearToActivityCount.put(activityYear,
|
||||
yearToActivityCount
|
||||
.get(activityYear) + 1);
|
||||
|
||||
} else {
|
||||
yearToPublicationCount.put(publicationYear, 1);
|
||||
yearToActivityCount.put(activityYear, 1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return yearToPublicationCount;
|
||||
return yearToActivityCount;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method is used to return a mapping between publication year & all the co-authors
|
||||
* This method is used to return a mapping between activity year & all the collaborators
|
||||
* that published with ego in that year.
|
||||
* @param authorNodesAndEdges
|
||||
* @param collaborationData
|
||||
* @return
|
||||
*/
|
||||
public static Map<String, Set<Node>> getPublicationYearToCoAuthors(
|
||||
CoAuthorshipData authorNodesAndEdges) {
|
||||
public static Map<String, Set<Collaborator>> getActivityYearToCollaborators(
|
||||
CollaborationData collaborationData) {
|
||||
|
||||
Map<String, Set<Node>> yearToCoAuthors = new TreeMap<String, Set<Node>>();
|
||||
Map<String, Set<Collaborator>> yearToCollaborators = new TreeMap<String, Set<Collaborator>>();
|
||||
|
||||
Node egoNode = authorNodesAndEdges.getEgoNode();
|
||||
Collaborator egoCollaborator = collaborationData.getEgoCollaborator();
|
||||
|
||||
for (Node currNode : authorNodesAndEdges.getNodes()) {
|
||||
for (Collaborator currNode : collaborationData.getCollaborators()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != egoNode) {
|
||||
if (currNode != egoCollaborator) {
|
||||
|
||||
for (String year : currNode.getYearToPublicationCount().keySet()) {
|
||||
for (String year : currNode.getYearToActivityCount().keySet()) {
|
||||
|
||||
Set<Node> coAuthorNodes;
|
||||
Set<Collaborator> collaboratorNodes;
|
||||
|
||||
if (yearToCoAuthors.containsKey(year)) {
|
||||
if (yearToCollaborators.containsKey(year)) {
|
||||
|
||||
coAuthorNodes = yearToCoAuthors.get(year);
|
||||
coAuthorNodes.add(currNode);
|
||||
collaboratorNodes = yearToCollaborators.get(year);
|
||||
collaboratorNodes.add(currNode);
|
||||
|
||||
} else {
|
||||
|
||||
coAuthorNodes = new HashSet<Node>();
|
||||
coAuthorNodes.add(currNode);
|
||||
yearToCoAuthors.put(year, coAuthorNodes);
|
||||
collaboratorNodes = new HashSet<Collaborator>();
|
||||
collaboratorNodes.add(currNode);
|
||||
yearToCollaborators.put(year, collaboratorNodes);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return yearToCoAuthors;
|
||||
return yearToCollaborators;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -178,83 +167,6 @@ public class UtilityFunctions {
|
|||
}
|
||||
}
|
||||
|
||||
public static Map<String, Set<CoPINode>> getGrantYearToCoPI(
|
||||
CoPIData pINodesAndEdges) {
|
||||
|
||||
|
||||
Map<String, Set<CoPINode>> yearToCoPIs = new TreeMap<String, Set<CoPINode>>();
|
||||
|
||||
CoPINode egoNode = pINodesAndEdges.getEgoNode();
|
||||
|
||||
for (CoPINode currNode : pINodesAndEdges.getNodes()) {
|
||||
|
||||
/*
|
||||
* We have already printed the Ego Node info.
|
||||
* */
|
||||
if (currNode != egoNode) {
|
||||
|
||||
for (String year : currNode.getYearToGrantCount().keySet()) {
|
||||
|
||||
Set<CoPINode> coPINodes;
|
||||
|
||||
if (yearToCoPIs.containsKey(year)) {
|
||||
|
||||
coPINodes = yearToCoPIs.get(year);
|
||||
coPINodes.add(currNode);
|
||||
|
||||
} else {
|
||||
|
||||
coPINodes = new HashSet<CoPINode>();
|
||||
coPINodes.add(currNode);
|
||||
yearToCoPIs.put(year, coPINodes);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
return yearToCoPIs;
|
||||
|
||||
}
|
||||
|
||||
public static Map<String, Integer> getYearToGrantCount(Set<Grant> pIGrants) {
|
||||
|
||||
/*
|
||||
* Create a map from the year to number of grants. Use the Grant's
|
||||
* parsedGrantStartYear to populate the data.
|
||||
* */
|
||||
Map<String, Integer> yearToGrantCount = new TreeMap<String, Integer>();
|
||||
|
||||
for (Grant curr : pIGrants) {
|
||||
|
||||
/*
|
||||
* Increment the count because there is an entry already available for
|
||||
* that particular year.
|
||||
*
|
||||
* I am pushing the logic to check for validity of year in "getGrantYear" itself
|
||||
* because,
|
||||
* 1. We will be using getGra... multiple times & this will save us duplication of code
|
||||
* 2. If we change the logic of validity of a grant year we would not have to make
|
||||
* changes all throughout the codebase.
|
||||
* 3. We are asking for a grant year & we should get a proper one or NOT at all.
|
||||
* */
|
||||
String grantYear = curr.getParsedGrantStartYear();
|
||||
|
||||
if (yearToGrantCount.containsKey(grantYear)) {
|
||||
yearToGrantCount.put(grantYear,
|
||||
yearToGrantCount
|
||||
.get(grantYear) + 1);
|
||||
|
||||
} else {
|
||||
yearToGrantCount.put(grantYear, 1);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return yearToGrantCount;
|
||||
|
||||
}
|
||||
|
||||
public static DateTime getValidParsedDateTimeObject(String unparsedDateTime) {
|
||||
|
||||
for (DateTimeFormatter currentFormatter : VOConstants.POSSIBLE_DATE_TIME_FORMATTERS) {
|
||||
|
@ -277,6 +189,33 @@ public class UtilityFunctions {
|
|||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be called to get the inferred end year for the entity.
|
||||
* The 2 choices, in order, are,
|
||||
* 1. parsed year from core:DateTime object saved in core:dateTimeValue
|
||||
* 2. Default Entity Year
|
||||
* @return
|
||||
*/
|
||||
public static String getValidYearFromCoreDateTimeString(String inputDate,
|
||||
String defaultYearInCaseOfError) {
|
||||
/*
|
||||
* Always return default year identifier in case of an illegal parsed year.
|
||||
* */
|
||||
String parsedGrantYear = defaultYearInCaseOfError;
|
||||
|
||||
if (inputDate != null) {
|
||||
|
||||
DateTime validParsedDateTimeObject = UtilityFunctions
|
||||
.getValidParsedDateTimeObject(inputDate);
|
||||
|
||||
if (validParsedDateTimeObject != null) {
|
||||
return String.valueOf(validParsedDateTimeObject.getYear());
|
||||
}
|
||||
}
|
||||
|
||||
return parsedGrantYear;
|
||||
}
|
||||
|
||||
public static String getCSVDownloadURL(String individualURI, String visType, String visMode) {
|
||||
|
||||
ParamMap CSVDownloadURLParams = null;
|
||||
|
|
Loading…
Add table
Reference in a new issue