NIHVIVO-2458 SolrPagedSearchController: remove code supporting alpha parameter, since not used; refinement links.
This commit is contained in:
parent
5467d62023
commit
57d9d061b5
2 changed files with 123 additions and 177 deletions
|
@ -78,7 +78,7 @@ public class ApplicationDaoJena extends JenaBaseDao implements ApplicationDao {
|
||||||
// namespace with a final slash, so this makes matching easier.
|
// namespace with a final slash, so this makes matching easier.
|
||||||
// It also accords with the way the default namespace is defined.
|
// It also accords with the way the default namespace is defined.
|
||||||
if (!namespace.endsWith("/")) {
|
if (!namespace.endsWith("/")) {
|
||||||
namespace = namespace + "/";
|
namespace += "/";
|
||||||
}
|
}
|
||||||
externallyLinkedNamespaces.add(namespace);
|
externallyLinkedNamespaces.add(namespace);
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,6 +4,7 @@ package edu.cornell.mannlib.vitro.webapp.search.controller;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Collection;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Comparator;
|
import java.util.Comparator;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
|
@ -160,9 +161,6 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
log.debug("IndividualDao is " + iDao.toString() + " Public classes in the classgroup are " + grpDao.getPublicGroupsWithVClasses().toString());
|
log.debug("IndividualDao is " + iDao.toString() + " Public classes in the classgroup are " + grpDao.getPublicGroupsWithVClasses().toString());
|
||||||
log.debug("VClassDao is "+ vclassDao.toString() );
|
log.debug("VClassDao is "+ vclassDao.toString() );
|
||||||
|
|
||||||
// RY Not sure where/how this is used ***
|
|
||||||
//String alphaFilter = vreq.getParameter("alpha");
|
|
||||||
|
|
||||||
int startIndex = 0;
|
int startIndex = 0;
|
||||||
try{
|
try{
|
||||||
startIndex = Integer.parseInt(vreq.getParameter("startIndex"));
|
startIndex = Integer.parseInt(vreq.getParameter("startIndex"));
|
||||||
|
@ -182,10 +180,7 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
int maxHitSize = DEFAULT_MAX_SEARCH_SIZE ;
|
int maxHitSize = DEFAULT_MAX_SEARCH_SIZE ;
|
||||||
if( startIndex >= DEFAULT_MAX_SEARCH_SIZE - hitsPerPage )
|
if( startIndex >= DEFAULT_MAX_SEARCH_SIZE - hitsPerPage )
|
||||||
maxHitSize = startIndex + DEFAULT_MAX_SEARCH_SIZE ;
|
maxHitSize = startIndex + DEFAULT_MAX_SEARCH_SIZE ;
|
||||||
// if( alphaFilter != null ){
|
|
||||||
// maxHitSize = maxHitSize * 2;
|
|
||||||
// hitsPerPage = maxHitSize;
|
|
||||||
// }
|
|
||||||
log.debug("maxHitSize is " + maxHitSize);
|
log.debug("maxHitSize is " + maxHitSize);
|
||||||
|
|
||||||
String qtxt = vreq.getParameter(VitroQuery.QUERY_PARAMETER_NAME);
|
String qtxt = vreq.getParameter(VitroQuery.QUERY_PARAMETER_NAME);
|
||||||
|
@ -339,40 +334,36 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
if (type != null && type.getName() != null)
|
if (type != null && type.getName() != null)
|
||||||
body.put("typeName", type.getName());
|
body.put("typeName", type.getName());
|
||||||
}
|
}
|
||||||
//
|
|
||||||
// /* Add classgroup and type refinement links to body */
|
/* Add classgroup and type refinement links to body */
|
||||||
// if( wasHtmlRequested ){
|
if( wasHtmlRequested ){
|
||||||
// // Search request includes no classgroup and no type, so add classgroup search refinement links.
|
// Search request includes no classgroup and no type, so add classgroup search refinement links.
|
||||||
// if ( !classGroupFilterRequested && !typeFiltereRequested ) {
|
if ( !classGroupFilterRequested && !typeFiltereRequested ) {
|
||||||
// List<VClassGroup> classgroups = getClassGroups(grpDao, topDocs, searcherForRequest);
|
List<VClassGroup> classgroups = getClassGroups(grpDao, docs);
|
||||||
// List<VClassGroupSearchLink> classGroupLinks = new ArrayList<VClassGroupSearchLink>(classgroups.size());
|
List<VClassGroupSearchLink> classGroupLinks = new ArrayList<VClassGroupSearchLink>(classgroups.size());
|
||||||
// for (VClassGroup vcg : classgroups) {
|
for (VClassGroup vcg : classgroups) {
|
||||||
// if (vcg.getPublicName() != null) {
|
if (vcg.getPublicName() != null) {
|
||||||
// classGroupLinks.add(new VClassGroupSearchLink(qtxt, vcg));
|
classGroupLinks.add(new VClassGroupSearchLink(qtxt, vcg));
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
// body.put("classGroupLinks", classGroupLinks);
|
body.put("classGroupLinks", classGroupLinks);
|
||||||
//
|
|
||||||
// // Search request is for a classgroup, so add rdf:type search refinement links
|
// Search request is for a classgroup, so add rdf:type search refinement links
|
||||||
// // but try to filter out classes that are subclasses
|
// but try to filter out classes that are subclasses
|
||||||
// } else if ( classGroupFilterRequested && !typeFiltereRequested ) {
|
} else if ( classGroupFilterRequested && !typeFiltereRequested ) {
|
||||||
// List<VClass> vClasses = getVClasses(vclassDao,topDocs,searcherForRequest);
|
List<VClass> vClasses = getVClasses(vclassDao, docs);
|
||||||
// List<VClassSearchLink> vClassLinks = new ArrayList<VClassSearchLink>(vClasses.size());
|
List<VClassSearchLink> vClassLinks = new ArrayList<VClassSearchLink>(vClasses.size());
|
||||||
// for (VClass vc : vClasses) {
|
for (VClass vc : vClasses) {
|
||||||
// vClassLinks.add(new VClassSearchLink(qtxt, vc));
|
vClassLinks.add(new VClassSearchLink(qtxt, vc));
|
||||||
// }
|
}
|
||||||
// body.put("classLinks", vClassLinks);
|
body.put("classLinks", vClassLinks);
|
||||||
// pagingLinkParams.put("classgroup", classGroupParam);
|
pagingLinkParams.put("classgroup", classGroupParam);
|
||||||
//
|
|
||||||
// // This case is never displayed
|
} else {
|
||||||
//// } else if (!StringUtils.isEmpty(alphaFilter)) {
|
pagingLinkParams.put("type", typeParam);
|
||||||
//// body.put("alphas", getAlphas(topDocs, searcherForRequest));
|
}
|
||||||
//// alphaSortIndividuals(beans);
|
}
|
||||||
// } else {
|
|
||||||
// pagingLinkParams.put("type", typeParam);
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
//
|
|
||||||
// Convert search result individuals to template model objects
|
// Convert search result individuals to template model objects
|
||||||
// RY If this diverges significantly from what's used on the index page,
|
// RY If this diverges significantly from what's used on the index page,
|
||||||
// create a different template model.
|
// create a different template model.
|
||||||
|
@ -407,53 +398,25 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void alphaSortIndividuals(List<Individual> beans) {
|
|
||||||
Collections.sort(beans, new Comparator< Individual >(){
|
|
||||||
public int compare(Individual o1, Individual o2) {
|
|
||||||
if( o1 == null || o1.getName() == null )
|
|
||||||
return 1;
|
|
||||||
else
|
|
||||||
return o1.getName().compareTo(o2.getName());
|
|
||||||
}});
|
|
||||||
}
|
|
||||||
|
|
||||||
private List<String> getAlphas(TopDocs topDocs, IndexSearcher searcher) {
|
|
||||||
Set<String> alphas = new HashSet<String>();
|
|
||||||
for(int i=0;i<topDocs.scoreDocs.length; i++){
|
|
||||||
Document doc;
|
|
||||||
try {
|
|
||||||
doc = searcher.doc(topDocs.scoreDocs[i].doc);
|
|
||||||
String name =doc.get(Entity2LuceneDoc.term.NAME);
|
|
||||||
if( name != null && name.length() > 0)
|
|
||||||
alphas.add( name.substring(0, 1));
|
|
||||||
} catch (CorruptIndexException e) {
|
|
||||||
log.debug("Could not get alphas for document",e);
|
|
||||||
} catch (IOException e) {
|
|
||||||
log.debug("Could not get alphas for document",e);
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
return new ArrayList<String>(alphas);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the class groups represented for the individuals in the topDocs.
|
* Get the class groups represented for the individuals in the topDocs.
|
||||||
*/
|
*/
|
||||||
private List<VClassGroup> getClassGroups(VClassGroupDao grpDao, TopDocs topDocs,
|
private List<VClassGroup> getClassGroups(VClassGroupDao grpDao, SolrDocumentList docs) {
|
||||||
IndexSearcher searcherForRequest) {
|
|
||||||
LinkedHashMap<String,VClassGroup> grpMap = grpDao.getClassGroupMap();
|
LinkedHashMap<String,VClassGroup> grpMap = grpDao.getClassGroupMap();
|
||||||
int n = grpMap.size();
|
int n = grpMap.size();
|
||||||
|
|
||||||
HashSet<String> classGroupsInHits = new HashSet<String>(n);
|
HashSet<String> classGroupsInHits = new HashSet<String>(n);
|
||||||
int grpsFound = 0;
|
int grpsFound = 0;
|
||||||
|
|
||||||
for(int i=0; i<topDocs.scoreDocs.length && n > grpsFound ;i++){
|
long hitCount = docs.getNumFound();
|
||||||
|
for(int i=0; i<hitCount && n > grpsFound ;i++){
|
||||||
try{
|
try{
|
||||||
Document doc = searcherForRequest.doc(topDocs.scoreDocs[i].doc);
|
SolrDocument doc = docs.get(i);
|
||||||
Field[] grps = doc.getFields(Entity2LuceneDoc.term.CLASSGROUP_URI);
|
Collection<Object> grps = doc.getFieldValues(Entity2LuceneDoc.term.CLASSGROUP_URI);
|
||||||
if(grps != null || grps.length > 0){
|
if (grps != null) {
|
||||||
for(int j=0;j<grps.length;j++){
|
for (Object o : grps) {
|
||||||
String groupUri = grps[j].stringValue();
|
String groupUri = o.toString();
|
||||||
if( groupUri != null && ! classGroupsInHits.contains(groupUri)){
|
if( groupUri != null && ! classGroupsInHits.contains(groupUri)){
|
||||||
classGroupsInHits.add(groupUri);
|
classGroupsInHits.add(groupUri);
|
||||||
grpsFound++;
|
grpsFound++;
|
||||||
|
@ -462,7 +425,7 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}catch(Exception e){
|
} catch(Exception e) {
|
||||||
log.error("problem getting VClassGroups from search hits "
|
log.error("problem getting VClassGroups from search hits "
|
||||||
+ e.getMessage());
|
+ e.getMessage());
|
||||||
}
|
}
|
||||||
|
@ -485,6 +448,148 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
return classgroups;
|
return classgroups;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
private List<VClass> getVClasses(VClassDao vclassDao, SolrDocumentList docs){
|
||||||
|
HashSet<String> typesInHits = getVClassUrisForHits(docs);
|
||||||
|
List<VClass> classes = new ArrayList<VClass>(typesInHits.size());
|
||||||
|
|
||||||
|
Iterator<String> it = typesInHits.iterator();
|
||||||
|
while(it.hasNext()){
|
||||||
|
String typeUri = it.next();
|
||||||
|
try{
|
||||||
|
if( VitroVocabulary.OWL_THING.equals(typeUri))
|
||||||
|
continue;
|
||||||
|
VClass type = vclassDao.getVClassByURI(typeUri);
|
||||||
|
if( ! type.isAnonymous() &&
|
||||||
|
type.getName() != null && !"".equals(type.getName()) &&
|
||||||
|
type.getGroupURI() != null ) //don't display classes that aren't in classgroups
|
||||||
|
classes.add(type);
|
||||||
|
}catch(Exception ex){
|
||||||
|
if( log.isDebugEnabled() )
|
||||||
|
log.debug("could not add type " + typeUri, ex);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Collections.sort(classes, new Comparator<VClass>(){
|
||||||
|
public int compare(VClass o1, VClass o2) {
|
||||||
|
return o1.compareTo(o2);
|
||||||
|
}});
|
||||||
|
return classes;
|
||||||
|
}
|
||||||
|
|
||||||
|
private HashSet<String> getVClassUrisForHits(SolrDocumentList docs){
|
||||||
|
HashSet<String> typesInHits = new HashSet<String>();
|
||||||
|
for (SolrDocument doc : docs) {
|
||||||
|
try {
|
||||||
|
Collection<Object> types = doc.getFieldValues(Entity2LuceneDoc.term.RDFTYPE);
|
||||||
|
if (types != null) {
|
||||||
|
for (Object o : types) {
|
||||||
|
String typeUri = o.toString();
|
||||||
|
typesInHits.add(typeUri);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception e) {
|
||||||
|
log.error("problems getting rdf:type for search hits",e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return typesInHits;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Analyzer getAnalyzer(ServletContext servletContext) throws SearchException {
|
||||||
|
Object obj = servletContext.getAttribute(LuceneSetup.ANALYZER);
|
||||||
|
if( obj == null || !(obj instanceof Analyzer) )
|
||||||
|
throw new SearchException("Could not get analyzer");
|
||||||
|
else
|
||||||
|
return (Analyzer)obj;
|
||||||
|
}
|
||||||
|
|
||||||
|
private Query getQuery(VitroRequest request,
|
||||||
|
Analyzer analyzer, String querystr ) throws SearchException, ParseException {
|
||||||
|
Query query = null;
|
||||||
|
try{
|
||||||
|
//String querystr = request.getParameter(VitroQuery.QUERY_PARAMETER_NAME);
|
||||||
|
if( querystr == null){
|
||||||
|
log.error("There was no Parameter '"+VitroQuery.QUERY_PARAMETER_NAME
|
||||||
|
+"' in the request.");
|
||||||
|
return null;
|
||||||
|
}else if( querystr.length() > MAX_QUERY_LENGTH ){
|
||||||
|
log.debug("The search was too long. The maximum " +
|
||||||
|
"query length is " + MAX_QUERY_LENGTH );
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Parsing query using QueryParser ");
|
||||||
|
|
||||||
|
QueryParser parser = getQueryParser(analyzer);
|
||||||
|
query = parser.parse(querystr);
|
||||||
|
|
||||||
|
//check if this is classgroup filtered
|
||||||
|
Object param = request.getParameter("classgroup");
|
||||||
|
if( param != null && !"".equals(param)){
|
||||||
|
|
||||||
|
log.debug("Firing classgroup query ");
|
||||||
|
log.debug("request.getParameter(classgroup) is "+ param.toString());
|
||||||
|
|
||||||
|
BooleanQuery boolQuery = new BooleanQuery();
|
||||||
|
boolQuery.add( query, BooleanClause.Occur.MUST);
|
||||||
|
boolQuery.add( new TermQuery(
|
||||||
|
new Term(Entity2LuceneDoc.term.CLASSGROUP_URI,
|
||||||
|
(String)param)),
|
||||||
|
BooleanClause.Occur.MUST);
|
||||||
|
query = boolQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
//check if this is rdf:type filtered
|
||||||
|
param = request.getParameter("type");
|
||||||
|
if( param != null && !"".equals(param)){
|
||||||
|
log.debug("Firing type query ");
|
||||||
|
log.debug("request.getParameter(type) is "+ param.toString());
|
||||||
|
|
||||||
|
BooleanQuery boolQuery = new BooleanQuery();
|
||||||
|
boolQuery.add( query, BooleanClause.Occur.MUST);
|
||||||
|
boolQuery.add( new TermQuery(
|
||||||
|
new Term(Entity2LuceneDoc.term.RDFTYPE,
|
||||||
|
(String)param)),
|
||||||
|
BooleanClause.Occur.MUST);
|
||||||
|
query = boolQuery;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.debug("Query: " + query);
|
||||||
|
|
||||||
|
} catch (ParseException e) {
|
||||||
|
throw new ParseException(e.getMessage());
|
||||||
|
} catch (Exception ex){
|
||||||
|
throw new SearchException(ex.getMessage());
|
||||||
|
}
|
||||||
|
|
||||||
|
return query;
|
||||||
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("static-access")
|
||||||
|
private QueryParser getQueryParser(Analyzer analyzer){
|
||||||
|
//defaultSearchField indicates which field search against when there is no term
|
||||||
|
//indicated in the query string.
|
||||||
|
//The analyzer is needed so that we use the same analyzer on the search queries as
|
||||||
|
//was used on the text that was indexed.
|
||||||
|
//QueryParser qp = new QueryParser("NAME",analyzer);
|
||||||
|
//this sets the query parser to AND all of the query terms it finds.
|
||||||
|
//set up the map of stemmed field names -> unstemmed field names
|
||||||
|
// HashMap<String,String> map = new HashMap<String, String>();
|
||||||
|
// map.put(Entity2LuceneDoc.term.ALLTEXT,Entity2LuceneDoc.term.ALLTEXTUNSTEMMED);
|
||||||
|
// qp.setStemmedToUnstemmed(map);
|
||||||
|
|
||||||
|
MultiFieldQueryParser qp = new MultiFieldQueryParser(Version.LUCENE_29, new String[]{
|
||||||
|
"name", "nameunstemmed", "type", "moniker", "ALLTEXT", "ALLTEXTUNSTEMMED", "nameraw" , "classLocalName", "classLocalNameLowerCase" }, analyzer);
|
||||||
|
|
||||||
|
// QueryParser qp = new QueryParser(Version.LUCENE_29, "name", analyzer);
|
||||||
|
|
||||||
|
//AND_OPERATOR returns documents even if the terms in the query lie in different fields.
|
||||||
|
//The only requirement is that they exist in a single document.
|
||||||
|
//qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
||||||
|
|
||||||
|
|
||||||
|
return qp;
|
||||||
|
}
|
||||||
|
|
||||||
private class VClassGroupSearchLink extends LinkTemplateModel {
|
private class VClassGroupSearchLink extends LinkTemplateModel {
|
||||||
|
|
||||||
VClassGroupSearchLink(String querytext, VClassGroup classgroup) {
|
VClassGroupSearchLink(String querytext, VClassGroup classgroup) {
|
||||||
|
@ -554,165 +659,6 @@ public class SolrPagedSearchController extends FreemarkerHttpServlet {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<VClass> getVClasses(VClassDao vclassDao, TopDocs topDocs,
|
|
||||||
IndexSearcher searherForRequest){
|
|
||||||
HashSet<String> typesInHits = getVClassUrisForHits(topDocs,searherForRequest);
|
|
||||||
List<VClass> classes = new ArrayList<VClass>(typesInHits.size());
|
|
||||||
|
|
||||||
Iterator<String> it = typesInHits.iterator();
|
|
||||||
while(it.hasNext()){
|
|
||||||
String typeUri = it.next();
|
|
||||||
try{
|
|
||||||
if( VitroVocabulary.OWL_THING.equals(typeUri))
|
|
||||||
continue;
|
|
||||||
VClass type = vclassDao.getVClassByURI(typeUri);
|
|
||||||
if( ! type.isAnonymous() &&
|
|
||||||
type.getName() != null && !"".equals(type.getName()) &&
|
|
||||||
type.getGroupURI() != null ) //don't display classes that aren't in classgroups
|
|
||||||
classes.add(type);
|
|
||||||
}catch(Exception ex){
|
|
||||||
if( log.isDebugEnabled() )
|
|
||||||
log.debug("could not add type " + typeUri, ex);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Collections.sort(classes, new Comparator<VClass>(){
|
|
||||||
public int compare(VClass o1, VClass o2) {
|
|
||||||
return o1.compareTo(o2);
|
|
||||||
}});
|
|
||||||
return classes;
|
|
||||||
}
|
|
||||||
|
|
||||||
private HashSet<String> getVClassUrisForHits(TopDocs topDocs,
|
|
||||||
IndexSearcher searcherForRequest){
|
|
||||||
HashSet<String> typesInHits = new HashSet<String>();
|
|
||||||
for(int i=0; i<topDocs.scoreDocs.length; i++){
|
|
||||||
try{
|
|
||||||
Document doc=searcherForRequest.doc(topDocs.scoreDocs[i].doc);
|
|
||||||
Field[] types = doc.getFields(Entity2LuceneDoc.term.RDFTYPE);
|
|
||||||
if(types != null ){
|
|
||||||
for(int j=0;j<types.length;j++){
|
|
||||||
String typeUri = types[j].stringValue();
|
|
||||||
typesInHits.add(typeUri);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}catch(Exception e){
|
|
||||||
log.error("problems getting rdf:type for search hits",e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return typesInHits;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Analyzer getAnalyzer(ServletContext servletContext) throws SearchException {
|
|
||||||
Object obj = servletContext.getAttribute(LuceneSetup.ANALYZER);
|
|
||||||
if( obj == null || !(obj instanceof Analyzer) )
|
|
||||||
throw new SearchException("Could not get analyzer");
|
|
||||||
else
|
|
||||||
return (Analyzer)obj;
|
|
||||||
}
|
|
||||||
|
|
||||||
private Query getQuery(VitroRequest request,
|
|
||||||
Analyzer analyzer, String querystr ) throws SearchException, ParseException {
|
|
||||||
Query query = null;
|
|
||||||
try{
|
|
||||||
//String querystr = request.getParameter(VitroQuery.QUERY_PARAMETER_NAME);
|
|
||||||
if( querystr == null){
|
|
||||||
log.error("There was no Parameter '"+VitroQuery.QUERY_PARAMETER_NAME
|
|
||||||
+"' in the request.");
|
|
||||||
return null;
|
|
||||||
}else if( querystr.length() > MAX_QUERY_LENGTH ){
|
|
||||||
log.debug("The search was too long. The maximum " +
|
|
||||||
"query length is " + MAX_QUERY_LENGTH );
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.debug("Parsing query using QueryParser ");
|
|
||||||
|
|
||||||
QueryParser parser = getQueryParser(analyzer);
|
|
||||||
query = parser.parse(querystr);
|
|
||||||
|
|
||||||
String alpha = request.getParameter("alpha");
|
|
||||||
|
|
||||||
if( alpha != null && !"".equals(alpha) && alpha.length() == 1){
|
|
||||||
|
|
||||||
log.debug("Firing alpha query ");
|
|
||||||
log.debug("request.getParameter(alpha) is " + alpha);
|
|
||||||
|
|
||||||
BooleanQuery boolQuery = new BooleanQuery();
|
|
||||||
boolQuery.add( query, BooleanClause.Occur.MUST );
|
|
||||||
boolQuery.add(
|
|
||||||
new WildcardQuery(new Term(Entity2LuceneDoc.term.NAME, alpha+'*')),
|
|
||||||
BooleanClause.Occur.MUST);
|
|
||||||
query = boolQuery;
|
|
||||||
}
|
|
||||||
|
|
||||||
//check if this is classgroup filtered
|
|
||||||
Object param = request.getParameter("classgroup");
|
|
||||||
if( param != null && !"".equals(param)){
|
|
||||||
|
|
||||||
log.debug("Firing classgroup query ");
|
|
||||||
log.debug("request.getParameter(classgroup) is "+ param.toString());
|
|
||||||
|
|
||||||
BooleanQuery boolQuery = new BooleanQuery();
|
|
||||||
boolQuery.add( query, BooleanClause.Occur.MUST);
|
|
||||||
boolQuery.add( new TermQuery(
|
|
||||||
new Term(Entity2LuceneDoc.term.CLASSGROUP_URI,
|
|
||||||
(String)param)),
|
|
||||||
BooleanClause.Occur.MUST);
|
|
||||||
query = boolQuery;
|
|
||||||
}
|
|
||||||
|
|
||||||
//check if this is rdf:type filtered
|
|
||||||
param = request.getParameter("type");
|
|
||||||
if( param != null && !"".equals(param)){
|
|
||||||
log.debug("Firing type query ");
|
|
||||||
log.debug("request.getParameter(type) is "+ param.toString());
|
|
||||||
|
|
||||||
BooleanQuery boolQuery = new BooleanQuery();
|
|
||||||
boolQuery.add( query, BooleanClause.Occur.MUST);
|
|
||||||
boolQuery.add( new TermQuery(
|
|
||||||
new Term(Entity2LuceneDoc.term.RDFTYPE,
|
|
||||||
(String)param)),
|
|
||||||
BooleanClause.Occur.MUST);
|
|
||||||
query = boolQuery;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.debug("Query: " + query);
|
|
||||||
|
|
||||||
} catch (ParseException e) {
|
|
||||||
throw new ParseException(e.getMessage());
|
|
||||||
} catch (Exception ex){
|
|
||||||
throw new SearchException(ex.getMessage());
|
|
||||||
}
|
|
||||||
|
|
||||||
return query;
|
|
||||||
}
|
|
||||||
|
|
||||||
@SuppressWarnings("static-access")
|
|
||||||
private QueryParser getQueryParser(Analyzer analyzer){
|
|
||||||
//defaultSearchField indicates which field search against when there is no term
|
|
||||||
//indicated in the query string.
|
|
||||||
//The analyzer is needed so that we use the same analyzer on the search queries as
|
|
||||||
//was used on the text that was indexed.
|
|
||||||
//QueryParser qp = new QueryParser("NAME",analyzer);
|
|
||||||
//this sets the query parser to AND all of the query terms it finds.
|
|
||||||
//set up the map of stemmed field names -> unstemmed field names
|
|
||||||
// HashMap<String,String> map = new HashMap<String, String>();
|
|
||||||
// map.put(Entity2LuceneDoc.term.ALLTEXT,Entity2LuceneDoc.term.ALLTEXTUNSTEMMED);
|
|
||||||
// qp.setStemmedToUnstemmed(map);
|
|
||||||
|
|
||||||
MultiFieldQueryParser qp = new MultiFieldQueryParser(Version.LUCENE_29, new String[]{
|
|
||||||
"name", "nameunstemmed", "type", "moniker", "ALLTEXT", "ALLTEXTUNSTEMMED", "nameraw" , "classLocalName", "classLocalNameLowerCase" }, analyzer);
|
|
||||||
|
|
||||||
// QueryParser qp = new QueryParser(Version.LUCENE_29, "name", analyzer);
|
|
||||||
|
|
||||||
//AND_OPERATOR returns documents even if the terms in the query lie in different fields.
|
|
||||||
//The only requirement is that they exist in a single document.
|
|
||||||
//qp.setDefaultOperator(QueryParser.AND_OPERATOR);
|
|
||||||
|
|
||||||
|
|
||||||
return qp;
|
|
||||||
}
|
|
||||||
|
|
||||||
private ExceptionResponseValues doSearchError(Throwable e, Format f) {
|
private ExceptionResponseValues doSearchError(Throwable e, Format f) {
|
||||||
Map<String, Object> body = new HashMap<String, Object>();
|
Map<String, Object> body = new HashMap<String, Object>();
|
||||||
body.put("message", "Search failed: " + e.getMessage());
|
body.put("message", "Search failed: " + e.getMessage());
|
||||||
|
|
Loading…
Add table
Reference in a new issue