- 浏览: 194818 次
文章分类
最新评论
-
code_xiaoke:
session可是有30分钟有效期的还有如果在分布式的环境下 ...
Java Web 用户登陆示例代码 -
xul0038:
http://www.baidu.com
Java Web 用户登陆示例代码 -
16866:
非常棒,配置信息呢
Nginx负载均衡 -
开发小菜:
什么意思,没明白?能不能写一个例子
JS 实现DIV随浏览器窗口大小变化
package com.zjr.service.impl;
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.wltea.analyzer.lucene.IKAnalyzer;
import org.wltea.analyzer.lucene.IKQueryParser;
import org.wltea.analyzer.lucene.IKSimilarity;
import com.zjr.model.User;
public class UserIndexService {
private final Log logger = LogFactory.getLog(UserIndexService.class);
private final String dirPath = "d:/temp/user";
Analyzer analyzer = new IKAnalyzer();
Directory directory = null;
IndexWriter writer = null;
IndexSearcher indexSearcher = null;
private void confirmDirs() {
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
indexFile.mkdirs();
}
if (!indexFile.exists() || !indexFile.canWrite()) {
if (logger.isDebugEnabled())
logger.error("索引文件目录创建失败或不可写入!");
}
}
public void init() {
confirmDirs();
try {
File f = new File(dirPath);
directory = FSDirectory.open(f);
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.error("解除索引文件锁定失败!" + e.getCause());
}
}
}
public void createIndex(List<User> userList) {
init();
try {
// 第一个参数是存放索引目录有FSDirectory(存储到磁盘上)和RAMDirectory(存储到内存中),
// 第二个参数是使用的分词器, 第三个:true,建立全新的索引,false,建立增量索引,第四个是建立的索引的最大长度
writer = new IndexWriter(directory, analyzer, true,IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(500);
writer.setMaxBufferedDocs(155);
writer.setMaxFieldLength(Integer.MAX_VALUE);
writeIndex(writer, userList);
writer.optimize();
writer.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public List<User> search(String keyword) {
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
// 单字段查询,单条件查询
// Query query = IKQueryParser.parse("userInfo", keyword);
// 多字段,单条件查询
String[] fields = new String[] { "userInfo", "parameter1" };
Query query = IKQueryParser.parseMultiField(fields, keyword);
// 多字体,单条件,多BooleanClause.Occur[] flags , 查询条件的组合方式(Or/And)
// BooleanClause.Occur[]数组,它表示多个条件之间的关系,
// BooleanClause.Occur.MUST表示 and,
// BooleanClause.Occur.MUST_NOT表示not,
// BooleanClause.Occur.SHOULD表示or.
// String[] fields =new String[]{"userInfo","parameter1"};
// BooleanClause.Occur[] flags=new
// BooleanClause.Occur[]{BooleanClause.Occur.MUST,BooleanClause.Occur.SHOULD};
// Query query = IKQueryParser.parseMultiField(fields,
// keyword,flags);
// //多Field,多条件查询分析
// String[] fields =new String[]{"userInfo","parameter1"};
// String[] queries = new String[]{keyword,keyword};
// Query query = IKQueryParser.parseMultiField(fields,queries);
// 多Field,多条件,多Occur 查询
// String[] fields =new String[]{"userInfo","parameter1"};
// String[] queries = new String[]{keyword,keyword};
// BooleanClause.Occur[] flags=new
// BooleanClause.Occur[]{BooleanClause.Occur.MUST,BooleanClause.Occur.SHOULD};
// Query query =
// IKQueryParser.parseMultiField(fields,queries,flags);
// 搜索相似度最高的20条记录
TopDocs topDocs = indexSearcher.search(query, 20);
ScoreDoc[] hits = topDocs.scoreDocs;
return hitsToQuery(hits, query);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private List<User> hitsToQuery(ScoreDoc[] hits, Query query) {
List<User> list = new ArrayList<User>();
try {
for (int i = 0; i < hits.length; i++) {
User u = new User();
Document doc = indexSearcher.doc(hits[i].doc);
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,
new QueryScorer(query));
TokenStream tokenStream = analyzer.tokenStream("text",
new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream, doc
.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
SimpleHTMLFormatter simpleHtmlFormatter1 = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter1 = new Highlighter(
simpleHtmlFormatter1, new QueryScorer(query));
TokenStream tokenStream1 = analyzer.tokenStream("text1",
new StringReader(doc.get("parameter1")));
String p1 = highlighter1.getBestFragment(tokenStream1, doc
.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
list.add(u);
}
indexSearcher.close();
return list;
} catch (CorruptIndexException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public void writeIndex(IndexWriter writer, List<User> userList) {
try {
for (User u : userList) {
Document doc = getDoc(u);
writer.addDocument(doc);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private Document getDoc(User user) {
System.out.println("用户ID 为" + user.getUserId() + " 索引被创建");
Document doc = new Document();
addField2Doc(doc, user, "userId", Store.YES, Index.NOT_ANALYZED);
addField2Doc(doc, user, "userName", Store.YES, Index.NOT_ANALYZED);// Index.NOT_ANALYZED
// 不分词,但建立索引
addField2Doc(doc, user, "userAge", Store.YES, Index.NOT_ANALYZED);// Index.ANALYZED
// 分词并且建立索引
addField2Doc(doc, user, "userInfo", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter1", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter2", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter3", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter4", Store.YES, Index.ANALYZED);
return doc;
}
private void addField2Doc(Document doc, Object bean, String name, Store s,
Index i) {
String value;
try {
value = BeanUtils.getProperty(bean, name);
if (value != null) {
doc.add(new Field(name, value, s, i,
Field.TermVector.WITH_POSITIONS_OFFSETS));
}
} catch (IllegalAccessException e) {
logger.error("get bean property error", e);
} catch (InvocationTargetException e) {
logger.error("get bean property error", e);
} catch (NoSuchMethodException e) {
logger.error("get bean property error", e);
}
}
/**
* 没有排序,有高亮,有分页
*
* @param pageNo
* @param pageSize
* @param keyword
* @return
*/
public PageBean getPageQuery(int pageNo, int pageSize, String keyword) {
List result = new ArrayList();
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
String[] fields = new String[] { "userInfo", "parameter1" };
BooleanClause.Occur[] flags = new BooleanClause.Occur[] {
BooleanClause.Occur.MUST, BooleanClause.Occur.SHOULD };
Query query = IKQueryParser.parseMultiField(fields, keyword, flags);
TopScoreDocCollector topCollector = TopScoreDocCollector.create(
indexSearcher.maxDoc(), true);
indexSearcher.search(query, topCollector);
// 查询当页的记录
ScoreDoc[] docs = topCollector.topDocs((pageNo - 1) * pageSize,
pageSize).scoreDocs;
// String[] highlightCol = {"userInfo", "parameter1"};
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,
new QueryScorer(query));
for (ScoreDoc scdoc : docs) {
User u = new User();
Document doc = indexSearcher.doc(scdoc.doc);
//
// for (Fieldable fa : doc.getFields()) {
// System.out.println(fa.name());
// String value = doc.get(fa.name());
// for (String col : highlightCol) {
// if(fa.name().equals(col)) {
// //设置高显内容
// TokenStream tokenStream = analyzer.tokenStream("text",new
// StringReader(value));
// value = highlighter.getBestFragment(tokenStream, value);
// }
// }
//
// }
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
TokenStream tokenStream = analyzer.tokenStream("text",
new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream, doc
.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
TokenStream tokenStream1 = analyzer.tokenStream("text1",
new StringReader(doc.get("parameter1")));
String p1 = highlighter.getBestFragment(tokenStream1, doc
.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
result.add(u);
}
PageBean pb = new PageBean();
pb.setCurrentPage(pageNo);// 当前页
pb.setPageSize(pageSize);
pb.setAllRow(topCollector.getTotalHits());// hit中的记录数目
pb.setList(result);
return pb;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
/**
* 排序,有高亮,有分页
*
* @param pageNo
* @param pageSize
* @param keyword
* @return
*/
public PageBean getPageQuery2(int pageNo, int pageSize, String keyword) {
List result = new ArrayList();
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
String[] fields = new String[] { "userInfo", "parameter1" };
BooleanClause.Occur[] flags = new BooleanClause.Occur[] {
BooleanClause.Occur.MUST, BooleanClause.Occur.SHOULD };
Query query = IKQueryParser.parseMultiField(fields, keyword, flags);
// 多字段排序,设置在前面的会优先排序
SortField[] sortFields = new SortField[2];
SortField sortField = new SortField("userId", SortField.INT, false);//false升序,true降序
SortField FIELD_SEX = new SortField("userAge", SortField.INT, true);
sortFields[0] = sortField;
sortFields[1] = FIELD_SEX;
Sort sort = new Sort(sortFields);
TopDocs topDocs = indexSearcher.search(query, null, 50, sort);
if (topDocs.totalHits != 0) {
// for(ScoreDoc sd : topDocs.scoreDocs) {
//
// }
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,new QueryScorer(query));
for (int i = (pageNo - 1) * pageSize; i < pageSize * pageNo; i++) {
ScoreDoc scdoc = topDocs.scoreDocs[i];
User u = new User();
Document doc = indexSearcher.doc(scdoc.doc);
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
TokenStream tokenStream = analyzer.tokenStream("text",new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream,doc.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
TokenStream tokenStream1 = analyzer.tokenStream("text1",new StringReader(doc.get("parameter1")));
String p1 = highlighter.getBestFragment(tokenStream1, doc.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
result.add(u);
}
PageBean pb = new PageBean();
pb.setCurrentPage(pageNo);// 当前页
pb.setPageSize(pageSize);
pb.setAllRow(topDocs.totalHits);// hit中的记录数目
pb.setList(result);
return pb;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
/**
* 删除索引
* @param userId
*/
public void deleIndex(String userId){
try {
File f = new File(dirPath);
directory = FSDirectory.open(f);
IndexReader reader = IndexReader.open(directory,false);
Term term = new Term("userId", userId);
reader.deleteDocuments(term);
reader.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
import java.io.File;
import java.io.IOException;
import java.io.StringReader;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.beanutils.BeanUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.document.Document;
import org.apache.lucene.document.Field;
import org.apache.lucene.document.Field.Index;
import org.apache.lucene.document.Field.Store;
import org.apache.lucene.index.CorruptIndexException;
import org.apache.lucene.index.IndexReader;
import org.apache.lucene.index.IndexWriter;
import org.apache.lucene.index.Term;
import org.apache.lucene.search.BooleanClause;
import org.apache.lucene.search.IndexSearcher;
import org.apache.lucene.search.Query;
import org.apache.lucene.search.ScoreDoc;
import org.apache.lucene.search.Sort;
import org.apache.lucene.search.SortField;
import org.apache.lucene.search.TopDocs;
import org.apache.lucene.search.TopScoreDocCollector;
import org.apache.lucene.search.highlight.Highlighter;
import org.apache.lucene.search.highlight.InvalidTokenOffsetsException;
import org.apache.lucene.search.highlight.QueryScorer;
import org.apache.lucene.search.highlight.SimpleHTMLFormatter;
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FSDirectory;
import org.wltea.analyzer.lucene.IKAnalyzer;
import org.wltea.analyzer.lucene.IKQueryParser;
import org.wltea.analyzer.lucene.IKSimilarity;
import com.zjr.model.User;
public class UserIndexService {
private final Log logger = LogFactory.getLog(UserIndexService.class);
private final String dirPath = "d:/temp/user";
Analyzer analyzer = new IKAnalyzer();
Directory directory = null;
IndexWriter writer = null;
IndexSearcher indexSearcher = null;
private void confirmDirs() {
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
indexFile.mkdirs();
}
if (!indexFile.exists() || !indexFile.canWrite()) {
if (logger.isDebugEnabled())
logger.error("索引文件目录创建失败或不可写入!");
}
}
public void init() {
confirmDirs();
try {
File f = new File(dirPath);
directory = FSDirectory.open(f);
} catch (Exception e) {
if (logger.isDebugEnabled()) {
logger.error("解除索引文件锁定失败!" + e.getCause());
}
}
}
public void createIndex(List<User> userList) {
init();
try {
// 第一个参数是存放索引目录有FSDirectory(存储到磁盘上)和RAMDirectory(存储到内存中),
// 第二个参数是使用的分词器, 第三个:true,建立全新的索引,false,建立增量索引,第四个是建立的索引的最大长度
writer = new IndexWriter(directory, analyzer, true,IndexWriter.MaxFieldLength.LIMITED);
writer.setMergeFactor(500);
writer.setMaxBufferedDocs(155);
writer.setMaxFieldLength(Integer.MAX_VALUE);
writeIndex(writer, userList);
writer.optimize();
writer.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public List<User> search(String keyword) {
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
// 单字段查询,单条件查询
// Query query = IKQueryParser.parse("userInfo", keyword);
// 多字段,单条件查询
String[] fields = new String[] { "userInfo", "parameter1" };
Query query = IKQueryParser.parseMultiField(fields, keyword);
// 多字体,单条件,多BooleanClause.Occur[] flags , 查询条件的组合方式(Or/And)
// BooleanClause.Occur[]数组,它表示多个条件之间的关系,
// BooleanClause.Occur.MUST表示 and,
// BooleanClause.Occur.MUST_NOT表示not,
// BooleanClause.Occur.SHOULD表示or.
// String[] fields =new String[]{"userInfo","parameter1"};
// BooleanClause.Occur[] flags=new
// BooleanClause.Occur[]{BooleanClause.Occur.MUST,BooleanClause.Occur.SHOULD};
// Query query = IKQueryParser.parseMultiField(fields,
// keyword,flags);
// //多Field,多条件查询分析
// String[] fields =new String[]{"userInfo","parameter1"};
// String[] queries = new String[]{keyword,keyword};
// Query query = IKQueryParser.parseMultiField(fields,queries);
// 多Field,多条件,多Occur 查询
// String[] fields =new String[]{"userInfo","parameter1"};
// String[] queries = new String[]{keyword,keyword};
// BooleanClause.Occur[] flags=new
// BooleanClause.Occur[]{BooleanClause.Occur.MUST,BooleanClause.Occur.SHOULD};
// Query query =
// IKQueryParser.parseMultiField(fields,queries,flags);
// 搜索相似度最高的20条记录
TopDocs topDocs = indexSearcher.search(query, 20);
ScoreDoc[] hits = topDocs.scoreDocs;
return hitsToQuery(hits, query);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
private List<User> hitsToQuery(ScoreDoc[] hits, Query query) {
List<User> list = new ArrayList<User>();
try {
for (int i = 0; i < hits.length; i++) {
User u = new User();
Document doc = indexSearcher.doc(hits[i].doc);
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,
new QueryScorer(query));
TokenStream tokenStream = analyzer.tokenStream("text",
new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream, doc
.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
SimpleHTMLFormatter simpleHtmlFormatter1 = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter1 = new Highlighter(
simpleHtmlFormatter1, new QueryScorer(query));
TokenStream tokenStream1 = analyzer.tokenStream("text1",
new StringReader(doc.get("parameter1")));
String p1 = highlighter1.getBestFragment(tokenStream1, doc
.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
list.add(u);
}
indexSearcher.close();
return list;
} catch (CorruptIndexException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
public void writeIndex(IndexWriter writer, List<User> userList) {
try {
for (User u : userList) {
Document doc = getDoc(u);
writer.addDocument(doc);
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
private Document getDoc(User user) {
System.out.println("用户ID 为" + user.getUserId() + " 索引被创建");
Document doc = new Document();
addField2Doc(doc, user, "userId", Store.YES, Index.NOT_ANALYZED);
addField2Doc(doc, user, "userName", Store.YES, Index.NOT_ANALYZED);// Index.NOT_ANALYZED
// 不分词,但建立索引
addField2Doc(doc, user, "userAge", Store.YES, Index.NOT_ANALYZED);// Index.ANALYZED
// 分词并且建立索引
addField2Doc(doc, user, "userInfo", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter1", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter2", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter3", Store.YES, Index.ANALYZED);
addField2Doc(doc, user, "parameter4", Store.YES, Index.ANALYZED);
return doc;
}
private void addField2Doc(Document doc, Object bean, String name, Store s,
Index i) {
String value;
try {
value = BeanUtils.getProperty(bean, name);
if (value != null) {
doc.add(new Field(name, value, s, i,
Field.TermVector.WITH_POSITIONS_OFFSETS));
}
} catch (IllegalAccessException e) {
logger.error("get bean property error", e);
} catch (InvocationTargetException e) {
logger.error("get bean property error", e);
} catch (NoSuchMethodException e) {
logger.error("get bean property error", e);
}
}
/**
* 没有排序,有高亮,有分页
*
* @param pageNo
* @param pageSize
* @param keyword
* @return
*/
public PageBean getPageQuery(int pageNo, int pageSize, String keyword) {
List result = new ArrayList();
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
String[] fields = new String[] { "userInfo", "parameter1" };
BooleanClause.Occur[] flags = new BooleanClause.Occur[] {
BooleanClause.Occur.MUST, BooleanClause.Occur.SHOULD };
Query query = IKQueryParser.parseMultiField(fields, keyword, flags);
TopScoreDocCollector topCollector = TopScoreDocCollector.create(
indexSearcher.maxDoc(), true);
indexSearcher.search(query, topCollector);
// 查询当页的记录
ScoreDoc[] docs = topCollector.topDocs((pageNo - 1) * pageSize,
pageSize).scoreDocs;
// String[] highlightCol = {"userInfo", "parameter1"};
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter(
"<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,
new QueryScorer(query));
for (ScoreDoc scdoc : docs) {
User u = new User();
Document doc = indexSearcher.doc(scdoc.doc);
//
// for (Fieldable fa : doc.getFields()) {
// System.out.println(fa.name());
// String value = doc.get(fa.name());
// for (String col : highlightCol) {
// if(fa.name().equals(col)) {
// //设置高显内容
// TokenStream tokenStream = analyzer.tokenStream("text",new
// StringReader(value));
// value = highlighter.getBestFragment(tokenStream, value);
// }
// }
//
// }
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
TokenStream tokenStream = analyzer.tokenStream("text",
new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream, doc
.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
TokenStream tokenStream1 = analyzer.tokenStream("text1",
new StringReader(doc.get("parameter1")));
String p1 = highlighter.getBestFragment(tokenStream1, doc
.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
result.add(u);
}
PageBean pb = new PageBean();
pb.setCurrentPage(pageNo);// 当前页
pb.setPageSize(pageSize);
pb.setAllRow(topCollector.getTotalHits());// hit中的记录数目
pb.setList(result);
return pb;
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
/**
* 排序,有高亮,有分页
*
* @param pageNo
* @param pageSize
* @param keyword
* @return
*/
public PageBean getPageQuery2(int pageNo, int pageSize, String keyword) {
List result = new ArrayList();
File indexFile = new File(dirPath);
if (!indexFile.exists()) {
return null;
}
Directory dir;
try {
dir = FSDirectory.open(indexFile);
indexSearcher = new IndexSearcher(dir);
indexSearcher.setSimilarity(new IKSimilarity());
String[] fields = new String[] { "userInfo", "parameter1" };
BooleanClause.Occur[] flags = new BooleanClause.Occur[] {
BooleanClause.Occur.MUST, BooleanClause.Occur.SHOULD };
Query query = IKQueryParser.parseMultiField(fields, keyword, flags);
// 多字段排序,设置在前面的会优先排序
SortField[] sortFields = new SortField[2];
SortField sortField = new SortField("userId", SortField.INT, false);//false升序,true降序
SortField FIELD_SEX = new SortField("userAge", SortField.INT, true);
sortFields[0] = sortField;
sortFields[1] = FIELD_SEX;
Sort sort = new Sort(sortFields);
TopDocs topDocs = indexSearcher.search(query, null, 50, sort);
if (topDocs.totalHits != 0) {
// for(ScoreDoc sd : topDocs.scoreDocs) {
//
// }
// 高亮设置
SimpleHTMLFormatter simpleHtmlFormatter = new SimpleHTMLFormatter("<font color=\"red\">", "</font>");
Highlighter highlighter = new Highlighter(simpleHtmlFormatter,new QueryScorer(query));
for (int i = (pageNo - 1) * pageSize; i < pageSize * pageNo; i++) {
ScoreDoc scdoc = topDocs.scoreDocs[i];
User u = new User();
Document doc = indexSearcher.doc(scdoc.doc);
u.setUserId(Integer.parseInt(doc.get("userId")));
u.setUserName(doc.get("userName"));
u.setUserAge(Integer.parseInt(doc.get("userAge")));
TokenStream tokenStream = analyzer.tokenStream("text",new StringReader(doc.get("userInfo")));
String userInfo = highlighter.getBestFragment(tokenStream,doc.get("userInfo"));
if (userInfo != null) {
u.setUserInfo(userInfo);
} else {
u.setUserInfo(doc.get("userInfo"));
}
TokenStream tokenStream1 = analyzer.tokenStream("text1",new StringReader(doc.get("parameter1")));
String p1 = highlighter.getBestFragment(tokenStream1, doc.get("parameter1"));
if (p1 != null) {
u.setParameter1(p1);
} else {
u.setParameter1(doc.get("parameter1"));
}
u.setParameter2(doc.get("parameter2"));
u.setParameter3(doc.get("parameter3"));
u.setParameter4(doc.get("parameter4"));
result.add(u);
}
PageBean pb = new PageBean();
pb.setCurrentPage(pageNo);// 当前页
pb.setPageSize(pageSize);
pb.setAllRow(topDocs.totalHits);// hit中的记录数目
pb.setList(result);
return pb;
}
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
} catch (InvalidTokenOffsetsException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
return null;
}
/**
* 删除索引
* @param userId
*/
public void deleIndex(String userId){
try {
File f = new File(dirPath);
directory = FSDirectory.open(f);
IndexReader reader = IndexReader.open(directory,false);
Term term = new Term("userId", userId);
reader.deleteDocuments(term);
reader.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
发表评论
-
java实现动态切换上网IP (ADSL拨号上网) java开发
2013-04-24 10:06 1217动态切换IP的实现主是也由Windows的rasdial命令提 ... -
JAVA字符串处理函数
2013-04-12 09:21 1029Java中的字符串也是一连串的字符。但是与许多其他的计算机语 ... -
(转)Lucene打分规则与Similarity模块详解
2013-02-06 14:08 1126搜索排序结果的控制 Lu ... -
Compass将lucene、Spring、Hibernate三者结合
2013-02-01 11:02 1639版权声明:转载时请以超链接形式标明文章原始出处和作者信息及本声 ... -
Lucene3.0详解
2013-02-01 10:57 1338★第一部分:概述 1. 我 ... -
Java Web 用户登陆示例代码
2013-02-01 09:56 57996实现功能: 1、用户登陆、注销 2、利用session记 ... -
Java对数函数及Java对数运算
2013-02-01 09:47 6718Java对数函数的计算方法非常有问题,然而在API中却有惊人 ... -
Lucene为不同字段指定不同分词器(转)
2013-01-31 17:34 3370在lucene使用过程中,如 ... -
域名管理与解析原理 — 《Java邮件开发详解》读书笔记
2013-01-31 14:56 1652一 基本概念 1. 域名:域名是由圆点分开一串单词或缩写组 ... -
优秀的Java工程师需要掌握的10项技能
2013-01-31 14:04 1788编程专业相对于计算机领域其他专业来讲,是一门比较难以修炼的专业 ... -
Web开发入门不得不看
2013-01-28 17:31 994如今,各种互联网的Web ... -
MVC框架的映射和解耦
2013-01-25 21:37 785最近在写一个业务上用到的框架,回想起接触过的一些MVC框架, ... -
JAVA发送EMAIL的例子
2013-07-09 09:44 863import javax.mail.*; ... -
SSH + Lucene + 分页 + 排序 + 高亮 模拟简单新闻网站搜索引擎
2012-11-19 09:55 1336前两天看到了一个中国新闻网,这个网站的搜索form的actio ... -
Lucene多字段搜索
2012-11-19 09:53 927最近在学习Lucene的过程中遇到了需要多域搜索并排序的问题, ... -
lucene之sort
2012-11-16 15:06 1040package cn.zqh.lucene.sort; im ... -
Nginx负载均衡
2012-11-16 11:45 7533最近迷上了Nginx,真实麻雀虽小,五脏俱全..功能实在强大. ... -
Lucene相关度排序的调整
2012-11-16 11:38 1675Lucene的搜索结果默认按 ... -
HashSet重复元素判断
2012-10-15 16:37 9093HashSet不能添加重复的元素,当调用add(Object) ... -
JAVA提高教程(2)-认识Set集合之HashSet
2012-10-09 09:44 932集合在Java里面的作用非 ...
相关推荐
Lucene 3.6.1: 中文分词、创建索引库、排序、多字段分页查询以及高亮显示源 希望对大家有帮助, 我自己建立的mysql数据库 使用了IKAnalyzer分词器源代码,大家可以自己设置停词,也可以自己改写算法
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
GWT Advanced Table 是一个基于 GWT 框架的网页表格组件,可实现分页数据显示、数据排序和过滤等功能! Google Tag Library 该标记库和 Google 有关。使用该标记库,利用 Google 为你的网站提供网站查询,并且可以...
内容索引:Java源码,窗体界面,3DMenu Java 3DMenu 界面源码,有人说用到游戏中不错,其实平时我信编写Java应用程序时候也能用到吧,不一定非要局限于游戏吧,RES、SRC资源都有,都在压缩包内。 Java zip压缩包查看...
内容索引:Java源码,窗体界面,3DMenu Java 3DMenu 界面源码,有人说用到游戏中不错,其实平时我信编写Java应用程序时候也能用到吧,不一定非要局限于游戏吧,RES、SRC资源都有,都在压缩包内。 Java zip压缩包查看...