public static void main(String[] args) throws Exception {
Directory dir =new RAMDirectory();
IndexWriterConfig conf = new IndexWriterConfig(Version.LUCENE_34, new StandardAnalyzer(Version.LUCENE_34));
IndexWriter writer = new IndexWriter(dir,conf);
Document doc1 = new Document();
Document doc2 = new Document();
Document doc3 = new Document();
Document doc4 = new Document();
Document doc5 = new Document();
Field f1 = new Field("sex","1", Field.Store.YES, Field.Index.ANALYZED);
Field f2 = new Field("sex","0", Field.Store.YES, Field.Index.ANALYZED);
Field f3 = new Field("sex","1", Field.Store.YES, Field.Index.ANALYZED);
Field f4 = new Field("sex","1", Field.Store.YES, Field.Index.ANALYZED);
Field f5 = new Field("sex","1", Field.Store.YES, Field.Index.ANALYZED);
Field f11 = new Field("income","10", Field.Store.YES, Field.Index.ANALYZED);
Field f22 = new Field("income","10", Field.Store.YES, Field.Index.ANALYZED);
Field f33 = new Field("income","11", Field.Store.YES, Field.Index.ANALYZED);
Field f44 = new Field("income","10", Field.Store.YES, Field.Index.ANALYZED);
Field f55 = new Field("income","10", Field.Store.YES, Field.Index.ANALYZED);
doc1.add(f1);
doc2.add(f2);
doc3.add(f3);
doc4.add(f4);
doc5.add(f5);
doc1.add(f11);
doc2.add(f22);
doc3.add(f33);
doc4.add(f44);
doc5.add(f55);
doc5.setBoost(2f);
writer.addDocument(doc1);
writer.addDocument(doc2);
writer.addDocument(doc3);
writer.addDocument(doc4);
writer.addDocument(doc5);
writer.close();
IndexSearcher searcher = new IndexSearcher(dir);
//TermQuery q = new TermQuery(new Term("sex", "1"));
//q.setBoost(2f);
QueryParser qp=new QueryParser(Version.LUCENE_34, "nickname",new StandardAnalyzer(Version.LUCENE_34));
Query q=qp.parse("sex:1 AND income:10");
TopDocs hits = searcher.search(q,100);
for(int i=0;i<hits.scoreDocs.length;i++){
ScoreDoc sdoc = hits.scoreDocs[i];
Document doc = searcher.doc(sdoc.doc);
System.out.print(doc.get("sex") + "\t\t");
System.out.println(sdoc.score);
System.out.println(searcher.explain(q, sdoc.doc));//
}
/*for(int docID=0; docID<hits.length;docID++){
Document doc = searcher.doc(hits[docID].doc);
System.out.print(doc.get("sex") + "\t\t");
System.out.println(hits[docID].score);
System.out.println(searcher.explain(q, docID));//
} */
}
输出结果
1 2.828427
2.828427 = (MATCH) sum of:
1.4142135 = (MATCH) weight(sex:1 in 4), product of:
0.70710677 = queryWeight(sex:1), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
2.0 = (MATCH) fieldWeight(sex:1 in 4), product of:
1.0 = tf(termFreq(sex:1)=1)
1.0 = idf(docFreq=4, maxDocs=5)
2.0 = fieldNorm(field=sex, doc=4)
1.4142135 = (MATCH) weight(income:10 in 4), product of:
0.70710677 = queryWeight(income:10), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
2.0 = (MATCH) fieldWeight(income:10 in 4), product of:
1.0 = tf(termFreq(income:10)=1)
1.0 = idf(docFreq=4, maxDocs=5)
2.0 = fieldNorm(field=income, doc=4)
1 1.4142135
1.4142135 = (MATCH) sum of:
0.70710677 = (MATCH) weight(sex:1 in 0), product of:
0.70710677 = queryWeight(sex:1), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
1.0 = (MATCH) fieldWeight(sex:1 in 0), product of:
1.0 = tf(termFreq(sex:1)=1)
1.0 = idf(docFreq=4, maxDocs=5)
1.0 = fieldNorm(field=sex, doc=0)
0.70710677 = (MATCH) weight(income:10 in 0), product of:
0.70710677 = queryWeight(income:10), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
1.0 = (MATCH) fieldWeight(income:10 in 0), product of:
1.0 = tf(termFreq(income:10)=1)
1.0 = idf(docFreq=4, maxDocs=5)
1.0 = fieldNorm(field=income, doc=0)
1 1.4142135
1.4142135 = (MATCH) sum of:
0.70710677 = (MATCH) weight(sex:1 in 3), product of:
0.70710677 = queryWeight(sex:1), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
1.0 = (MATCH) fieldWeight(sex:1 in 3), product of:
1.0 = tf(termFreq(sex:1)=1)
1.0 = idf(docFreq=4, maxDocs=5)
1.0 = fieldNorm(field=sex, doc=3)
0.70710677 = (MATCH) weight(income:10 in 3), product of:
0.70710677 = queryWeight(income:10), product of:
1.0 = idf(docFreq=4, maxDocs=5)
0.70710677 = queryNorm
1.0 = (MATCH) fieldWeight(income:10 in 3), product of:
1.0 = tf(termFreq(income:10)=1)
1.0 = idf(docFreq=4, maxDocs=5)
1.0 = fieldNorm(field=income, doc=3)
score简单说是由各个字段的 tf * idf * boost * lengthNorm相加计算得出的。每个字段又分为 queryWeight,fieldWeight
lucene3.4文档得分
猜你喜欢
转载自zxh116116.iteye.com/blog/1175976
今日推荐
周排行