spring data mongo Aggregation 聚合查询 子文档过滤
原始文档
{
"_id" : ObjectId("5ad43fbdbe65d9168fe22a6d"),
"key" : "test",
"values" : [
{
"p" : 20.3199996948242,
"d" : -1,
"t" : ISODate("2018-01-02T01:35:00.000Z")
},
{
"p" : 20.7199993133545,
"d" : 1,
"t" : ISODate("2018-01-02T02:38:00.000Z")
},
{
"p" : 20.5900001525879,
"d" : -1,
"t" : ISODate("2018-01-02T02:56:00.000Z")
},
{
"p" : 20.75,
"d" : 1,
"t" : ISODate("2018-01-02T03:10:00.000Z")
},
{
"p" : 20.6599998474121,
"d" : -1,
"t" : ISODate("2018-01-02T03:27:00.000Z")
},
{
"p" : 20.9699993133545,
"d" : 1,
"t" : ISODate("2018-01-02T06:03:00.000Z")
},
{
"p" : 20.6800003051758,
"d" : -1,
"t" : ISODate("2018-01-02T06:37:00.000Z")
},
{
"p" : 20.8500003814697,
"d" : 1,
"t" : ISODate("2018-01-02T06:46:00.000Z")
},
{
"p" : 19.0,
"d" : -1,
"t" : ISODate("2018-01-03T01:33:00.000Z")
},
{
"p" : 20.3999996185303,
"d" : 1,
"t" : ISODate("2018-01-03T01:44:00.000Z")
},
{
"p" : 20.25,
"d" : -1,
"t" : ISODate("2018-01-03T01:52:00.000Z")
},
{
"p" : 20.5900001525879,
"d" : 1,
"t" : ISODate("2018-01-03T02:13:00.000Z")
},
{
"p" : 20.3799991607666,
"d" : -1,
"t" : ISODate("2018-01-03T02:19:00.000Z")
},
{
"p" : 20.6000003814697,
"d" : 1,
"t" : ISODate("2018-01-03T02:35:00.000Z")
},
{
"p" : 20.3600006103516,
"d" : -1,
"t" : ISODate("2018-01-03T02:58:00.000Z")
},
{
"p" : 20.8799991607666,
"d" : 1,
"t" : ISODate("2018-01-03T05:01:00.000Z")
},
{
"p" : 20.6000003814697,
"d" : -1,
"t" : ISODate("2018-01-03T05:16:00.000Z")
},
{
"p" : 20.8400001525879,
"d" : 1,
"t" : ISODate("2018-01-03T05:45:00.000Z")
},
{
"p" : 20.5,
"d" : -1,
"t" : ISODate("2018-01-03T06:13:00.000Z")
},
{
"p" : 20.7600002288818,
"d" : 1,
"t" : ISODate("2018-01-03T06:32:00.000Z")
},
{
"p" : 20.6200008392334,
"d" : -1,
"t" : ISODate("2018-01-03T06:39:00.000Z")
}
]
}
原生查询语句实现文档匹配和子文档过滤
db.haiyoung_test.aggregate([
{
"$match":{
"_id":"5ad43fbdbe65d9168fe22a6d"
}
},
{
"$project":{
"_id":1,
"key":1,
"values":{
"$filter":{
"input":"$values",
"as":"src",
"cond":{
"$and":[
{"$gte":["$$src.t", ISODate("2018-01-02 00:00:00.000Z")]},
{"$lte":["$$src.t", ISODate("2018-01-03 00:00:00.000Z")]}
]
}
}
}
}
}
]);
spring-data-mongo 实现文档匹配和子文档过滤
@Document(collection = "haiyoung_test")
@CompoundIndex(name="query_index",def="{id_:-1}")
public class TargetResult {
// model
public static final String ID = "_id";
public static final String KEY = "key";
public static final String VALUE = "values";
@Id
@Field("id_")
private String targetId;
@Field("key")
private String key;
@Field("values")
private List<Map<String, Object>> values;
...
}
public class BaseAggregateExpression implements AggregationExpression {
/**
* BaseAggregateExpression
* An AggregationExpression can be used with field expressions in aggregation pipeline stages like project and group
*/
private DBObject operator;
public BaseAggregateExpression(DBObject operator) {
this.operator = operator;
}
@Override
public Document toDocument(AggregationOperationContext context) {
return context.getMappedObject(Document.parse(operator.toString()));
}
}
public TargetResult getTargetResult(Target target, String insId, Date startDate, Date endDate) {
/**
* 拼接filter条件
* 数组对应原始查询条件中的数组,map对应原始查询条件中的 {}
* 理解查询语句中的层级关系,写代码的时候,就不麻烦了
* 应该有更简单封装好的写法,时间不允许,先实现功能,后面再研究
* 利用如下管道查询,先匹配,再对匹配出的文档的子文档进行过滤
* Aggregation aggregation = Aggregation.newAggregation(match, project);
*/
Map<String, Object> map = new LinkedHashMap<>();
Object[] subCond = new Object[2];
Map<String, Object> gteCond = new LinkedHashMap<>();
Map<String, Object> lteCond = new LinkedHashMap<>();
Object[] gteArr = new Object[2];
Object[] lteArr = new Object[2];
gteArr[0] = "$$src.t";
gteArr[1] = startDate;
lteArr[0] = "$$src.t";
lteArr[1] = endDate;
gteCond.put("$gte", gteArr);
lteCond.put("$lte", lteArr);
subCond[0] = gteCond;
subCond[1] = lteCond;
map.put("input", "$" + TargetResult.VALUE);
map.put("as", "src");
map.put("cond", new BasicDBObject("$and",subCond));
// 利用上述cond拼接管道流查询 pipeline[match,project]
DBObject filterOpr = new BasicDBObject("$filter", new BasicDBObject(map));
BaseAggregateExpression filter = new BaseAggregateExpression(filterOpr);
String targetId = target.getTargetId();
if (StringUtils.isNotEmpty(targetId)) {
throw new HException(HErrorCode.ARGUMENT_ILLEGAL, "targetId为空");
}
MatchOperation match = Aggregation.match(Criteria.where(TargetResult.ID).is(targetId));
ProjectionOperation project = Aggregation.project(TargetResult.ID, TargetResult.KEY).and(filter).as(TargetResult.VALUE);
Aggregation aggregation = Aggregation.newAggregation(match, project);
AggregationResults<TargetResult> results = mongoTemplate_rtqs.aggregate(aggregation, target.getCollection(), TargetResult.class);
return results.getUniqueMappedResult();
}
过滤后的文档
{
"_id" : ObjectId("5ad43fbdbe65d9168fe22a6d"),
"key" : "test",
"values" : [
{
"p" : 20.3199996948242,
"d" : -1,
"t" : ISODate("2018-01-02T01:35:00.000Z")
},
{
"p" : 20.7199993133545,
"d" : 1,
"t" : ISODate("2018-01-02T02:38:00.000Z")
},
{
"p" : 20.5900001525879,
"d" : -1,
"t" : ISODate("2018-01-02T02:56:00.000Z")
},
{
"p" : 20.75,
"d" : 1,
"t" : ISODate("2018-01-02T03:10:00.000Z")
},
{
"p" : 20.6599998474121,
"d" : -1,
"t" : ISODate("2018-01-02T03:27:00.000Z")
},
{
"p" : 20.9699993133545,
"d" : 1,
"t" : ISODate("2018-01-02T06:03:00.000Z")
},
{
"p" : 20.6800003051758,
"d" : -1,
"t" : ISODate("2018-01-02T06:37:00.000Z")
},
{
"p" : 20.8500003814697,
"d" : 1,
"t" : ISODate("2018-01-02T06:46:00.000Z")
}
]
}