First of all, we need to draw their own statistical data kiban out above
Then click on the top right corner inspect, copy the request of json format
Paste the above Dev Tools, which splits the data we need, has returned as json
Save the request json format down and began to write py file
from elasticsearch import Elasticsearch import datetime # Initialize the link es = Elasticsearch([{'host':'10.3.2.1','port':9200}]) # Get the current time and UTC format timestamp 7 days ago last_7day=datetime.datetime.utcnow()-datetime.timedelta(days=6, hours=16) now_time=datetime.datetime.utcnow()+datetime.timedelta(hours=8) last_7day=last_7day.strftime('%Y-%m-%dT%H:%M:%S.%f%z') now_time=now_time.strftime('%Y-%m-%dT%H:%M:%S.%f%z') # DSL defined the request body query_json={ "aggs": { "2": { "terms": { "field": "message.keyword", "size": 30, "order": { "_count": "desc" } } } }, "size": 0, "_source": { "excludes": [] }, "stored_fields": [ "*" ], "script_fields": {}, "docvalue_fields": [ { "field": "@timestamp", "format": "date_time" }, { "field": "timestamp", "format": "date_time" } ], "query": { "bool": { "must": [ { "query_string": { "query": "level:error", "analyze_wildcard": "true", "default_field": "*" } }, { "range": { "@timestamp": { "gte": last_7day, "lte": now_time, } } } ], "filter": [], "should": [], "must_not": [] } } } res=es.search(index='index-*', body=query_json) res=res['aggregations']['2']['buckets'] for i in res: print(i)