RRD分析

介绍: 通过分析RRD数据源获取 最近一天MAX,最近12小时MAX值

Graph RRD-tool 代码块

RRD初始代码,一共设置了13个RRA

const (
	RRA1PointCnt   = 720 // 1m一个点存12h
	RRA5PointCnt   = 576 // 5m一个点存2d
	RRA20PointCnt  = 504 // 20m一个点存7d
	RRA180PointCnt = 766 // 3h一个点存3month
	RRA720PointCnt = 730 // 12h一个点存1year
)

func create(filename string, item *cmodel.GraphItem) error {
	now := time.Now()
	start := now.Add(time.Duration(-24) * time.Hour)
	step := uint(item.Step)

	c := rrdlite.NewCreator(filename, start, step)
	c.DS("metric", item.DsType, item.Heartbeat, item.Min, item.Max)

	// 设置各种归档策略
	// 1分钟一个点存 12小时
	c.RRA("AVERAGE", 0.5, 1, RRA1PointCnt)

	// 5m一个点存2d
	c.RRA("AVERAGE", 0.5, 5, RRA5PointCnt)
	c.RRA("MAX", 0.5, 5, RRA5PointCnt)
	c.RRA("MIN", 0.5, 5, RRA5PointCnt)

	// 20m一个点存7d
	c.RRA("AVERAGE", 0.5, 20, RRA20PointCnt)
	c.RRA("MAX", 0.5, 20, RRA20PointCnt)
	c.RRA("MIN", 0.5, 20, RRA20PointCnt)

	// 3小时一个点存3个月
	c.RRA("AVERAGE", 0.5, 180, RRA180PointCnt)
	c.RRA("MAX", 0.5, 180, RRA180PointCnt)
	c.RRA("MIN", 0.5, 180, RRA180PointCnt)

	// 12小时一个点存1year
	c.RRA("AVERAGE", 0.5, 720, RRA720PointCnt)
	c.RRA("MAX", 0.5, 720, RRA720PointCnt)
	c.RRA("MIN", 0.5, 720, RRA720PointCnt)

	return c.Create(true)
}

RRD文件INFO信息


[[email protected] f7]# rrdtool info f7ff999328aa42a5685302091a212c47_GAUGE_60.rrd
filename = "f7ff999328aa42a5685302091a212c47_GAUGE_60.rrd"
rrd_version = "0003"
step = 60
last_update = 1522669860
header_size = 3080
ds[metric].index = 0
ds[metric].type = "GAUGE"
ds[metric].minimal_heartbeat = 120
ds[metric].min = NaN
ds[metric].max = NaN
ds[metric].last_ds = "72.5"
ds[metric].value = 0.0000000000e+00
ds[metric].unknown_sec = 0
rra[0].cf = "AVERAGE"
rra[0].rows = 720
rra[0].cur_row = 575
rra[0].pdp_per_row = 1
rra[0].xff = 5.0000000000e-01
rra[0].cdp_prep[0].value = NaN
rra[0].cdp_prep[0].unknown_datapoints = 0
rra[1].cf = "AVERAGE"
rra[1].rows = 576
rra[1].cur_row = 308
rra[1].pdp_per_row = 5
rra[1].xff = 5.0000000000e-01
rra[1].cdp_prep[0].value = 7.2500000000e+01
rra[1].cdp_prep[0].unknown_datapoints = 0
rra[2].cf = "MAX"
rra[2].rows = 576
rra[2].cur_row = 253
rra[2].pdp_per_row = 5
rra[2].xff = 5.0000000000e-01
rra[2].cdp_prep[0].value = 7.2500000000e+01
rra[2].cdp_prep[0].unknown_datapoints = 0
rra[3].cf = "MIN"
rra[3].rows = 576
rra[3].cur_row = 422
rra[3].pdp_per_row = 5
rra[3].xff = 5.0000000000e-01
rra[3].cdp_prep[0].value = 7.2500000000e+01
rra[3].cdp_prep[0].unknown_datapoints = 0
rra[4].cf = "AVERAGE"
rra[4].rows = 504
rra[4].cur_row = 139
rra[4].pdp_per_row = 20
rra[4].xff = 5.0000000000e-01
rra[4].cdp_prep[0].value = 8.3788844721e+02
rra[4].cdp_prep[0].unknown_datapoints = 0
rra[5].cf = "MAX"
rra[5].rows = 504
rra[5].cur_row = 301
rra[5].pdp_per_row = 20
rra[5].xff = 5.0000000000e-01
rra[5].cdp_prep[0].value = 8.7000000000e+01
rra[5].cdp_prep[0].unknown_datapoints = 0
rra[6].cf = "MIN"
rra[6].rows = 504
rra[6].cur_row = 94
rra[6].pdp_per_row = 20
rra[6].xff = 5.0000000000e-01
rra[6].cdp_prep[0].value = 5.2500000000e+01
rra[6].cdp_prep[0].unknown_datapoints = 0
rra[7].cf = "AVERAGE"
rra[7].rows = 766
rra[7].cur_row = 728
rra[7].pdp_per_row = 180
rra[7].xff = 5.0000000000e-01
rra[7].cdp_prep[0].value = 1.3485957786e+04
rra[7].cdp_prep[0].unknown_datapoints = 0
rra[8].cf = "MAX"
rra[8].rows = 766
rra[8].cur_row = 400
rra[8].pdp_per_row = 180
rra[8].xff = 5.0000000000e-01
rra[8].cdp_prep[0].value = 9.2964824121e+01
rra[8].cdp_prep[0].unknown_datapoints = 0
rra[9].cf = "MIN"
rra[9].rows = 766
rra[9].cur_row = 716
rra[9].pdp_per_row = 180
rra[9].xff = 5.0000000000e-01
rra[9].cdp_prep[0].value = 5.2500000000e+01
rra[9].cdp_prep[0].unknown_datapoints = 0
rra[10].cf = "AVERAGE"
rra[10].rows = 730
rra[10].cur_row = 501
rra[10].pdp_per_row = 720
rra[10].xff = 5.0000000000e-01
rra[10].cdp_prep[0].value = 6.2198524211e+04
rra[10].cdp_prep[0].unknown_datapoints = 0
rra[11].cf = "MAX"
rra[11].rows = 730
rra[11].cur_row = 84
rra[11].pdp_per_row = 720
rra[11].xff = 5.0000000000e-01
rra[11].cdp_prep[0].value = 9.9500000000e+01
rra[11].cdp_prep[0].unknown_datapoints = 0
rra[12].cf = "MIN"
rra[12].rows = 730
rra[12].cur_row = 574
rra[12].pdp_per_row = 720
rra[12].xff = 5.0000000000e-01
rra[12].cdp_prep[0].value = 5.2500000000e+01
rra[12].cdp_prep[0].unknown_datapoints = 0

术语

  • CF:聚合运算,可以有以下四种方式:AVERAGE,MAX,MIN,LAST,分别取平均值,最大值,最小值和当前值;

  • DS:Data Source,数据源,每个RRD文件可以有多个数据源,至少有一个,可以对每个DS做单独的聚合,指定的DS名要求小于19个字符,只能包含[a-zA-Z0-9_];

  • DST:数据源的类型,有GAUGE,COUNTER,DERIVE,ABSOLUTE,COMPUTE; GAUGE:保存当前值; COUNTER:保存相对于上一个数值的值,必须是递增的值; DERIVE:保存相对于上一个值的值,不要求递增; ABSOLUTE:保存相对于初始值的相对值;

  • PDP:Primary Data Point,主数据节点,根据每个step获取的直接数据;

  • RRA:Round Robin Archive,轮转归档信息,多个PDP通过聚合后计算得到的值,保存在对应的时间槽中,这里的时间槽和生成数据时候定义的时间槽不是同一个概念,这里做过聚合;

image

命令

[[email protected] f7]# rrdtool -h

Usage: rrdtool [options] command command_options
Valid commands: create, update, updatev, graph, graphv,  dump, restore,
                last, lastupdate, first, info, fetch, tune,
                resize, xport, flushcached
  • 创建RRD数据库 Usage: rrdtool [options] command command_options
  • create - create a new RRD

      rrdtool create filename 
              [--start|-b start time]
              [--step|-s step]
              [--no-overwrite|-O]
              [DS:ds-name:DST:dst arguments]
              [RRA:CF:cf arguments]
    

Example: rrdtool create --start date +%s --step 60 test.rrd DS:ds1:GAUGE:120:U:U RRA:AVERAGE:0.5:2:1440 RRA:MIN:0.5:2:1440

  • 获取数据 rrdtool fetch file_name CF [--resolution|-r resolution] [--start|-s time] [--end|-e time]

file_name:rrd文件名;

CF:指定聚合函数,有AVERAGE,MAX,MIN,LAST;

--resolution|-r:指定解析度,若没有该解析度,则从存在的解析度中自动挑选比它大的一个;

--start|-s:指定起始时间;

--end|-e:指定结束时间; rrdtool fetch *.rrd AVERAGE

分析

最近12小时 08:00-20:00 20:00-08:00 最近3小时 23:00-02:00 02:00-05:00

  • 最近5分钟 MIN

fetch 00fff19f6ac36651be30bcf7717d758a_GAUGE_60.rrd AVERAGE -r 300

  • 最近20分钟 MIN,AVERAGE,MAX fetch 00fff19f6ac36651be30bcf7717d758a_GAUGE_60.rrd AVERAGE -r 1200

  • 最近12小时 MIN,

rrdtool fetch e163d3dde301d599daf8be7f901059e2_GAUGE_60.rrd AVERAGE -r 43200 -s 1523122600 -e 1523122600

如何计算时间: 开始时间 = 当前时间戳 -(当前时间戳 % 43200)

graph 组件


func (this *Graph) Query(param cmodel.GraphQueryParam, resp *cmodel.GraphQueryResponse) error {
	var (
		datas      []*cmodel.RRDData
		datas_size int
	)

	// statistics
	proc.GraphQueryCnt.Incr()

	cfg := g.Config()

	// form empty response
	resp.Values = []*cmodel.RRDData{}
	resp.Endpoint = param.Endpoint
	resp.Counter = param.Counter
	dsType, step, exists := index.GetTypeAndStep(param.Endpoint, param.Counter) // complete dsType and step
	if !exists {
		return nil
	}
	resp.DsType = dsType
	resp.Step = step

	start_ts := param.Start - param.Start%int64(step)
	end_ts := param.End - param.End%int64(step) + int64(step)
	if end_ts-start_ts-int64(step) < 1 {
		return nil
	}

	md5 := cutils.Md5(param.Endpoint + "/" + param.Counter)
	key := g.FormRrdCacheKey(md5, dsType, step)
	filename := g.RrdFileName(cfg.RRD.Storage, md5, dsType, step)

	// read cached items
	items, flag := store.GraphItems.FetchAll(key)
	items_size := len(items)

	if cfg.Migrate.Enabled && flag&g.GRAPH_F_MISS != 0 {
		node, _ := rrdtool.Consistent.Get(param.Endpoint + "/" + param.Counter)
		done := make(chan error, 1)
		res := &cmodel.GraphAccurateQueryResponse{}
		rrdtool.Net_task_ch[node] <- &rrdtool.Net_task_t{
			Method: rrdtool.NET_TASK_M_QUERY,
			Done:   done,
			Args:   param,
			Reply:  res,
		}
		<-done
		// fetch data from remote
		datas = res.Values
		datas_size = len(datas)
	} else {
		// read data from rrd file
		datas, _ = rrdtool.Fetch(filename, param.ConsolFun, start_ts, end_ts, step)
		datas_size = len(datas)
	}

问题

  1. 怎么获取12小时一个点MAX,MIN,AVG值?
  2. 怎么获取3小一个点MAX,MIN,AVG 值

建议

  • 增加 24小时(一天)点g

相关资料

猜你喜欢

转载自my.oschina.net/guoenzhou/blog/1824438