Download python mysql Ali cloud backup files to local and binlog

# Ado, directly on the code. You have good ideas, you can leave a message! To discuss progress together! 

purpose:

       Work together, progress together!



# !/usr/bin/python
# coding=utf-8

import json
import os
import datetime
import urllib
from aliyunsdkcore import client
from aliyunsdkrds.request.v20140815.DescribeBackupsRequest import DescribeBackupsRequest
from aliyunsdkrds.request.v20140815.DescribeBinlogFilesRequest import DescribeBinlogFilesRequest


class PullBackupfile():
def __init__(self,accessid,key,region,instanceid):
self.accessid = accessid
self.key = key
self.region = region
self.instanceid = instanceid
#自动创建备份目录
def mkdir(self):
now = datetime.date.today() - datetime.timedelta(days=1)
= path 'D: \ databack \ S%'% now
Data_Path the os.path.join = (path, 'Data')
bin_log_path the os.path.join = (path, 'the binlog')
path_list = [path, Data_Path, bin_log_path]
I in path_list for:
IF os.path.isdir (I):
Continue
the else:
os.popen, ( '% S mkdir'% I)
return Data_Path, bin_log_path

DEF Login (Self):
CLT = client.AcsClient (self.accessid, self.key, self.region)
return CLT

# Ali cloud data returned as UTC time, so you want to convert into eight districts in Eastern time. 2013-08-15T12: 00: 00Z Beijing time at 20:00:00 on August 15, 2013.
backup_time DEF (Self, name):
now datetime.datetime.now = ()
# 0:00
end_time = now - datetime.timedelta(hours=now.hour + 8, minutes=now.minute, seconds=now.second,
microseconds=now.microsecond)
start_time = end_time - datetime.timedelta(days=1)
#时间格式不相同
if name == 'datafile':
starttime = start_time.strftime('%Y-%m-%dT%H:%MZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%MZ')
if name == 'binlog':
starttime = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
endtime = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
return starttime,endtime


def download_rds_backfile(self):
data_path,bin_log_path = self.mkdir()
starttime,endtime = self.backup_time('datafile')
try:
req_bakup = DescribeBackupsRequest()
req_bakup.set_DBInstanceId(self.instanceid)
req_bakup.set_accept_format('json')
req_bakup.set_StartTime(starttime)
req_bakup.set_EndTime(endtime)
clt = self.login()
backup = clt.do_action_with_exception(req_bakup)
jsload = json.loads(backup)
num = jsload["PageRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(data_path,'')
while i <on one:

bak_url = jsload["Items"]["Backup"][i]["BackupDownloadURL"]
bak_host = jsload["Items"]["Backup"][i]["HostInstanceID"]
bak_id = jsload["Items"]["Backup"][i]["BackupId"]
print ("BackupId:" + str(bak_id), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_url.split('?')[0].split('/')[-1]
u = urllib.request.urlopen(bak_url)
f_header = u.info()
print(f_header)
bak_size = int(f_header["Content-Length"])

print ("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))

with open(save_name, "wb") as f:

file_size_dl = 0
block_sz = 8192
while True:
buffer = u.read(block_sz)
if not buffer:
break

file_size_dl += len(buffer)
f.write(buffer)
status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / bak_size)
# status = status + chr(8) * (len(status) + 1)
print (status)
i = i + 1
print ("download complet!")
except:
print("无备份")


# 备份binlog 下载到本地服务器
def download_rds_binlog(self):
data_path, bin_log_path = self.mkdir()
print(bin_log_path)
starttime,endtime = self.backup_time('binlog')
try:
request = DescribeBinlogFilesRequest()
request.set_DBInstanceId(self.instanceid)
request.set_accept_format('json')
request.set_StartTime(starttime)
request.set_EndTime(endtime)
clt = self.login()
binlog_backup = clt.do_action_with_exception(request)
jsload = json.loads(binlog_backup)
num = jsload["TotalRecordCount"]
print("backfiles:" + str(num))
i = 0
back_path = os.path.join(bin_log_path,'')
print(back_path)
while i < num:
bak_url = jsload["Items"]["BinLogFile"][i]["DownloadLink"]
bak_host = jsload["Items"]["BinLogFile"][i]["HostInstanceID"]
bak_name = jsload["Items"]["BinLogFile"][i]["LogFileName"]
bak_size = jsload["Items"]["BinLogFile"][i]["FileSize"]
bak_time = datetime.datetime.strptime(jsload["Items"]["BinLogFile"][i]['LogEndTime'],'%Y-%m-%dT%H:%M:%SZ')
# print("LogFileName:" + str(bak_name), "HostInstanceID:" + str(bak_host), "downloadurl:" + bak_url)
save_name = back_path + bak_name + '_' + str(bak_time).replace(' ','').replace(':','').replace('-','')
# print("backup file size: %s M ,fime nema: %s" % (bak_size / 1024 / 1024, save_name))
with open(save_name, "wb") as f:
urllib.request.urlretrieve(bak_url,save_name)
i = i + 1
print("download complet!")
except:
print('无备份')
#删除超过7天的文件夹及子目录,windows 命令
def remove_file(self):
os.popen('forfiles /p "D:\databack" /d -7 /c "cmd /c echo deleting @path ... && rd @path /s /q" ')

if __name__ == '__main__':
pull_file = PullBackupfile('accessid','key','region','instanceid')
    pull_file.download_rds_backfile()
pull_file.download_rds_binlog()
pull_file.remove_file()

Guess you like

Origin www.cnblogs.com/leeInvisible/p/11582608.html