python实现对文件的全量、增量备份

时间:2022-04-09 14:07:50
 
 
#!/user/bin/env python
# @Time     :2018/6/6 10:10
# @Author   :PGIDYSQ
#@File      :FileBackup2.py
import os
import filecmp
import shutil
import sys
import time,sched
'''定时任务备份,增量备份'''
schedule = sched.scheduler(time.time, time.sleep)

def autoBackup(scrDir,dstDir):
    if((not os.path.isdir(scrDir))or(not os.path.isdir(dstDir))or
           (os.path.abspath(scrDir) != scrDir)or(os.path.abspath(dstDir) != dstDir)):
        usage()
    for item in os.listdir(scrDir):
        scrItem = os.path.join(scrDir,item)
        dstItem= scrItem.replace(scrDir,dstDir)
        if os.path.isdir(scrItem):
            #创建新增加的文件夹,保证目标文件夹结构与原始文件一致
            if not os.path.exists(dstItem):
                os.makedirs(dstItem)
                print('make directory'+dstItem)
            #递归调用自身函数
            autoBackup(scrItem,dstItem)
        elif os.path.isfile(scrItem):
            #只复制新增或修改的文件
            if((not os.path.exists(dstItem))or(not filecmp.cmp(scrItem,dstItem,shallow=False))):
                shutil.copyfile(scrItem,dstItem)
                print('file:'+scrItem+'==>'+dstItem,os.system('echo %time%'))
    schedule.enter(10, 0, autoBackup, (scrDir, dstDir))
def usage():
    print('Error')
    print('For example:{0}'.format(sys.argv[0]))
    sys.exit(0)
if __name__ == "__main__":
    # if len(sys.argv) != 3:
    #     usage()
    # scrDir,dstDir = sys.argv[1],sys.argv[2]
    scrDir, dstDir = r'E:\PyCharm\WorkSpace\TestPkg\base\src',r'E:\PyCharm\WorkSpace\TestPkg\base\dest'
    #定时周期执行备份任务
    schedule.enter(10, 0, autoBackup, (scrDir,dstDir))
    schedule.run()  # 持续运行,直到计划时间队列变成空为止
    #autoBackup(scrDir,dstDir)
 
 

 

#!/user/bin/env python # @Time :2018/6/6 10:10 # @Author :PGIDYSQ #@File :FileBackup.py
import time import os import tarfile import shutil import pickle as p import hashlib '''实现每周一全量备份,其余时间增量备份'''
def md5check(fname): m = hashlib.md5() with open(fname) as fobj: while True: data = fobj.read(4096) if not data: break m.update(data.encode())#pay
    return m.hexdigest() def full_backup(src_dir: object, dst_dir, md5file): par_dir, base_dir = os.path.split(src_dir.rstrip('/')) back_name = '%s_full_%s.tar.gz' % (base_dir, time.strftime('%Y%m%d')) full_name = os.path.join(dst_dir, back_name) md5dict = {} # tar = tarfile.open(full_name, 'w:gz')
    # tar.add(src_dir)
    # tar.close()

    if os.path.exists(dst_dir): shutil.rmtree(dst_dir) shutil.copytree(src_dir,dst_dir)#拷贝文件
    #shutil.make_archive(dst_dir,'zip',src_dir)#将文件压缩,注:如果压缩tar,中文文件名有可能乱码
    for path, folders, files in os.walk(src_dir): for fname in files: full_path = os.path.join(path, fname) md5dict[full_path] = md5check(full_path) if os.path.exists(md5file): with open(md5file, 'wb') as f0: p.dump(md5dict, f0) else: with open(md5file,'xb') as f1: p.dump(md5dict,f1) def incr_backup(src_dir, dst_dir, md5file): par_dir, base_dir = os.path.split(src_dir.rstrip('/')) back_name = '%s_incr_%s.tar.gz' % (base_dir, time.strftime('%Y%m%d')) full_name = os.path.join(dst_dir, back_name) md5new = {} for path, folders, files in os.walk(src_dir): for fname in files: full_path = os.path.join(path, fname) md5new[full_path] = md5check(full_path) with open(md5file,'rb') as fobj: md5old = p.load(fobj) with open(md5file, 'wb') as fobj: p.dump(md5new, fobj) #tar = tarfile.open(full_name, 'w:gz')
    for key in md5new: if md5old[key] != md5new[key]: #tar.add(key)
 shutil.copyfile(key,dst_dir) #tar.close()


if __name__ == '__main__': src_dir = 'base/src' dst_dir = 'base/dest/backupfile' md5file = 'base/md5.data' full_backup(src_dir, dst_dir, md5file) # if time.strftime('%a') == 'Mon':
    # full_backup(src_dir, dst_dir, md5file)
    # else:
    # incr_backup(src_dir, dst_dir, md5file)