import os
import subprocess
import multiprocessingdefrun_command(run_cmd, shell=True, executable='/bin/bash', echo=True):"""run input command through subprocess.check_call.Args:- run_cmd (string): command to be performedReturns:int, program exit codeRaises:raise exception if failed to run the command"""if echo:print("CMD: %s"%run_cmd)res = subprocess.check_call(run_cmd, shell=shell, executable=executable)return resdefget_command_output(run_cmd):"""run input command through subprocess.check_output.Args:- run_cmd (string): command to be performedReturns:str, command outputRaises:raise exception if failed to run the command"""print("CMD: %s"%run_cmd)return_info = subprocess.check_output(run_cmd, shell=True)returnbytes.decode(return_info).rstrip('\n')defrun_command_parallel(cmd_list, parallel_job=2):"""run input command through Pool & subprocess.check_call to parallel.Args:- run_cmd (list): command to be performedNotes:If failed, call sys.exit with the exitcode same as the running command."""pool = multiprocessing.Pool(parallel_job)pool.map(run_command, cmd_list)pool.close()pool.join()defcreate_dir(folder,*subfolders):"""to create folder and any given subfolders under `folder`Args:- folder (string): folder to be created- \*subfolders (zero or more string): subfolders under ``folder`` to be createdReturns:- list of stringeach element will be the "folder/subfolder" being created.empty list if `subfolders` are not given."""ifnot os.path.exists(folder):os.makedirs(folder)ret =[]for subfolder in subfolders:d = os.path.join(folder, subfolder)ret.append(d)ifnot os.path.exists(d):os.makedirs(d)return retclassDockerCmd:def__init__(self, repository, tag):self.repository = repositoryself.tag = tagdefdocker_images():docker_images_cmd =f"docker images"defdocker_save(self, save_tar_path):docker_save_cmd =f"docker save -o {save_tar_path}{self.repository}:{self.tag}"return docker_save_cmddefdocker_load(self, in_tar_path):docker_save_cmd =f"docker load -i {in_tar_path} "return docker_save_cmdclassTarCMD:defcompress(source_path, tar_path, file_str="*"):cmd =f"cd {source_path}; tar -cvf {tar_path}{file_str}"return cmddefuncompress(target_path, tar_path):create_dir(target_path)cmd =f"cd {target_path}; tar xvf {tar_path}"return cmddefdir_diff(old_dir, new_dir):old_dir_list = os.listdir(old_dir)new_dir_list = os.listdir(new_dir)# 相同名称,但需要替换的diff_list =[f"{new_dir}/manifest.json",f"{new_dir}/repositories"]# 把old中多余的删除掉# rm_list = [f"{old_dir}/{i}" for i in old_dir_list if i.endswith('.json')]rm_list =[]for old_file in old_dir_list:if old_file notin new_dir_list:rm_list.append(f"{old_dir}/{old_file}")# 把new中的添加到old中for new_file in new_dir_list:if new_file notin old_dir_list:diff_list.append(f"{new_dir}/{new_file}")# print(diff_list, rm_list)return diff_list, rm_listdefupdate_shell_script():uncompress_cmd = TarCMD.uncompress(update_path, update_tar)if __name__ =="__main__":old_rep ="software"old_tag ="v1.4.0"new_rep ="software"new_tag ="v1.4.0"target_path ="/home/software/dlgrab/tmp"# 把这个脚本放到update和cupcake相同层目录下sh_script =f"{target_path}/{old_tag}_{new_tag}_update.sh"f_shell =open(sh_script,"w")f_shell.write("#!/usr/bin/bash"+"\n")f_shell.write("set -ex"+"\n")f_shell.write("update_path=$1"+"\n")f_shell.write("target_path=$2"+"\n")update_tar =f"{target_path}/{old_tag}_{new_tag}.tar"update_path =f"{target_path}/{old_tag}_{new_tag}"new_tar =f"{target_path}/{old_rep}_{new_tag}.tar"old_tar =f"{target_path}/{new_rep}_{old_tag}.tar"new_path =f"{target_path}/{new_tag}"old_path =f"{target_path}/{old_tag}"create_dir(old_path)create_dir(new_path)create_dir(update_path)# # 从docker 中下载 tar 文件# new_docker = DockerCmd(new_rep, new_tag)# old_docker = DockerCmd(old_rep, old_tag)# old_save_cmd = old_docker.docker_save(old_tar)# new_save_cmd = new_docker.docker_save(new_tar)# run_command_parallel([old_save_cmd, new_save_cmd])# # 解压 tar 文件old_uncompress_cmd = TarCMD.uncompress(old_path, old_tar)new_uncompress_cmd = TarCMD.uncompress(new_path, new_tar)run_command(old_uncompress_cmd)run_command(new_uncompress_cmd)# 更新 image diff_list, rm_list = dir_diff(old_path, new_path)# 删除old中不需要的for rm_file in rm_list:# run_command(f"rm -rf {rm_file}") f_shell.write(f"rm -rf $target_path/{os.path.basename(rm_file)}"+"\n")# 将更新的文件,打包成一个tar 文件for diff_file in diff_list:# run_command(f"rsync -azvP {diff_file} {old_path}")run_command(f"rsync -azvP {diff_file}{update_path}")f_shell.write(f"rsync -azvP $update_path/{os.path.basename(diff_file)} $target_path"+"\n")# 打包成一个 tar 文件new_compress_cmd = TarCMD.compress(new_path, new_tar)old_compress_cmd = TarCMD.compress(old_path, old_tar.replace('old','old_update'))update_compress_cmd = TarCMD.compress(update_path, update_tar)# run_command(new_uncompress_cmd)# run_command(old_compress_cmd)run_command(update_compress_cmd)f_shell.write("cd ${target_path}; tar -cvf ${target_path}_updated.tar *"+"\n")f_shell.write("# docker load -i ${target_path}_updated.tar"+"\n")f_shell.close()run_command(f"sh {sh_script}{update_path}{old_path}")
使用
tar -cvf ${old_id}.tar
tar -cvf ${old_id}_${new_id}.tar# 升级 dockersh${old_id}_${new_id}_update.sh ./${old_id}_${new_id} ./${old_id}