def createZip(zipDir, dst, versionName): tempPath = utils.flat_path(os.path.join(dst, "")) publishPath = utils.flat_path(os.path.join(dst, "../publish")) print(tempPath) print(publishPath) manifest = {} manifest["version"] = versionName manifest["packageUrl"] = '' manifest["remoteManifestUrl"] = '' manifest["remoteVersionUrl"] = '' manifest["searchPaths"] = [] manifest["assets"] = {} assets = manifest["assets"] get_file_path_md5(tempPath, '/res', assets) #创建project.manifest project_manifest = utils.flat_path( os.path.join(tempPath, 'project.manifest')) file_m = open(project_manifest, "wb") file_m.writelines(json.dumps(manifest)) file_m.close() #生成zip包 out_dir = '%s/%s.zip' % (publishPath, versionName) utils.zip_folder(tempPath, out_dir)
def write_zip_file(self): Logging.debug_msg('正在生成压缩包..') # 创建目录 cur_time_str = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H%M%S') zip_name = '%s_%s_%s_%s.zip' % (self.app_id, self.channel_id, self.batch_info['version'], cur_time_str) output_dir = os.path.join(self.root_dir, 'output', zip_name) utils.zip_folder(self.temp_dir, output_dir) Logging.debug_msg('压缩包生成完成 %s ' % output_dir)
def createZip(zipDir, dst, versionName): tempPath = utils.flat_path(os.path.join(dst, "../temp")) publishPath = utils.flat_path(os.path.join(dst, "../publish")) print(tempPath) print(publishPath) #拷贝主包的资源生成为更新包 if os.path.isdir(tempPath): time.sleep(0.01) shutil.rmtree(tempPath) time.sleep(0.01) #src目录 mainPackageCfg = { 'from': 'mainPackage/src', 'to': 'src', 'exclude': [ '**/.DS_Store', '**/manifest.*', ] } excopy.copy_files_with_config(mainPackageCfg, dst, tempPath) #res目录 mainPackageCfg = { 'from': 'mainPackage/res', 'to': 'res', 'exclude': [ '**/.DS_Store', ] } excopy.copy_files_with_config(mainPackageCfg, dst, tempPath) manifest = {} manifest["version"] = versionName manifest["packageUrl"] = '' manifest["remoteManifestUrl"] = '' manifest["remoteVersionUrl"] = '' manifest["searchPaths"] = [] manifest["assets"] = {} assets = manifest["assets"] get_file_path_md5(tempPath, '/src', assets) get_file_path_md5(tempPath, '/res', assets) #创建project.manifest project_manifest = utils.flat_path( os.path.join(tempPath, 'project.manifest')) file_m = open(project_manifest, "wb") file_m.writelines(json.dumps(manifest)) file_m.close() #创建version.manifest manifest["assets"] = {} version_manifest = utils.flat_path( os.path.join(tempPath, 'version.manifest')) file_m = open(version_manifest, "wb") file_m.writelines(json.dumps(manifest)) file_m.close() #拷贝exclude目录 if bExclude: excludePath = dst + '/exclude' for (parent, dirs, files) in os.walk(excludePath): for f in files: if f == '.DS_Store': continue full_path = os.path.join(parent, f) rel_path = os.path.relpath(full_path, excludePath) rel_path = rel_path.replace('\\', '/') #print(rel_path) moveFileTo(full_path, tempPath + '/' + rel_path) if zipDir == '': zipDir = 'creator' #生成zip包 out_dir = '%s/%s/%s.zip' % (publishPath, zipDir, versionName) utils.zip_folder(tempPath, out_dir)
def create_bkp_files(self, databases, config): msg = "Pulling databases" self.pk_log_row = self.db.insert( self.config['db_name_log_record'], { 'backup_id': self.pk_row, 'log': msg, 'status': 1, 'log_datetime': 'now()' } ) bkp_context_success = [] bkp_context_error = [] db_to_pass = '******'.join(databases).replace(' ', '').replace('\n', '') query = u"UPDATE {0} SET databases_to_pass='******' WHERE id={2}".format( self.config['db_name_record'], db_to_pass, self.pk_row ) self.db.query(query) for database in databases: db_name = clear_name(database) if db_name is not None and db_name not in config['DB_IGNORED']: self.create_folder( config['local_destiny_folder']) file_name = \ db_name + "_bkp_" + time.strftime('%d_%m_%Y') + '.sql' path = os.path.join(self.bkp_folder_path, file_name) bkp = subprocess.call( self.commands['bkp_error'].format( config['pg_user'], db_name, path ), shell=True ) if bkp != 0: bkp = subprocess.call( self.commands['bkp'].format( config['host_machine'], config['port'], config['pg_user'], path, db_name ), shell=True ) if bkp != 0: bkp_context_error.append(db_name) else: bkp_context_success.append(db_name) else: bkp_context_success.append(db_name) try: zip_folder(self.bkp_folder_path) delete_folder(self.bkp_folder_path) except Exception as err: self.treat_exception(err) self.zip_folder_path = self.bkp_folder_path + '.zip' msg = "Databases backup: {0}".format(','.join(bkp_context_success)) query = u"UPDATE {0} SET databases_passed='{1}' WHERE id={2}".format( self.config['db_name_record'], ','.join(bkp_context_success), self.pk_row ) self.db.query(query) self.steps_done.append(True) self.db.update( self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 2, 'log': msg } ) self.db.update( self.config['db_name_record'], { 'id': self.pk_row, 'status': 2, 'percents_completed': self.count_percentage(), 'finish_backup_datetime': 'NULL' } ) self.email_context_success = self.email_context_success \ + "- {0}\n".format(msg) if bkp_context_error != []: msg = "No databases backup: {0}".format(','.join(bkp_context_error)) self.db.update( self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 3, 'log': msg } ) self.email_context_error = "- {0}\n".format( msg)
def start(version, srcDir, dstDir, packArgs): # 遍历当前目录 global ConfigMap print("srcDir:%s" % srcDir) print("dstDir:%s" % dstDir) if os.path.isdir(dstDir): time.sleep(0.01) shutil.rmtree(dstDir) time.sleep(0.01) dst_project_path = utils.flat_path(os.path.join(dstDir, "../project")) + '/' if os.path.isdir(dst_project_path): shutil.rmtree(dst_project_path) os.makedirs(dst_project_path) # 拷贝src目录 copySrc( srcDir, dst_project_path, [".js"], # build\wechatgame\src >>>> packConfig\minigame\wx\project ['manifest', 'CfgPackage', 'SubManifest']) path_src_internal = os.path.join(srcDir, "assets/internal") path_dst_internal = os.path.join(dst_project_path, "assets/internal") copySrc(path_src_internal, path_dst_internal) path_src_main = os.path.join(srcDir, "assets/main") path_dst_main = os.path.join(dst_project_path, "assets/main") copySrc(path_src_main, path_dst_main, [r"config\..*json"]) # path = utils.flat_path(os.path.join(dstDir, "../project/src/project.js")) # shortProjectJS.start(path, 'minigame', packArgs['currentChannel']) flat_path = utils.flat_path(os.path.join(dstDir, "../project/internal/")) removeTextureJson(flat_path) # 拷贝res assetSrc = os.path.normpath(os.path.join(srcDir, "assets")) assetDst = os.path.normpath(os.path.join(dstDir, "assets")) mergeJsonMd5 = "" if 'mergeJson' in packArgs and packArgs['mergeJson']: copySrc( assetSrc, assetDst, None, [r".*\.js$", r".*\.js\.map$", r".*/import/[0-9a-f]{2}/.*\.json$"]) # copyResRawAssets(srcDir, dstDir) out_put = dstDir + '/assets/MergeJson.json' configMapPath = os.path.join(srcDir, "ConfigMap.json") f = open(configMapPath) ConfigMap = json.load(f) f.close() mergeJson(srcDir, dstDir, out_put) mergeJsonMd5 = get_file_md5(out_put) else: copySrc(assetSrc, assetDst, None, [r".*\.js$", r".*\.js\.map"]) path_ = packArgs['mod_file_path'] tmplt_name = "mini_tmplt" src_tmplt = os.path.normpath(os.path.join(path_, tmplt_name)) copySrc(src_tmplt, dst_project_path) udpateMergeJsonMD5(dst_project_path + '/hooks.js', mergeJsonMd5) # 修改项目配置 cfg_file = os.path.join(dst_project_path, 'project.config.json') proj_name = 'fishClient-' + os.path.basename(os.path.dirname(dstDir)) modify_proj_cfg(cfg_file, proj_name, packArgs) # copy渠道相关的CfgPackage.js if packArgs['cfgPackagePath']: moveFileTo(packArgs['cfgPackagePath'], dst_project_path + 'src/assets/scripts/CfgPackage.js') # project 文件夹压缩备份 ts_str = datetime.datetime.fromtimestamp( time.time()).strftime('%Y%m%d_%H%M%S') dst_proj_zip_path = utils.flat_path( os.path.join(os.path.dirname(dstDir), 'publish', '%s_%s' % (version, ts_str), 'project.zip')) utils.zip_folder(dst_project_path, dst_proj_zip_path) # 压缩png if packArgs['compressPng']: pngyu.start(os.path.join(dstDir, "assets"), os.path.join(srcDir, "PngMap.json")) zipDir = '' # 生产远端资源包和更新包 createZip(zipDir, dstDir, version, ts_str) shutil.rmtree(dstDir) # 删除临时目录
def create_bkp_files(self, databases, config): msg = "Pulling databases" self.pk_log_row = self.db.insert( self.config['db_name_log_record'], { 'backup_id': self.pk_row, 'log': msg, 'status': 1, 'log_datetime': 'now()' }) bkp_context_success = [] bkp_context_error = [] db_to_pass = '******'.join(databases).replace(' ', '').replace('\n', '') query = u"UPDATE {0} SET databases_to_pass='******' WHERE id={2}".format( self.config['db_name_record'], db_to_pass, self.pk_row) self.db.query(query) for database in databases: db_name = clear_name(database) if db_name is not None and db_name not in config['DB_IGNORED']: self.create_folder(config['local_destiny_folder']) file_name = \ db_name + "_bkp_" + time.strftime('%d_%m_%Y') + '.sql' path = os.path.join(self.bkp_folder_path, file_name) bkp = subprocess.call(self.commands['bkp_error'].format( config['pg_user'], db_name, path), shell=True) if bkp != 0: bkp = subprocess.call(self.commands['bkp'].format( config['host_machine'], config['port'], config['pg_user'], path, db_name), shell=True) if bkp != 0: bkp_context_error.append(db_name) else: bkp_context_success.append(db_name) else: bkp_context_success.append(db_name) try: zip_folder(self.bkp_folder_path) delete_folder(self.bkp_folder_path) except Exception as err: self.treat_exception(err) self.zip_folder_path = self.bkp_folder_path + '.zip' msg = "Databases backup: {0}".format(','.join(bkp_context_success)) query = u"UPDATE {0} SET databases_passed='{1}' WHERE id={2}".format( self.config['db_name_record'], ','.join(bkp_context_success), self.pk_row) self.db.query(query) self.steps_done.append(True) self.db.update(self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 2, 'log': msg }) self.db.update( self.config['db_name_record'], { 'id': self.pk_row, 'status': 2, 'percents_completed': self.count_percentage(), 'finish_backup_datetime': 'NULL' }) self.email_context_success = self.email_context_success \ + "- {0}\n".format(msg) if bkp_context_error != []: msg = "No databases backup: {0}".format( ','.join(bkp_context_error)) self.db.update(self.config['db_name_log_record'], { 'id': self.pk_log_row, 'status': 3, 'log': msg }) self.email_context_error = "- {0}\n".format(msg)
from apigee_proxy import Proxies from utils import zip_folder def import_proxy_to_apigee(proxy_name, absolute_zip_file_pth): proxy_obj = Proxies() # The download folder is mentioned in api_config.json proxy_obj.import_proxy_into_apigee(proxy_name, absolute_zip_file_pth) if __name__ == '__main__': # Mention the absolute location of the folder to be uploaded, if the proxy name already exists then it will # update the revision by a number higher else it will create a new proxy proxy_name = 'Test-Proxy' folder_to_zip = r'C:\Location\to\Proxy_folder' zip_folder(folder_to_zip) absolute_zip_file_pth = '{}.zip'.format(folder_to_zip) import_proxy_to_apigee(proxy_name, absolute_zip_file_pth)