Beispiel #1
0
def main():
    args = get_args()

    level = logging.DEBUG if args.verbose else logging.INFO
    logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\
        datefmt='%m/%d/%Y %I:%M:%S %p',\
        level=level)

    svn_url = util.get_game_svn_url('.')
    def load_armory_meta(svn_url):
        tmpfile = tempfile.mktemp()
        if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,tmpfile):
            return None
        armory_meta_obj = None
        f = None
        try:
            f = open(tmpfile,'rb')
            armory_meta_obj = json.load(f)
        except Exception as e:
            logging.debug(str(e))
        finally:
            if f is not None:f.close()
        return armory_meta_obj
    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load '+ ARMORY_META)
        return -1


    config_obj = armory_meta_obj['config']
    package_name = config_obj['package_name']
    main_versions = config_obj['versions']['main']

    armory_json = config_obj['appid'] + '/' + package_name + '.json'
    
    if args.installer:
        if config_obj['appid'] == 'pandora_manager':
            install_cfg_gen.pandora_maker()
        else:
            install_cfg_gen.main(True,args.volsize,args.verbose,armoryjson = armory_json)
    elif args.updater:
        if not config_obj['appid'] == 'pandora_manager':
            if len(main_versions) > 1:
                main_versions.sort()
                for each1 in main_versions[:-1]:
                    update_cfg_gen.main(True,args.volsize,armory_json,str(each1))
    else:  
        diffs.main(True,None,armory_json,args.verbose)

        if config_obj['appid'] == 'pandora_manager':
            install_cfg_gen.pandora_maker()
        else:
            install_cfg_gen.main(True,args.volsize,args.verbose,armoryjson = armory_json)
            
            if len(main_versions) > 1:
                main_versions.sort()
                for each1 in main_versions[:-1]:
                    update_cfg_gen.main(True,args.volsize,armory_json,str(each1))
Beispiel #2
0
def main():
    def load_armory_meta(svn_url):
        tmpfile = tempfile.mktemp()
        if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,
                                             tmpfile):
            return None
        armory_meta_obj = None
        f = None
        try:
            f = open(tmpfile, 'rb')
            armory_meta_obj = json.load(f)
        except Exception as e:
            logging.debug(str(e))
        finally:
            if f is not None: f.close()
        return armory_meta_obj

    get_args()

    if gargs.dir is None:
        gargs.dir = '/var/wwwroot/armories/'

    if not gargs.dir.endswith('/'):
        gargs.dir = gargs.dir + '/'

    inputdir = '.' if gargs.inputdir is None else gargs.inputdir

    svn_url = util.get_game_svn_url(inputdir)
    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load ' + ARMORY_META)
        return -1
    config_obj = armory_meta_obj['config']
    appid = config_obj['appid']

    args_outputfolder = '../.portal'
    local_folder = os.path.join(args_outputfolder, appid) + '/'
    remote_folder = gargs.sshalias + ':' + gargs.dir
    remote_folder = os.path.join(remote_folder, appid + '/')

    local_folder = local_folder.replace('\\', '/')
    remote_folder = remote_folder.replace('\\', '/')

    local_base_folder = local_folder + 'base/'
    local_package_folder = local_folder + 'package/'
    local_patch_folder = local_folder + 'patch/'
    local_control_json = local_folder + 'control.json'

    remote_base_folder = remote_folder + 'base/'
    remote_package_folder = remote_folder + 'package/'
    remote_patch_folder = remote_folder + 'patch/'
    remote_control_json = remote_folder + 'control.json'

    my_mkdir(gargs.dir + appid)
    if util.simple_path_exists(local_base_folder):
        sync_folder(local_base_folder, remote_base_folder)
    if util.simple_path_exists(local_package_folder):
        sync_folder(local_package_folder, remote_package_folder)
    if util.simple_path_exists(local_patch_folder):
        sync_folder(local_patch_folder, remote_patch_folder)
    if util.simple_path_exists(local_control_json):
        sync_folder(local_control_json, remote_control_json)

    my_chown()
    my_chmod()
Beispiel #3
0
def main():
    def load_armory_meta(svn_url):
        tmpfile = tempfile.mktemp()
        if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,tmpfile):
            return None
        armory_meta_obj = None
        f = None
        try:
            f = open(tmpfile,'rb')
            armory_meta_obj = json.load(f)
        except Exception as e:
            logging.debug(str(e))
        finally:
            if f is not None:f.close()
        return armory_meta_obj

    get_args()

    if gargs.dir is None:
        gargs.dir = '/var/wwwroot/armories/' 

    if not gargs.dir.endswith('/'):
        gargs.dir = gargs.dir + '/'

    inputdir = '.' if gargs.inputdir is None else gargs.inputdir

    svn_url = util.get_game_svn_url(inputdir)
    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load '+ ARMORY_META)
        return -1
    config_obj = armory_meta_obj['config']
    appid = config_obj['appid']

    args_outputfolder = '../.portal'
    local_folder = os.path.join(args_outputfolder,appid) + '/'
    remote_folder = gargs.sshalias + ':' + gargs.dir
    remote_folder = os.path.join(remote_folder,appid + '/')
    
    local_folder = local_folder.replace('\\','/')
    remote_folder = remote_folder.replace('\\','/')

    local_base_folder = local_folder + 'base/'
    local_package_folder = local_folder + 'package/'
    local_patch_folder = local_folder + 'patch/'
    local_control_json = local_folder + 'control.json'

    remote_base_folder = remote_folder + 'base/'
    remote_package_folder = remote_folder + 'package/'
    remote_patch_folder = remote_folder + 'patch/'
    remote_control_json = remote_folder + 'control.json'

    my_mkdir(gargs.dir + appid)
    if util.simple_path_exists(local_base_folder):
        sync_folder(local_base_folder,remote_base_folder)
    if util.simple_path_exists(local_package_folder):
        sync_folder(local_package_folder,remote_package_folder)
    if util.simple_path_exists(local_patch_folder):
        sync_folder(local_patch_folder,remote_patch_folder)
    if util.simple_path_exists(local_control_json):
        sync_folder(local_control_json,remote_control_json)

    my_chown()
    my_chmod()
Beispiel #4
0
def main(slave = False,volsize = None,verbose = False,
    outputfolder = '../.portal',armoryjson = None):

    def make_sisium_json():
        sisium_config = args_input + '/' + '.pandora/sisium.json'
        folder_name,base_name = os.path.split(sisium_config)
        util.simple_make_dirs(folder_name)
        sisium_obj = {}
        sisium_obj['version'] = int(version)
        f1 = open(sisium_config,'w+b')#new json file
        json.dump(sisium_obj,f1,indent=4)#dump json
        f1.close()
        
    def remove_install_json():
        try:
            tmp = os.path.join(args_input,'.install.json')
            util.simple_remove_file(tmp)
        except:
            pass

    def make_install_json():
        try:
            #we don't care .svn folder when making installer
            walk.walkutil(args_input,None,add_extract,'.svn')
            o1 = {}
            o1['to_extract'] = to_extract 
            o1['to_backup'] = [] 
            tmp = os.path.join(args_input,'.install.json')
            f = open(tmp,'w+b')
            #print json.dumps(o1)
            json.dump(o1,f)
            f.close()
        except Exception as e:
            logging.error('fail to general a valid json file: ' + str(e))
            exit(-1)
        finally:
            pass

    def remove_sisium():
        try:
            tgt_folder = args_input + '/' + SISIUM_FOLDER
            logging.debug('target folder is ' + tgt_folder)
            util.simple_remove_dir(tgt_folder)
        except Exception as e:
            logging.debug('fail to remove sisium folder' + str(e))

    def copy_sisium():
        tgt_folder =  args_input + '/' + SISIUM_FOLDER + '/'
        logging.debug('target folder is ' + tgt_folder)
        try:
            util.simple_make_dirs(tgt_folder)
        except:pass
        try:
            util.simple_copy(os.path.join(parent_path,INSTALLER),\
                args_input + '/' + SISIUM_FOLDER)
            util.simple_copy(os.path.join(parent_path,STUB_LUA),\
                args_input + '/' + SISIUM_FOLDER)
        except Exception as e:
            logging.error('fail to copy files' + str(e))
            exit(-1)
        

    global args_input
    global args_outputfolder
    global newest_objs
    
    if slave:
        args_volsize = volsize
        args_verbose = verbose
        args_outputfolder = outputfolder
        args_armoryjson = armoryjson
    else:
        args = get_args()
        args_volsize = args.volsize
        args_verbose = args.verbose
        args_armoryjson = args.armoryjson
        args_outputfolder = args.outputfolder

    if args_verbose:
        level = logging.DEBUG
    else:
        level = logging.INFO
    logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\
        datefmt='%m/%d/%Y %I:%M:%S %p',\
        level=level)

    changed = False
    old_revision = ''
    new_revision = ''
    package_exe_name = ''


    svn_url = util.get_game_svn_url('.')
    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load '+ ARMORY_META)
        return -1

    appid = armory_meta_obj['config']['appid']
    #args_input = appid + '/base/1'
    #args_input = os.path.join(args_outputfolder, args_input)
    args_input = './trunk'
    args_input = args_input.replace('\\','/')
    logging.debug('input folder is ' + args_input)
   
    #try:
    #    old_revision = util.get_revision_by_path(args_input)
    #except:
    #    exit('error get revision of first version')

    #if not old_revision:
    #    exit('error get revision of first version')

    armoryjson_path = os.path.join(args_outputfolder,args_armoryjson)
    f1 = open(armoryjson_path,'rb')
    o1 = json.load(f1)
    f1.close()
    newest_objs = o1[o1['latest']]

    try:
        game_name = armory_meta_obj['config']['game_name']
        versions = armory_meta_obj['config']['versions']['main']
        versions.sort()
        version = str(max(versions))
        new_revision = util.find_revision_from_armory_meta(armory_meta_obj,\
            version,'main')

        #if new_revision is None:
        #    exit('fail to find target revision')
        #if not util.update_path_to_revision(args_input,new_revision):
        #    exit('fail to update to target revision')
        changed = True
        #make archive

        remove_sisium()
        remove_install_json()

        make_sisium_json()
        make_install_json()
        logging.debug('copy sisium folder\'s files')
        copy_sisium()

        _package_name = armory_meta_obj['config']['package_name']
        _package_name = ''.join([_package_name,'-',version,'.ana'])
        output_folder = appid + '/package/' + version
        output_folder = \
            os.path.join(args_outputfolder,output_folder)
        package_name = os.path.join(output_folder,_package_name)
        package_name = package_name.replace('\\','/')
        util.rm_files(package_name[:-4] + '*')
        package_exe_name = package_name[:-4] + '.exe'
        vol_size = 90*1024*1024 if args_volsize is None else args_volsize
        util.make_archive(args_input,package_name,vol_size)
        append_stub(package_name,output_folder)
        util.simple_move(package_name,package_exe_name)

        remove_sisium()
        remove_install_json()

    except Exception as e:
        logging.error(e)
        exit('fail')
    finally:
        pass
        #if changed:
        #    try:
        #        if not util.update_path_to_revision(args_input,old_revision):
        #            logging.error('fail to restore revison')
        #    except:
        #        logging.error('fail to restore revison')

    folder_name,base_name = os.path.split(package_exe_name)

    #update control_json
    logging.info('update control_json')
    f1 = None
    try:
        control_json_path = \
            os.path.join(args_outputfolder,appid + '/' + control_json)
        f1 = open(control_json_path,'rb')
        control_obj = json.load(f1)
        f1.close()
    finally:
        if f1 is not None:
            f1.close()

    if not control_obj.has_key('package'):
        control_obj['package'] = {}
    control_obj['package'][version] = []
    package_unit = {}
    package_unit['url'] = '/package/' + version + '/' + base_name
    package_unit['size'] = util.simple_getsize(package_exe_name) 
    control_obj['package'][version].append(package_unit)
    i = 1
    while 1:
        tmp_name = ''.join([package_name[:-4],'.ana.',str(i)])
        if util.simple_path_exists(tmp_name):
            folder_name,base_name = os.path.split(tmp_name)
            package_unit = {}
            package_unit['url'] = '/package/' + version + '/' + base_name
            package_unit['size'] = util.simple_getsize(tmp_name) 
            control_obj['package'][version].append(package_unit)
        else:
            break
        i += 1

    f1 = None
    try:
        f1 = open(control_json_path,'w+b')#new json file
        json.dump(control_obj,f1,indent=4)#dump json
        f1.close()
    finally:
        if f1 is not None:
            f1.close()

    logging.info('installer:' + package_exe_name + ' generation successful')
Beispiel #5
0
def pandora_maker():
    changed = False
    svn_url = util.get_game_svn_url('.')
    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load '+ ARMORY_META)
        return -1

    appid = armory_meta_obj['config']['appid']
    args_input = appid + '/base/1'
    args_input = os.path.join('../.portal', args_input)
    args_input = args_input.replace('\\','/')
    logging.debug('input folder is ' + args_input)

    try:
        old_revision = util.get_revision_by_path(args_input)
    except:
        exit('error get revision of first version')

    if not old_revision:
        exit('error get revision of first version')

    try:
        game_name = armory_meta_obj['config']['game_name']
        versions = armory_meta_obj['config']['versions']['main']
        versions.sort()
        version = str(max(versions))
        new_revision = util.find_revision_from_armory_meta(armory_meta_obj,\
            version,'main')

        if new_revision is None:
            exit('fail to find target revision')
        if not util.update_path_to_revision(args_input,new_revision):
            exit('fail to update to target revision')
        changed = True

        custom_version = armory_meta_obj['config']['custom_version']

        def pandora_zip():
            import zipfile
            try:
                outdir = '../.portal/pandora_manager/package/'
                util.simple_make_dirs(outdir)
            except Exception as e:
                logging.error(str(e))
            with zipfile.ZipFile(outdir + 'pandora-' + version + '.zip', 'w',\
                zipfile.ZIP_DEFLATED) as myzip:
                def foo(filename):
                    foo1,bar1 = os.path.split(filename)
                    if bar1 == 'pandora.exe':
                        zip_path = bar1
                    else:
                        zip_path = custom_version + '/' + filename
                    zip_path = zip_path.replace('\\','/')
                    _filename = os.path.join(args_input,filename).replace('\\','/')
                    myzip.write(_filename,zip_path)
                walk.walkutil(args_input,None,foo,'.svn')
                myzip.close()

        pandora_zip()


    except Exception as e:
        logging.error(e)
        exit('fail')
    finally:
        if changed:
            try:
                if not util.update_path_to_revision(args_input,old_revision):
                    logging.error('fail to restore revison')
            except:
                logging.error('fail to restore revison')
Beispiel #6
0
def main(slave = False,volsize = None,armoryjson = None,version = None,
    outputfolder = '../.portal'):

    def copy_to_tmpdir(o1,delta_dir):
        latest_dir = o1['latest']
        latest_dir = os.path.join(args_outputfolder,latest_dir)
        if not latest_dir.endswith('/') and not latest_dir.endswith('\\'):
            latest_dir = latest_dir + '/'

        update_tmp_folder = tempfile.mkdtemp()
        tmp_dir = update_tmp_folder + '/'

        coding = util.simple_get_encoding()

        #copy lastest files to tmp dir
        for each in o1['template']['to_extract']:
            src = (latest_dir + each).encode(coding).replace('\\','/')
            dst = (tmp_dir + each).encode(coding).replace('\\','/')
            logging.debug('src is ' + src)
            logging.debug('dst is ' + dst)
            folder_name,base_name = os.path.split(dst)
            util.simple_make_dirs(folder_name)
            util.simple_copy(src,dst)
        #copy delta files to tmp dir
        for each in o1['template']['to_delta']:
            src = (delta_dir + '/' + each).encode(coding).replace('\\','/')
            dst = (tmp_dir + each).encode(coding).replace('\\','/')
            folder_name,base_name = os.path.split(dst)
            util.simple_make_dirs(folder_name)
            util.simple_copy(src,dst)
            
        sisium_config = tmp_dir + '.pandora/sisium.json'
        folder_name,base_name = os.path.split(sisium_config)
        util.simple_make_dirs(folder_name)
        sisium_obj = {}
        sisium_obj['version'] = int(o1['latest_num'])
        f1 = open(sisium_config,'w+b')#new json file
        json.dump(sisium_obj,f1,indent=4)#dump json
        f1.close()

        return update_tmp_folder

    if slave:
        args_volsize = volsize
        args_armoryjson = armoryjson
        args_version = version
        args_outputfolder = outputfolder
    else:
        args = get_args()
        args_volsize = args.volsize
        args_armoryjson = args.armoryjson
        args_version = args.version
        args_outputfolder = args.outputfolder

    f1 = None
    try:
        armoryjson_path = os.path.join(args_outputfolder,args_armoryjson)
        f1 = open(armoryjson_path,'rb')
        o1 = json.load(f1)
        f1.close()

        latest = o1['latest']
        target = o1[args_version]
        appid = o1['appid']

        svn_url = util.get_game_svn_url('.')

        def load_armory_meta(svn_url):
            tmpfile = tempfile.mktemp()
            if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,tmpfile):
                return None
            armory_meta_obj = None
            f = None
            try:
                f = open(tmpfile,'rb')
                armory_meta_obj = json.load(f)
            except Exception as e:
                logging.debug(str(e))
            finally:
                if f is not None:f.close()
            return armory_meta_obj

        armory_meta_obj = load_armory_meta(svn_url)
        if armory_meta_obj is None:
            logging.error('could not load '+ ARMORY_META)
            return -1

        package_name = armory_meta_obj['config']['package_name']
        
        if 1 == len(armory_meta_obj['config']['versions']['main']):
            return

        to_modify = target['to_modify']
        to_add = target['to_add']
        to_delete = target['to_delete']
        
        assert(isinstance(to_modify,dict))
        assert(isinstance(to_add,list))
        assert(isinstance(to_delete,list))

        o2 = {}
        o2['to_delete'] = to_delete
        o2['to_extract'] = to_add
        o2['to_delta'] = []
        o2['to_backup'] = []

        for each in to_modify.keys():
            if to_modify[each] is None:
                o2['to_extract'].append(each)
            else:
                o2['to_delta'].append(to_modify[each]['name'])
                
        for each in o2['to_delta']:
            p1 = ur'(.+)-\w+-\w+\.delta'
            o2['to_backup'].append(r1(each,p1))

        out_obj = {}
        out_obj['template'] = o2

        out_obj['latest'] = appid + '/base/' + latest
        out_obj['latest_num'] = latest

        delta_folder = o1['delta_folder']
        delta_folder = os.path.join(args_outputfolder,delta_folder)
        update_tmp_folder = copy_to_tmpdir(out_obj,delta_folder)
        logging.debug(update_tmp_folder)
        f1 = open(update_tmp_folder + '/' + '.install.json','w+b')
        json.dump(out_obj['template'],f1,indent=4)
        f1.close()
        
        old_new = args_version + '_' + latest
        _patch_name = ''.join(['/patch/',package_name,'-','patch','-',\
           args_version,'-',latest,'.ana'])
        patch_name = ''.join([appid,_patch_name])
        patch_name = os.path.join(args_outputfolder,patch_name)
        patch_name = patch_name.replace('\\','/')
        #remove old patches
        util.rm_files(patch_name[:-4] + '*')
        patch_exe_name = patch_name[:-4] + '.exe'
        vol_size = 90*1024*1024 if args_volsize is None else args_volsize


        #make ana file
        logging.debug(patch_name)
        #folder_name,base_name = os.path.split(patch_name)
        #util.simple_make_dirs(folder_name)
        util.make_archive(update_tmp_folder,patch_name,vol_size)
        try:
            util.simple_remove_dir(update_tmp_folder)
        except: pass
        util.simple_move(patch_name,patch_exe_name)

        #update control.json
        control_json_path = \
            os.path.join(args_outputfolder,appid + '/' + control_json)
        f1 = open(control_json_path,'rb')
        control_obj = json.load(f1)
        f1.close()
        if not control_obj.has_key('patch'):
            control_obj['patch'] = {}
        control_obj['patch'][old_new] = []
        patch_unit = {}
        #patch_unit['url'] = ''.join([control_obj['root'],'/patch/',old_new,\
        #    '/','update.exe'])
        folder_name,base_name = os.path.split(patch_exe_name)
        patch_unit['url'] = '/patch/' + base_name
        patch_unit['size'] = util.simple_getsize(patch_exe_name)
        control_obj['patch'][old_new].append(patch_unit)

        #need test
        i = 1
        while 1:
            tmp_name = ''.join([package_name[:-4],'.ana.',str(i)])
            if util.simple_path_exists(tmp_name):
                folder_name,base_name = os.path.split(tmp_name)
                patch_unit = {}
                patch_unit['url'] = '/patch/' + base_name
                patch_unit['size'] = util.simple_getsize(tmp_name) 
                control_obj['patch'][old_new].append(patch_unit)
            else:
                break
            i += 1
            
        f1 = open(control_json_path,'w+b')#new json file
        json.dump(control_obj,f1,indent=4)#dump json
        f1.close()

    finally:
        if f1 is not None:
            f1.close()

    logging.info('updater: ' + patch_exe_name + ' generation successful')
Beispiel #7
0
def main(slave = False,logname = None,diffjson = '',verbose = False,\
    outputfolder = '../.portal'):

    if slave:
        args_logname = logname
        args_diffjson = diffjson
        args_verbose = verbose
        args_outputfolder = outputfolder
    else:
        args = get_args()
        args_logname = args.logname
        args_diffjson = args.diffjson
        args_verbose = args.verbose
        args_outputfolder = args.outputfolder

    svn_user = '******'
    svn_pwd = 'zkf123456'

    latest_diffs = {}

    if args_verbose:
        level = logging.DEBUG
    else:
        level = logging.INFO
    logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\
        datefmt='%m/%d/%Y %I:%M:%S %p',\
        level=level,
        filename=args_logname)

    all_versions = {}

    svn_url = util.get_game_svn_url('.')

    def load_armory_meta(svn_url):
        tmpfile = tempfile.mktemp()
        if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,tmpfile):
            return None
        armory_meta_obj = None
        f = None
        try:
            f = open(tmpfile,'rb')
            armory_meta_obj = json.load(f)
        except Exception as e:
            logging.debug(str(e))
        finally:
            if f is not None:f.close()
        return armory_meta_obj

    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load '+ ARMORY_META)
        return -1

    config_obj = armory_meta_obj['config']
    versions = config_obj['versions']
    version1 = 1
    no_delta = False
    if len(versions['main']) == 1:
        no_delta = True 
    if not version1 in versions['main']:
        versions['main'].append(version1)
    newest_num = str(max(versions['main']))
    game_name = unicode_to_utf8(config_obj['game_name'])
    root_url = unicode_to_utf8(config_obj['svn_url'])
    armories_url = unicode_to_utf8(config_obj['armories_url'])
    package_mirrors = unicode_to_utf8(config_obj['package_mirrors'])
    base_mirrors = unicode_to_utf8(config_obj['base_mirrors'])
    patch_mirrors = unicode_to_utf8(config_obj['patch_mirrors'])
    appid = config_obj['appid']

    delta_folder = ''.join([appid,'/','base','/',newest_num])
    first_folder = ''.join([appid,'/','base','/','1'])
    tmp_folder = ''.join([appid,'/','.',appid])

    delta_folder = os.path.join(args_outputfolder,delta_folder)
    first_folder = os.path.join(args_outputfolder,first_folder)
    tmp_folder = os.path.join(args_outputfolder,tmp_folder)
        
    if not versions.has_key('main'):
        logging.error('must have a main version')
        return -1
    newest_version = 'main',newest_num
    for each in versions.keys():
        if versions[each] is None:
            continue
        for num in versions[each]:
            str_num = str(num)
            each = str(each)
            if (each,str_num) in all_versions:
                continue
            #r_num = util.find_revision(log,game_name,each,str_num)
            r_num = util.find_revision_from_armory_meta(armory_meta_obj,\
                str_num,each)
            if r_num is None:
                logging.error('failed to find revision %s of',str_num)
                return -1
            all_versions[each,str_num] = r_num

    #main 1st version num
    #first_num = util.find_revision(log,game_name,'main','1')
    first_num = util.find_revision_from_armory_meta(armory_meta_obj,'1','main')
    if first_num is None:
        logging.error('failed to find revision %s of',1)
        return -1

    #try:
    #    util.simple_remove_dir(tmp_folder)
    #except:
    #    pass
    #util.simple_make_dirs(tmp_folder)

    nv_url = combine_url_at_rev(util.combine_trunk_url(svn_url,''),\
        all_versions[newest_version])
    input_folder = delta_folder

    #try:
    #    util.simple_remove_dir(input_folder)
    #except:
    #    pass

    #input_folder = tmp_folder + '/' + 'main' + all_versions[newest_version]
    #if not check_out_version(nv_url,input_folder):
    #    return -1
    first_url = combine_url_at_rev(util.combine_trunk_url(svn_url,''),first_num)

    #always check out 1st version
    if not util.simple_path_exists(first_folder):
        logging.info('copy first version from local')
        util.simple_copy_folder('./trunk',first_folder)
    if not check_out_version(first_url,first_folder):
        return -1

    d_to_xdelta = {}

    def download_lastest_file(path):
        rel_name = os.path.join(input_folder,path)
        if util.simple_path_exists(rel_name):
            return
        folder_name,base_name = os.path.split(rel_name)
        util.simple_make_dirs(folder_name)
        tmp_http_url = util.convert_svnurl_to_httpurl(nv_url,root_url)#svn_url is root path
        if not tmp_http_url.endswith('/'):
            tmp_http_url += '/'
        download_url = tmp_http_url + quote_path(path)
        logging.info('latest: ' + download_url)
        logging.debug(nv_url)
        nv_svn_url = combine_url_at_rev(util.combine_trunk_url(svn_url,'') + \
            quote_path(path),\
            all_versions[newest_version])
        #logging.info(nv_svn_url)
        #logging.info(rel_name)
        #logging.info(util.simple_path_exists(rel_name))
        #util.export_svn_file_to_local(nv_svn_url,rel_name)
        
        rv = util.download_svn_file(download_url,rel_name,svn_user,svn_pwd)#download old file to folder
        if not rv:#retry
            rv = util.download_svn_file(download_url,rel_name,svn_user,svn_pwd)#download old file to folder

    lastest_changed = []

    for each in all_versions.keys():
        if each == newest_version:
            continue
        tmp_revision = all_versions[each]
        if each[0] == 'main':
            base_url = util.combine_trunk_url(svn_url,'')
        else:
            base_url = util.combine_branch_url(svn_url,each[0],'')
        tmp_url = combine_url_at_rev(base_url,tmp_revision)
        s1 = diff_between_urls(tmp_url,nv_url)
        logging.debug(s1)
        if s1 is not None:
            diff_to_newest_key = each[1] if each[0] == 'main' else None
            if diff_to_newest_key is not None:
                latest_diffs[diff_to_newest_key] = {}
                latest_diffs[diff_to_newest_key]['to_modify'] = {}
                latest_diffs[diff_to_newest_key]['to_delete'] = []
                latest_diffs[diff_to_newest_key]['to_add'] = []
            logging.info(''.join([each[0],each[1],'->',\
                newest_version[0],newest_version[1]]))
            l1 = s1.split('\n')
            for diff in l1:
                m1 = get_modified_from_svn_diff_line(diff)
                logging.debug('m1 is ' + str(m1))
                if m1 is not None:
                    attr = util.get_svn_url_attr(combine_url_at_rev(m1,all_versions[newest_version]))
                    assert(attr is not None)
                    #print 'attr is ',attr
                    if util.FOLDER == attr:
                        continue #on windows,blind to directory,just pass
                    if not m1.startswith(base_url):
                        assert(0)
                    m1 = m1.replace(base_url,'')
                    if not d_to_xdelta.has_key(each):
                        d_to_xdelta[each] = []
                    tmp_http_url = util.convert_svnurl_to_httpurl(tmp_url,root_url)#svn_url is root path
                    if not tmp_http_url.endswith('/'):
                        tmp_http_url += '/'
                    #d_to_xdelta[each].append((m1,tmp_http_url + m1))
                    rel_name = os.path.join(tmp_folder + '/' + each[0] + each[1]+'/',m1)
                    folder_name,base_name = os.path.split(rel_name)
                    util.simple_make_dirs(folder_name)
                    m1 = m1.decode(util.simple_get_encoding()).encode('utf-8')
                    if diff_to_newest_key is not None:
                        latest_diffs[diff_to_newest_key]['to_modify'][m1] = None
                    download_url = tmp_http_url + quote_path(m1)
                    tmp_svn_url = combine_url_at_rev(base_url + quote_path(m1),\
                        tmp_revision)
                    #logging.info(tmp_svn_url)
                    #logging.info(rel_name)
                    #util.export_svn_file_to_local(tmp_svn_url,rel_name)

                    logging.info(download_url)
                    rv = util.download_svn_file(download_url,rel_name,svn_user,svn_pwd)#download old file to folder
                    if not rv:#retry
                        rv = util.download_svn_file(download_url,rel_name,svn_user,svn_pwd)#download old file to folder

                    #also download the related latest version file
                    download_lastest_file(m1)

                    m1 = m1.replace('\\','/')
                    if m1 not in lastest_changed:
                        lastest_changed.append(m1)

                m2 = get_added_from_svn_diff_line(diff)
                if m2 is not None:
                    attr = util.get_svn_url_attr(combine_url_at_rev(m2,all_versions[newest_version]))
                    assert(attr is not None)
                    if util.FOLDER == attr:
                        continue
                    if diff_to_newest_key is not None:
                        m2 = m2.replace(base_url,'')
                        m2 = m2.decode(util.simple_get_encoding())
                        latest_diffs[diff_to_newest_key]['to_add'].append(m2)
                    download_lastest_file(m2)

                    m2 = m2.replace('\\','/')
                    if m2 not in lastest_changed:
                        lastest_changed.append(m2)

                m3 = get_deleted_from_svn_diff_line(diff)
                if m3 is not None:
                    if diff_to_newest_key is not None:
                        m3 = m3.replace(base_url,'')
                        m3 = m3.decode(util.simple_get_encoding())
                        latest_diffs[diff_to_newest_key]['to_delete'].append(m3)

    src_folders = []
    for each in d_to_xdelta.keys():
        version_name = each[1] if each[0] == 'main' else each[0] + '_' + each[1]
        t1 = each[0] + each[1],version_name
        src_folders.append(t1)

    logging.info(d_to_xdelta.keys())
    latest_diffs[newest_version[1]] = []
    latest_diffs['latest'] = newest_version[1]


    def make_diffs(file_name):
        if not file_name.find('.svn/') == -1:#ignore svn folder
            return
        coding = util.simple_get_encoding()
        entry1 = {}
        file_name = file_name.replace('\\','/')
        if file_name not in lastest_changed:
            return
        abs_input_name = input_folder + '/' + file_name
        entry1['size'] = util.simple_getsize(abs_input_name)#zero size input file
        entry1['name'] = file_name.decode(coding)
        if entry1['size'] != 0:
            entry1['hash'],entry1['crc32']= calc_md5(abs_input_name)
            srcs = []
            for each,version_name in src_folders:#each source folder,try to get delta
                src_file1 = tmp_folder + '/' + each + '/' + file_name
                if util.simple_path_exists(src_file1):#src exists
                    if util.simple_getsize(src_file1) != 0:#zero size file not having md5,skip it
                        src_md5 = calc_md5(src_file1)[0]
                        #if md5 already exist 
                        #or the same with input file,continue
                        output_name = ''.join([file_name,'-',version_name,'-',\
                            newest_version[1],'.delta'])
                        if src_md5 not in srcs and entry1['hash'] != src_md5:
                            if no_delta:
                                continue
                            logging.info('encoding...')
                            logging.info('input:' + abs_input_name)
                            logging.info('src:' + src_file1)
                            logging.info('output:' + output_name)
                            xdelta_code(abs_input_name,\
                                delta_folder + '/' + output_name,\
                                src_file1,1)#encode,generate a xdelta file
                            xdelta_size = util.simple_getsize(delta_folder +\
                                '/' + output_name)
                            #print each,type(each)
                            xdelta_dict = {}
                            #name should be a unicode object
                            xdelta_dict['name'] = output_name.decode(coding)
                            xdelta_dict['size'] = xdelta_size
                            xdelta_dict['hash'] = src_md5
                            srcs.append(xdelta_dict)
                            if each.startswith('main'):
                                latest_diffs[each[4:]]['to_modify'][file_name.decode(coding).encode('utf-8')] = xdelta_dict
            if len(srcs):
                entry1['deltas'] = srcs
        latest_diffs[newest_version[1]].append(entry1)
    
    latest_diffs['delta_folder'] = delta_folder
    latest_diffs['appid'] = appid

    walk.walkutil(input_folder,None,make_diffs)

    def replace_with_doubledot(path):
        import re
        return re.sub(r'[^/]+','..',path)
    
    base_folder = ''.join([appid,'/','base'])
    base_folder = os.path.join(args_outputfolder,base_folder).replace('\\','/')
    nv_list = util.get_svn_url_list(nv_url,True)
    for sth in nv_list:
        input_path = os.path.join(input_folder,sth).replace('\\','/')
        if util.simple_path_exists(input_path) and \
            sth.replace('\\','/') in lastest_changed:
            continue
        else:
            first_path = os.path.join(first_folder,sth).replace('\\','/')
            #files not change,make a soft link to 1st version file
            if util.simple_path_exists(first_path):
                if os.path.isfile(first_path):
                    tmp_path = first_path.replace(base_folder + '/1/','',1)
                    folder_name,base_name = os.path.split(tmp_path)
                    parent_folder_name = os.path.join(replace_with_doubledot(folder_name),'../1')
                    folder_name = os.path.join(parent_folder_name,folder_name)
                    tmp_path = os.path.join(folder_name,base_name).replace('\\','/')
                    soft_link_file(tmp_path,input_path)

                    #update diffs.json
                    coding = util.simple_get_encoding()
                    entry1 = {}
                    entry1['size'] = util.simple_getsize(first_path)#zero size input file
                    entry1['name'] = sth.decode(coding)
                    if entry1['size'] != 0:
                        entry1['hash'],entry1['crc32'] = calc_md5(first_path)
                    latest_diffs[newest_version[1]].append(entry1)

                else:
                    util.simple_make_dirs(input_path)
            else:
                pass
                #assert(0)
                tmp = sth
                tmp = tmp.decode(util.simple_get_encoding())
                download_lastest_file(tmp)
    
    
    diffjson_path = os.path.join(args_outputfolder,args_diffjson)
    f1 = open(diffjson_path,'w+b')#new json file
    json.dump(latest_diffs,f1,indent=4)#dump json
    f1.close()

    control_obj = {}
    control_obj['root'] = util.simle_join_path(armories_url,appid)
    control_obj['package_mirrors'] = []
    control_obj['patch_mirrors'] = []
    control_obj['base_mirrors'] = []
    for each in package_mirrors:
        control_obj['package_mirrors'].append(util.simle_join_path(each,appid))
    for each in patch_mirrors:
        control_obj['patch_mirrors'].append(util.simle_join_path(each,appid))
    for each in base_mirrors:
        control_obj['base_mirrors'].append(util.simle_join_path(each,appid))
    control_obj['latest'] = latest_diffs['latest']
    control_obj['base'] = {}
    control_obj['base'][newest_version[1]] = latest_diffs[newest_version[1]]
    for each in control_obj['base'][newest_version[1]]:
        if each.has_key('crc32'):
            each.pop('crc32')
        

    control_json_path = \
        os.path.join(args_outputfolder,appid + '/' + control_json)
    f1 = open(control_json_path,'w+b')#new json file
    json.dump(control_obj,f1,indent=4)#dump json
    f1.close()

    logging.info('armory generation successful')
Beispiel #8
0
def main(slave = False,logname = None,diffjson = '',verbose = False,\
    outputfolder = '../.portal'):

    if slave:
        args_logname = logname
        args_diffjson = diffjson
        args_verbose = verbose
        args_outputfolder = outputfolder
    else:
        args = get_args()
        args_logname = args.logname
        args_diffjson = args.diffjson
        args_verbose = args.verbose
        args_outputfolder = args.outputfolder

    svn_user = '******'
    svn_pwd = 'zkf123456'

    latest_diffs = {}

    if args_verbose:
        level = logging.DEBUG
    else:
        level = logging.INFO
    logging.basicConfig(format='%(asctime)s [%(levelname)s] %(message)s',\
        datefmt='%m/%d/%Y %I:%M:%S %p',\
        level=level,
        filename=args_logname)

    all_versions = {}

    svn_url = util.get_game_svn_url('.')

    def load_armory_meta(svn_url):
        tmpfile = tempfile.mktemp()
        if not util.export_svn_file_to_local(svn_url + '/' + ARMORY_META,
                                             tmpfile):
            return None
        armory_meta_obj = None
        f = None
        try:
            f = open(tmpfile, 'rb')
            armory_meta_obj = json.load(f)
        except Exception as e:
            logging.debug(str(e))
        finally:
            if f is not None: f.close()
        return armory_meta_obj

    armory_meta_obj = load_armory_meta(svn_url)
    if armory_meta_obj is None:
        logging.error('could not load ' + ARMORY_META)
        return -1

    config_obj = armory_meta_obj['config']
    versions = config_obj['versions']
    version1 = 1
    no_delta = False
    if len(versions['main']) == 1:
        no_delta = True
    if not version1 in versions['main']:
        versions['main'].append(version1)
    newest_num = str(max(versions['main']))
    game_name = unicode_to_utf8(config_obj['game_name'])
    root_url = unicode_to_utf8(config_obj['svn_url'])
    armories_url = unicode_to_utf8(config_obj['armories_url'])
    package_mirrors = unicode_to_utf8(config_obj['package_mirrors'])
    base_mirrors = unicode_to_utf8(config_obj['base_mirrors'])
    patch_mirrors = unicode_to_utf8(config_obj['patch_mirrors'])
    appid = config_obj['appid']

    delta_folder = ''.join([appid, '/', 'base', '/', newest_num])
    first_folder = ''.join([appid, '/', 'base', '/', '1'])
    tmp_folder = ''.join([appid, '/', '.', appid])

    delta_folder = os.path.join(args_outputfolder, delta_folder)
    first_folder = os.path.join(args_outputfolder, first_folder)
    tmp_folder = os.path.join(args_outputfolder, tmp_folder)

    if not versions.has_key('main'):
        logging.error('must have a main version')
        return -1
    newest_version = 'main', newest_num
    for each in versions.keys():
        if versions[each] is None:
            continue
        for num in versions[each]:
            str_num = str(num)
            each = str(each)
            if (each, str_num) in all_versions:
                continue
            #r_num = util.find_revision(log,game_name,each,str_num)
            r_num = util.find_revision_from_armory_meta(armory_meta_obj,\
                str_num,each)
            if r_num is None:
                logging.error('failed to find revision %s of', str_num)
                return -1
            all_versions[each, str_num] = r_num

    #main 1st version num
    #first_num = util.find_revision(log,game_name,'main','1')
    first_num = util.find_revision_from_armory_meta(armory_meta_obj, '1',
                                                    'main')
    if first_num is None:
        logging.error('failed to find revision %s of', 1)
        return -1

    #try:
    #    util.simple_remove_dir(tmp_folder)
    #except:
    #    pass
    #util.simple_make_dirs(tmp_folder)

    nv_url = combine_url_at_rev(util.combine_trunk_url(svn_url,''),\
        all_versions[newest_version])
    input_folder = delta_folder

    #try:
    #    util.simple_remove_dir(input_folder)
    #except:
    #    pass

    #input_folder = tmp_folder + '/' + 'main' + all_versions[newest_version]
    #if not check_out_version(nv_url,input_folder):
    #    return -1
    first_url = combine_url_at_rev(util.combine_trunk_url(svn_url, ''),
                                   first_num)

    #always check out 1st version
    if not util.simple_path_exists(first_folder):
        logging.info('copy first version from local')
        util.simple_copy_folder('./trunk', first_folder)
    if not check_out_version(first_url, first_folder):
        return -1

    d_to_xdelta = {}

    def download_lastest_file(path):
        rel_name = os.path.join(input_folder, path)
        if util.simple_path_exists(rel_name):
            return
        folder_name, base_name = os.path.split(rel_name)
        util.simple_make_dirs(folder_name)
        tmp_http_url = util.convert_svnurl_to_httpurl(
            nv_url, root_url)  #svn_url is root path
        if not tmp_http_url.endswith('/'):
            tmp_http_url += '/'
        download_url = tmp_http_url + quote_path(path)
        logging.info('latest: ' + download_url)
        logging.debug(nv_url)
        nv_svn_url = combine_url_at_rev(util.combine_trunk_url(svn_url,'') + \
            quote_path(path),\
            all_versions[newest_version])
        #logging.info(nv_svn_url)
        #logging.info(rel_name)
        #logging.info(util.simple_path_exists(rel_name))
        #util.export_svn_file_to_local(nv_svn_url,rel_name)

        rv = util.download_svn_file(download_url, rel_name, svn_user,
                                    svn_pwd)  #download old file to folder
        if not rv:  #retry
            rv = util.download_svn_file(download_url, rel_name, svn_user,
                                        svn_pwd)  #download old file to folder

    lastest_changed = []

    for each in all_versions.keys():
        if each == newest_version:
            continue
        tmp_revision = all_versions[each]
        if each[0] == 'main':
            base_url = util.combine_trunk_url(svn_url, '')
        else:
            base_url = util.combine_branch_url(svn_url, each[0], '')
        tmp_url = combine_url_at_rev(base_url, tmp_revision)
        s1 = diff_between_urls(tmp_url, nv_url)
        logging.debug(s1)
        if s1 is not None:
            diff_to_newest_key = each[1] if each[0] == 'main' else None
            if diff_to_newest_key is not None:
                latest_diffs[diff_to_newest_key] = {}
                latest_diffs[diff_to_newest_key]['to_modify'] = {}
                latest_diffs[diff_to_newest_key]['to_delete'] = []
                latest_diffs[diff_to_newest_key]['to_add'] = []
            logging.info(''.join([each[0],each[1],'->',\
                newest_version[0],newest_version[1]]))
            l1 = s1.split('\n')
            for diff in l1:
                m1 = get_modified_from_svn_diff_line(diff)
                logging.debug('m1 is ' + str(m1))
                if m1 is not None:
                    attr = util.get_svn_url_attr(
                        combine_url_at_rev(m1, all_versions[newest_version]))
                    assert (attr is not None)
                    #print 'attr is ',attr
                    if util.FOLDER == attr:
                        continue  #on windows,blind to directory,just pass
                    if not m1.startswith(base_url):
                        assert (0)
                    m1 = m1.replace(base_url, '')
                    if not d_to_xdelta.has_key(each):
                        d_to_xdelta[each] = []
                    tmp_http_url = util.convert_svnurl_to_httpurl(
                        tmp_url, root_url)  #svn_url is root path
                    if not tmp_http_url.endswith('/'):
                        tmp_http_url += '/'
                    #d_to_xdelta[each].append((m1,tmp_http_url + m1))
                    rel_name = os.path.join(
                        tmp_folder + '/' + each[0] + each[1] + '/', m1)
                    folder_name, base_name = os.path.split(rel_name)
                    util.simple_make_dirs(folder_name)
                    m1 = m1.decode(util.simple_get_encoding()).encode('utf-8')
                    if diff_to_newest_key is not None:
                        latest_diffs[diff_to_newest_key]['to_modify'][
                            m1] = None
                    download_url = tmp_http_url + quote_path(m1)
                    tmp_svn_url = combine_url_at_rev(base_url + quote_path(m1),\
                        tmp_revision)
                    #logging.info(tmp_svn_url)
                    #logging.info(rel_name)
                    #util.export_svn_file_to_local(tmp_svn_url,rel_name)

                    logging.info(download_url)
                    rv = util.download_svn_file(
                        download_url, rel_name, svn_user,
                        svn_pwd)  #download old file to folder
                    if not rv:  #retry
                        rv = util.download_svn_file(
                            download_url, rel_name, svn_user,
                            svn_pwd)  #download old file to folder

                    #also download the related latest version file
                    download_lastest_file(m1)

                    m1 = m1.replace('\\', '/')
                    if m1 not in lastest_changed:
                        lastest_changed.append(m1)

                m2 = get_added_from_svn_diff_line(diff)
                if m2 is not None:
                    attr = util.get_svn_url_attr(
                        combine_url_at_rev(m2, all_versions[newest_version]))
                    assert (attr is not None)
                    if util.FOLDER == attr:
                        continue
                    if diff_to_newest_key is not None:
                        m2 = m2.replace(base_url, '')
                        m2 = m2.decode(util.simple_get_encoding())
                        latest_diffs[diff_to_newest_key]['to_add'].append(m2)
                    download_lastest_file(m2)

                    m2 = m2.replace('\\', '/')
                    if m2 not in lastest_changed:
                        lastest_changed.append(m2)

                m3 = get_deleted_from_svn_diff_line(diff)
                if m3 is not None:
                    if diff_to_newest_key is not None:
                        m3 = m3.replace(base_url, '')
                        m3 = m3.decode(util.simple_get_encoding())
                        latest_diffs[diff_to_newest_key]['to_delete'].append(
                            m3)

    src_folders = []
    for each in d_to_xdelta.keys():
        version_name = each[1] if each[0] == 'main' else each[0] + '_' + each[1]
        t1 = each[0] + each[1], version_name
        src_folders.append(t1)

    logging.info(d_to_xdelta.keys())
    latest_diffs[newest_version[1]] = []
    latest_diffs['latest'] = newest_version[1]

    def make_diffs(file_name):
        if not file_name.find('.svn/') == -1:  #ignore svn folder
            return
        coding = util.simple_get_encoding()
        entry1 = {}
        file_name = file_name.replace('\\', '/')
        if file_name not in lastest_changed:
            return
        abs_input_name = input_folder + '/' + file_name
        entry1['size'] = util.simple_getsize(
            abs_input_name)  #zero size input file
        entry1['name'] = file_name.decode(coding)
        if entry1['size'] != 0:
            entry1['hash'], entry1['crc32'] = calc_md5(abs_input_name)
            srcs = []
            for each, version_name in src_folders:  #each source folder,try to get delta
                src_file1 = tmp_folder + '/' + each + '/' + file_name
                if util.simple_path_exists(src_file1):  #src exists
                    if util.simple_getsize(
                            src_file1
                    ) != 0:  #zero size file not having md5,skip it
                        src_md5 = calc_md5(src_file1)[0]
                        #if md5 already exist
                        #or the same with input file,continue
                        output_name = ''.join([file_name,'-',version_name,'-',\
                            newest_version[1],'.delta'])
                        if src_md5 not in srcs and entry1['hash'] != src_md5:
                            if no_delta:
                                continue
                            logging.info('encoding...')
                            logging.info('input:' + abs_input_name)
                            logging.info('src:' + src_file1)
                            logging.info('output:' + output_name)
                            xdelta_code(abs_input_name,\
                                delta_folder + '/' + output_name,\
                                src_file1,1)#encode,generate a xdelta file
                            xdelta_size = util.simple_getsize(delta_folder +\
                                '/' + output_name)
                            #print each,type(each)
                            xdelta_dict = {}
                            #name should be a unicode object
                            xdelta_dict['name'] = output_name.decode(coding)
                            xdelta_dict['size'] = xdelta_size
                            xdelta_dict['hash'] = src_md5
                            srcs.append(xdelta_dict)
                            if each.startswith('main'):
                                latest_diffs[each[4:]]['to_modify'][
                                    file_name.decode(coding).encode(
                                        'utf-8')] = xdelta_dict
            if len(srcs):
                entry1['deltas'] = srcs
        latest_diffs[newest_version[1]].append(entry1)

    latest_diffs['delta_folder'] = delta_folder
    latest_diffs['appid'] = appid

    walk.walkutil(input_folder, None, make_diffs)

    def replace_with_doubledot(path):
        import re
        return re.sub(r'[^/]+', '..', path)

    base_folder = ''.join([appid, '/', 'base'])
    base_folder = os.path.join(args_outputfolder,
                               base_folder).replace('\\', '/')
    nv_list = util.get_svn_url_list(nv_url, True)
    for sth in nv_list:
        input_path = os.path.join(input_folder, sth).replace('\\', '/')
        if util.simple_path_exists(input_path) and \
            sth.replace('\\','/') in lastest_changed:
            continue
        else:
            first_path = os.path.join(first_folder, sth).replace('\\', '/')
            #files not change,make a soft link to 1st version file
            if util.simple_path_exists(first_path):
                if os.path.isfile(first_path):
                    tmp_path = first_path.replace(base_folder + '/1/', '', 1)
                    folder_name, base_name = os.path.split(tmp_path)
                    parent_folder_name = os.path.join(
                        replace_with_doubledot(folder_name), '../1')
                    folder_name = os.path.join(parent_folder_name, folder_name)
                    tmp_path = os.path.join(folder_name,
                                            base_name).replace('\\', '/')
                    soft_link_file(tmp_path, input_path)

                    #update diffs.json
                    coding = util.simple_get_encoding()
                    entry1 = {}
                    entry1['size'] = util.simple_getsize(
                        first_path)  #zero size input file
                    entry1['name'] = sth.decode(coding)
                    if entry1['size'] != 0:
                        entry1['hash'], entry1['crc32'] = calc_md5(first_path)
                    latest_diffs[newest_version[1]].append(entry1)

                else:
                    util.simple_make_dirs(input_path)
            else:
                pass
                #assert(0)
                tmp = sth
                tmp = tmp.decode(util.simple_get_encoding())
                download_lastest_file(tmp)

    diffjson_path = os.path.join(args_outputfolder, args_diffjson)
    f1 = open(diffjson_path, 'w+b')  #new json file
    json.dump(latest_diffs, f1, indent=4)  #dump json
    f1.close()

    control_obj = {}
    control_obj['root'] = util.simle_join_path(armories_url, appid)
    control_obj['package_mirrors'] = []
    control_obj['patch_mirrors'] = []
    control_obj['base_mirrors'] = []
    for each in package_mirrors:
        control_obj['package_mirrors'].append(util.simle_join_path(
            each, appid))
    for each in patch_mirrors:
        control_obj['patch_mirrors'].append(util.simle_join_path(each, appid))
    for each in base_mirrors:
        control_obj['base_mirrors'].append(util.simle_join_path(each, appid))
    control_obj['latest'] = latest_diffs['latest']
    control_obj['base'] = {}
    control_obj['base'][newest_version[1]] = latest_diffs[newest_version[1]]
    for each in control_obj['base'][newest_version[1]]:
        if each.has_key('crc32'):
            each.pop('crc32')


    control_json_path = \
        os.path.join(args_outputfolder,appid + '/' + control_json)
    f1 = open(control_json_path, 'w+b')  #new json file
    json.dump(control_obj, f1, indent=4)  #dump json
    f1.close()

    logging.info('armory generation successful')