Exemplo n.º 1
0
def cleanup_one():
    """Remove a single (problematic) build."""
    builds = app.config["BUILDS"]
    secret_key = request.values.get('secret_key', '')
    hash = request.values.get('hash', '')

    if not hash:
        res = "<error>Don't know what to remove. Please enter hash number in query!</error>\n"
        return Response(res, mimetype='application/xml')

    if check_secret_key(secret_key):
        b = builds.get(hash, None)
        if b:
            log.info("Removing %s" % hash)
            del builds[hash]
            b.remove_files()
            res = ("<message>\n<removed hash='%s'/>\n</message>" % hash)
        else:
            log.error("Hash not found, trying to remove files.")
            res = "<error>Failed to remove build: hash not found, trying to remove files.</error>\n"
            if hash in get_build_directories(Config.builds_dir):
                rmdir(os.path.join(Config.builds_dir, hash))
                log.info("Files removed for hash %s" % hash)
            else:
                log.info("No files to be removed for hash %s" % hash)
    else:
        log.error("No builds will be removed.")
        res = "<error>Failed to remove all builds: secret key could not be confirmed.</error>\n"

    return Response(res, mimetype='application/xml')
Exemplo n.º 2
0
    def run(self, *args):
        if self.before_run_hook:
            self.before_run_hook()

        prefix.check_for_pg_latest()  # make sure we're pointed at a real instance
        utils.add_github_to_known_hosts() # make sure ssh doesn't prompt

        repo = self.repo_path_for_url(self.repo_url)

        if len(args) == 0:
            git_ref = self.default_git_ref
        else:
            git_ref = args[0]

        utils.rmdir(repo, force=True) # force because git write-protects files
        run('git clone -q {} {}'.format(self.repo_url, repo))

        with cd(repo), path('{}/bin'.format(config.paths['pg-latest'])):
            run('git checkout {}'.format(git_ref))
            run('make install')

        if self.post_install_hook:
            self.post_install_hook()

        utils.psql('CREATE EXTENSION {} CASCADE;'.format(self.extension_name))
Exemplo n.º 3
0
def verify_injection(team, config_file):
    config = load_config(config_file)
    timeout = config["exploit_timeout"]["injection_phase"]
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    bug_branches = config['teams'][team]['bug_branches']
    clone(repo_owner, repo_name)
    branches = bug_branches if len(bug_branches) > 0 \
        else list_branches(repo_name)
    if "master" in branches:
        branches.remove("master")  # master branch is not verification target

    for branch in branches:
        checkout(repo_name, branch)
        exploit_dir = get_exploit_dir(repo_name, branch, config, team)
        bug_branch_result, _ = \
            verify_exploit(exploit_dir, repo_name, branch, timeout, config)

        checkout(repo_name, "master")
        master_result, _ = \
            verify_exploit(exploit_dir, repo_name, "master", timeout, config)

        rmdir(exploit_dir)

        if master_result == False and bug_branch_result == True:
            print('[*] Successflly verified branch "%s".' % branch)
        elif bug_branch_result == True:
            print ('[*] Exploit for branch "%s" works, but it also works on ' \
                   'master branch, which indicates some error.' %  branch)
            sys.exit()
        else:
            print('[*] Failed to verify exploit in branch "%s".' % branch)
            sys.exit()

    rmdir(repo_name)
Exemplo n.º 4
0
    def run(self, argv, dependencies):
        self.parse_args(argv)
        if self.is_clean_before_build:
            utils.rmdir(self.simulator_output_dir)

        # backup some files
        modify_files = self.get_depend_project_file_list()
        if cocos.os_is_mac() and self.build_mac:
            modify_files.append(os.path.join(self.simulator_abs_path, 'frameworks/runtime-src/proj.ios_mac/mac/Info.plist'))

        if cocos.os_is_win32() and self.build_win:
            modify_files.append(os.path.join(self.simulator_abs_path, 'frameworks/runtime-src/proj.win32/game.rc'))

        self.backup_files(modify_files)

        try:
            # modify bundle version
            self.update_bundle_version()

            # modify project config files
            self.change_cocos2d_debug_macro_to_1(modify_files)

            # compile simulator
            self.do_compile()
        except Exception as e:
            raise e
        finally:
            # roll back modified files
            self.rollback_files(modify_files)
            Logging.info("")
            Logging.info(self.build_log)
            Logging.info("")

        return 0
Exemplo n.º 5
0
 def remove_files(self):
     """
     Removes the files associated with this build.
     """
     self.change_status(Status.Deleted)
     log.info("Removing files")
     rmdir(self.directory)
Exemplo n.º 6
0
def start_get_hash(config, github, config_file):
    repo_owner = config['repo_owner']
    for team in config['teams']:
        repo_name = config['teams'][team]['repo_name']
        if repo_name == '-':
            continue

        print('[*] Get the commit hash of %s repo.' % repo_name)
        bug_branches = config['teams'][team]['bug_branches']
        clone(repo_owner, repo_name)
        branches = bug_branches if len(bug_branches) > 0 \
            else list_branches(repo_name)
        if "master" in branches:
            branches.remove("master") # Do not consider master branch
        for branch in branches:
            checkout(repo_name, branch)
            hash = get_latest_commit_hash(repo_name, int(time.time()), branch)
            config['teams'][team][branch] = hash
        rmdir(repo_name)

    with open(config_file, 'w') as outfile:
        json.dump(config, outfile, indent=4)

    print ('[*] Successfully write in %s' % config_file)

    return
Exemplo n.º 7
0
def start(log_level = 0):
    global main_pid, log_dir

    # log level
    if ( log_level != 0 ): setLevel(log_level)

    # check for old logs
    if ( os.path.isdir(log_dir) ):
        files = os.listdir(log_dir)
        if ( len(files) > 0 ):

            # check if old logs already exists
            old_log_dir = os.path.join(log_dir, "old")
            if ( os.path.isdir(old_log_dir) ): utils.rmdir(old_log_dir)
            utils.mkdir(old_log_dir)
            
            for f in files:
                path = os.path.join(log_dir, f)            
                if ( os.path.isdir(path) ): continue            
                os.rename(path, os.path.join(old_log_dir, f))
    else:
        utils.mkdir(log_dir)

    # set the main process id so we know where it began.
    main_pid = os.getpid()
Exemplo n.º 8
0
def build_postgres():
    'Installs postges'

    # Give the postgres source to the remote nodes
    sourceball_loc = utils.download_pg()
    if env.host_string != 'localhost':
        put(local_path=sourceball_loc, remote_path=sourceball_loc)

    with cd(config.paths['pg-source-balls']):
        final_dir = os.path.basename(sourceball_loc).split('.tar.bz2')[0]
        # rm makes this idempotent, if not a bit inefficient

        utils.rmdir(final_dir)
        with hide('stdout'):
            run('tar -xf {}.tar.bz2'.format(final_dir))

        with cd(final_dir):
            pg_latest = config.paths['pg-latest']
            flags = ' '.join(config.settings['pg-configure-flags'])
            with hide('stdout'):
                run('./configure --prefix={} {}'.format(pg_latest, flags))

            core_count = run('cat /proc/cpuinfo | grep "core id" | wc -l')

            with hide('stdout'):
                run('make -s -j{} install'.format(core_count))

            with cd('contrib'), hide('stdout'):
                run('make -s install')
Exemplo n.º 9
0
    def compile_mac_ios(self):
        xcode_proj_info = self.cfg_info[LibsCompiler.KEY_XCODE_PROJS_INFO]
        if self.mode == 'debug':
            mode_str = 'Debug'
        else:
            mode_str = 'Release'

        XCODE_CMD_FMT = "xcodebuild -project \"%s\" -configuration %s -target \"%s\" %s CONFIGURATION_BUILD_DIR=%s"
        ios_out_dir = os.path.join(self.lib_dir, "ios")
        mac_out_dir = os.path.join(self.lib_dir, "mac")
        ios_sim_libs_dir = os.path.join(ios_out_dir, "simulator")
        ios_dev_libs_dir = os.path.join(ios_out_dir, "device")
        for key in xcode_proj_info.keys():
            proj_path = os.path.join(self.repo_x, key)
            target = xcode_proj_info[key][LibsCompiler.KEY_XCODE_TARGETS]

            if self.build_mac:
                # compile mac
                build_cmd = XCODE_CMD_FMT % (proj_path, mode_str, "%s Mac" %
                                             target, "", mac_out_dir)
                self._run_cmd(build_cmd)

            if self.build_ios:
                # compile ios simulator
                build_cmd = XCODE_CMD_FMT % (
                    proj_path, mode_str, "%s iOS" % target,
                    "-sdk iphonesimulator ARCHS=\"i386 x86_64\" VALID_ARCHS=\"i386 x86_64\"",
                    ios_sim_libs_dir)
                build_cmd += ' ONLY_ACTIVE_ARCH=NO'
                self._run_cmd(build_cmd)

                # compile ios device
                build_cmd = XCODE_CMD_FMT % (proj_path, mode_str,
                                             "%s iOS" % target,
                                             "-sdk iphoneos", ios_dev_libs_dir)
                self._run_cmd(build_cmd)

        if self.build_ios:
            # generate fat libs for iOS
            for lib in os.listdir(ios_sim_libs_dir):
                sim_lib = os.path.join(ios_sim_libs_dir, lib)
                dev_lib = os.path.join(ios_dev_libs_dir, lib)
                output_lib = os.path.join(ios_out_dir, lib)
                lipo_cmd = "lipo -create -output \"%s\" \"%s\" \"%s\"" % (
                    output_lib, sim_lib, dev_lib)

                self._run_cmd(lipo_cmd)

            # remove the simulator & device libs in iOS
            utils.rmdir(ios_sim_libs_dir)
            utils.rmdir(ios_dev_libs_dir)

        if not self.disable_strip:
            # strip the libs
            if self.build_ios:
                ios_strip_cmd = "xcrun -sdk iphoneos strip -S %s/*.a" % ios_out_dir
                self._run_cmd(ios_strip_cmd)
            if self.build_mac:
                mac_strip_cmd = "xcrun strip -S %s/*.a" % mac_out_dir
                self._run_cmd(mac_strip_cmd)
Exemplo n.º 10
0
def fetch(team, issue_no, config, token=None):
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    github = Github(config["player"], token)

    _, submitter, create_time, content = \
        get_github_issue(repo_owner, repo_name, issue_no, github)

    # Write the fetched issue content to temp file
    tmpfile = "/tmp/gitctf_%s.issue" % random_string(6)
    with open(tmpfile, "w") as f:
        f.write(content)

    # Decrypt the exploit
    out_dir = "exploit-%s-%s" % (submitter, create_time)
    prompt_rmdir_warning(out_dir)
    rmdir(out_dir)
    mkdir(out_dir)
    team = config["player_team"]
    out_dir = decrypt_exploit(tmpfile, config, team, out_dir, submitter)
    if out_dir is not None:
        print "Exploit fetched into %s" % out_dir

    # Clean up
    rmfile(tmpfile)
Exemplo n.º 11
0
def cleanup_all():
    """Remove all the existing builds. Requires secret_key parameter in query."""
    builds = app.config["BUILDS"]
    to_remove = []
    secret_key = request.values.get('secret_key', '')

    if check_secret_key(secret_key):
        log.info("All builds will be removed.")
        build_dirs = get_build_directories(Config.builds_dir)
        for hashnumber in build_dirs:
            b = builds.get(hashnumber, None)
            log.info("Removing %s" % hashnumber)
            if b:
                to_remove.append(hashnumber)
                b.remove_files()
            else:
                rmdir(os.path.join(Config.builds_dir, hashnumber))
        res = []
        for h in to_remove:
            del builds[h]
            res.append("<removed hash='%s'/>" % h)
        if len(res) == 0:
            res = "<message>No hashes to be removed.</message>\n"
        else:
            res = "\n".join(res)
            res = "<message>\n%s</message>\n" % res
    else:
        log.error("No builds will be removed.")
        res = "<error>Failed to remove all builds: secret key could not be confirmed.</error>\n"

    return Response(res, mimetype='application/xml')
Exemplo n.º 12
0
def run():
    # Clean up previous extraction
    rmdir(prepared_omniglot_location)
    mkdir(prepared_omniglot_location)

    # Unzip dataset
    for root, _, files in os.walk(raw_omniglot_location):
        for f in files:
            if f in dataset_zip_files:
                print('Unzipping {}...'.format(f))
                zip_ref = zipfile.ZipFile(root + "/" + f, 'r')
                zip_ref.extractall(prepared_omniglot_location)
                zip_ref.close()

    print('Processing background set...')
    for root, alphabets, _ in os.walk(
            prepared_omniglot_location + 'images_background/'):
        for alphabet in sorted(alphabets):
            handle_alphabet(root + alphabet)

    print('Processing evaluation set...')
    for root, alphabets, _ in os.walk(
            prepared_omniglot_location + 'images_evaluation/'):
        for alphabet in sorted(alphabets):
            handle_alphabet(root + alphabet)
Exemplo n.º 13
0
def clone(repo_owner, repo_name, prompt=False, target_dir=None):
    target = repo_name if target_dir is None else target_dir
    if prompt:
        prompt_rmdir_warning(target)
    rmdir(target)
    url = '[email protected]:%s/%s' % (repo_owner, repo_name)
    _, err, r = run_command("git clone %s %s" % (url, target), os.getcwd())
    if r!= 0:
        print '[*] Failed to clone: "%s"' % url
        print err
        sys.exit()
Exemplo n.º 14
0
def get_next_commit(last_commit, defender, branch, config):
    repo_name = config['teams'][defender]['repo_name']
    rmdir(repo_name)
    clone(config['repo_owner'], repo_name)
    next_commit_hash = get_next_commit_hash(repo_name, branch, last_commit)
    rmdir(repo_name)
    print next_commit_hash
    if next_commit_hash == '':
        return None
    else:
        return next_commit_hash
Exemplo n.º 15
0
def del_local_file_in_dir(params):
    log("del_local_file_in_dir: params: %s" % params)
    local_file = utils.unquote_plus(params.get("nzb"))
    local_path = os.path.dirname(local_file)
    if xbmcgui.Dialog().yesno("Pneumatic", "Delete:", "%s" % local_path):
        for file in utils.listdir_files(local_path):
            local_file_path = utils.join(local_path, file)
            log("del_local_file_in_dir: delete: %s" % local_file_path)
            utils.delete(local_file_path)
        log("del_local_file_in_dir: rmdir: %s" % local_path)
        utils.rmdir(local_path)
        xbmc.executebuiltin("Container.Refresh")
Exemplo n.º 16
0
def del_local_file_in_dir(params):
    log("del_local_file_in_dir: params: %s" % params)
    local_file = utils.unquote_plus(params.get("nzb"))
    local_path = os.path.dirname(local_file)
    if xbmcgui.Dialog().yesno("Pneumatic", "Delete:", "%s" % local_path):
        for file in utils.listdir_files(local_path):
            local_file_path = utils.join(local_path, file)
            log("del_local_file_in_dir: delete: %s" % local_file_path)
            utils.delete(local_file_path)
        log("del_local_file_in_dir: rmdir: %s" % local_path)
        utils.rmdir(local_path)
        xbmc.executebuiltin("Container.Refresh")
Exemplo n.º 17
0
    def compile_mac_ios(self):
        xcode_proj_info = self.cfg_info[LibsCompiler.KEY_XCODE_PROJS_INFO]
        if self.mode == 'debug':
            mode_str = 'Debug'
        else:
            mode_str = 'Release'

        XCODE_CMD_FMT = "xcodebuild -project \"%s\" -configuration %s -target \"%s\" %s CONFIGURATION_BUILD_DIR=%s"
        ios_out_dir = os.path.join(self.lib_dir, "ios")
        mac_out_dir = os.path.join(self.lib_dir, "mac")
        ios_sim_libs_dir = os.path.join(ios_out_dir, "simulator")
        ios_dev_libs_dir = os.path.join(ios_out_dir, "device")
        for key in xcode_proj_info.keys():
            proj_path = os.path.join(self.repo_x, key)
            target = xcode_proj_info[key][LibsCompiler.KEY_XCODE_TARGETS]

            if self.build_mac:
                # compile mac
                build_cmd = XCODE_CMD_FMT % (proj_path, mode_str, "%s Mac" % target, "", mac_out_dir)
                self._run_cmd(build_cmd)

            if self.build_ios:
                # compile ios simulator
                build_cmd = XCODE_CMD_FMT % (proj_path, mode_str, "%s iOS" % target, "-sdk iphonesimulator ARCHS=\"i386 x86_64\" VALID_ARCHS=\"i386 x86_64\"", ios_sim_libs_dir)
                self._run_cmd(build_cmd)

                # compile ios device
                build_cmd = XCODE_CMD_FMT % (proj_path, mode_str, "%s iOS" % target, "-sdk iphoneos", ios_dev_libs_dir)
                self._run_cmd(build_cmd)

        if self.build_ios:
            # generate fat libs for iOS
            for lib in os.listdir(ios_sim_libs_dir):
                sim_lib = os.path.join(ios_sim_libs_dir, lib)
                dev_lib = os.path.join(ios_dev_libs_dir, lib)
                output_lib = os.path.join(ios_out_dir, lib)
                lipo_cmd = "lipo -create -output \"%s\" \"%s\" \"%s\"" % (output_lib, sim_lib, dev_lib)

                self._run_cmd(lipo_cmd)

            # remove the simulator & device libs in iOS
            utils.rmdir(ios_sim_libs_dir)
            utils.rmdir(ios_dev_libs_dir)

        if not self.disable_strip:
            # strip the libs
            if self.build_ios:
                ios_strip_cmd = "xcrun -sdk iphoneos strip -S %s/*.a" % ios_out_dir
                self._run_cmd(ios_strip_cmd)
            if self.build_mac:
                mac_strip_cmd = "xcrun strip -S %s/*.a" % mac_out_dir
                self._run_cmd(mac_strip_cmd)
Exemplo n.º 18
0
def verify_service(team, branch, service_port, host_port, config_file):
    config = load_config(config_file)
    repo_owner = config['repo_owner']
    repo_name = config['teams'][team]['repo_name']
    container_name = "%s-%s" % (repo_name, branch)
    clone(repo_owner, repo_name)
    docker_cleanup(container_name)
    checkout(repo_name, branch)
    setup(repo_name, container_name, int(service_port), int(host_port))
    check_liveness(container_name, int(host_port))
    docker_cleanup(container_name)
    rmdir(repo_name)
    sys.exit()
Exemplo n.º 19
0
def build_citus():
    repo = config.paths['citus-repo']
    utils.rmdir(repo, force=True) # force because git write-protects files
    run('git clone -q https://github.com/citusdata/citus.git {}'.format(repo))
    with cd(repo):
        git_ref = config.settings.get('citus-git-ref', 'master')
        run('git checkout {}'.format(git_ref))

        pg_latest = config.paths['pg-latest']
        run('PG_CONFIG={}/bin/pg_config ./configure'.format(pg_latest))

        core_count = run('cat /proc/cpuinfo | grep "core id" | wc -l')
        run('make -j{} install'.format(core_count))
Exemplo n.º 20
0
def build_enterprise():
    utils.add_github_to_known_hosts() # make sure ssh doesn't prompt
    repo = config.paths['enterprise-repo']
    utils.rmdir(repo, force=True)
    run('git clone -q [email protected]:citusdata/citus-enterprise.git {}'.format(repo))
    with cd(repo):
        git_ref = config.settings.get('citus-git-ref', 'enterprise-master')
        run('git checkout {}'.format(git_ref))

        pg_latest = config.paths['pg-latest']
        run('PG_CONFIG={}/bin/pg_config ./configure'.format(pg_latest))

        core_count = run('cat /proc/cpuinfo | grep "core id" | wc -l')
        run('make -j{} install'.format(core_count))
Exemplo n.º 21
0
def _gen_diffs(ver1, ver2, mapping, allowOverwrite=False):
    n = f"{ver1}__{ver2}__{mapping}.diff"
    utils.journal_write('../../diffs/' + n)
    if os.path.isfile('../../diffs/' + n) and not allowOverwrite:
        print(utils.c.WARNING, n, "already was generated! Skipping.",
              utils.c.RESET)
        return

    # print('../../diffs/' + n)

    print(f"{mapping} diffing {ver1} and {ver2}... Deleting META-INF...")
    utils.rmdir(f"{ver1}_{mapping}/client/META-INF/")
    utils.rmdir(f"{ver2}_{mapping}/client/META-INF/")
    l = f"git diff --no-index {ver1}_{mapping} {ver2}_{mapping} > {n}"
    print(utils.c.WARNING, '>', l, utils.c.RESET)
    os.system(l)
Exemplo n.º 22
0
def handle_alphabet(folder):
    print('{}...'.format(folder.split('/')[-1]))
    for rotate in [0, 90, 180, 270]:
        # Create new folders for each augmented alphabet
        mkdir(f'{folder}.{rotate}')
        for root, character_folders, _ in os.walk(folder):
            for character_folder in character_folders:
                # For each character folder in an alphabet
                # rotate and resize all of the images and save
                # to the new folder
                handle_characters(folder,
                                  root + '/' + character_folder,
                                  rotate)
                # return

    # Delete original alphabet
    rmdir(folder)
Exemplo n.º 23
0
def decrypt_exploit(encrypted_exploit_path, config, team, out_dir=None, \
        expected_signer=None):
    if out_dir is None:
        out_dir = "exploit"

    rmdir(out_dir)

    tmpzip = "/tmp/gitctf_%s.zip" % random_string(6)
    tmpdir = "/tmp/gitctf_%s" % random_string(6)
    tmpgpg = "/tmp/gitctf_%s.gpg" % random_string(6)

    if expected_signer == None:
        decrypt_cmd = 'gpg -o %s %s' % (tmpzip, encrypted_exploit_path)
    else:
        instructor_id = config['teams']['instructor']['pub_key_id']
        team_id = config['teams'][team]['pub_key_id']
        expected_signer_id = config['individual'][expected_signer][
            'pub_key_id']

        # Make keyring
        run_command("gpg -o %s --export %s %s %s" % (tmpgpg, \
                expected_signer_id, instructor_id, team_id), os.getcwd())

        decrypt_cmd = "gpg --no-default-keyring --keyring %s -o %s %s" \
                % (tmpgpg, tmpzip, encrypted_exploit_path)

    _, err, r = run_command(decrypt_cmd, os.getcwd())
    if r != 0:
        print "[*] Failed to decrypt/verify %s" % encrypted_exploit_path
        print err
        return None

    run_command('unzip %s -d %s' % (tmpzip, tmpdir), os.getcwd())
    shutil.move(tmpdir, out_dir)

    rmfile(tmpzip)
    rmfile(tmpgpg)
    rmdir(tmpdir)

    return out_dir
Exemplo n.º 24
0
def prepare_mini_imagenet():
    print("preparing dataset")
    # Clean up folders
    rmdir(MINI_IMG_PATH + '/images_background')
    rmdir(MINI_IMG_PATH + '/images_evaluation')
    mkdir(MINI_IMG_PATH + '/images_background')
    mkdir(MINI_IMG_PATH + '/images_evaluation')

    # Find class identities
    classes = []
    for root, _, files in os.walk(MINI_IMG_PATH + '/images/'):
        for f in files:
            if f.endswith('.jpg'):
                classes.append(f[:-12])

    classes = list(set(classes))

    # Train/test split
    np.random.seed(0)
    np.random.shuffle(classes)
    background_classes, evaluation_classes = classes[:80], classes[80:]

    # Create class folders
    for c in background_classes:
        mkdir(MINI_IMG_PATH + f'/images_background/{c}/')

    for c in evaluation_classes:
        mkdir(MINI_IMG_PATH + f'/images_evaluation/{c}/')

    # Move images to correct location
    for root, _, files in os.walk(MINI_IMG_PATH + '/images'):
        for f in tqdm(files, total=600 * 100):
            if f.endswith('.jpg'):
                class_name = f[:-12]
                image_name = f[-12:]
                # Send to correct folder
                subset_folder = 'images_evaluation' if class_name in evaluation_classes else 'images_background'
                src = f'{root}/{f}'
                dst = MINI_IMG_PATH + f'/{subset_folder}/{class_name}/{image_name}'
                shutil.copy(src, dst)
Exemplo n.º 25
0
 def clean_template(self):
     utils.rmdir(
         os.path.join(self.engine_template_dir, "cpp-template-binary"))
     utils.rmdir(
         os.path.join(self.engine_template_dir, "lua-template-binary"))
     utils.rmdir(
         os.path.join(self.engine_template_dir, "js-template-binary"))
Exemplo n.º 26
0
def make_lmdb(list_file, out_dir):
  '''
  Use convert_annoset tool from caffe SSD branch.
  The tool takes a list file as parameter and writes
  all listed images and its labels into LBDM database.
  '''
  utils.rmdir(out_dir)
    
  cmd = []
  cmd.append(os.path.join(CAFFE_BIN_DIR, 'convert_annoset'))
  cmd.append('--anno_type=detection')
  cmd.append('--label_type=txt')
  cmd.append('--label_map_file=' + LABEL_MAP_FILE)
  cmd.append('--resize_height=' + str(TARGET_IMG_H))
  cmd.append('--resize_width=' + str(TARGET_IMG_W))
  cmd.append('--backend=lmdb')
  cmd.append('--encoded')
  cmd.append('--encode_type=jpg')
  cmd.append('') # we can leave root path empty as our file list contains absolute pathes
  cmd.append(list_file)
  cmd.append(out_dir)
  utils.run_command(cmd)
def make_lmdb():
  '''
  Use convert_annoset tool.
  The tool takes a list file as parameter and writes
  all listed images and its labels into LBDM database.
  '''
  utils.rmdir(LMDB_TARGET_DIR)

  cmd = []
  cmd.append(os.path.join(CAFFE_BIN_DIR, 'convert_annoset'))
  cmd.append('--anno_type=detection')
  cmd.append('--label_type=txt')
  cmd.append('--label_map_file=' + LABEL_MAP_FILE)
  cmd.append('--resize_height=' + str(TARGET_IMG_H))
  cmd.append('--resize_width=' + str(TARGET_IMG_W))
  cmd.append('--backend=lmdb')
  cmd.append('--encoded')
  cmd.append('--encode_type=jpg')
  cmd.append('""') # we can leave root path empty as our file list contains absolute pathes
  cmd.append(IMAGE_LIST_FILE)
  cmd.append(LMDB_TARGET_DIR)
  utils.run_command(cmd, 'convert_annoset.log')
Exemplo n.º 28
0
def local_setup(repo_owner, scoreboard_name, problems):
    print '[*] Start local setup'
    # Create root directory for CTF env.
    prompt_rmdir_warning(repo_owner)
    rmdir(repo_owner)
    mkdir(repo_owner)

    # Setup local scoreboard repo
    scoreboard_dir_path = os.path.join(repo_owner, scoreboard_name)
    if create_local_repo(scoreboard_dir_path):
        open(os.path.join(scoreboard_dir_path, 'score.csv'), 'w').close()

    # Setup local problems repo
    for problem in problems:
        problem_info = problems[problem]
        repo_dir_path = os.path.join(repo_owner, \
                problem_info['repo_name'])
        if create_local_repo(repo_dir_path):
            print '[*] Copy binary'
            copy(problem_info['bin_src_path'], repo_dir_path)
            print '[*] Create flag file'
            create_flag(repo_dir_path)
            print '[*] Make Dockerfile'
            create_dockerfile(problem_info, repo_dir_path)
Exemplo n.º 29
0
def run(args):
    files = args.file.split(',')
    for file in files:
        flag = utils.rmdir(args.path + "/" + file)
        utils.edit_list(file, None)
        if flag:
            # 通知服务器
            try:
                result = notice_server(args.serverAddress, args.tagOwner, args.tagPassword, file)
                if not utils.parser_result(result):
                    utils.log('err_notice_server', file, 'error')
            except:
                utils.log('err_notice_connect', file, 'error')
            utils.log('info_clear', file)
        else:
            utils.log('err_clear', file, 'error')
# -*- coding: utf-8 -*-

import utils

targetDir = 'EffekseerForDXLib_143_320a/'
dxlibDir = 'DXLib_VC/'
effekseerDir = '../Effekseer/'
effekseerVSDir = effekseerDir + 'EffekseerRuntime143/Compiled/'

utils.cdToScript()

utils.rmdir(targetDir)
utils.mkdir(targetDir)

utils.copytree('docs/', targetDir+'Help/')

utils.copytree(dxlibDir+'プロジェクトに追加すべきファイル_VC用/', targetDir+'プロジェクトに追加すべきファイル_VC用/')

utils.copy(effekseerVSDir+'include/Effekseer.h', targetDir+'プロジェクトに追加すべきファイル_VC用/')
utils.copy(effekseerVSDir+'include/EffekseerRendererDX9.h', targetDir+'プロジェクトに追加すべきファイル_VC用/')
utils.copy(effekseerVSDir+'include/EffekseerRendererDX11.h', targetDir+'プロジェクトに追加すべきファイル_VC用/')

utils.copy(effekseerVSDir+'lib/VS2015/Debug/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x86_d.lib')
utils.copy(effekseerVSDir+'lib/VS2015/Debug/EffekseerRendererDX9.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX9_vs2015_x86_d.lib')
utils.copy(effekseerVSDir+'lib/VS2015/Debug/EffekseerRendererDX11.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX11_vs2015_x86_d.lib')

utils.copy(effekseerVSDir+'lib/VS2015/Release/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x86.lib')
utils.copy(effekseerVSDir+'lib/VS2015/Release/EffekseerRendererDX9.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX9_vs2015_x86.lib')
utils.copy(effekseerVSDir+'lib/VS2015/Release/EffekseerRendererDX11.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/EffekseerRendererDX11_vs2015_x86.lib')

utils.copy(effekseerVSDir+'lib/VS2015WIN64/Debug/Effekseer.lib', targetDir+'プロジェクトに追加すべきファイル_VC用/Effekseer_vs2015_x64_d.lib')
Exemplo n.º 31
0
birth_prob = 0.5
appear_interval = 5
scale_var = 0
ratio_var = 0
velocity = 5.3
task = 'mnist'
m = h // 3
eps = 1e-5

txt_name = 'gt.txt'
metric_dir = 'metric' if arg.metric == 1 else ''
data_dir = path.join('data', task)
input_dir = path.join(data_dir, 'processed')
output_dir = path.join(data_dir, 'pt', metric_dir)
output_input_dir = path.join(output_dir, 'input')
utils.rmdir(output_input_dir)
utils.mkdir(output_input_dir)
output_gt_dir = path.join(output_dir, 'gt')

# mnist data
# datasets.MNIST(root=data_dir, train=True, download=True)
train_data = torch.load(path.join(input_dir, 'training.pt'))  # 60000 * 28 * 28
test_data = torch.load(path.join(input_dir, 'test.pt'))  # 10000 * 28 * 28
data = torch.cat((train_data[0], test_data[0]),
                 0).unsqueeze(3)  # 70000 * h * w * D
data_num = data.size(0)

# generate data from trackers
train_frame_num = frame_num * train_ratio
test_frame_num = frame_num * (1 - train_ratio)
print('train frame number: ' + str(train_frame_num))
Exemplo n.º 32
0
 def clean_libs(self):
     utils.rmdir(self.lib_dir)
Exemplo n.º 33
0
import selenium
from selenium import webdriver
from selenium.webdriver.support.ui import Select
import time, os
import dates, utils
import traceback, logging

location = 'D:/Trading/mcxdata/delta'  # Laptop
# location = 'C:/Users/SVK/OneDrive/Python/mcxdata/data' # Desktop

url = 'https://www.mcxindia.com/market-data/bhavcopy'
chromedriver = 'C:/Program Files (x86)/chromedriver_win32/chromedriver.exe'
logfile = 'log.txt'

utils.rmdir(location)
utils.mkdir(location)
os.chdir(location)

log_lines = []

#date_range = dates.dates('2017-02-17', '2017-02-20')
date_range = dates.dates('2018-06-06')
#date_range = dates.adhoc_dates

options = webdriver.ChromeOptions()
options.add_argument("--start-maximized")
prefs = {"download.default_directory": location}
options.add_experimental_option("prefs", prefs)
#options.add_argument(location)
Exemplo n.º 34
0
 def clean_template(self):
     utils.rmdir(os.path.join(self.engine_template_dir, "cpp-template-binary"))
     utils.rmdir(os.path.join(self.engine_template_dir, "lua-template-binary"))
     utils.rmdir(os.path.join(self.engine_template_dir, "js-template-binary"))
Exemplo n.º 35
0
def main(config):
    cfg_trainer = config['trainer_colab'] if config[
        'colab'] == True else config['trainer']
    run_id = config['resume'].split('/')[-2]
    file_name = config['resume'].split('/')[-1].split('.')[0]
    output_dir = os.path.join(cfg_trainer['output_dir'], run_id, file_name)
    (os.path.exists(output_dir) or os.makedirs(
        output_dir, exist_ok=True)) and rmdir(output_dir, remove_parent=False)
    setup_logging(output_dir)
    logger = logging.getLogger('test')

    use_gpu = cfg_trainer['n_gpu'] > 0 and torch.cuda.is_available()
    device = torch.device('cuda:0' if use_gpu else 'cpu')
    map_location = "cuda:0" if use_gpu else torch.device('cpu')

    datamanager, _ = build_datamanager(config['type'], config['data'])

    model, _ = build_model(config,
                           num_classes=len(
                               datamanager.datasource.get_attribute()))

    logger.info('Loading checkpoint: {} ...'.format(config['resume']))
    checkpoint = torch.load(config['resume'], map_location=map_location)

    model.load_state_dict(checkpoint['state_dict'])
    model.eval()
    model.to(device)

    preds = []
    labels = []

    with tqdm(total=len(datamanager.get_dataloader('test'))) as epoch_pbar:
        with torch.no_grad():
            for batch_idx, (data, _labels) in enumerate(
                    datamanager.get_dataloader('test')):
                data, _labels = data.to(device), _labels.to(device)

                out = model(data)

                _preds = torch.sigmoid(out)
                preds.append(_preds)
                labels.append(_labels)
                epoch_pbar.update(1)
    preds = torch.cat(preds, dim=0)
    labels = torch.cat(labels, dim=0)
    preds = preds.cpu().numpy()
    labels = labels.cpu().numpy()

    # # get best threshold
    # from sklearn.metrics import roc_curve, auc, precision_recall_curve

    # precision = dict()
    # recall = dict()
    # thresholds_pr = dict()
    # pr_auc = dict()
    # best_threshold = dict()

    # fpr = dict()
    # tpr = dict()
    # roc_auc = dict()
    # thresholds_roc = dict()

    # for i in range(len(datamanager.datasource.get_attribute())):
    #     precision[i], recall[i], thresholds_pr[i] = precision_recall_curve(labels[:, i], preds[:, i])
    #     pr_auc[i] = auc(recall[i], precision[i])
    #     best_threshold[i] = np.argmax((2 * precision[i] * recall[i]) / (precision[i] + recall[i]))

    #     fpr[i], tpr[i], thresholds_roc[i] = roc_curve(labels[:, i], preds[:, i])
    #     roc_auc[i] = auc(fpr[i], tpr[i])

    #     fig, (ax1, ax2) = plt.subplots(nrows=1, ncols=2)
    #     ax1.plot(recall[i], precision[i], label='Precision-Recall Curve, mAP: %f' % pr_auc[i])
    #     ax1.scatter(
    #         recall[i][best_threshold[i]],
    #         precision[i][best_threshold[i]],
    #         marker='o',
    #         color='black',
    #         label='Best threshold %f' % (thresholds_pr[i][best_threshold[i]]))

    #     ax1.set_xlabel('Recall')
    #     ax1.set_ylabel('Precision')
    #     ax1.set_title('Attribute: %s' % datamanager.datasource.get_attribute()[i])
    #     # ax1.legend(loc="lower right")

    #     fig, ax2 = plt.subplots(122)
    #     ax2.plot(fpr[i], tpr[i], label='ROC curve (area = %0.2f)' % (roc_auc[i]))
    #     ax2.plot([0, 1], [0, 1], 'k--')
    #     ax2.scatter(fpr[i][best_threshold[i]], tpr[i][best_threshold[i]], marker='o', color='black', label='Best threshold %f' % (thresholds[i][best_threshold[i]]))
    #     ax2.set_xlim([0.0, 1.0])
    #     ax2.set_ylim([0.0, 1.05])
    #     ax2.set_xlabel('False Positive Rate')
    #     ax2.set_ylabel('True Positive Rate')
    #     ax2.set_title('Attribute: %s' % datamanager.datasource.get_attribute()[i])
    #     # ax2.legend(loc="lower right")

    # plt.show()

    result_label, result_instance = recognition_metrics(labels, preds)
    log_test(logger.info, datamanager.datasource.get_attribute(),
             datamanager.datasource.get_weight('test'), result_label,
             result_instance)
Exemplo n.º 36
0
def verify_issue(defender,
                 repo_name,
                 issue_no,
                 config,
                 github,
                 target_commit=None):
    timeout = config["exploit_timeout"]["exercise_phase"]
    repo_owner = config['repo_owner']
    title, submitter, create_time, content = \
        get_github_issue(repo_owner, repo_name, issue_no, github)

    # Issue convention: "exploit-[branch_name]"
    target_branch = title[8:]

    clone(repo_owner, repo_name)

    # Write the fetched issue content to temp file
    tmpfile = "/tmp/gitctf_%s.issue" % random_string(6)
    tmpdir = "/tmp/gitctf_%s.dir" % random_string(6)

    with open(tmpfile, "w") as f:
        f.write(content)

    # Decrypt the exploit
    mkdir(tmpdir)

    team = defender
    decrypt_exploit(tmpfile, config, team, tmpdir, submitter)
    rmfile(tmpfile)

    # Now iterate through branches and verify exploit
    # zchn: not sure about this, was: branches = list_branches(repo_name)
    bug_branches = config['teams'][team]['bug_branches']
    branches = bug_branches + ['master'] if len(bug_branches) > 0 \
        else list_branches(repo_name)

    candidates = []
    if (target_branch in branches) and (target_commit is None):
        # Iterate through branches and collect candidates
        commit = get_latest_commit_hash(repo_name, create_time, target_branch)
        candidates.append((target_branch, commit))

    verified_branch = None
    verified_commit = None

    log = 'About %s (exploit-service branch)\n' % title

    for (branch, commit) in candidates:
        if branch in title:
            result, log = verify_exploit(tmpdir, repo_name, commit, timeout, \
                    config, log=log)
        else:
            result, _ = verify_exploit(tmpdir, repo_name, commit, timeout, \
                    config)

        if result:
            verified_branch = branch
            verified_commit = commit
            break

    rmdir(tmpdir)
    rmdir(repo_name)

    if verified_branch is None:
        print("[*] The exploit did not work against branch '%s'" % \
                target_branch)
    else:
        print("[*] The exploit has been verified against branch '%s'" %
              verified_branch)

    return (verified_branch, verified_commit, submitter, log)
Exemplo n.º 37
0
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('--c', type=int, choices=range(1, 9))  # camera id
parser.add_argument('--v', type=int, default=0)  # visualization
arg = parser.parse_args()
cam_id = arg.c

# specify directories and parameters
task_dir = 'data/duke'
camera_dir = 'camera' + str(cam_id)
fg_mask_dir = path.join(task_dir, 'imbs', 'fg_mask', camera_dir)
gt_dir = path.join(task_dir, 'ground_truth')
bb_dir = path.join(gt_dir, 'bb', camera_dir)
bb_mask_dir = path.join(gt_dir, 'bb_mask', camera_dir)
if arg.v == 0:
    utils.rmdir(bb_dir)
    utils.mkdir(bb_dir)
    utils.rmdir(bb_mask_dir)
    utils.mkdir(bb_mask_dir)
#
H, W = 108, 192
scl = 1080 / H
zeta_s = 0.55
zeta_r = [1.25, 0.3]
h = 23
w = 9
O = 12  # maximum object number
step = 6
scl_v = 3
hv, wv = H * scl_v, W * scl_v  # for visualization
Exemplo n.º 38
0
 def tearDown(self):
     # Remove test directory and contents
     utils.rmdir(self.se_dir)
Exemplo n.º 39
0
 def clean_libs(self):
     utils.rmdir(self.lib_dir)