Exemplo n.º 1
0
Arquivo: ci.py Projeto: rapgro/enki
 def install_Windows(self):
     # ctags
     ctags_zip = os.path.join(DOWNLOADS, CTAGS_VER + '.zip')
     if not isfile(ctags_zip):
         wget('http://sourceforge.net/projects/ctags/files/ctags/5.8/{}.zip'.
              format(CTAGS_VER), ctags_zip)
     unzip(ctags_zip, CTAGS_VER + '/ctags.exe')
Exemplo n.º 2
0
    def update(self):
        # Step 1: Get newest nightly folder
        response = urllib2.urlopen(self.nightly_dir+"/?C=N;O=D")
        html = response.read()
        folder_id =  re.findall("[0-9]{5,}", html)[0]

        # Step 2: Find the correct file
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id)
        html = response.read()
        exec_file = re.findall("jsshell-win32.zip", html)[0]
        json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.json", html)[0]

        # Step 3: Get build information
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id+"/"+json_file)
        html = response.read()
        info = json.loads(html)

        # Step 4: Fetch archive
        print "Retrieving", self.nightly_dir+"/"+folder_id+"/"+exec_file
        urllib.urlretrieve(self.nightly_dir+"/"+folder_id+"/"+exec_file, self.tmp_dir + "shell.zip")

        # Step 5: Unzip
        utils.unzip(self.tmp_dir,"shell.zip")

        # Step 6: Save info
        self.updated = True
        self.cset = info["moz_source_stamp"]
def install_ipfs(quiet=False):
    '''
    Installs the ipfs software for windows and linux systems.
    '''
    if ipfs_installed():
        return True
    os_name = os.name  # either of ('posix', 'nt', 'java')
    n_bits = 32 << bool(sys.maxsize >> 32)
    ipfs_url = IPFS_LINKS.get(n_bits, dict()).get(os_name, None)
    file_extension = ('zip', 'tar.gz')[os.name == 'posix']
    output_file_name = 'ipfs.' + file_extension
    ipfs_dir_name = 'go-ipfs'  # dir after unzipping.
    if ipfs_url is None:
        raise NotImplementedError('Program doesn\'t support your OS yet.')
    downloaded = utils.wget(ipfs_url,
                            output_document=output_file_name,
                            tries=math.inf,
                            timeout=100,
                            quiet=quiet)
    if downloaded:
        utils.unzip(output_file_name)
        os.chdir(ipfs_dir_name)
        if os_name == 'posix':
            os.system('sudo ./install.sh')
        else:
            os.system('ipfs')
        os.chdir('..')
        if not quiet:
            print('IPFS installed successfully.')
            print('Removing intermediate files...')
        os.remove(output_file_name)
        return True
    return False
Exemplo n.º 4
0
def get_sport(sport):

    global root

    utils.logmessage("Getting: " + sport, "getdata", 1)

    if sport == "baseball":
        url = "http://seanlahman.com/files/database/lahman-csv_2014-02-14.zip"
        path = root + "rawdata/lahman/"
        saveas = "lahman.zip"

    if sport == "basketball":
        url = "http://www.databasebasketball.com/databasebasketball_2009_v1.zip"
        path = root + "rawdata/basketball/"
        saveas = "basketball.zip"

    # download file
    utils.logmessage("Downloading zip file", "getdata", 1)
    utils.download_file(url, path+saveas)

    # unzip file
    utils.unzip(path+saveas, path)
    utils.logmessage("Unzipping file", "getdata", 1)

    pass
Exemplo n.º 5
0
def deploy(machine=None):
    print green("当前机器:" + env.host)
    #如果指定机器,那就只上着一个
    if machine and not machine == env.host:
        return
    #关闭slb
    #print env.host
    for slb in env.slbList:
        utils.setSlb(env.slbServer, slb, 0)
    utils.runCmd("sleep 5")
    #备份
    utils.zip(env.online)
    utils.download(env.online, env.localPath)
    #上传
    utils.upload(env.source, env.target)
    #杀掉进程
    utils.stopProcess(env.aport)
    utils.runCmd("sleep 10")
    #解压缩
    utils.unzip(env.online)
    #开始进程
    utils.runCmd(env.start_process)
    #sleep 多少秒
    utils.runCmd("sleep " + env.sleep_time)
    #监控进程
    run(env.monitor_url)
    #上线slb
    for slb in env.slbList:
        utils.setSlb(env.slbServer, slb, 100)
Exemplo n.º 6
0
def unzip(online=None, machine=None):
    if not machine or not online:
        abort("机器、online路径不能为空")
    print green("当前机器:" + env.host)
    if machine and not machine == env.host:
        return
    utils.unzip(online)
Exemplo n.º 7
0
def download(target="all", verbose=False, **kwargs):
    if target == "all":
        download(target="code", verbose=verbose)
        download(target="data", verbose=verbose)
    elif target == "code":
        for algorithm in GIT_REPOS.keys():
            download(target=algorithm, verbose=verbose)
    elif target in GIT_REPOS.keys():
        download_path = os.path.join(SRC_DIR, target)
        print(download_path)
        if os.path.exists(download_path):
            print("Download path {path} exists. Skipping downloading {algo}".format(path=download_path, algo=target))
            return
        if verbose:
            print("Downloading {algo} code to {path}.".format(algo=target, path=download_path))
        Repo.clone_from(GIT_REPOS[target], download_path)

    elif target == "data":
        for graph in GRAPHS:
            download(graph, verbose=True)
    elif target in GRAPHS.keys():
        for url in GRAPHS[target].values():
            if not url.startswith("http"):
                continue
            download_path = os.path.join(GRAPH_DIR, target, os.path.basename(url))
            unzip_path = os.path.join(GRAPH_DIR, target, os.path.splitext(os.path.basename(url))[0])
            utils.download(url, download_path)
            utils.unzip(download_path, unzip_path)
    else:
        raise ValueError("Unknown target.")
Exemplo n.º 8
0
    def install_webdriver(self, dest=None, channel=None, browser_binary=None):
        if self.platform != "win" and self.platform != "macos":
            raise ValueError("Only Windows and Mac platforms are currently supported")

        if dest is None:
            dest = os.pwd

        if channel is None:
            version_url = "https://msedgedriver.azureedge.net/LATEST_DEV"
        else:
            version_url = "https://msedgedriver.azureedge.net/LATEST_%s" % channel.upper()
        version = get(version_url).text.strip()

        if self.platform == "macos":
            bits = "mac64"
            edgedriver_path = os.path.join(dest, self.edgedriver_name)
        else:
            bits = "win64" if uname[4] == "x86_64" else "win32"
            edgedriver_path = os.path.join(dest, "%s.exe" % self.edgedriver_name)
        url = "https://msedgedriver.azureedge.net/%s/edgedriver_%s.zip" % (version, bits)

        # cleanup existing Edge driver files to avoid access_denied errors when unzipping
        if os.path.isfile(edgedriver_path):
            # remove read-only attribute
            os.chmod(edgedriver_path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)  # 0777
            os.remove(edgedriver_path)
            driver_notes_path = os.path.join(dest, "Driver_notes")
            if os.path.isdir(driver_notes_path):
                shutil.rmtree(driver_notes_path, ignore_errors=False, onerror=handle_remove_readonly)

        self.logger.info("Downloading MSEdgeDriver from %s" % url)
        unzip(get(url).raw, dest)
        if os.path.isfile(edgedriver_path):
            self.logger.info("Successfully downloaded MSEdgeDriver to %s" % edgedriver_path)
        return find_executable(self.edgedriver_name, dest)
Exemplo n.º 9
0
def download(_):
    conf.log = setup_log(logging.DEBUG) if conf.args['debug'] else setup_log()
    tempdir = tempfile.mkdtemp()
    cdn_url = conf.checkpoints_cdn.format(conf.checkpoints_version)
    temp_zip = os.path.join(tempdir, "{}.zip".format(conf.checkpoints_version))

    try:
        conf.log.info("Downloading {}".format(cdn_url))
        dl_file(conf.checkpoints_cdn.format(conf.checkpoints_version),
                temp_zip)

        conf.log.info("Extracting {}".format(temp_zip))
        unzip(temp_zip, conf.args['checkpoints'])

        conf.log.info("Moving Checkpoints To Final Location")

        for c in ("cm.lib", "mm.lib", "mn.lib"):
            if os.path.isfile(os.path.join(conf.args['checkpoints'], c)):
                os.remove(os.path.join(conf.args['checkpoints'], c))
            shutil.move(
                os.path.join(conf.args['checkpoints'], 'checkpoints', c),
                conf.args['checkpoints'])
        shutil.rmtree(os.path.join(conf.args['checkpoints'], 'checkpoints'))

    except Exception as e:
        conf.log.error(e)
        conf.log.error(
            "Something Gone Bad Download Downloading The Checkpoints")
        shutil.rmtree(tempdir)
        sys.exit(1)
    shutil.rmtree(tempdir)
    conf.log.info("Checkpoints Downloaded Successfully")
Exemplo n.º 10
0
def mfold_result(task_id,
                 zipname="now.zip",
                 path="./results/mfold/",
                 verbose=True):
    """Gets mfold result via task_id

    Args:
        task_id: Id of task which was given by RESTful API
        zipname: Name of zip file in which client will save results.
            After save this file is removed
        path: Path where results should be stored
        verbose: Bool which tells if function should print what she actualy does

    Returns:
        None
    """
    req = get_request("mfold_result", task_id)
    with open(zipname, "wb") as f:
        for chunk in req.iter_content():
            f.write(chunk)
    unzip(zipname, path)
    os.remove(zipname)

    if verbose:
        print("Result under: {}/\n".format(path + task_id))
Exemplo n.º 11
0
    def update(self):
        # Step 1: Get newest nightly folder
        response = urllib2.urlopen(self.nightly_dir+"/?C=N;O=D")
        html = response.read()
        folder_id =  re.findall("[0-9]{5,}", html)[0]

        # Step 2: Find the correct file
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id)
        html = response.read()
        exec_file = re.findall("jsshell-win32.zip", html)[0]
        json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.json", html)[0]

        # Step 3: Get build information
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id+"/"+json_file)
        html = response.read()
        info = json.loads(html)

        # Step 4: Fetch archive
        print "Retrieving", self.nightly_dir+"/"+folder_id+"/"+exec_file
        urllib.urlretrieve(self.nightly_dir+"/"+folder_id+"/"+exec_file, self.tmp_dir + "shell.zip")

        # Step 5: Unzip
        utils.unzip(self.tmp_dir,"shell.zip")

        # Step 6: Save info
        self.updated = True
        self.cset = info["moz_source_stamp"]
Exemplo n.º 12
0
def parse_source(source, idx, header):
    """
    Import data from a single source based on the data type.
    """
    path = '{}/{}'.format(config.workspace_dir, idx)
    if not os.path.exists(path):
        os.makedirs(path)

    cache_url = source[header.index('cache')]
    cache_filename = re.search('/[^/]*$', cache_url).group()
    fetch(cache_url, path + cache_filename)

    files = rlistdir(path)
    for f in files:
        if re.match('.*\.(zip|obj|exe)$', f):  # some files had mislabelled ext
            unzip(f, path)

    shapes = []
    files = rlistdir(path)
    for f in files:
        if re.match('.*\.({})$'.format('|'.join(config.fiona_extensions)), f):
            objs = import_with_fiona(f, source[0])
            for obj in objs:
                shapes.append(obj)
        elif re.match('.*\.csv$', f):
            objs = import_csv(f, source[0])
            for obj in objs:
                shapes.append(obj)

    shutil.rmtree(path)

    if not shapes:
        _L.warning('failed to parse source. did not find shapes. files in archive: {}'.format(files))

    return shapes
Exemplo n.º 13
0
    def update(self):
        # Step 1: Get latest succesfull build revision
        response = urllib2.urlopen(self.nightly_dir + "LAST_CHANGE")
        chromium_rev = response.read()

        # Step 3: Get v8 revision
        response = urllib2.urlopen(self.nightly_dir + chromium_rev +
                                   "/REVISIONS")
        self.cset = re.findall('"v8_revision_git": "([a-z0-9]*)",',
                               response.read())[0]

        # Step 3: Test if there is a new revision
        utils.getOrDownload(
            self.tmp_dir, "chrome", self.cset,
            self.nightly_dir + chromium_rev + "/" + self.filename,
            self.tmp_dir + self.filename)
        # Step 4: Unzip
        utils.unzip(self.tmp_dir, self.filename)

        # Step 5: Install on device
        if self.slaveType == "android":
            print subprocess.check_output([
                "adb", "install", "-r",
                self.tmp_dir + "/chrome-android/apks/ChromeShell.apk"
            ])

        # Step 6: Save info
        self.updated = True
Exemplo n.º 14
0
 def install_Windows(self):
     # ctags
     ctags_zip = os.path.join(DOWNLOADS, CTAGS_VER + '.zip')
     if not isfile(ctags_zip):
         wget(
             'http://sourceforge.net/projects/ctags/files/ctags/5.8/{}.zip'.
             format(CTAGS_VER), ctags_zip)
     unzip(ctags_zip, CTAGS_VER + '/ctags.exe')
Exemplo n.º 15
0
def eval(ucloud=False, model_name='resnet50'):

    if model_name == 'resnet50':
        batch_size = 32
        image_size = 224
    if model_name == 'inception_resnet_v2':
        batch_size = 16
        image_size = 299

    if ucloud:
        image_dir_testA = '/data/data/ai_challenger_scene_test_a_20180103/scene_test_a_images_20180103/'
        image_dir_testB = '/data/data/ai_challenger_scene_test_b_20180103/scene_test_b_images_20180103/'
        weights_path = '/data/code/imagenet_models/scene-output-trained_weights_all_layers_1.h5'
        submit_file_testA = '/data/output/testA_submit.json'
        submit_file_testB = '/data/output/testB_submit.json'
        reference_file_testA = '/data/data/ai_challenger_scene_test_a_20180103/scene_test_a_annotations_20180103.json'
        reference_file_testB = '/data/data/ai_challenger_scene_test_b_20180103/scene_test_b_annotations_20180103.json'

        testA_zip = '/data/data/ai_challenger_scene_test_a_20180103.zip'
        testB_zip = '/data/data/ai_challenger_scene_test_b_20180103.zip'
        zip_output = '/data/data/'

        unzip(testA_zip, zip_output)
        unzip(testB_zip, zip_output)

        creat_submit(image_dir_testA, submit_file_testA, image_size,
                     batch_size, weights_path)
        creat_submit(image_dir_testB, submit_file_testB, image_size,
                     batch_size, weights_path)

        result_A = {'error': [], 'warning': [], 'top3': None, 'top1': None}
        submit_dict_A, ref_dict_A, result_A = __load_data(
            submit_file_testA, reference_file_testA, result_A)
        result_A = __eval_result(submit_dict_A, ref_dict_A, result_A)

        result_B = {'error': [], 'warning': [], 'top3': None, 'top1': None}
        submit_dict_B, ref_dict_B, result_B = __load_data(
            submit_file_testB, reference_file_testB, result_B)
        result_B = __eval_result(submit_dict_B, ref_dict_B, result_B)

        print 'testA result =', result_A
        print 'testB result =', result_B

    else:
        #image_dir = '/Users/xiang/Desktop/classification/test/'
        image_dir = '/Users/xiang/Desktop/classification/ai_challenger_scene_test_a_20180103/scene_test_a_images_20180103/'
        weights_path = 'imagenet_models/scene-output-res50ep008-loss0.981-val_loss0.779-val_acc0.767.h5'
        submit_file = 'submit/testA_res50.json'
        reference_file = '/Users/xiang/Desktop/classification/ai_challenger_scene_test_a_20180103/scene_test_a_annotations_20180103.json'

        creat_submit(image_dir, submit_file, image_size, batch_size,
                     weights_path)

        result = {'error': [], 'warning': [], 'top3': None, 'top1': None}
        submit_dict, ref_dict, result = __load_data(submit_file,
                                                    reference_file, result)
        result = __eval_result(submit_dict, ref_dict, result)
        print result
Exemplo n.º 16
0
 def __init__(self, root):
     self.root = root
     if not posixpath.exists(posixpath.join(self.root, self.ukbench_dir)):
         download(self.root, self.filename, self.url)
         unzip(self.root, self.filename, self.ukbench_dir)
     self.uris = sorted(
         list_files(root=posixpath.join(self.root, self.ukbench_dir,
                                        'full'),
                    suffix=('png', 'jpg', 'jpeg', 'gif')))
Exemplo n.º 17
0
    def __init__(self):
        super(Shumway, self).__init__('shumway', '0.1', 'shumway')

        # Only update harness once a day:
        from datetime import datetime
        date = datetime.now().strftime("%Y-%m-%d")
        utils.getOrDownload("/tmp/", "shumway", date,
                            "http://mozilla.github.io/shumway/shell/shumway-shell.zip",
                            "/tmp/shumway-shell.zip")
        utils.unzip("/tmp/", "shumway-shell.zip")
Exemplo n.º 18
0
def unzip_news2016zh():
    path = settings.NEWS2016ZH_PATH
    if os.path.exists(path):
        logger.info(f'news2016zh already decompressed.')
        return
    zip_path = settings.NEWS2016ZH_ZIP_PATH
    folder = settings.FOLDER
    logger.info('news2016zh decompressing ...')
    unzip(zip_path, folder)
    logger.info(f'news2016zh decompressed to {folder}')
Exemplo n.º 19
0
def unpack(meta):
    src_path = download_to_cache(meta)

    os.makedirs(WORK_DIR)
    if src_path.endswith(('.tar.gz', '.tar.bz2', '.tgz', '.tar.xz', '.tar')):
        tar_xf(src_path, WORK_DIR)
    elif src_path.endswith('.zip'):
        unzip(src_path, WORK_DIR)
    else:
        raise Exception("not a vaild source")
Exemplo n.º 20
0
 def install_webdriver(self, dest=None, channel=None, browser_binary=None):
     if dest is None:
         dest = os.pwd
     url = self._latest_chromedriver_url(browser_binary)
     self.logger.info("Downloading ChromeDriver from %s" % url)
     unzip(get(url).raw, dest)
     chromedriver_dir = os.path.join(dest, 'chromedriver_%s' % self.platform_string())
     if os.path.isfile(os.path.join(chromedriver_dir, "chromedriver")):
         shutil.move(os.path.join(chromedriver_dir, "chromedriver"), dest)
         shutil.rmtree(chromedriver_dir)
     return find_executable("chromedriver", dest)
Exemplo n.º 21
0
    def __init__(self):
        super(Shumway, self).__init__('shumway', '0.1', 'shumway')

        # Only update harness once a day:
        from datetime import datetime
        date = datetime.now().strftime("%Y-%m-%d")
        utils.getOrDownload(
            "/tmp/", "shumway", date,
            "http://mozilla.github.io/shumway/shell/shumway-shell.zip",
            "/tmp/shumway-shell.zip")
        utils.unzip("/tmp/", "shumway-shell.zip")
Exemplo n.º 22
0
 def install_webdriver(self, dest=None, channel=None, browser_binary=None):
     if dest is None:
         dest = os.pwd
     url = self._latest_chromedriver_url(browser_binary)
     self.logger.info("Downloading ChromeDriver from %s" % url)
     unzip(get(url).raw, dest)
     chromedriver_dir = os.path.join(dest, 'chromedriver_%s' % self.platform_string())
     if os.path.isfile(os.path.join(chromedriver_dir, "chromedriver")):
         shutil.move(os.path.join(chromedriver_dir, "chromedriver"), dest)
         shutil.rmtree(chromedriver_dir)
     return find_executable("chromedriver", dest)
Exemplo n.º 23
0
    def install_prefs(self, binary, dest=None, channel=None):
        if binary:
            version, channel_ = self.get_version_and_channel(binary)
            if channel is not None and channel != channel_:
                # Beta doesn't always seem to have the b in the version string, so allow the
                # manually supplied value to override the one from the binary
                self.logger.warning(
                    "Supplied channel doesn't match binary, using supplied channel"
                )
            elif channel is None:
                channel = channel_
        else:
            version = None

        if dest is None:
            dest = os.curdir

        dest = os.path.join(dest, "profiles", channel)
        if version:
            dest = os.path.join(dest, version)
        have_cache = False
        if os.path.exists(dest) and len(os.listdir(dest)) > 0:
            if channel != "nightly":
                have_cache = True
            else:
                now = datetime.now()
                have_cache = (datetime.fromtimestamp(os.stat(dest).st_mtime) >
                              now - timedelta(days=1))

        # If we don't have a recent download, grab and extract the latest one
        if not have_cache:
            if os.path.exists(dest):
                shutil.rmtree(dest)
            os.makedirs(dest)

            url = self.get_profile_bundle_url(version, channel)

            self.logger.info("Installing test prefs from %s" % url)
            try:
                extract_dir = tempfile.mkdtemp()
                unzip(get(url).raw, dest=extract_dir)

                profiles = os.path.join(extract_dir,
                                        os.listdir(extract_dir)[0], 'testing',
                                        'profiles')
                for name in os.listdir(profiles):
                    path = os.path.join(profiles, name)
                    shutil.move(path, dest)
            finally:
                shutil.rmtree(extract_dir)
        else:
            self.logger.info("Using cached test prefs from %s" % dest)

        return dest
Exemplo n.º 24
0
    def install_webdriver(self, dest=None):
        if dest is None:
            dest = os.pwd
        latest = get("http://chromedriver.storage.googleapis.com/LATEST_RELEASE").text.strip()
        url = "http://chromedriver.storage.googleapis.com/%s/chromedriver_%s.zip" % (latest,
                                                                                     self.platform_string())
        unzip(get(url).raw, dest)

        path = find_executable("chromedriver", dest)
        st = os.stat(path)
        os.chmod(path, st.st_mode | stat.S_IEXEC)
        return path
Exemplo n.º 25
0
 def install_webdriver(self, dest=None):
     """Install latest Webdriver."""
     if dest is None:
         dest = os.pwd
     latest = get("http://chromedriver.storage.googleapis.com/LATEST_RELEASE").text.strip()
     url = "http://chromedriver.storage.googleapis.com/%s/chromedriver_%s.zip" % (latest,
                                                                                  self.platform_string())
     unzip(get(url).raw, dest)
     path = find_executable(dest, "chromedriver")
     st = os.stat(path)
     os.chmod(path, st.st_mode | stat.S_IEXEC)
     return path
Exemplo n.º 26
0
 def install_webdriver_by_version(self, version, dest=None):
     assert version, "Cannot install ChromeDriver without Chrome version"
     if dest is None:
         dest = os.pwd
     url = self._latest_chromedriver_url(version)
     self.logger.info("Downloading ChromeDriver from %s" % url)
     unzip(get(url).raw, dest)
     chromedriver_dir = os.path.join(
         dest, 'chromedriver_%s' % self.platform_string())
     if os.path.isfile(os.path.join(chromedriver_dir, "chromedriver")):
         shutil.move(os.path.join(chromedriver_dir, "chromedriver"), dest)
         shutil.rmtree(chromedriver_dir)
     return find_executable("chromedriver", dest)
Exemplo n.º 27
0
    def _update(self, folder_id):
        # Step 2: Find the correct file
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id)
        html = response.read()
        if self.slaveType == "android":
            exec_file = re.findall("fennec-[a-zA-Z0-9.]*.en-US.android-arm.apk", html)[0]
            json_file = re.findall("fennec-[a-zA-Z0-9.]*.en-US.android-arm.json", html)[0]
        elif self.slaveType == "mac-desktop":
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.mac.dmg", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.mac.json", html)[0]
        elif self.slaveType == "linux-desktop":
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.linux-x86_64.tar.bz2", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.linux-x86_64.json", html)[0]
        else:
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.zip", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.json", html)[0]

        # Step 3: Get build information
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id+"/"+json_file)
        html = response.read()
        info = json.loads(html)

        # Step 4: Test if there is a new revision
        if self.slaveType == "android":
            output = self.tmp_dir + self.folder + "/fennec.apk"
        elif self.slaveType == "mac-desktop":
            output = self.tmp_dir + self.folder + "/firefox.dmg"
        elif self.slaveType == "linux-desktop":
            output = self.tmp_dir + self.folder + "/firefox.tar.bz2"
        else:
            output = self.tmp_dir + self.folder + "/firefox.zip"
        utils.getOrDownload(self.tmp_dir, "mozilla", info["moz_source_stamp"],
                            self.nightly_dir + "/" + folder_id + "/" + exec_file,
                            output)

        # Step 5: Prepare to run
        if self.slaveType == "android":
            print subprocess.check_output(["adb", "install", "-r", self.tmp_dir + self.folder + "/fennec.apk"])
        elif self.slaveType == "mac-desktop":
            if os.path.exists("/Volumes/Nightly"):
                print subprocess.check_output(["hdiutil", "detach", "/Volumes/Nightly"])
            print subprocess.check_output(["hdiutil", "attach", self.tmp_dir + self.folder + "/firefox.dmg"])
        elif self.slaveType == "linux-desktop":
            utils.unzip(self.tmp_dir + self.folder, "firefox.tar.bz2")
        else:
            utils.unzip(self.tmp_dir + self.folder, "firefox.zip")

        # Step 6: Save info
        self.updated = True
        self.cset = info["moz_source_stamp"]
Exemplo n.º 28
0
def load_MovieLens_1m_dataset(target="cf"):
    '''
    Load MovieLens 1m dataset

    :returns: features, target_values, users, movies, ratings, data
    '''
    
    dataset_name = "ml-1m"
    data_path = os.path.join(movielens_path, dataset_name)
    pkl_path = os.path.join(data_path, "%s.pkl" % dataset_name)
    dataset = None

    if not os.path.exists(pkl_path):
        url = "http://files.grouplens.org/datasets/movielens/%s.zip" % dataset_name

        print("Downloading MovieLens %s dataset..." % dataset_name)
        utils.download(url, data_path + ".zip")
        utils.unzip("%s.zip" % data_path, movielens_path)
        
        # Read data
        user_names = [user_name, "gender", "age", "occupationId", "zip-code"]
        movie_names =  [movie_name, "Title", "Genres"]
        rating_names = [user_name, movie_name, rating_name, "timestamp"]

        users = pd.read_csv(os.path.join(data_path, "users.dat"), sep="::", header=None, names=user_names, engine="python")
        movies = pd.read_csv(os.path.join(data_path, "movies.dat"), sep="::", header=None, names=movie_names, engine="python")
        ratings = pd.read_csv(os.path.join(data_path, "ratings.dat"), sep="::", header=None, names=rating_names, engine="python")
        
        # Preprocessing

        # Merge data
        data = pd.merge(pd.merge(ratings.filter(regex='userId|movieId|rating'), users), movies)

        # Separate features and rating values
        features = data.drop([rating_name], axis = 1).values # drop rating column
        target_values = data[rating_name].values

        dataset = (features, target_values, users, movies, ratings, data)

        # Save preprocessed data
        with open(pkl_path, "wb") as f:
            pickle.dump(dataset, f)   
    else:
        with open(pkl_path, mode="rb") as f:
            dataset = pickle.load(f)

    if target is "cf":
        return convert_to_cf_data(dataset[5])
    else:
        return dataset
Exemplo n.º 29
0
    def _update(self, folder_id):
        # Step 2: Find the correct file
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id)
        html = response.read()
        if self.slaveType == "android":
            exec_file = re.findall("fennec-[a-zA-Z0-9.]*.en-US.android-arm.apk", html)[0]
            json_file = re.findall("fennec-[a-zA-Z0-9.]*.en-US.android-arm.json", html)[0]
        elif self.slaveType == "mac-desktop":
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.mac.dmg", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.mac.json", html)[0]
        elif self.slaveType == "linux-desktop":
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.linux-x86_64.tar.bz2", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.linux-x86_64.json", html)[0]
        else:
            exec_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.zip", html)[0]
            json_file = re.findall("firefox-[a-zA-Z0-9.]*.en-US.win32.json", html)[0]

        # Step 3: Get build information
        response = urllib2.urlopen(self.nightly_dir+"/"+folder_id+"/"+json_file)
        html = response.read()
        info = json.loads(html)

        # Step 4: Test if there is a new revision
        if self.slaveType == "android":
            output = self.tmp_dir + self.folder + "/fennec.apk"
        elif self.slaveType == "mac-desktop":
            output = self.tmp_dir + self.folder + "/firefox.dmg"
        elif self.slaveType == "linux-desktop":
            output = self.tmp_dir + self.folder + "/firefox.tar.bz2"
        else:
            output = self.tmp_dir + self.folder + "/firefox.zip"
        utils.getOrDownload(self.tmp_dir, "mozilla", info["moz_source_stamp"],
                            self.nightly_dir + "/" + folder_id + "/" + exec_file,
                            output)

        # Step 5: Prepare to run
        if self.slaveType == "android":
            print subprocess.check_output(["adb", "install", "-r", self.tmp_dir + self.folder + "/fennec.apk"])
        elif self.slaveType == "mac-desktop":
            if os.path.exists("/Volumes/Nightly"):
                print subprocess.check_output(["hdiutil", "detach", "-force", "/Volumes/Nightly"])
            print subprocess.check_output(["hdiutil", "attach", self.tmp_dir + self.folder + "/firefox.dmg"])
        elif self.slaveType == "linux-desktop":
            utils.unzip(self.tmp_dir + self.folder, "firefox.tar.bz2")
        else:
            utils.unzip(self.tmp_dir + self.folder, "firefox.zip")

        # Step 6: Save info
        self.updated = True
        self.cset = info["moz_source_stamp"]
Exemplo n.º 30
0
    def install_webdriver(self, dest=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemplo n.º 31
0
    def install_webdriver(self, dest=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemplo n.º 32
0
def download(_):
    """
    Start checkpoints download logic.

    :param _: None
    :return: None
    """
    Conf.log = setup_log(logging.DEBUG) if Conf.args['debug'] else setup_log()
    tempdir = tempfile.mkdtemp()
    cdn_url = Conf.checkpoints_cdn.format(Conf.checkpoints_version)
    temp_zip = os.path.join(tempdir, "{}.zip".format(Conf.checkpoints_version))

    try:
        Conf.log.info("Downloading {}".format(cdn_url))
        dl_file(Conf.checkpoints_cdn.format(Conf.checkpoints_version),
                temp_zip)

        if not os.path.exists(Conf.args['checkpoints']['checkpoints_path']):
            os.mkdir(Conf.args['checkpoints']['checkpoints_path'])

        Conf.log.info("Extracting {}".format(temp_zip))
        unzip(temp_zip, Conf.args['checkpoints']['checkpoints_path'])

        Conf.log.info("Moving Checkpoints To Final Location")

        for c in ("cm.lib", "mm.lib", "mn.lib"):
            if os.path.isfile(
                    os.path.join(Conf.args['checkpoints']['checkpoints_path'],
                                 c)):
                os.remove(
                    os.path.join(Conf.args['checkpoints']['checkpoints_path'],
                                 c))
            shutil.move(
                os.path.join(Conf.args['checkpoints']['checkpoints_path'],
                             'checkpoints', c),
                Conf.args['checkpoints']['checkpoints_path'])
        shutil.rmtree(
            os.path.join(Conf.args['checkpoints']['checkpoints_path'],
                         'checkpoints'))

    except Exception as e:
        Conf.log.error(e)
        Conf.log.error(
            "Something Gone Bad Download Downloading The Checkpoints")
        shutil.rmtree(tempdir)
        sys.exit(1)
    shutil.rmtree(tempdir)
    Conf.log.info("Checkpoints Downloaded Successfully")
Exemplo n.º 33
0
    def install_webdriver(self, dest=None, channel=None, browser_binary=None):
        if dest is None:
            dest = os.pwd
        latest = get("https://api.github.com/repos/operasoftware/operachromiumdriver/releases/latest").json()["tag_name"]
        url = "https://github.com/operasoftware/operachromiumdriver/releases/download/%s/operadriver_%s.zip" % (latest,
                                                                                                                self.platform_string())
        unzip(get(url).raw, dest)

        operadriver_dir = os.path.join(dest, "operadriver_%s" % self.platform_string())
        shutil.move(os.path.join(operadriver_dir, "operadriver"), dest)
        shutil.rmtree(operadriver_dir)

        path = find_executable("operadriver")
        st = os.stat(path)
        os.chmod(path, st.st_mode | stat.S_IEXEC)
        return path
Exemplo n.º 34
0
def fetch_sub(location, directory=None, token=None):
    """Filename is the full path of the movie file
    dirname is the full path of the movie dir, fetch_sub will search for video file
    diretory is the directory where sub should be stored, default to the movie dir
    """
    my_open_sub = MyOpenSub(OPENSUB_URL, token)
    if not token:
        my_open_sub.login(USERNAME, PASSWORD, LANGUAGE, USER_AGENT)
    filenames = find_video(location)
    if not filenames:
        print "No video file found"
        return
    for filename in find_video(location):
        print "Fetching subtitle for %s" % filename
        file_hash, file_bytes = hashfile(filename)
        dl_links = my_open_sub.search_sub_links(file_hash, file_bytes)
        if not dl_links:
            print "Sorry, no subtitle found"
            return
        if not directory:
            directory = os.path.dirname(filename)
        for dl_link in dl_links:
            try:
                filename = wget.download(dl_link, out=directory)
                print "Download finished: %s" % filename
                filename = unzip(filename, directory)
                if filename:
                    print "Unzipped to %s" % filename
            except IOError as io_error:
                print io_error
Exemplo n.º 35
0
def submit(request):
    team = get_team(request.user)
    params = dict()

    if request.method == 'POST':
        form = forms.UploadSubmissionForm(request.POST, request.FILES)
        params['form'] = form

        if form.is_valid():
            submission = Submission(
                team=team,
                package=request.FILES['file'],
                command=request.POST['command'],
            )
            submission.save()

            error = utils.unzip(submission.package.path)
            if error:
                submission.delete()
                params['error'] = error
                return render_submit(request, params)

            try:
                execute_tester(submission)
            except Exception as error:
                print u'ERROR: Blad wewnetrzny testerki:', error

            return my_results(request,
                              message=_(u'Rozwiązanie zostało wysłane.'))

    return render_submit(request, params)
Exemplo n.º 36
0
def save_progress(options, tparams, epoch, best_perf):
    sd = options.get('seed', -99)
    fld = options.get('fold', -99)
    with open(state_file_name(options) + '.tmp', 'a') as w:
        w.write('%d,%d,%d,%f,%f\n' %
                (epoch, sd, fld, best_perf[0], best_perf[1]))
    pickle.dump(unzip(tparams), open(state_file_name(options) + '.pkl', 'w'))
Exemplo n.º 37
0
 def ordenar_vecinos(self):
     if self.vecinos_ordenados:
         return
     for u in self.vertices():
         self.lista_vecinos[u], self.pesos[u] = unzip(
             sorted(list(zip(*self.vecinos(u)))))
     self.vecinos_ordenados = True
Exemplo n.º 38
0
def unzip_data():

    if not os.path.exists(data_dir):
        os.makedirs(data_dir)

    fnames = [
        "asian.zip",
        "africanamerican.zip",
        "caucasian.zip",
        "hispanic.zip",
        "multiracial.zip",
    ]

    for fname in fnames:
        print(".. unzipping")
        unzip(os.path.join(data_dir, fname), data_dir)
Exemplo n.º 39
0
def submit(request):
    team = get_team(request.user)
    params = dict()

    if request.method == 'POST':
        form = forms.UploadSubmissionForm(request.POST, request.FILES)
        params['form'] = form

        if form.is_valid():
            submission = Submission(
                team=team,
                package=request.FILES['file'],
                command=request.POST['command'],
            )
            submission.save()

            error = utils.unzip(submission.package.path)
            if error:
                submission.delete()
                params['error'] = error
                return render_submit(request, params)

            try:
                execute_tester(submission)
            except Exception as error:
                print u'ERROR: Blad wewnetrzny testerki:', error
                

            return my_results(
                request, message=_(u'Rozwiązanie zostało wysłane.'))

    return render_submit(request, params)
Exemplo n.º 40
0
    def install_webdriver(self, dest=None):
        if dest is None:
            dest = os.pwd
        latest = get("https://api.github.com/repos/operasoftware/operachromiumdriver/releases/latest").json()["tag_name"]
        url = "https://github.com/operasoftware/operachromiumdriver/releases/download/%s/operadriver_%s.zip" % (latest,
                                                                                                                self.platform_string())
        unzip(get(url).raw, dest)

        operadriver_dir = os.path.join(dest, "operadriver_%s" % self.platform_string())
        shutil.move(os.path.join(operadriver_dir, "operadriver"), dest)
        shutil.rmtree(operadriver_dir)

        path = find_executable("operadriver")
        st = os.stat(path)
        os.chmod(path, st.st_mode | stat.S_IEXEC)
        return path
Exemplo n.º 41
0
    def install_agent(self, package_path):
        checksum = utils.get_checksum(package_path, self.get_checksum_dir())
        if not utils.check_file(package_path, checksum):
            LOG.debug("check downloaded package failed, removing %s", package_path)
            os.remove(package_path)
            utils.remove_checksum(package_path, self.get_checksum_dir())
            return None

        dirname = utils.get_file_name_without_extension(package_path)
        agent_dir = '%s\%s' % (self.get_install_dir(), dirname)
        tmp_dir = '%s\%s' % (self.get_tmp_dir(), dirname)

        try:
            if utils.unzip(package_path, tmp_dir):
                tmp_name = os.listdir(tmp_dir)[0]
                tmp_path = '%s\%s' % (tmp_dir, tmp_name)
                ren_path = '%s\%s' % (tmp_dir, dirname)

                if os.system('ren "%s" "%s"' % (tmp_path, dirname)) != 0:
                    return None

                if os.system('move "%s" "%s"' % (ren_path, self.get_install_dir())) != 0:
                    return None
            else:
                return None
        finally:
            if os.path.isdir(tmp_dir):
                os.system('rd /s /q "%s"' % tmp_dir)

        installer = Installer(agent_dir, self.recycle_queue)
        installer.install()
        return agent_dir
Exemplo n.º 42
0
def get_shape_from_coords(coords):
    """ Return shape of smallest well plate that can contain provided indices."""  
    well_plate_shapes = [(2,4),(8,12)] # 8 well slide, 96 well plate
    rows,cols = unzip(coords)
    for shape in well_plate_shapes:
        if rows |are_all_in| range(shape[0]) and cols |are_all_in| range(shape[1]):
            return shape
    raise Exception("Given well coordinates do not fit in plate shapes:{}".format(well_plate_shapes))
def main(url=ZIP_CODE_URL):
    path = utils.DATASET_HOME + ADDRESS_ZIP
    utils.download(url, path)
    files = utils.unzip(path)
    if files and len(files) > 0:
        write_address(files[0])
    else:
        print("failed to download or unzip the file. please see at {0}.".format(utils.DATASET_HOME))
Exemplo n.º 44
0
    def install_prefs(self, binary, dest=None, channel=None):
        version, channel_ = self.get_version_and_channel(binary)
        if channel is not None and channel != channel_:
            # Beta doesn't always seem to have the b in the version string, so allow the
            # manually supplied value to override the one from the binary
            self.logger.warning("Supplied channel doesn't match binary, using supplied channel")
        elif channel is None:
            channel = channel_
        if dest is None:
            dest = os.pwd

        dest = os.path.join(dest, "profiles", channel)
        if version:
            dest = os.path.join(dest, version)
        have_cache = False
        if os.path.exists(dest):
            if channel != "nightly":
                have_cache = True
            else:
                now = datetime.now()
                have_cache = (datetime.fromtimestamp(os.stat(dest).st_mtime) >
                              now - timedelta(days=1))

        # If we don't have a recent download, grab and extract the latest one
        if not have_cache:
            if os.path.exists(dest):
                shutil.rmtree(dest)
            os.makedirs(dest)

            url = self.get_profile_bundle_url(version, channel)

            self.logger.info("Installing test prefs from %s" % url)
            try:
                extract_dir = tempfile.mkdtemp()
                unzip(get(url).raw, dest=extract_dir)

                profiles = os.path.join(extract_dir, os.listdir(extract_dir)[0], 'testing', 'profiles')
                for name in os.listdir(profiles):
                    path = os.path.join(profiles, name)
                    shutil.move(path, dest)
            finally:
                shutil.rmtree(extract_dir)
        else:
            self.logger.info("Using cached test prefs from %s" % dest)

        return dest
Exemplo n.º 45
0
    def imputResource():

        flag = utils.unzip(intputLabel['text'], outputLabel['text'])
        # print (intputLabel['text'] + ':' + outputLabel['text'] + ':' +xmlPathLabel['text'] )
        if flag:
            print("导入资源成功")
            global resFlag
            resFlag = True
Exemplo n.º 46
0
    def install_certutil(self, dest=None):
        # TODO: this doesn't really work because it just gets the binary, and is missing the
        # libnss3 library. Getting that means either downloading the corresponding Firefox
        # and extracting the library (which is hard on mac becase DMG), or maybe downloading from
        # nss's treeherder builds?
        if dest is None:
            dest = os.pwd

        # Don't create a path like bin/bin/certutil
        split = os.path.split(dest)
        if split[1] == "bin":
            dest = split[0]

        resp = self.get_from_nightly(
            "<a[^>]*>(firefox-\d+\.\d(?:\w\d)?.en-US.%s\.common\.tests.zip)</a>" % self.platform_string())
        bin_path = path("bin/certutil", exe=True)
        unzip(resp.raw, dest=dest, limit=[bin_path])

        return os.path.join(dest, bin_path)
Exemplo n.º 47
0
def mfold_result(task_id, zipname="now.zip", path="./results/mfold/", verbose=True):
    """Gets mfold result via task_id

    Args:
        task_id: Id of task which was given by RESTful API
        zipname: Name of zip file in which client will save results.
            After save this file is removed
        path: Path where results should be stored
        verbose: Bool which tells if function should print what she actualy does

    Returns:
        None
    """
    req = get_request("mfold_result", task_id)
    with open(zipname, "wb") as f:
        for chunk in req.iter_content():
            f.write(chunk)
    unzip(zipname, path)
    os.remove(zipname)

    if verbose:
        print("Result in: {}/".format(path + task_id))
Exemplo n.º 48
0
    def update(self):
        # Step 1: Get latest succesfull build revision
        response = urllib2.urlopen(self.nightly_dir+"LAST_CHANGE")
        chromium_rev = response.read()

        # Step 3: Get v8 revision
        response = urllib2.urlopen(self.nightly_dir + chromium_rev + "/REVISIONS")
        self.cset = re.findall('"v8_revision_git": "([a-z0-9]*)",', response.read())[0]

        # Step 3: Test if there is a new revision
        utils.getOrDownload(self.tmp_dir, "chrome", self.cset,
                            self.nightly_dir + chromium_rev + "/" + self.filename,
                            self.tmp_dir + self.filename)
        # Step 4: Unzip
        utils.unzip(self.tmp_dir, self.filename)

        # Step 5: Install on device
        if self.slaveType == "android":
            print subprocess.check_output(["adb", "install", "-r", self.tmp_dir+"/chrome-android/apks/ChromeShell.apk"])

        # Step 6: Save info
        self.updated = True
Exemplo n.º 49
0
def validate_and_save_checkpoint(model_options, dp, params, tparams, f_pred, f_pred_prob, kf_valid, save_n):
  scores = prediction(f_pred, f_pred_prob, dp.prepare_valid_or_test_batch_image_data, 'val', kf_valid, dp.ix_to_word, dp.get_raw_sentences_from_imgid, model_options, save_n['prediction'])
  # saving a checkpoint
  save_path = os.path.join(model_options['checkpoint_save_dir'], "lstm_{0}_{1:.2f}.npz".format(save_n['checkpoint'], scores['Bleu_4'] * 100))
  params = utils.unzip(tparams)
  numpy.savez(save_path, checkpoint_save_n=save_n['checkpoint'], scores=scores, **params)
  pickle.dump(model_options, open('%s.pkl' % save_path, 'wb'), -1)
  print 'Saved checkpoint to', os.path.abspath(save_path)
  
  save_n['checkpoint'] = save_n['checkpoint'] + 1
  save_n['prediction'] = save_n['prediction'] + 1
  
  return scores
Exemplo n.º 50
0
    def install_webdriver(self, dest=None, channel=None):
        """Install latest Geckodriver."""
        if dest is None:
            dest = os.getcwd()

        if channel == "nightly":
            path = self.install_geckodriver_nightly(dest)
            if path is not None:
                return path
            else:
                self.logger.warning("Nightly webdriver not found; falling back to release")

        version = self._latest_geckodriver_version()
        format = "zip" if uname[0] == "Windows" else "tar.gz"
        self.logger.debug("Latest geckodriver release %s" % version)
        url = ("https://github.com/mozilla/geckodriver/releases/download/%s/geckodriver-%s-%s.%s" %
               (version, version, self.platform_string_geckodriver(), format))
        if format == "zip":
            unzip(get(url).raw, dest=dest)
        else:
            untar(get(url).raw, dest=dest)
        return find_executable(os.path.join(dest, "geckodriver"))
Exemplo n.º 51
0
    def install_prefs(self, binary, dest=None):
        version, channel = self.get_version_and_channel(binary)
        if dest is None:
            dest = os.pwd

        dest = os.path.join(dest, "profiles", channel, version)
        have_cache = False
        if os.path.exists(dest):
            if channel != "nightly":
                have_cache = True
            else:
                now = datetime.now()
                have_cache = (datetime.fromtimestamp(os.stat(dest).st_mtime) >
                              now - timedelta(days=1))

        # If we don't have a recent download, grab and extract the latest one
        if not have_cache:
            if os.path.exists(dest):
                shutil.rmtree(dest)
            os.makedirs(dest)

            url = self.get_profile_bundle_url(version, channel)

            print("Installing test prefs from %s" % url)
            try:
                extract_dir = tempfile.mkdtemp()
                unzip(get(url).raw, dest=extract_dir)

                profiles = os.path.join(extract_dir, os.listdir(extract_dir)[0], 'testing', 'profiles')
                for name in os.listdir(profiles):
                    path = os.path.join(profiles, name)
                    shutil.move(path, dest)
            finally:
                shutil.rmtree(extract_dir)
        else:
            print("Using cached test prefs from %s" % dest)

        return dest
Exemplo n.º 52
0
 def get_gobstones_tests(self):
     tests = []
     test_groups = self.get_test_groups()
     for tgroup, index in zip(test_groups, range(len(test_groups))):
         ops, pyfs = unzip(tgroup)
         tests.append(AutoGobstonesTest(
             "group %s, %s tests" % (index, len(ops)),
             self,
             ops,
             self.program_for(ops),
             pyfs,
             self.gobstones_parameters()
             ))
     return tests
Exemplo n.º 53
0
    def run(self):
        result_to_op = []
        for op in self.operations:
            result_to_op.extend([op for x in range(op.nretvals)])
            #print "\tRunning subtest %s" % (op,)
        run_params = [
            temp_test_file(self.gbscode),
            os.path.dirname(__file__) + "/boards/empty.gbb",
            self.gbsparams
            ]
        results = run_gobstones(*run_params)
        if results[0] == "OK":
            gbsres = results[1]
            pyres = []
            for f in self.pyfuncs:
                pyr = f()
                if isinstance(pyr, tuple):
                    pyres += list(pyr)
                else:
                    pyres.append(pyr)

            if len(pyres) == len(gbsres):
                for gbsval, pyval, index in zip(
                    unzip(gbsres)[1],
                    pyres,
                    range(len(pyres))
                    ):
                    self.assertEqual(
                        gbsval,
                        str(pyval),
                        ("Operation %s failed. The result %s do not match " +
                        "the expected value. " +
                        "Expected: %s. Actual: %s"
                        ) % (
                            result_to_op[index],
                            index,
                            pyval,
                            gbsval
                            )
                        )
            else:
                self.fail("The execution results count do " +
                          "not match the expected results count")
        else:
            self.fail(results[1])
Exemplo n.º 54
0
    def _fetch_pcsapi(self, path, params=None, data=None, headers={}):
        assert self._bduss is not None
        url = urljoin(self._pcsapi_baseUrl, path) + "?app_id=266719"
        if params:
            url += "&" + urlencode(params)
        headers["Cookie"] = "BDUSS=" + self._bduss

        try:
            r = fetch_url(url, data, headers)
        except urllib2.HTTPError as e:
            try:
                error_content = e.read()
                if e.headers.get("content-encoding") == "gzip":
                    error_content = unzip(error_content)
                eo = json.loads(error_content)

            except:
                raise e
            else:
                raise PCSApiError(eo.get("error_code"), eo.get("error_msg"))

        return json.loads(r)
Exemplo n.º 55
0
    def _fetch_pcsapi(self, path, params=None, data=None, headers={}):
        assert self._bduss is not None
        url = urljoin(self._pcsapi_baseUrl, path) + '?app_id=266719'
        if params:
            url += '&' + urlencode(params)
        headers['Cookie'] = 'BDUSS=' + self._bduss

        try:
            r = fetch_url(url, data, headers)
        except urllib2.HTTPError as e:
            try:
                error_content = e.read()
                if e.headers.get('content-encoding') == 'gzip':
                    error_content = unzip(error_content)
                eo = json.loads(error_content)

            except:
                raise e
            else:
                raise PCSApiError(eo.get('error_code'), eo.get('error_msg'))

        return json.loads(r)
Exemplo n.º 56
0
def submit(request):
    team = request.user.team
    params = dict()

    if request.method == 'POST':
        form = UploadSubmissionForm(request.POST, request.FILES)
        params['form'] = form

        if form.is_valid():
            submission = Submission(
                team=team,
                package=request.FILES['file'],
                user=request.user,
            )
            submission.save()

            error = utils.unzip(submission.package.path)
            if error:
                submission.delete()
                params['error'] = error
                return render_submit(request, params)

            submissions = team.submission_set.all()
            if len(submissions) > 2:
                for sub in submissions[2:]:
                    sub.delete()

            execute_tester(submission)
            messages.add_message(request, messages.INFO, _(u'Rozwiązanie zostało wysłane'))
            return redirect('my_results')
            #return my_results(
            #    request, message=_(u'Rozwiązanie zostało wysłane.'))
        else:
            print form.errors

    return render_submit(request, params)
Exemplo n.º 57
0
 def setupTests(self):
     zippedTests = download_url(getTestUrl(),dest=str(os.path.join(self.shellCacheDir,"tests.zip")))
     unzip(self.testDir,zippedTests)
Exemplo n.º 58
0
Arquivo: train.py Projeto: orhanf/rmn
def train(dim_word=100,  # word vector dimensionality
          dim=1000,  # the number of GRU units
          encoder='gru',
          patience=10,  # early stopping patience
          max_epochs=5000,
          finish_after=10000000,  # finish after this many updates
          dispFreq=100,
          decay_c=0.,  # L2 weight decay penalty
          lrate=0.01,
          n_words=100000,  # vocabulary size
          vocab_dim=100000,  # Size of M, C
          memory_dim=1000,  # Dimension of memory
          memory_size=15,  # n_back to attend
          maxlen=100,  # maximum length of the description
          optimizer='rmsprop',
          batch_size=16,
          valid_batch_size=16,
          saveto='model.npz',
          validFreq=1000,
          saveFreq=1000,  # save the parameters after every saveFreq updates
          sampleFreq=100,  # generate some samples after every sampleFreq
          dataset='/data/lisatmp3/chokyun/wikipedia/extracted/wiki.tok.txt.gz',
          valid_dataset='../data/dev/newstest2011.en.tok',
          dictionary='/data/lisatmp3/chokyun/wikipedia/extracted/'
          'wiki.tok.txt.gz.pkl',
          use_dropout=False,
          reload_=False):

    # Model options
    model_options = locals().copy()

    # Theano random stream
    trng = RandomStreams(1234)

    # load dictionary
    with open(dictionary, 'rb') as f:
        worddicts = pkl.load(f)

    # invert dictionary
    worddicts_r = dict()
    for kk, vv in worddicts.iteritems():
        worddicts_r[vv] = kk

    # reload options
    if reload_ and os.path.exists(saveto):
        with open('%s.pkl' % saveto, 'rb') as f:
            model_options = pkl.load(f)

    print 'Loading data'
    train = TextIterator(dataset,
                         dictionary,
                         n_words_source=n_words,
                         batch_size=batch_size,
                         maxlen=maxlen)
    valid = TextIterator(valid_dataset,
                         dictionary,
                         n_words_source=n_words,
                         batch_size=valid_batch_size,
                         maxlen=maxlen)

    # initialize RMN
    rmn_ = RMN(model_options)

    print 'Building model'
    rmn_.init_params()

    # reload parameters
    if reload_ and os.path.exists(saveto):
        rmn_.load_params(saveto)

    # create shared variables for parameters
    tparams = rmn_.tparams

    # build the symbolic computational graph
    use_noise, x, x_mask, opt_ret, cost = rmn_.build_model()
    inps = [x, x_mask]

    print 'Buliding sampler'
    f_next = rmn_.build_sampler(trng)

    # before any regularizer
    print 'Building f_log_probs...',
    f_log_probs = theano.function(inps, cost, profile=profile)
    print 'Done'

    cost = cost.mean()

    # apply L2 regularization on weights
    if decay_c > 0.:
        decay_c = theano.shared(numpy.float32(decay_c), name='decay_c')
        weight_decay = 0.
        for kk, vv in tparams.iteritems():
            weight_decay += (vv ** 2).sum()
        weight_decay *= decay_c
        cost += weight_decay

    # after any regularizer - compile the computational graph for cost
    print 'Building f_cost...',
    f_cost = theano.function(inps, cost, profile=profile)
    print 'Done'

    print 'Computing gradient...',
    grads = tensor.grad(cost, wrt=itemlist(tparams))
    print 'Done'

    # compile the optimizer, the actual computational graph is compiled here
    lr = tensor.scalar(name='lr')
    print 'Building optimizers...',
    optimizer = getattr(importlib.import_module('optimizer'), optimizer)
    f_grad_shared, f_update = optimizer(lr, tparams, grads, inps, cost)
    print 'Done'

    print 'Optimization'

    history_errs = []
    uidx = 0
    estop = False
    bad_counter = 0

    # reload history
    if reload_ and os.path.exists(saveto):
        history_errs = list(numpy.load(saveto)['history_errs'])
        uidx = numpy.load(saveto)['uidx']
    best_p = None

    if validFreq == -1:
        validFreq = len(train[0])/batch_size
    if saveFreq == -1:
        saveFreq = len(train[0])/batch_size
    if sampleFreq == -1:
        sampleFreq = len(train[0])/batch_size

    # Training loop
    for eidx in xrange(max_epochs):
        n_samples = 0

        for x in train:
            n_samples += len(x)
            uidx += 1
            use_noise.set_value(1.)

            # pad batch and create mask
            x, x_mask = prepare_data(x, maxlen=maxlen, n_words=n_words)

            if x is None:
                print 'Minibatch with zero sample under length ', maxlen
                uidx -= 1
                continue

            ud_start = time.time()

            # compute cost, grads and copy grads to shared variables
            cost = f_grad_shared(x, x_mask)

            # do the update on parameters
            f_update(lrate)

            ud = time.time() - ud_start

            # check for bad numbers
            if numpy.isnan(cost) or numpy.isinf(cost):
                print 'NaN detected'
                return 1.

            # verbose
            if numpy.mod(uidx, dispFreq) == 0:
                print 'Epoch ', eidx, 'Update ', uidx, 'Cost ', cost, 'UD ', ud

            # save the best model so far
            if numpy.mod(uidx, saveFreq) == 0:
                print 'Saving...',

                if best_p is not None:
                    params = best_p
                else:
                    params = unzip(tparams)
                numpy.savez(saveto, history_errs=history_errs, uidx=uidx,
                            **params)
                pkl.dump(model_options, open('%s.pkl' % saveto, 'wb'))
                print 'Done'

            # generate some samples with the model and display them
            if numpy.mod(uidx, sampleFreq) == 0:
                # FIXME: random selection?
                for jj in xrange(5):
                    sample, score = rmn_.gen_sample(tparams, f_next,
                                                    trng=trng, maxlen=30,
                                                    argmax=False)
                    print 'Sample ', jj, ': ',
                    ss = sample
                    for vv in ss:
                        if vv == 0:
                            break
                        if vv in worddicts_r:
                            print worddicts_r[vv],
                        else:
                            print 'UNK',
                    print

            # validate model on validation set and early stop if necessary
            if numpy.mod(uidx, validFreq) == 0:
                use_noise.set_value(0.)
                valid_errs = rmn_.pred_probs(valid, f_log_probs, prepare_data)
                valid_err = valid_errs.mean()
                history_errs.append(valid_err)

                if uidx == 0 or valid_err <= numpy.array(history_errs).min():
                    best_p = unzip(tparams)
                    bad_counter = 0
                if len(history_errs) > patience and valid_err >= \
                        numpy.array(history_errs)[:-patience].min():
                    bad_counter += 1
                    if bad_counter > patience:
                        print 'Early Stop!'
                        estop = True
                        break

                if numpy.isnan(valid_err):
                    ipdb.set_trace()

                print 'Valid ', valid_err

            # finish after this many updates
            if uidx >= finish_after:
                print 'Finishing after %d iterations!' % uidx
                estop = True
                break

        print 'Seen %d samples' % n_samples

        if estop:
            break

    if best_p is not None:
        zipp(best_p, tparams)

    use_noise.set_value(0.)
    valid_err = rmn_.pred_probs(f_log_probs, prepare_data,
                                model_options, valid).mean()

    print 'Valid ', valid_err

    params = copy.copy(best_p)
    numpy.savez(saveto, zipped_params=best_p,
                history_errs=history_errs,
                uidx=uidx,
                **params)

    return valid_err
Exemplo n.º 59
0
 def ordenar_vecinos(self):
     if self.vecinos_ordenados:
         return
     for u in self.vertices():
         self.lista_vecinos[u], self.pesos[u] = unzip(sorted(list(zip(*self.vecinos(u)))))
     self.vecinos_ordenados = True