Example #1
1
    def included_files(self):
        """Returns a list of the included files, checking if they exist"""
        for filename in self.included_files_list:
            if not isfile(join(self.makefile_dirname, filename)):
                raise MissingIncludedFile("A listed included file cannot be found")

        return [join(self.makefile_dirname, filename) for filename in self.included_files_list]
Example #2
1
def hash_cleanup_listdir(hsh_path, reclaim_age=ONE_WEEK):
    """
    List contents of a hash directory and clean up any old files.

    :param hsh_path: object hash path
    :param reclaim_age: age in seconds at which to remove tombstones
    :returns: list of files remaining in the directory, reverse sorted
    """
    files = os.listdir(hsh_path)
    if len(files) == 1:
        if files[0].endswith(".ts"):
            # remove tombstones older than reclaim_age
            ts = files[0].rsplit(".", 1)[0]
            if (time.time() - float(ts)) > reclaim_age:
                os.unlink(join(hsh_path, files[0]))
                files.remove(files[0])
    elif files:
        files.sort(reverse=True)
        meta = data = tomb = None
        for filename in list(files):
            if not meta and filename.endswith(".meta"):
                meta = filename
            if not data and filename.endswith(".data"):
                data = filename
            if not tomb and filename.endswith(".ts"):
                tomb = filename
            if (
                filename < tomb
                or filename < data  # any file older than tomb
                or (filename.endswith(".meta") and filename < meta)  # any file older than data
            ):  # old meta
                os.unlink(join(hsh_path, filename))
                files.remove(filename)
    return files
Example #3
1
def determine_gl_flags():
    flags = {"libraries": []}
    if platform == "win32":
        flags["libraries"] = ["opengl32"]
    elif platform == "darwin":
        flags["extra_link_args"] = ["-framework", "OpenGL", "-arch", "x86_64"]
        flags["extra_compile_args"] = ["-arch", "x86_64"]
    elif platform.startswith("freebsd"):
        flags["include_dirs"] = ["/usr/local/include"]
        flags["extra_link_args"] = ["-L", "/usr/local/lib"]
        flags["libraries"] = ["GL"]
    elif platform.startswith("openbsd"):
        flags["include_dirs"] = ["/usr/X11R6/include"]
        flags["extra_link_args"] = ["-L", "/usr/X11R6/lib"]
        flags["libraries"] = ["GL"]
    elif platform == "android":
        flags["include_dirs"] = [join(ndkplatform, "usr", "include")]
        flags["extra_link_args"] = ["-L", join(ndkplatform, "usr", "lib")]
        flags["libraries"] = ["GLESv2"]
    else:
        flags["libraries"] = ["GL"]
    if c_options["use_glew"]:
        if platform == "win32":
            flags["libraries"] += ["glew32"]
        else:
            flags["libraries"] += ["GLEW"]
    return flags
Example #4
1
    def __init__(self, verbose=False):
        """
        Loads all settings files.

        The default settings are loaded first, followed by user settings and
        finally campaign settings. Keys from later files overwrite those from
        earlier files.

        Only the default settings need to exist. If a different file cannot be
        found or opened, it will be silently ignored without crashing.

        Args:
            verbose (bool): Whether to show additional error messages that are
                usually ignored. These involve unloadable optional settings
                files and keys that cannot be found. The file
                `support/settings.json` should never be found, but will still
                be reported.
        """
        self.verbose = verbose
        self.data = util.load_json(path.join(self.default_settings_path, "settings-default.json"))

        # massage template names into real paths
        self.data["templates"] = self._expand_filenames(base_path=self.install_base, data=self.data["templates"])

        # merge additional settings files
        for settings_path in self.settings_paths:
            for file in self.settings_files:
                try:
                    self.load_more(path.join(settings_path, file))
                except OSError as err:
                    # All of these files are optional, so normally we silently
                    # ignore these errors
                    if self.verbose:
                        util.error(err.strerror, err.filename)
Example #5
1
 def _generate_xml(self):
     self.xml_filename = "%s-root-device.xml" % self.friendly_name
     self.xml_filename = self.xml_filename.replace(" ", "")
     self._xml_filepath = path.join(config.manager.brisa_home, "tmp_xml")
     if not path.exists(self._xml_filepath):
         mkdir(self._xml_filepath)
     self._xml_filepath = path.join(self._xml_filepath, self.xml_filename)
Example #6
1
def test_get_start_address():
    stdout, stderr = "", ""

    tmp_dir = mkdtemp()
    asm_fp = join(tmp_dir, "shellcode.asm")
    exe_fp = join(tmp_dir, "shellcode.exe")

    secret_fp = "/tmp/secret"
    os.system('echo "%s" > %s' % (SECRET_STR, secret_fp))

    kernel = ShellNoob.get_kernel()
    if kernel == "Linux":
        shutil.copyfile(join(dirname(__file__), "samples/x86-linux/open-read-write.asm"), asm_fp)
    elif kernel == "FreeBSD":
        shutil.copyfile(join(dirname(__file__), "samples/x86-freebsd/open-read-write.asm"), asm_fp)
    else:
        raise Exception("testing on kernel %s not supported" % kernel)

    _out, _err, _val = run_with_args("%s --to-exe" % asm_fp)
    stdout += _out
    stderr += _err
    assert _val == 0

    snoob = ShellNoob()
    start_addr = snoob.get_start_address(exe_fp)
    assert re.match("0x[0-9a-f]+", start_addr)

    shutil.rmtree(tmp_dir)
    os.unlink(secret_fp)
    return stdout, stderr, 0
 def download_file(url, save_path, file_name=None, try_time=3, timeout=60):
     """
     使用wget模块进行数据的下载
     :param url:
     :param save_path:
     :param file_name:
     :param try_time:
     :param timeout:
     :return:
     """
     print(save_path)
     print(file_name)
     print(url)
     print(path.basename(url))
     print(path.basename(url).split("?")[0])
     if file_name is not None:
         save_file_full_name = path.join(save_path, file_name)
     else:
         save_file_full_name = path.join(save_path, path.basename(url).split("?")[0])
     socket.setdefaulttimeout(timeout)
     print(save_file_full_name)
     while True:
         if try_time <= 0:
             break
         try:
             wget.download(url, save_file_full_name)
             break
         except socket.timeout:
             try_time -= 1
Example #8
1
    def build_status(self, args):

        print(
            "{Style.BRIGHT}Bootstraps whose core components are probably "
            "already built:{Style.RESET_ALL}".format(Style=Out_Style)
        )
        for filen in os.listdir(join(self.ctx.build_dir, "bootstrap_builds")):
            print(
                "    {Fore.GREEN}{Style.BRIGHT}{filen}{Style.RESET_ALL}".format(
                    filen=filen, Fore=Out_Fore, Style=Out_Style
                )
            )

        print("{Style.BRIGHT}Recipes that are probably already built:" "{Style.RESET_ALL}".format(Style=Out_Style))
        if exists(join(self.ctx.build_dir, "other_builds")):
            for filen in sorted(os.listdir(join(self.ctx.build_dir, "other_builds"))):
                name = filen.split("-")[0]
                dependencies = filen.split("-")[1:]
                recipe_str = "    {Style.BRIGHT}{Fore.GREEN}{name}" "{Style.RESET_ALL}".format(
                    Style=Out_Style, name=name, Fore=Out_Fore
                )
                if dependencies:
                    recipe_str += (" ({Fore.BLUE}with " + ", ".join(dependencies) + "{Fore.RESET})").format(
                        Fore=Out_Fore
                    )
                recipe_str += "{Style.RESET_ALL}".format(Style=Out_Style)
                print(recipe_str)
def single_run(X, y, estimator, train, test, estimator_idx, split_idx, output_dir=None):
    X_train = X[train]
    y_train = y[train]
    X_test = X[test]
    y_test = y[test]

    if output_dir is not None:
        debug_folder = join(output_dir, "split_{}_est_{}".format(split_idx, estimator_idx))
        if not os.path.exists(debug_folder):
            os.makedirs(debug_folder)
        estimator.set_params(debug_folder=debug_folder)
        estimator.fit(X_train, y_train, probe_list=[(X_test, y_test)])
        # estimator.fit(X_train, y_train)
    else:
        estimator.fit(X_train, y_train)
    y_hat = estimator.predict(X_test)
    score = np.sqrt(mean_squared_error(y_hat, y_test))
    print("RMSE %s: %.3f" % (estimator, score))

    if output_dir is not None:
        with open(join(debug_folder, "score"), "w+") as f:
            f.write("score : %.4f" % score)
        dump(estimator, join(debug_folder, "estimator"), compress=9)

    return score
Example #10
0
def configuration(parent_package="", top_path=None):
    import numpy
    from numpy.distutils.misc_util import Configuration

    config = Configuration("utils", parent_package, top_path)

    config.add_subpackage("sparsetools")

    # cd fast needs CBLAS
    blas_info = get_info("blas_opt", 0)
    if (not blas_info) or (("NO_ATLAS_INFO", 1) in blas_info.get("define_macros", [])):
        cblas_libs = ["cblas"]
        blas_info.pop("libraries", None)
    else:
        cblas_libs = blas_info.pop("libraries", [])

    config.add_extension("arraybuilder", sources=["arraybuilder.c"])

    config.add_extension(
        "arrayfuncs",
        sources=["arrayfuncs.c"],
        depends=[join("src", "cholesky_delete.c")],
        libraries=cblas_libs,
        include_dirs=[join("..", "src", "cblas"), numpy.get_include(), blas_info.pop("include_dirs", [])],
        extra_compile_args=blas_info.pop("extra_compile_args", []),
        **blas_info
    )

    config.add_extension("graph_shortest_path", sources=["graph_shortest_path.c"], include_dirs=[numpy.get_include()])

    return config
Example #11
0
    def ensure_ssh_config(self, user, hosts):
        """
        Ensures that the specified ssh host options are present on the remote server for the specific user - the global config file isn't affected.

        :param user: User to set SSH configuration options for.
        :type user: :class:`str`
        :param hosts: A list of host-pattern and options dictionaries.
        :type hosts: :class:`list`

        Example:
        ::

            from provy.core import Role
            from provy.more.debian import SSHRole

            class MySampleRole(Role):
                def provision(self):
                    with self.using(SSHRole) as role:
                        role.ensure_ssh_config(user='someuser', hosts=[{'pattern': 'example.com', 'options': ['StrictHostKeyChecking no']}])

        """
        path = "/home/%s" % user
        ssh_path = join(path, ".ssh")
        config_path = join(ssh_path, "config")
        config_text = self.render("ssh.config.template", options=hosts)
        config_file = self.write_to_temp_file(config_text)
        result_config = self.update_file(config_file, config_path, sudo=True, owner=user)
        self.execute("chmod 600 " + config_path, user=user)

        if result_config:
            self.log("SSH config file generated on server")
Example #12
0
def main():
    from utils import repeat_sign_and_verify_token

    cur_dir = path.dirname(__file__)
    start_time = datetime.datetime.now()
    repeat_sign_and_verify_token(
        TOKEN_DATA,
        path.join(cur_dir, "ssl", "keystone_signing_key.pem"),
        path.join(cur_dir, "ssl", "keystone_signing_cert.pem"),
        5000,
    )
    end_time = datetime.datetime.now()
    print "Time used: {0}".format(end_time - start_time)

    logger = logging.getLogger()
    start_time = datetime.datetime.now()
    repeat_keystone_cms_token(
        TOKEN_DATA,
        path.join(cur_dir, "ssl", "keystone_signing_key.pem"),
        path.join(cur_dir, "ssl", "keystone_signing_cert.pem"),
        path.join(cur_dir, "ssl", "ca.pem"),
        5000,
        logger,
    )
    end_time = datetime.datetime.now()
    print "Time used: {0}".format(end_time - start_time)
Example #13
0
def get_includes():
    """Return a list of directories to include for linking against pyzmq with cython."""
    from os.path import join, dirname, abspath, pardir

    base = dirname(__file__)
    parent = abspath(join(base, pardir))
    return [parent] + [join(parent, base, subdir) for subdir in ("utils",)]
def processFiles(files_to_proc, in_dir, out_dir):
    if not exists(out_dir):
        makedirs(out_dir)

    for fname in files_to_proc:
        src_img = cv2.imread(join(in_dir, fname))
        yuv_img = cvtRGB2YUV(src_img)

        # Generate 126x78 images with context ratio 1.4
        resized_img = resizeImage(yuv_img)
        saveImage(resized_img, out_dir, fname)

        # Mirror along the horizontal axis
        flipped_img = flipImage(resized_img)
        saveImage(flipped_img, out_dir, generateFileName(fname, "f"))

        # 5 random variations: translations (in range [-2, 2]) and scale (in range ([0.95, 1.05]))
        for i in range(0, 5):
            action = rnd.random()
            if action > 0.5:
                translated_img = translateImage(resized_img, rnd.uniform(-2, 2), rnd.uniform(-2, 2))
                translated_img_f = translateImage(flipped_img, rnd.uniform(-2, 2), rnd.uniform(-2, 2))
                saveImage(translated_img, out_dir, generateFileName(fname, "t" + str(i)))
                saveImage(translated_img_f, out_dir, generateFileName(fname, "tf" + str(i)))
            else:
                scaled_img = scaleImageInFizedSize(resized_img, rnd.uniform(0.95, 1.05), rnd.uniform(0.95, 1.05))
                scaled_img_f = scaleImageInFizedSize(flipped_img, rnd.uniform(0.95, 1.05), rnd.uniform(0.95, 1.05))
                saveImage(scaled_img, out_dir, generateFileName(fname, "s" + str(i)))
                saveImage(scaled_img_f, out_dir, generateFileName(fname, "sf" + str(i)))

        print "Processed image: " + join(in_dir, fname)
Example #15
0
def load_movielens(ratings=True, movie_genres=True, movie_actors=True):
    module_path = join(dirname(__file__), "data", "movielens")
    if ratings:
        ratings_data = defaultdict(dict)
        with gzip.open(join(module_path, "ratings.csv.gz"), "rt", encoding="utf-8") as f:
            f.readline()
            for line in f:
                line = line.strip().split(",")
                ratings_data[int(line[0])][int(line[1])] = float(line[2])
    else:
        ratings_data = None

    if movie_genres:
        movie_genres_data = {}
        with gzip.open(join(module_path, "movies.csv.gz"), "rt", encoding="utf-8") as f:
            f.readline()
            lines = csv.reader(f)
            for line in lines:
                movie_genres_data[int(line[0])] = line[2].split("|")
    else:
        movie_genres_data = None

    if movie_actors:
        movie_actors_data = {}
        with gzip.open(join(module_path, "actors.csv.gz"), "rt", encoding="utf-8") as f:
            f.readline()
            lines = csv.reader(f)
            for line in lines:
                movie_actors_data[int(line[0])] = line[2].split("|")
    else:
        movie_actors_data = None
    return ratings_data, movie_genres_data, movie_actors_data
    def tst_from_args(self):
        from os.path import join
        from glob import glob

        # Get all the filenames
        filenames = [
            join(self.path, "experiment_test_data", "experiment_1.json"),
            join(self.path, "experiment_test_data", "experiment_2.json"),
            join(self.path, "experiment_test_data", "experiment_3.json"),
            join(self.path, "experiment_test_data", "experiment_4.json"),
        ]

        # Get the experiments from a list of filenames
        experiments = ExperimentListFactory.from_args(filenames)

        # Have 4 experiment
        assert len(experiments) == 4
        for i in range(4):
            assert experiments[i].imageset is not None
            assert experiments[i].beam is not None
            assert experiments[i].detector is not None
            assert experiments[i].goniometer is not None
            assert experiments[i].scan is not None

        # Test passed
        print "OK"
Example #17
0
def upload_path(instance, filename):
    from os import path

    extension = extension_allowed(filename)

    # Has to match original extension filename
    if instance.id and instance.attachment and instance.attachment.original_filename:
        original_extension = instance.attachment.original_filename.split(".")[-1]
        if not extension.lower() == original_extension:
            raise IllegalFileExtension(
                "File extension has to be '%s', not '%s'." % (original_extension, extension.lower())
            )
    elif instance.attachment:
        instance.attachment.original_filename = filename

    upload_path = settings.UPLOAD_PATH
    upload_path = upload_path.replace("%aid", str(instance.attachment.article.id))
    if settings.UPLOAD_PATH_OBSCURIFY:
        import random, hashlib

        m = hashlib.md5(str(random.randint(0, 100000000000000)))
        upload_path = path.join(upload_path, m.hexdigest())

    if settings.APPEND_EXTENSION:
        filename += ".upload"
    return path.join(upload_path, filename)
Example #18
0
def make_queues(config):
    queues = dict(config["queues"])
    # This loop modifies 'queues' and cannot use iteritems()
    for queue, qconf in queues.items():
        queue_dir = path.join(config["base_directory"], queue)
        qconf["incoming"] = path.join(queue_dir, "in")
        qconf["outgoing"] = path.join(queue_dir, "out")
        # Make sure retries are sane and in order
        qconf["retries"] = sorted(set(qconf["retries"]))
        fail_dirs = [path.join(queue_dir, d) for d in ["retry%d" % t for t in qconf["retries"]] + ["failed"]]
        qconf["failure"] = fail_dirs[0]
        # Start with the minimum number of processes
        qconf["nprocs"] = qconf["minprocs"]
        # Initialize list for active processes
        qconf["procs"] = []

        # Set up the chain of retries
        delay_prev = 0
        for delay, in_dir, fail_dir in zip(qconf["retries"], fail_dirs, fail_dirs[1:]):
            retry_queue = queue + "_retry%d" % delay
            queues[retry_queue] = qconf.copy()
            queues[retry_queue].update(
                {
                    "incoming": in_dir,
                    "failure": fail_dir,
                    "minprocs": qconf["retry_minprocs"],
                    "maxprocs": qconf["retry_maxprocs"],
                    "nprocs": qconf["retry_minprocs"],
                    "procs": [],
                    "filter": delay - delay_prev,
                }
            )
            delay_prev = delay
    return queues
Example #19
0
def get_random_resource(type, prefix=None):
    """Return a random resource of a given type."""
    path = join(resource_path, type)
    choices = (x for x in listdir(path) if not x.startswith("."))
    if prefix is not None:
        choices = (x for x in choices if x.startswith(prefix))
    return join(path, random.choice(tuple(choices)))
Example #20
0
def setup():
    master_path, master = init_master(test_name)

    # Prepare master repo
    master.git.checkout(b=test_name)

    # Clone to test repo
    path = join(basepath, test_name)

    master.clone(path, b=test_name)
    repo = Repo(path, odbt=GitCmdObjectDB)

    assert repo.working_dir == path

    # Modify file in master
    update_file(master, test_name)

    # Modify file in our repo
    contents = "completely changed!"
    repo_file = join(path, testfile_name)

    write_file(repo_file, contents)
    repo.index.add([repo_file])
    repo.index.commit(test_name)

    # Modify file in master
    update_file(master, test_name)
Example #21
0
 def setUp(self):
     super(ExternalTestCase, self).setUp()
     if uname()[0] == "Darwin":
         self.skipTest("Cannot build static test_external on OS X")
     else:
         home = self.settings["DISCO_HOME"]
         self.binary = path.join(home, "tests", "test_external")
         check_call(
             [
                 "gcc",
                 "-g",
                 "-O3",
                 "-static",
                 "-Wall",
                 "-I",
                 path.join(home, "ext"),
                 "-o",
                 self.binary,
                 path.join(home, "ext", "disco.c"),
                 path.join(home, "tests", "test_external.c"),
                 "-l",
                 "Judy",
             ],
             stderr=STDOUT,
         )
Example #22
0
def iter_dir(loc):
    for fn in os.listdir(loc):
        if path.isdir(path.join(loc, fn)):
            for sub in os.listdir(path.join(loc, fn)):
                yield path.join(loc, fn, sub)
        else:
            yield path.join(loc, fn)
Example #23
0
    def createCEDARjson_folder(self, work_dir, json_dir, inv_identifier):
        print("Convert ISA datasets in folder ", work_dir)
        path = os.path.abspath(work_dir)
        folders = [f for f in listdir(path) if isdir(join(path, f))]

        for folder in folders:
            self.createCEDARjson(CEDAR_SCHEMA_PATH, join(path, folder), json_dir, inv_identifier)
Example #24
0
    def __init__(self):

        self.__c.pardus_version = "Pardus 1.0"

        self.__c.log_file = "/tmp/install.log"

        # directories
        self.__c.data_dir = "/usr/share/yali"

        self.__c.mnt_dir = "/mnt"
        # new system will be installed directly into this target directory
        self.__c.target_dir = join(self.__c.mnt_dir, "target")
        # packages (and maybe others) will be in this source (cdrom) directory
        self.__c.source_dir = join(self.__c.mnt_dir, "cdrom")

        # swap file path
        self.__c.swap_file_name = ".swap"
        self.__c.swap_file_path = join(self.__c.target_dir, self.__c.swap_file_name)

        # user faces
        self.__c.user_faces_dir = join(self.__c.data_dir, "user_faces")

        # pisi repository
        self.__c.repo_name = "pardus-devel-cd"
        self.__c.repo_uri = join(self.__c.source_dir, "repo/pisi-index.xml")

        # before release
        # pardus-devel repository
        self.__c.devel_repo_name = "pardus-devel"
        self.__c.devel_repo_uri = "http://paketler.uludag.org.tr/pardus-devel/pisi-index.xml"

        # min root partition size
        self.__c.min_root_size = 3500
Example #25
0
File: setup.py Project: Gagaro/kivy
def determine_base_flags():
    flags = {"libraries": [], "include_dirs": [], "extra_link_args": [], "extra_compile_args": []}
    if c_options["use_ios"]:
        sysroot = environ.get("IOSSDKROOT", environ.get("SDKROOT"))
        if not sysroot:
            raise Exception("IOSSDKROOT is not set")
        flags["include_dirs"] += [sysroot]
        flags["extra_compile_args"] += ["-isysroot", sysroot]
        flags["extra_link_args"] += ["-isysroot", sysroot]
    elif platform.startswith("freebsd"):
        flags["include_dirs"] += [join(environ.get("LOCALBASE", "/usr/local"), "include")]
        flags["extra_link_args"] += ["-L", join(environ.get("LOCALBASE", "/usr/local"), "lib")]
    elif platform == "darwin":
        v = os.uname()
        if v[2] >= "13.0.0":
            # use xcode-select to search on the right Xcode path
            # XXX use the best SDK available instead of a specific one
            import platform as _platform

            xcode_dev = getoutput("xcode-select -p").splitlines()[0]
            sdk_mac_ver = ".".join(_platform.mac_ver()[0].split(".")[:2])
            print("Xcode detected at {}, and using OS X{} sdk".format(xcode_dev, sdk_mac_ver))
            sysroot = join(
                xcode_dev.decode("utf-8"),
                "Platforms/MacOSX.platform/Developer/SDKs",
                "MacOSX{}.sdk".format(sdk_mac_ver),
                "System/Library/Frameworks",
            )
        else:
            sysroot = "/System/Library/Frameworks/" "ApplicationServices.framework/Frameworks"
        flags["extra_compile_args"] += ["-F%s" % sysroot]
        flags["extra_link_args"] += ["-F%s" % sysroot]
    return flags
Example #26
0
def cx1():
    """
    test case for comparison between pragma simd and pragma ivdep on cx1
    """
    domain_size = (1.0, 1.0, 1.0)
    grid_size = (200, 200, 200)
    dt = 0.001
    tmax = 5.0
    eigenwave3d(
        domain_size,
        grid_size,
        dt,
        tmax,
        output_vts=False,
        o_converge=False,
        omp=True,
        simd=False,
        ivdep=True,
        filename=path.join(_test_dir, "eigenwave3d_ivdep.cpp"),
    )
    eigenwave3d(
        domain_size,
        grid_size,
        dt,
        tmax,
        output_vts=False,
        o_converge=False,
        omp=True,
        simd=True,
        ivdep=False,
        filename=path.join(_test_dir, "eigenwave3d_simd.cpp"),
    )
    def __build_worknote_list(self, directory, worknote_list, worknotes):
        from worknote import Worknote
        from os.path import isdir, join, exists
        from os import listdir

        print 'Processing directory "{:s}"...'.format(directory)
        for wn_workdir in [
            name
            for name in listdir(directory)
            if isdir(join(directory, name)) and exists(join(join(directory, name), "notedata.worknote"))
        ]:
            print "Worknote:", wn_workdir
            worknotes[wn_workdir] = Worknote(join(directory, wn_workdir))
            title = worknotes[wn_workdir].metadata.metadata["title"]
            path = join(directory, wn_workdir)
            worknote_list.append(
                [
                    wn_workdir,
                    worknotes[wn_workdir].metadata.metadata["title"],
                    worknotes[wn_workdir].metadata.metadata["date"],
                ]
            )
            print "\tTitle:", worknotes[wn_workdir].metadata.metadata["title"]
            print "\tBuilding HTML..."
            worknotes[wn_workdir].build("HTML")
            print "\tBuilding Beamer PDF..."
            worknotes[wn_workdir].build("Beamer")
Example #28
0
    def apk(self, args):
        """Create an APK using the given distribution."""

        # AND: Need to add a parser here for any extra options
        # parser = argparse.ArgumentParser(
        #     description='Build an APK')
        # args = parser.parse_args(args)

        ctx = self.ctx
        dist = self._dist

        # Manually fixing these arguments at the string stage is
        # unsatisfactory and should probably be changed somehow, but
        # we can't leave it until later as the build.py scripts assume
        # they are in the current directory.
        for i, arg in enumerate(args[:-1]):
            if arg in ("--dir", "--private"):
                args[i + 1] = realpath(expanduser(args[i + 1]))

        build = imp.load_source("build", join(dist.dist_dir, "build.py"))
        with current_directory(dist.dist_dir):
            build.parse_args(args)
            shprint(sh.ant, "debug", _tail=20, _critical=True)

        # AND: This is very crude, needs improving. Also only works
        # for debug for now.
        info_main("# Copying APK to current directory")
        apks = glob.glob(join(dist.dist_dir, "bin", "*-*-debug.apk"))
        if len(apks) == 0:
            raise ValueError("Couldn't find the built APK")
        if len(apks) > 1:
            info("More than one built APK found...guessing you " "just built {}".format(apks[-1]))
        shprint(sh.cp, apks[-1], "./")
def plot_model(settings):
    sim_dir = settings["path"]
    plotting = settings["plotting"]
    stretch = "linear"
    fig = plt.figure(figsize=(6.5, 5.0))
    hdu, data = get_hdu(join(sim_dir, plotting["model"]))
    cmin = data.min()
    cmax = data.max()
    f = aplpy.FITSFigure(hdu, figure=fig)
    f.show_colorscale(vmin=cmin, vmax=cmax, stretch=stretch, cmap="afmhot")
    f.add_colorbar()
    f.colorbar.set_width(0.1)
    f.colorbar.set_axis_label_text(r"mJy / beam")
    f.colorbar.set_axis_label_font(size="small")
    f.tick_labels.set_font(size="x-small")
    f.axis_labels.set_font(size="x-small")
    # f.set_yaxis_coord_type('scalar')
    # f.set_xaxis_coord_type('scalar')
    # f.tick_labels.set_yformat('%.3f')
    # f.tick_labels.set_xformat('%.3f')
    f.add_grid()
    f.grid.set_color("white")
    f.grid.set_linestyle("--")
    f.grid.set_alpha(0.3)
    f.set_title("model", fontsize="small", weight="bold")
    plt.savefig(join(sim_dir, "model.png"), dpi=300)
Example #30
0
def test_evoked_resample():
    """Test for resampling of evoked data
    """
    tempdir = _TempDir()
    # upsample, write it out, read it in
    ave = read_evokeds(fname, 0)
    sfreq_normal = ave.info["sfreq"]
    ave.resample(2 * sfreq_normal)
    write_evokeds(op.join(tempdir, "evoked-ave.fif"), ave)
    ave_up = read_evokeds(op.join(tempdir, "evoked-ave.fif"), 0)

    # compare it to the original
    ave_normal = read_evokeds(fname, 0)

    # and compare the original to the downsampled upsampled version
    ave_new = read_evokeds(op.join(tempdir, "evoked-ave.fif"), 0)
    ave_new.resample(sfreq_normal)

    assert_array_almost_equal(ave_normal.data, ave_new.data, 2)
    assert_array_almost_equal(ave_normal.times, ave_new.times)
    assert_equal(ave_normal.nave, ave_new.nave)
    assert_equal(ave_normal._aspect_kind, ave_new._aspect_kind)
    assert_equal(ave_normal.kind, ave_new.kind)
    assert_equal(ave_normal.last, ave_new.last)
    assert_equal(ave_normal.first, ave_new.first)

    # for the above to work, the upsampling just about had to, but
    # we'll add a couple extra checks anyway
    assert_true(len(ave_up.times) == 2 * len(ave_normal.times))
    assert_true(ave_up.data.shape[1] == 2 * ave_normal.data.shape[1])