Example #1
1
def _get_params_base_options(param_path):

    # Read parameter file into params object
    params = configparser.ConfigParser()
    try:
        params.read(param_path)
    except:
        raise ValueError, "Parameter file is invalid"

    # Setup param_dir and results_dir, get run_names
    param_dir = os.path.abspath(os.path.dirname(param_path))
    results_dir = os.path.join(param_dir, "results")

    if os.path.isdir(results_dir):
        shutil.rmtree(results_dir)
    os.makedirs(results_dir)

    run_names = params.sections()

    # Check there's at least one run
    if not run_names:
        raise NameError, "Parameters file must contain at least one run"

    # Create options dict
    base_options = {}
    base_options["param_dir"] = param_dir
    base_options["results_dir"] = results_dir
    base_options["run_names"] = run_names

    return params, base_options
Example #2
1
 def put(self, eggfile, project, version):
     eggpath = self._eggpath(project, version)
     eggdir = path.dirname(eggpath)
     if not path.exists(eggdir):
         makedirs(eggdir)
     with open(eggpath, "wb") as f:
         copyfileobj(eggfile, f)
Example #3
1
def get_build_config(args):
    # Use the platform defaults to populate the values
    if args["toolchain"] is None:
        args["toolchain"] = br_platform.supported["toolchain"][0]

    if args["rtos"] is None:
        args["rtos"] = br_platform.supported["rtos"][0]

    if args["outputDir"] is None:
        pathStr = "%s/output/%s_%s_%s" % (os.getcwd(), args["boardName"], args["rtos"], args["toolchain"])
        args["outputDir"] = os.path.realpath(pathStr)
    else:
        args["outputDir"] = os.path.realpath(args["outputDir"])

    if args["logFile"] is None:
        logFile = "build_%s.log" % datetime.datetime.now().strftime("%b_%d_%Y_%H%M%S")
        args["logFile"] = os.path.join(args["outputDir"], logFile)
    else:
        args["logFile"] = os.path.realpath(args["logFile"])
        if not os.path.isdir(os.path.dirname(args["logFile"])):
            os.makedirs(os.path.dirname(args["logFile"]))

    if args["imageDest"] is None:
        args["imageDest"] = "%s/images" % args["outputDir"]
    else:
        args["imageDest"] = os.path.realpath(args["imageDest"])

    if args["dlDir"] is None:
        args["dlDir"] = os.path.realpath("%s/dl/%s" % (BR_ROOT, args["platformName"]))
    elif not os.path.isabs(args["dlDir"]):
        args["dlDir"] = os.path.realpath("%s/%s" % (os.getcwd(), args["dlDir"]))

    args["catalogFile"] = os.path.realpath(args["catalogFile"])
Example #4
1
    def handle(self, app_or_project, name, target=None, **options):
        self.app_or_project = app_or_project
        self.paths_to_remove = []
        self.verbosity = int(options.get("verbosity"))

        # If it's not a valid directory name.
        if not re.search(r"^[_a-zA-Z]\w*$", name):
            # Provide a smart error message, depending on the error.
            if not re.search(r"^[_a-zA-Z]", name):
                message = "make sure the name begins " "with a letter or underscore"
            else:
                message = "use only numbers, letters and underscores"
            raise CommandError("%r is not a valid %s name. Please %s." % (name, app_or_project, message))

        # if some directory is given, make sure it's nicely expanded
        if target is None:
            target = os.getcwd()
        else:
            target = path.expanduser(target)

        top_dir = path.join(target, name)
        try:
            os.makedirs(top_dir)
        except OSError, e:
            raise CommandError(e)
Example #5
1
 def compile_html(self, source, dest, is_two_file=True):
     try:
         os.makedirs(os.path.dirname(dest))
     except:
         pass
     cmd = "asciidoc -f html -s -o {0} {1}".format(dest, source)
     os.system(cmd)
Example #6
1
def Craeate_addon_from_github(URL, local_repo_folder):
    archive_suffix = "/archive/master.zip"
    print(URL)
    addonname = URL.strip("/").split("/")[-1]
    if not os.path.exists(local_repo_folder + os.sep + addonname):
        print("Making folder for addon in repo: ", addonname)
        os.makedirs(local_repo_folder + os.sep + addonname)
    download_file(URL + archive_suffix, local_repo_folder + os.sep + addonname + os.sep + "master.zip")
    try:
        xml_frm_file, ziptype = zipfilehandler(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
    except Exception as e:
        print("cannot create a zip from githuburl ", URL)
        return
    root = ET.fromstring(xml_frm_file)
    for element in root.iter("addon"):
        addon_name = element.attrib["id"]
        addon_version = element.attrib["version"]
    try:
        currntzip = zipfile.ZipFile(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
        currntzip.extractall(local_repo_folder + os.sep + addonname + os.sep)
        currntzip.close()
        shutil.move(
            local_repo_folder + os.sep + addonname + os.sep + addon_name + "-master",
            local_repo_folder + os.sep + addonname + os.sep + addon_name,
        )
        os.remove(local_repo_folder + os.sep + addonname + os.sep + "master.zip")
        shutil.make_archive(
            local_repo_folder + os.sep + addon_name + os.sep + addon_name + "-" + addon_version,
            "zip",
            local_repo_folder + os.sep + addon_name,
            addon_name,
        )
        shutil.rmtree(local_repo_folder + os.sep + addonname + os.sep + addon_name)
    except Exception as e:
        print("could not save fil ", addonname)
Example #7
1
    def __enter__(self):
        """Enter context: Create temporary file for writing, copying stat() of original"""
        from gruntle.memebot.exceptions import TrapErrors, TrapError, trapped, reraise

        # make sure the directory exists
        dirname, basename = os.path.split(self.file)
        if not os.path.exists(dirname):
            os.makedirs(dirname)

        # construct temporary file in the same directory as the original
        name, ext = os.path.splitext(basename)
        self.fd, self.temp_file = tempfile.mkstemp(suffix=ext, prefix=".%s-" % name, dir=dirname)

        try:
            with TrapErrors():
                exists = os.path.exists(self.file)
                if self.perms is not None:
                    os.chmod(self.file if exists else self.temp_file, self.perms)
                if exists:
                    shutil.copystat(self.file, self.temp_file)
                    if self.backup:
                        backup_file = self.file + ".bak"
                        if os.path.exists(backup_file):
                            os.remove(backup_file)
                        shutil.copy2(self.file, backup_file)
                self.fp = os.fdopen(self.fd, "w")
        except TrapError, exc:
            with trapped:
                os.close(self.fd)
            if os.path.exists(self.temp_file):
                with trapped:
                    os.remove(self.temp_file)
            self.reset()
            reraise(*exc.args)
Example #8
1
def build_site(project_dir):
    build_dir = os.path.join(project_dir, BUILD_DIR)
    if os.path.exists(build_dir):
        shutil.rmtree(build_dir)
    os.makedirs(build_dir)
    build_pages(project_dir, build_dir)
    build_media(project_dir, build_dir)
Example #9
0
def _file_lists(load, form):
    """
    Return a dict containing the file lists for files and dirs
    """
    if "env" in load:
        salt.utils.warn_until(
            "Boron",
            "Passing a salt environment should be done using 'saltenv' "
            "not 'env'. This functionality will be removed in Salt Boron.",
        )
        load["saltenv"] = load.pop("env")

    list_cachedir = os.path.join(__opts__["cachedir"], "file_lists/gitfs")
    if not os.path.isdir(list_cachedir):
        try:
            os.makedirs(list_cachedir)
        except os.error:
            log.critical("Unable to make cachedir {0}".format(list_cachedir))
            return []
    list_cache = os.path.join(list_cachedir, "{0}.p".format(load["saltenv"]))
    w_lock = os.path.join(list_cachedir, ".{0}.w".format(load["saltenv"]))
    cache_match, refresh_cache, save_cache = salt.fileserver.check_file_list_cache(__opts__, form, list_cache, w_lock)
    if cache_match is not None:
        return cache_match
    if refresh_cache:
        ret = {}
        ret["files"] = _get_file_list(load)
        ret["dirs"] = _get_dir_list(load)
        if save_cache:
            salt.fileserver.write_file_list_cache(__opts__, ret, list_cache, w_lock)
        return ret.get(form, [])
    # Shouldn't get here, but if we do, this prevents a TypeError
    return []
def main():
    """
    need to work with rgenetics composite datatypes
    so in and out are html files with data in extrafiles path
    <command interpreter="python">pbed_to_lped_converter.py '$input1/$input1.metadata.base_name'
    '$output1' '$output1.extra_files_path' '${GALAXY_DATA_INDEX_DIR}/rg/bin/plink'
    </command>
    """
    nparm = 4
    if len(sys.argv) < nparm:
        sys.stderr.write("## %s called with %s - needs %d parameters \n" % (myname, sys.argv, nparm))
        sys.exit(1)
    inpedfilepath = sys.argv[1]
    outhtmlname = sys.argv[2]
    outfilepath = sys.argv[3]
    try:
        os.makedirs(outfilepath)
    except:
        pass
    plink = sys.argv[4]
    rgConv(inpedfilepath, outhtmlname, outfilepath, plink)
    f = file(outhtmlname, "w")
    f.write(galhtmlprefix % prog)
    flist = os.listdir(outfilepath)
    s = "## Rgenetics: http://rgenetics.org Galaxy Tools %s %s" % (prog, timenow())  # becomes info
    print s
    f.write("<div>%s\n<ol>" % (s))
    for i, data in enumerate(flist):
        f.write('<li><a href="%s">%s</a></li>\n' % (os.path.split(data)[-1], os.path.split(data)[-1]))
    f.write("</div></body></html>")
    f.close()
Example #11
0
    def archive(self, bucket, path, compress=False):

        # make root path, if it does not exist
        if not os.path.exists(path):
            os.mkdir(path)

        bckt = self.conn.get_bucket(bucket)
        count = 0

        for item in bckt.list():

            # build local path
            local_path = os.path.join(path, item.key)

            # find local dir and create intermediate dirs
            # if they don't exist
            local_dir = os.path.dirname(local_path)
            if not os.path.exists(local_dir):
                os.makedirs(local_dir)

            if not os.path.isdir(local_path):
                with open(local_path, "w") as local_file:
                    item.get_contents_to_file(local_file)
                    logging.info("copying %s:%s" % (bucket, item.key))
                    count += 1

        if compress:
            tarpath = "%s.tar.gz" % path
            with tarfile.open(tarpath, "w:gz") as tar:
                tar.add(path, arcname=path.split(os.sep)[-1], recursive=True)
            shutil.rmtree(path)
            logging.info("compressed archive and removed working directory")

        logging.info("archived %d files in %s" % (count, bucket))
Example #12
0
    def _init_index(self, reset=False):
        index_path = os.path.join(jupyter_data_dir(), "index")

        # clear out old index if requested
        if reset:
            shutil.rmtree(index_path, True)

        # make sure there's a path to store the index data
        if not os.path.exists(index_path):
            os.makedirs(index_path)

        if not exists_in(index_path):
            # create an index with the current schema
            analyzer = ChineseAnalyzer()
            schema = Schema(
                basename=TEXT(stored=True, field_boost=5.0, analyzer=analyzer),
                dirname=ID(stored=True, analyzer=analyzer),
                path=ID(stored=True, unique=True, analyzer=analyzer),
                content=TEXT(stored=False, analyzer=analyzer),
                time=STORED,
            )
            self.ix = create_in(index_path, schema)
        else:
            # open the existing index
            self.ix = open_dir(index_path)

        # build a query parser based on the current schema
        self.query_parser = MultifieldParser(["content", "basename", "dirname"], self.ix.schema)
Example #13
0
def handle_uploaded_file(directory, file, filename):
    if not os.path.exists(directory):
        os.makedirs(directory)

    with open(os.path.join(directory, filename), "wb+") as destination:
        for chunk in file.chunks():
            destination.write(chunk)
def single_run(X, y, estimator, train, test, estimator_idx, split_idx, output_dir=None):
    X_train = X[train]
    y_train = y[train]
    X_test = X[test]
    y_test = y[test]

    if output_dir is not None:
        debug_folder = join(output_dir, "split_{}_est_{}".format(split_idx, estimator_idx))
        if not os.path.exists(debug_folder):
            os.makedirs(debug_folder)
        estimator.set_params(debug_folder=debug_folder)
        estimator.fit(X_train, y_train, probe_list=[(X_test, y_test)])
        # estimator.fit(X_train, y_train)
    else:
        estimator.fit(X_train, y_train)
    y_hat = estimator.predict(X_test)
    score = np.sqrt(mean_squared_error(y_hat, y_test))
    print("RMSE %s: %.3f" % (estimator, score))

    if output_dir is not None:
        with open(join(debug_folder, "score"), "w+") as f:
            f.write("score : %.4f" % score)
        dump(estimator, join(debug_folder, "estimator"), compress=9)

    return score
Example #15
0
    def test_getSlaveInfo(self):
        infodir = os.path.join(self.basedir, "info")
        os.makedirs(infodir)
        open(os.path.join(infodir, "admin"), "w").write("testy!")
        open(os.path.join(infodir, "foo"), "w").write("bar")
        open(os.path.join(infodir, "environ"), "w").write("something else")

        d = self.bot.callRemote("getSlaveInfo")

        def check(info):
            self.assertEqual(
                info,
                dict(
                    admin="testy!",
                    foo="bar",
                    environ=os.environ,
                    system=os.name,
                    basedir=self.basedir,
                    slave_commands=self.real_bot.remote_getCommands(),
                    version=self.real_bot.remote_getVersion(),
                ),
            )

        d.addCallback(check)
        return d
Example #16
0
def generate_apidoc(apidoc_build_path):
    global options

    if options.skip_docs:
        info("Skipping documentation generation.")
        return False
    else:
        info("Module apidoc generation can be skipped using --skip-docs")
    apidoc_path = os.path.join(cwd, "apidoc")
    if not os.path.exists(apidoc_path):
        warn("Skipping apidoc generation. No apidoc folder found at: %s" % apidoc_path)
        return False

    if not os.path.exists(apidoc_build_path):
        os.makedirs(apidoc_build_path)
    ti_root = string.strip(subprocess.check_output(["echo $TI_ROOT"], shell=True))
    if not len(ti_root) > 0:
        warn("Not generating documentation from the apidoc folder. The titanium_mobile repo could not be found.")
        warn(
            "Set the TI_ROOT environment variable to the parent folder where the titanium_mobile repo resides (eg.'export TI_ROOT=/Path')."
        )
        return False
    docgen = os.path.join(ti_root, "titanium_mobile", "apidoc", "docgen.py")
    if not os.path.exists(docgen):
        warn("Not generating documentation from the apidoc folder. Couldn't find docgen.py at: %s" % docgen)
        return False

    info("Generating documentation from the apidoc folder.")
    rc = os.system(
        '"%s" --format=jsca,modulehtml --css=styles.css -o "%s" -e "%s"' % (docgen, apidoc_build_path, apidoc_path)
    )
    if rc != 0:
        die("docgen failed")
    return True
Example #17
0
    def __init__(self, anAppName, aRecursive=True):
        """ Throws SysSingletonCreateError if such app singleton already exists (app is running), other exceptions for the rest """
        self.__isCreated = False
        self.__mutexPath = "/var/run/%s.pid" % anAppName

        if not os.access(self.__mutexPath, os.F_OK):
            if not os.access("/var/run/", os.F_OK):
                os.makedirs("/var/run/")
            myMutexFile = open(self.__mutexPath, "w")
            myMutexFile.write("%d\n" % os.getpid())
            myMutexFile.close()
            self.__isCreated = True
            return

        myMutexFile = open(self.__mutexPath, "r")
        myPid = int(myMutexFile.readline())
        myMutexFile.close()
        if myPid == os.getpid():
            if aRecursive:
                return
            raise SysSingletonCreateError(anAppName, myPid)

        if isPidExist(myPid):
            raise SysSingletonCreateError(anAppName, myPid)

        myMutexFile = open(self.__mutexPath, "w")
        myMutexFile.write("%d\n" % os.getpid())
        myMutexFile.close()
        self.__isCreated = True
Example #18
0
    def enumerate_modules(self, show_all=False):
        """
        *Availability: 4.0+*

        Return a dict mapping the names of modules to the location of their
        file.  This searches the regular modules directory and all directories
        specified in the `core.extra` attribute of the `config` object. If two
        modules have the same name, the last one to be found will be returned
        and the rest will be ignored. Modules are found starting in the regular
        directory, followed by `~/.willie/modules`, and then through the extra
        directories in the order that the are specified.

        If `show_all` is given as `True`, the `enable` and `exclude`
        configuration options will be ignored, and all modules will be shown
        (though duplicates will still be ignored as above).
        """
        modules = {}

        # First, add modules from the regular modules directory
        this_dir = os.path.dirname(os.path.abspath(__file__))
        modules_dir = os.path.join(this_dir, "modules")
        for fn in os.listdir(modules_dir):
            if fn.endswith(".py") and not fn.startswith("_"):
                modules[fn[:-3]] = os.path.join(modules_dir, fn)
        # Next, look in ~/.willie/modules
        home_modules_dir = os.path.join(os.path.expanduser("~"), ".willie", "modules")
        if not os.path.isdir(home_modules_dir):
            os.makedirs(home_modules_dir)
        for fn in os.listdir(home_modules_dir):
            if fn.endswith(".py") and not fn.startswith("_"):
                modules[fn[:-3]] = os.path.join(home_modules_dir, fn)

        # Last, look at all the extra directories. (get_list returns [] if
        # there are none or the option isn't defined, so it'll just skip this
        # bit)
        for directory in self.core.get_list("extra"):
            for fn in os.listdir(directory):
                if fn.endswith(".py") and not fn.startswith("_"):
                    modules[fn[:-3]] = os.path.join(directory, fn)

        # If caller wants all of them, don't apply white and blacklists
        if show_all:
            return modules

        # Apply whitelist, if present
        enable = self.core.get_list("enable")
        if enable:
            enabled_modules = {}
            for module in enable:
                if module in modules:
                    enabled_modules[module] = modules[module]
            modules = enabled_modules

        # Apply blacklist, if present
        exclude = self.core.get_list("exclude")
        for module in exclude:
            if module in modules:
                del modules[module]

        return modules
Example #19
0
    def write_module(self, basedir, package, generated_modules):
        """create a module file to mark directory for python"""
        dir = self.outdir(basedir)
        if not os.path.exists(dir):
            os.makedirs(dir)
        elif not os.path.isdir(dir):
            raise self.exception("file preventing the creating of module directory: %s" % dir)
        p = os.path.join(dir, "__init__.py")
        if roslib.msgs.is_verbose():
            print("... creating module file", p)
        f = open(p, "w")
        try:
            # this causes more problems than anticipated -- for pure python
            # packages it works fine, but in C++ packages doxygen seems to prefer python first.
            # f.write('## \mainpage\n') #doxygen
            # f.write('# \htmlinclude manifest.html\n')
            for mod in generated_modules:
                f.write("from .%s import *\n" % mod)
        finally:
            f.close()

        parentInit = os.path.dirname(dir)
        p = os.path.join(parentInit, "__init__.py")
        if not os.path.exists(p):
            # touch __init__.py in the parent package
            print("... also creating module file %s" % p)
            f = open(p, "w")
            f.close()
Example #20
0
 def _generate_cert(self):
     if not os.path.exists(self.cert_dir):
         os.makedirs(self.cert_dir)
     t = tpl.render(common_name=self.cn, ca_cert_dir=CA_DIR)
     fh = tempfile.NamedTemporaryFile(delete=False)
     fh.write(t)
     fh.close()
     ssl_conf = fh.name
     key = os.path.join(self.cert_dir, "cert.key")
     csr = os.path.join(self.cert_dir, "cert.csr")
     crt = os.path.join(self.cert_dir, "cert.crt")
     success = True
     # Now fiddle with all the shitty options of openssl(1)
     try:
         lock.acquire()
         if not os.path.exists(key):
             cmd = "openssl req -config %s -days 365 -nodes -new -keyout %s -out %s" % (ssl_conf, key, csr)
             self.log.error(cmd)
             gencert = envoy.run(cmd)
             if gencert.status_code != 0:
                 raise OSError(gencert.status_code, "problem generating the certificate: %s" % gencert.std_err)
         if not os.path.exists(crt):
             cmd = "openssl ca -batch -notext -config %s -out  %s -infiles %s" % (ssl_conf, crt, csr)
             self.log.error(cmd)
             signcert = envoy.run(cmd)
             if signcert.status_code != 0:
                 raise OSError(signcert.status_code, "problem signing the certificate: %s" % signcert.std_err)
         if not os.path.exists(self.cert_path):
             destination = open(self.cert_path, "wb")
             shutil.copyfileobj(open(crt, "rb"), destination)
             shutil.copyfileobj(open(key, "rb"), destination)
             destination.close()
     except Exception, e:
         self.log.error(str(e))
         success = False
Example #21
0
def wget(url, target, reporthook=None, proxies=None):
    """Copy the contents of a file from a given URL
    to a local file.
    """

    def report(bcount, bsize, total):
        global last_time_display
        if total > 0 and bsize > 0:
            # print only every second or at end
            if (time.time() - last_time_display >= 0.1) or (bcount * bsize >= total):
                print "%i / %i (%.0f%%) (%.0f KB/s)\r" % (
                    bcount * bsize,
                    total,
                    100.0 * bcount * bsize / total,
                    bsize / (1024 * (time.time() - last_time_display)),
                ),
                last_time_display = time.time()

    if os.path.isdir(target):
        target = os.path.join(target, "")

    (dir, filename) = os.path.split(target)
    if not filename:
        filename = url.split("/")[-1]
    if not dir:
        dir = os.getcwd()

    if not os.path.isdir(dir):
        os.makedirs(dir)

    global last_progress_display
    last_progress_display = 0
    start_time = time.time()
    r = requests.get(url, stream=True, proxies=proxies)

    total_bytes = int(r.headers["content-length"])
    chunk_size = max([total_bytes / 100, 1000])
    print "Downloading %s (%.1f Mb)" % (url, int(total_bytes) / 1024 / 1024)

    output_file = open(os.path.join(dir, filename), "wb")
    try:
        if not reporthook:
            reporthook = report
        reporthook(0, chunk_size, total_bytes)
        cnt = 0
        if r.ok:
            for chunk in r.iter_content(chunk_size=chunk_size):
                output_file.write(chunk)
                reporthook(cnt, len(chunk), total_bytes)
                cnt += 1
            reporthook(total_bytes / chunk_size, chunk_size, total_bytes)

        else:
            r.raise_for_status()
    finally:
        output_file.close()

    # (localpath,headers) = WaptURLopener(proxies=proxies).retrieve(url=url, filename=os.path.join(dir,filename),reporthook=reporthook or report,)
    print "  -> download finished (%.0f Kb/s)" % (total_bytes / (1024 * (time.time() - start_time)))
    return os.path.join(dir, filename)
Example #22
0
 def _ensure_dir(self, template_name):
     """Ensure the output directory for a template exists."""
     head = os.path.dirname(template_name)
     if head:
         file_dirpath = os.path.join(self.outpath, head)
         if not os.path.exists(file_dirpath):
             os.makedirs(file_dirpath)
Example #23
0
    def processJarSection(self, jarfile, lines, jardir):
        """Internal method called by makeJar to actually process a section
        of a jar.mn file.

        jarfile is the basename of the jarfile or the directory name for
        flat output, lines is a PushbackIter of the lines of jar.mn,
        the remaining options are carried over from makeJar.
        """

        # chromebasepath is used for chrome registration manifests
        # {0} is getting replaced with chrome/ for chrome.manifest, and with
        # an empty string for jarfile.manifest

        chromebasepath = "{0}" + os.path.basename(jarfile)
        if self.outputFormat == "jar":
            chromebasepath = "jar:" + chromebasepath + ".jar!"
        chromebasepath += "/"

        jarfile = os.path.join(jardir, jarfile)
        jf = None
        if self.outputFormat == "jar":
            # jar
            jarfilepath = jarfile + ".jar"
            try:
                os.makedirs(os.path.dirname(jarfilepath))
            except OSError, error:
                if error.errno != errno.EEXIST:
                    raise
            jf = ZipFile(jarfilepath, "a", lock=True)
            outHelper = self.OutputHelper_jar(jf)
Example #24
0
def import_project_myos_config(project_id):
    """
    导入项目的myos配置文件
    :param project_id:
    :return:
    """

    # 保存配置文件
    new_config_file = request.files["app_config"]
    current_app.logger.info(new_config_file)
    upload_path = os.path.join(current_app.config["TMS_TEMP_DIR"], current_user.username, "ApkVersion.ini")
    if not os.path.exists(os.path.dirname(upload_path)):
        os.makedirs(os.path.dirname(upload_path))
    new_config_file.save(upload_path)

    project_apk = []
    cf = ConfigParser()
    try:
        cf.read(upload_path)
        for section in cf.sections():
            if section == "ProjectInfo":
                continue
            project_apk.append(section)

    except NoSectionError, e:
        print e
        print "[ERROR] apkVersion file config error"
Example #25
0
    def UpLoad(self, _UploadedFile, FileName, Size):
        Log.Success("Done upload of " + FileName + " (" + Size + " octets)")
        Log.Info("Starting processing...", True)
        Path2Save = "models/" + FileName
        FileData = ""

        while True:
            datatmp = _UploadedFile.file.read(8192)
            FileData += datatmp
            if not datatmp:
                break

        if not os.path.exists(os.path.dirname(Path2Save)):
            os.makedirs(os.path.dirname(Path2Save))

        File2Write = open(Path2Save, "w")
        File2Write.write(FileData)
        File2Write.close()

        uid = os.environ.get("SUDO_UID")
        gid = os.environ.get("SUDO_GID")
        if uid is not None:
            os.chown(Path2Save, int(uid), int(gid))

        Log.Success("Done !")
Example #26
0
def export_project_myos_config(project_id):
    """
    项目myos配置从数据库中导出到ApkVersion.ini文件
    :param project_id:
    :return:
    """
    project_obj = ProjectAndroid.query.filter(ProjectAndroid.id == project_id).first()
    current_config = ProjectMyOSConfig.query.filter(ProjectMyOSConfig.project_id == project_id).all()

    cf = ConfigParser()
    for app in current_config:
        cf.add_section(app.app_name)
        cf.set(app.app_name, "support", app.support)
        cf.set(app.app_name, "app_version", app.app_version)
        if app.overrides:
            cf.set(app.app_name, "overrides", app.overrides)

    upload_path = os.path.join(current_app.config["TMS_TEMP_DIR"], current_user.username, "ApkVersion.ini")
    if not os.path.exists(os.path.dirname(upload_path)):
        os.makedirs(os.path.dirname(upload_path))
    fp = open(upload_path, "w")
    fp.write("[ProjectInfo]\n")
    fp.write("#android version,optional value:L/M/N\n")
    fp.write("ANDROID_VERSION=%s\n\n" % project_obj.android_version)
    fp.write("#chip platform,optional value:MTK/QCOM/RDA\n")
    fp.write("PLATFORM=%s\n\n" % project_obj.platform_name)
    cf.write(fp)
    fp.close()

    return send_file(open(upload_path, "rb"), as_attachment=True, attachment_filename=os.path.basename(upload_path))
Example #27
0
def test_backup_images(server, tmpdir, loop):
    Config.instance().set("Server", "images_path", str(tmpdir))

    os.makedirs(str(tmpdir / "QEMU"))
    with open(str(tmpdir / "QEMU" / "a.img"), "w+") as f:
        f.write("hello")
    with open(str(tmpdir / "QEMU" / "b.img"), "w+") as f:
        f.write("world")

    response = server.get("/backup/images.tar", api_version=None, raw=True)
    assert response.status == 200
    assert response.headers["CONTENT-TYPE"] == "application/x-gtar"

    with open(str(tmpdir / "images.tar"), "wb+") as f:
        print(len(response.body))
        f.write(response.body)

    tar = tarfile.open(str(tmpdir / "images.tar"), "r")
    os.makedirs(str(tmpdir / "extract"))
    os.chdir(str(tmpdir / "extract"))
    # Extract to current working directory
    tar.extractall()
    tar.close()

    assert os.path.exists(os.path.join("QEMU", "a.img"))
    open(os.path.join("QEMU", "a.img")).read() == "hello"

    assert os.path.exists(os.path.join("QEMU", "b.img"))
    open(os.path.join("QEMU", "b.img")).read() == "world"
Example #28
0
def get_css(title):
    title = title.strip('"')

    try:
        sheet = Stylesheet.objects.get(title=title)
    except Stylesheet.DoesNotExist:
        raise template.TemplateSyntaxError("%s is an invalid stylesheet" % title)

    parse = False
    modified = None
    file = "%s.css" % sheet.filename
    output = os.path.join(settings.STATIC_ROOT, file)
    path = os.path.abspath(output)

    try:
        os.makedirs(path)
    except OSError:
        pass

    try:
        modified = datetime.fromtimestamp(os.path.getmtime(output))
    except OSError:
        pass

    if not modified or (modified and modified < sheet.date_updated):
        parse = True

    if parse:
        try:
            f = open(output, "w")
            f.write(sheet.css)
            f.close()
        except IOError:
            raise template.TemplateSyntaxError("Failed to write %s" % output)
    return "%s%s" % (settings.STATIC_URL, file)
Example #29
0
    def __init__(self, should_start=True, *args, **kwargs):
        self.habitat_name = self.__class__.__name__
        super(Habitat, self).__init__(name="%(habitat_name)s")
        self.executer = Executer(self)
        self._args = args
        self._port_map = {}

        # We absolutely need a metadata file.
        metadata_path = self["metadata_path"]
        if not os.path.exists(os.path.dirname(metadata_path)):
            os.makedirs(os.path.dirname(metadata_path))
        self.metadata = MetaDataFile(metadata_path)

        for name, component in self.get_all_components().iteritems():
            component.habitat = self
            component.name = name
            if component.name not in self.metadata:
                self.metadata[component.name] = {}
            component.metadata = self.metadata[component.name]

        # If we should start the Habitat, run the first argument as a command.
        if should_start:
            if self._args:
                command = self._args[0]
            else:
                command = "run"
            self.command(command, *self._args[1:])
Example #30
0
def create_platform_zip(platform, dist_dir, osname, version):
    if not os.path.exists(dist_dir):
        os.makedirs(dist_dir)
    basepath = "%s/%s/%s" % (platform, osname, version)
    sdkzip = os.path.join(dist_dir, "%s-%s-%s.zip" % (platform, version, osname))
    zf = zipfile.ZipFile(sdkzip, "w", zipfile.ZIP_DEFLATED)
    return (zf, basepath)