Пример #1
1
def removeAll():
    count = 5
    while count > 0:
        count = count - 1

        filenames = []
        for g in glob.glob("[#_A-Za-z0-9]*"):
            filenames.append(g)
        for g in glob.glob(".[A-Za-z]*"):
            filenames.append(g)

        try:
            for filename in filenames:
                if os.path.isdir(filename):
                    shutil.rmtree(filename, onerror=handleRemoveReadonly)
                else:
                    os.remove(filename)
        except WindowsError as err:
            time.sleep(30)
            if count == 0:
                print("Failed to cleanup files/folders")
                print(err)
                sys.exit(1)
            continue
        except OSError as err:
            time.sleep(30)
            if count == 0:
                print("Failed to cleanup files/folders")
                print(err)
                sys.exit(1)
            continue
        count = 0
Пример #2
1
    def createserverconfig(self, entry, _):
        """Build monolithic server configuration file."""
        host_configs = glob.glob("%s/*-host.cfg" % self.data)
        group_configs = glob.glob("%s/*-group.cfg" % self.data)
        host_data = []
        group_data = []
        for host in host_configs:
            host_data.append(open(host, "r").read())

        for group in group_configs:
            group_name = re.sub("(-group.cfg|.*/(?=[^/]+))", "", group)
            if "\n".join(host_data).find(group_name) != -1:
                groupfile = open(group, "r")
                group_data.append(groupfile.read())
                groupfile.close()

        entry.text = "%s\n\n%s" % ("\n".join(group_data), "\n".join(host_data))
        [entry.attrib.__setitem__(key, value) for (key, value) in list(self.server_attrib.items())]
        try:
            fileh = open("%s/nagiosgen.cfg" % self.data, "w")
            fileh.write(entry.text)
            fileh.close()
        except OSError:
            ioerr = sys.exc_info()[1]
            LOGGER.error("Failed to write %s/nagiosgen.cfg" % self.data)
            LOGGER.error(ioerr)
    def updatePthFile(self, oldName, newName):
        """Searches site-packages for .pth files and replaces any instance of
        `oldName` with `newName`, where the names likely have the form PsychoPy-1.60.04
        """
        from distutils.sysconfig import get_python_lib

        siteDir = get_python_lib()
        pthFiles = glob.glob(os.path.join(siteDir, "*.pth"))
        enclosingSiteDir = os.path.split(siteDir)[
            0
        ]  # sometimes the site-packages dir isn't where the pth files are kept?
        pthFiles.extend(glob.glob(os.path.join(enclosingSiteDir, "*.pth")))
        nUpdates = 0  # no paths updated
        info = ""
        for filename in pthFiles:
            lines = open(filename, "r").readlines()
            needSave = False
            for lineN, line in enumerate(lines):
                if oldName in line:
                    lines[lineN] = line.replace(oldName, newName)
                    needSave = True
            if needSave:
                try:
                    f = open(filename, "w")
                    f.writelines(lines)
                    f.close()
                    nUpdates += 1
                    logging.info("Updated PsychoPy path in %s" % filename)
                except:
                    info += "Failed to update PsychoPy path in ", filename
                    return -1, info
        return nUpdates, info
Пример #4
1
def main():

    parser = argparse.ArgumentParser(description="quickie")
    parser.add_argument("parentdir", type=str, help="Parent directory of run containing subdirectories of data files")
    args = parser.parse_args()
    pdir = args.parentdir

    # Loop through all subdirectories under the parent directory
    for item in os.listdir(pdir):
        if os.path.isdir(os.path.join(pdir, item)):
            # if OUTPUT exists, cd into it and begin plotting
            if "OUTPUT" in os.listdir(os.path.join(pdir, item)):
                print "Found output"
                os.chdir(os.path.join(pdir, item) + "/OUTPUT")
                try:
                    os.mkdir("donotdelete")
                except OSError:
                    None
                supers = glob.glob("gce-super*")
                for supfile in supers:
                    ID = supfile[-13:]
                    newfiles = glob.glob("*%s" % ID)
                    for newfile in newfiles:
                        sh("mv %s donotdelete" % newfile)
                sh("rm *.dat")
                os.chdir(pdir)
            else:
                None
Пример #5
1
def remove_simulation_replicate_output_files(s_basename):
    """
	When a sim is cancelled inside a PGGuiSimuPop
	object, this def is called
	to remove any output files produced by the
	do_pgopsimupop_replicate_from_files
	"""

    # type is to coerce python 3, despite fact that any iterable
    # would probably suffice:
    ls_output_extentions = list(pgout.PGOutputSimuPop.DICT_OUTPUT_FILE_EXTENSIONS.values())

    for s_ext in ls_output_extentions:

        s_unzipped_file_pattern = s_basename + "*" + SIMULATION_OUTPUT_FILE_REPLICATE_TAG + "*" + s_ext

        ls_files_unzipped = glob.glob(s_unzipped_file_pattern)

        ls_files_zipped = glob.glob(s_unzipped_file_pattern + "." + pgout.PGOutputSimuPop.COMPRESSION_FILE_EXTENSION)

        for s_file in ls_files_unzipped + ls_files_zipped:
            os.remove(s_file)
            # end for each unzipped and zipped output file with this extension
            # end for each file ext

    return
Пример #6
1
    def getSerialPorts(self):
        """Lists serial ports
        From http://stackoverflow.com/questions/12090503/listing-available-com-ports-with-python

        :raises EnvironmentError:
            On unsupported or unknown platforms
        :returns:
            A list of available serial ports
        """
        if sys.platform.startswith("win"):
            ports = ["COM" + str(i + 1) for i in range(256)]

        elif sys.platform.startswith("linux") or sys.platform.startswith("cygwin"):
            # this is to exclude your current terminal "/dev/tty"
            ports = glob.glob("/dev/tty[A-Za-z]*")

        elif sys.platform.startswith("darwin"):
            ports = glob.glob("/dev/tty.*")

        else:
            raise EnvironmentError("Unsupported platform")

        result = []
        for port in ports:
            try:
                s = serial.Serial(port)
                s.close()
                result.append(port)
            except (OSError, serial.SerialException):
                pass

        if result == "":
            result = "No ports found"

        return result
Пример #7
1
def rename_files(rootfile, mode, output_path):
    '''
    Rename all the files that match the root of the filename input 
    paramater, excluding the input filename itself.
    '''
    print 'Renaming Files'
    
    # Build the file list.
    rootfile = os.path.abspath(rootfile)
    basename = os.path.splitext(rootfile)[0]
    search = basename + '_sci*'
    file_list_1 = glob.glob(search)
    search = basename + '_single*'
    file_list_2 = glob.glob(search)
    file_list = file_list_1 + file_list_2
    
    # Loop over the files and rename.
    for filename in file_list:
        dst = string.split(basename,'/')[-1] + '_' + mode 
        dst += string.split(filename, basename)[1]
        if output_path == None:
            dst = os.path.join(os.path.dirname(rootfile), dst)
        elif output_path != None:
            dst = os.path.join(output_path, dst)
        shutil.copyfile(filename, dst)
        os.remove(filename)
Пример #8
0
def update_from_app_install_data(db, cache, datadir=None):
    """ index the desktop files in $datadir/desktop/*.desktop """
    if not datadir:
        datadir = softwarecenter.paths.APP_INSTALL_DESKTOP_PATH
    context = GLib.main_context_default()
    for desktopf in glob(datadir + "/*.desktop") + glob(datadir + "/*.scope"):
        LOG.debug("processing %r", desktopf)
        # process events
        while context.pending():
            context.iteration()
        try:
            if desktopf.endswith(".scope"):
                parser = ScopeConfigParser()
            else:
                parser = DesktopConfigParser()
            parser.read(desktopf)
            parser.index_app_info(db, cache)
        except Exception as e:
            # Print a warning, no error (Debian Bug #568941)
            LOG.debug("error processing: %r %r", desktopf, e)
            warning_text = _(
                "The file: '%s' could not be read correctly. The application "
                "associated with this file will not be included in the "
                "software catalog. Please consider raising a bug report "
                "for this issue with the maintainer of that application"
            )
            LOG.warning(warning_text, desktopf)
    return True
Пример #9
0
def load_check_directory(agentConfig, hostname):
    """ Return the initialized checks from checks.d, and a mapping of checks that failed to
    initialize. Only checks that have a configuration
    file in conf.d will be returned. """
    from checks import AgentCheck, AGENT_METRICS_CHECK_NAME

    initialized_checks = {}
    init_failed_checks = {}
    deprecated_checks = {}
    agentConfig["checksd_hostname"] = hostname

    deprecated_configs_enabled = [
        v for k, v in OLD_STYLE_PARAMETERS if len([l for l in agentConfig if l.startswith(k)]) > 0
    ]
    for deprecated_config in deprecated_configs_enabled:
        msg = "Configuring %s in datadog.conf is not supported anymore. Please use conf.d" % deprecated_config
        deprecated_checks[deprecated_config] = {"error": msg, "traceback": None}
        log.error(msg)

    osname = get_os()
    checks_paths = [glob.glob(os.path.join(agentConfig["additional_checksd"], "*.py"))]

    try:
        checksd_path = get_checksd_path(osname)
        checks_paths.append(glob.glob(os.path.join(checksd_path, "*.py")))
    except PathNotFound, e:
        log.error(e.args[0])
        sys.exit(3)
def serial_ports():
    """Lists serial ports

    :raises EnvironmentError:
        On unsupported or unknown platforms
    :returns:
        A list of available serial ports
    """
    if sys.platform.startswith("win"):
        ports = ["COM" + str(i + 1) for i in range(256)]

    elif sys.platform.startswith("linux") or sys.platform.startswith("cygwin"):
        # this is to exclude your current terminal "/dev/tty"
        ports = glob.glob("/dev/tty[A-Za-z]*")

    elif sys.platform.startswith("darwin"):
        ports = glob.glob("/dev/tty.*")

    else:
        raise EnvironmentError("Unsupported platform")

    result = []
    for port in ports:
        try:
            s = serial.Serial(port)
            s.close()
            result.append(port)
        except (OSError, serial.SerialException):
            pass
    return result
Пример #11
0
def copy_mr_log_file(xoutputdir):

    year, month, day = get_time()

    print "start sleep"
    # Sleep for 2 minutes - fuck this
    time.sleep(300)
    print "stop sleep"

    hdfspath = "/tmp/hadoop-yarn/staging/history/done/%s/%s/%s/000000/*.jhist" % (year, month, day)

    # List the jhist files in the remote directory
    dirpath = xoutputdir + "/"

    # Make directory
    if os.path.exists(dirpath):
        files = glob.glob(dirpath + "*")
        for f in files:
            os.remove(f)
    else:
        os.makedirs(dirpath)

    subprocess.call(["hadoop", "dfs", "-copyToLocal", hdfspath, dirpath])
    localpath = glob.glob(dirpath + "*")
    print "found files: %s" % localpath
    return localpath
Пример #12
0
def teardown():
    output = glob.glob(os.path.join(cwd, "statepoint.10.*"))
    output += glob.glob(os.path.join(cwd, "source.10.*"))
    output.append(os.path.join(cwd, "results_test.dat"))
    for f in output:
        if os.path.exists(f):
            os.remove(f)
def apply_separate_test_fix():
    if not os.path.exists(FIX_DIR + "collapseTests/packages/python-pgsql_2.5.1-2+b2_i386.deb"):
        log("Note: Not applying separateTestFix.", PRINT_TO_CONSOLE)
        return

    matches = glob.glob(os.path.join(APT_CACHE_DIR, "python-pgsql*"))

    # The only systems this is being applied to are those where the module has not
    # yet been installed so if it is installed it is assumed that the fix has been run already
    if len(matches) > 0:
        log("Separate Test Fix already run", PRINT_TO_CONSOLE)
        return

    log("Apply Separate Test Fix", PRINT_TO_CONSOLE)

    packages = glob.glob(FIX_DIR + "collapseTests/packages/*")

    for package in packages:
        shutil.copy(package, APT_CACHE_DIR)

    cmd = "dpkg -i " + FIX_DIR + "collapseTests/packages/*"
    os.system(cmd)

    cmd = "python " + FIX_DIR + "collapseTests/testReconfiguration_Linux.py -p " + CLINLIMS_PWD + " -d clinlims"
    os.system(cmd)

    log("Fix applied", PRINT_TO_CONSOLE)
def get_tomcat_directory():
    names = glob.glob(TOMCAT_BASE + "[0-9].[0-9]")
    if names:
        version = names[0].strip(TOMCAT_BASE)
        splitVersion = version.split(".")
        major = int(splitVersion[0])
        minor = int(splitVersion[1])
        # check for version 5.5 or greater
        if major == 5 and minor >= 5:
            log("Found " + names[0], PRINT_TO_CONSOLE)
            return names[0]
        elif major > 5:
            log("Found " + names[0], PRINT_TO_CONSOLE)
            return names[0]
        else:
            log("Tomcat must be version 5.5 or later\n", PRINT_TO_CONSOLE)
            return None
    names = glob.glob(TOMCAT_BASE + "[0-9]")
    if names:
        version = names[0].strip(TOMCAT_BASE)
        major = int(version)
        if major > 5:
            log("Found " + names[0], PRINT_TO_CONSOLE)
            return names[0]
        else:
            log("Tomcat must be version 5.5 or later\n", PRINT_TO_CONSOLE)
            return None
    return None
    def find_file(self, directory, exten):
        if exten == "*.part":
            direc_exet = "%s/%s" % (directory, exten)

            for fle in glob.glob(direc_exet):
                os.chmod(fle, 755)

                start = fle.find(".")
                exten_csv = "%s.xlsx" % (fle[:start])

                shutil.copy(fle, exten_csv)
                filename = filter(None, exten_csv.strip().split("/"))[-1]

                new_file = "%s/%s" % (self.directory, filename)
                shutil.move(exten_csv, new_file)

                shutil.move(fle, self.mydump)
                return new_file

        else:
            dir_file_csv = "%s/%s" % (directory, exten)

            for fle in glob.glob(dir_file_csv):
                os.chmod(fle, 755)

                filename = filter(None, fle.strip().split("/"))[-1]
                new_file = "%s/%s" % (self.directory, filename)

                shutil.move(filename, new_file)
                return new_file
Пример #16
0
def test_created_statepoint():
    statepoint = glob.glob(pwd + "/statepoint.*")
    assert len(statepoint) == 2
    assert statepoint[0].endswith("binary") or statepoint[0].endswith("h5")
    sourcepoint = glob.glob(pwd + "/source.7.*")
    assert len(sourcepoint) == 1
    assert sourcepoint[0].endswith("binary") or sourcepoint[0].endswith("h5")
Пример #17
0
def main():
    if len(sys.argv) < 2 or "-h" in sys.argv or "--help" in sys.argv:
        syntax()
    src_dir = sys.argv[1]
    dst_dir = sys.argv[2]
    try:
        if not os.path.isfile(sys.argv[3]):
            giveup('"%s" does not exist or is not a file' % sys.argv[3])
        index_page_msg = open(sys.argv[3]).read()
    except IndexError:
        index_page_msg = ""
    if not os.path.isdir(src_dir):
        giveup('"%s" does not exist or is not a directory' % src_dir)
    facets = []
    timestamp = "%d-%02d-%02d %02d:%02d" % time.gmtime()[:5]
    for src_facet_dir in glob("%s/*" % src_dir):
        if not os.path.isdir(src_facet_dir):
            continue
        facet = src_facet_dir.split("/")[-1]
        facet_values = []
        facets.append(facet)
        for src_tag_file in glob("%s/*" % src_facet_dir):
            facet_value = os.path.basename(src_tag_file)
            dst_facet_dir = "%s/%s" % (dst_dir, facet)
            ensure_dir_exists(dst_facet_dir)
            bug_table_rows = []
            for line in open(src_tag_file).readlines():
                # the columns are: bug type, bug number, package, popcon, dust
                cols = line.rstrip().split(" ")
                _, bug_no, package_name, popcon = cols[:4]
                if len(cols) != 5:
                    sys.stderr.write('skipping invalid line "%s"' % line)
                    continue
                # add link for bug number and package
                cols[1] = '<a href="http://bugs.debian.org/%s">%s</a>' % (bug_no, bug_no)
                cols[2] = '<a href="http://packages.qa.debian.org/%s/%s.html">%s</a>' % (
                    package_name[0],
                    package_name,
                    package_name,
                )
                cols[3] = '<a href="http://qa.debian.org/popcon.php?package=%s">%s</a>' % (package_name, popcon)
                cols = "</td><td>".join(cols)
                bug_table_rows.append("<tr><td>%s</td></tr>" % cols)
            nbugs = len(bug_table_rows)
            if nbugs > 0:
                row = '<tr><td><a href="./%s.html">%s</a></td><td>%d</td></tr>' % (facet_value, facet_value, nbugs)
            else:
                row = "<tr><td>%s</td><td>%d</td></tr>" % (facet_value, nbugs)
            facet_values.append(row)
            dst_tag_file = "%s/%s.html" % (dst_facet_dir, facet_value)
            # FIXME: use newstyle dicts instead
            tag = "%s::%s" % (facet, facet_value)
            html_doc = bugs_html_template % (tag, tag, "\n".join(bug_table_rows), facet, timestamp)
            create_file(dst_tag_file, html_doc)
        content = facet_values_html_template % (facet, facet, facet, "\n".join(sorted(facet_values)), timestamp)
        create_file("%s/index.html" % dst_facet_dir, content)
    formated_facets = ['<li><a href="./%s/index.html">%s</a></li>' % (f, f) for f in facets]
    create_file(
        "%s/index.html" % dst_dir, main_page_template % ("\n".join(sorted(formated_facets)), index_page_msg, timestamp)
    )
Пример #18
0
def list():
    """
    """
    drives = []
    for path in glob("/sys/block/*/device"):
        base = os.path.dirname(path)
        device = os.path.basename(base)

        # Skip partitioned drives
        partitions = glob(base + "/" + device + "*")
        if partitions:
            continue

        # Skip removable media
        filename = base + "/removable"
        removable = open(filename).read().rstrip("\n")
        if removable == "1":
            continue
        filename = base + "/queue/rotational"
        rotational = open(filename).read().rstrip("\n")

        hardware = _hwinfo(device)
        hardware["device"] = device
        hardware["rotational"] = rotational

        drives.append(hardware)
    return drives
Пример #19
0
def build_transforms(ext_modules, packages, numerix):
    if numerix in ["numarray", "both"]:  # Build for numarray
        cxx = glob.glob("CXX/*.cxx")
        cxx.extend(glob.glob("CXX/*.c"))
        temp_copy("src/_transforms.cpp", "src/_na_transforms.cpp")
        module = Extension(
            "matplotlib._na_transforms",
            ["src/_na_transforms.cpp", "src/mplutils.cpp"] + cxx,
            libraries=["stdc++", "m"],
            include_dirs=["src", "."] + numarray_inc_dirs,
        )

        module.extra_compile_args.append("-DNUMARRAY=1")
        add_base_flags(module)
        ext_modules.append(module)

    if numerix in ["Numeric", "both"]:  # Build for Numeric
        cxx = glob.glob("CXX/*.cxx")
        cxx.extend(glob.glob("CXX/*.c"))
        temp_copy("src/_transforms.cpp", "src/_nc_transforms.cpp")
        module = Extension(
            "matplotlib._nc_transforms",
            ["src/_nc_transforms.cpp", "src/mplutils.cpp"] + cxx,
            libraries=["stdc++", "m"],
            include_dirs=["src", "."],
        )
        module.extra_compile_args.append("-DNUMERIC=1")
        add_base_flags(module)
        ext_modules.append(module)
Пример #20
0
    def link_lsb_libraries(self):
        if not self.libraries_linked:
            logging.info("Linking LSB libraries")
            libdir_key = "libdir-%s" % self.arch
            os_libdir = self.config.get("lib", libdir_key)
            if not os_libdir:
                raise TypeError("Could not find OS lib dir from conf file")
            lib_key = "lib-%s" % self.arch
            lib_list_raw = self.config.get("lib", lib_key)
            if not lib_list_raw:
                raise TypeError("Could not find library list from conf file")
            lib_list = eval(lib_list_raw)

            # Remove any previous ld-lsb*.so symbolic links
            lsb_libs = glob.glob("%s/ld-lsb*.so*" % os_libdir)
            for lib in lsb_libs:
                os.remove(lib)

            # Get the base library that we'll use to recreate the symbolic links
            system_lib = glob.glob("%s/ld-2*.so*" % os_libdir)[0]

            # Now just link the system lib that we just found to each one of the
            # needed LSB libraries that we provided on the conf file
            for lsb_lib in lib_list:
                # Get the library absolute path
                lsb_lib = os.path.join(os_libdir, lsb_lib)
                # Link the library system_lib -> lsb_lib
                os.symlink(system_lib, lsb_lib)

            self.libraries_linked = True
Пример #21
0
def _file_configs_paths(osname, agentConfig):
    """ Retrieve all the file configs and return their paths
    """
    try:
        confd_path = get_confd_path(osname)
        all_file_configs = glob.glob(os.path.join(confd_path, "*.yaml"))
        all_default_configs = glob.glob(os.path.join(confd_path, "*.yaml.default"))
    except PathNotFound as e:
        log.error(
            "No conf.d folder found at '%s' or in the directory where the Agent is currently deployed.\n" % e.args[0]
        )
        sys.exit(3)

    if all_default_configs:
        current_configs = set([_conf_path_to_check_name(conf) for conf in all_file_configs])
        for default_config in all_default_configs:
            if not _conf_path_to_check_name(default_config) in current_configs:
                all_file_configs.append(default_config)

    # Compatibility code for the Nagios checks if it's still configured
    # in datadog.conf
    # FIXME: 6.x, should be removed
    if not any("nagios" in config for config in itertools.chain(*all_file_configs)):
        # check if it's configured in datadog.conf the old way
        if any([nagios_key in agentConfig for nagios_key in NAGIOS_OLD_CONF_KEYS]):
            all_file_configs.append("deprecated/nagios")

    return all_file_configs
Пример #22
0
def find_data_files():
    """
    Find IPython's data_files.

    Most of these are docs.
    """

    docdirbase = pjoin("share", "doc", "ipython")
    manpagebase = pjoin("share", "man", "man1")

    # Simple file lists can be made by hand
    manpages = [f for f in glob(pjoin("docs", "man", "*.1.gz")) if isfile(f)]
    if not manpages:
        # When running from a source tree, the manpages aren't gzipped
        manpages = [f for f in glob(pjoin("docs", "man", "*.1")) if isfile(f)]

    igridhelpfiles = [f for f in glob(pjoin("IPython", "extensions", "igrid_help.*")) if isfile(f)]

    # For nested structures, use the utility above
    example_files = make_dir_struct("data", pjoin("docs", "examples"), pjoin(docdirbase, "examples"))
    manual_files = make_dir_struct("data", pjoin("docs", "html"), pjoin(docdirbase, "manual"))

    # And assemble the entire output list
    data_files = (
        [(manpagebase, manpages), (pjoin(docdirbase, "extensions"), igridhelpfiles)] + manual_files + example_files
    )

    return data_files
Пример #23
0
 def find_in_2011style_dir(version):
     # The 2011 (compiler v12) dirs are inconsistent, so just redo the search from
     # get_all_compiler_versions and look for a match (search the newest form first)
     top = None
     for d in glob.glob("/opt/intel/composer_xe_*"):
         # Typical dir here is /opt/intel/composer_xe_2011_sp1.11.344
         # The _sp1 is useless, the installers are named 2011.9.x, 2011.10.x, 2011.11.x
         m = re.search(r"([0-9]{0,4})(?:_sp\d*)?\.([0-9][0-9.]*)$", d)
         if m:
             cur_ver = "%s.%s" % (m.group(1), m.group(2))
             if cur_ver == version and (
                 os.path.exists(os.path.join(d, "bin", "ia32", "icc"))
                 or os.path.exists(os.path.join(d, "bin", "intel64", "icc"))
             ):
                 top = d
                 break
     if not top:
         for d in glob.glob("/opt/intel/composerxe-*"):
             # Typical dir here is /opt/intel/composerxe-2011.4.184
             m = re.search(r"([0-9][0-9.]*)$", d)
             if (
                 m
                 and m.group(1) == version
                 and (
                     os.path.exists(os.path.join(d, "bin", "ia32", "icc"))
                     or os.path.exists(os.path.join(d, "bin", "intel64", "icc"))
                 )
             ):
                 top = d
                 break
     return top
Пример #24
0
	def extract_it(self):
		"""Extract files and unencrypt if need be"""
		print("Extracting Files - Unzipping Errors can usually be ignored")
		if vars.gpg_enc == "yes":
			print("Decrypting CollectedData Files")
			vars.gpgNames = glob.glob(vars.tmp_trgt_dir + "\\gpg\\*.gpg")
			for xyz in vars.gpgNames:
				call("files\\src\\gpg.exe --output " + xyz.replace(".gpg","").replace("\\gpg","") + " -d " + xyz, shell=True)
		
		"""Iterate thru all files named CollectedData- and unzip --- 002, 003, 004 and so on will error and be ignored"""
		self.fileNames = glob.glob(vars.tmp_trgt_dir + "\\CollectedData-*.*")
		for item in self.fileNames:
			print(vars.outputdir)
			print("Unzipping " + item)
			print("files\\7za.exe x \"" + str(item) + "\" -o" + vars.outputdir + "\\")
			call("files\\7za.exe x \"" + str(item) + "\" -o" + vars.outputdir + "\\", shell=True)
		
		if vars.ticket_rec == "yes":
			# raw_input("Press Enter to continue...")
			print("Recording Incident Ticket Number")
			print(vars.tmp_trgt_dir + "\\tickets.csv")
			# raw_input("Press Enter to continue...")
			t=open(vars.tmp_trgt_dir + "\\tickets.csv", 'w')
			t.write("ComputerName,Date,Ticket Number,Analyst\n")
			#raw_input("Press Enter to continue...")
			tick=open(vars.ir_trk, 'r')
			for tix in tick:
				if vars.cname in tix:
					t.writelines(str(tix))
			tick.close()
			t.close()
			call("echo " + str(vars.cname) + "," + str(vars.justdate) + "," + str(vars.ticket_num) + "," + str(vars.anal_nam)  + " >> " + str(vars.ir_trk), shell=True)
			print("echo " + str(vars.cname) + "," + str(vars.justdate) + "," + str(vars.ticket_num) + "," + str(vars.anal_nam)  + " >> " + str(vars.ir_trk))
			# raw_input("Press Enter to continue...")
		self.finish_up()
Пример #25
0
def align_combine(fitsdir, myfilter, examine=True):
    from pyraf import iraf

    iraf.noao(_doprint=0)
    iraf.digiphot(_doprint=0)
    iraf.apphot(_doprint=0)

    os.chdir(fitsdir)
    listfiles = glob.glob(myfilter)
    listfiles.sort()

    if examine:
        print "Opening ", listfiles[0], " to examine."
        iraf.imexamine(input=listfiles[0], logfile="coords.dat", keeplog="yes")

        with open("align.list", "w") as f:
            for i in listfiles:
                f.write(i + "\n")

    print "Aligning with reference:", listfiles[0]
    iraf.imalign(input="@align.list", referenc=listfiles[0], coords="coords.dat", output="a_@align.list")

    listfiles = glob.glob("a_" + myfilter)
    listfiles.sort()
    with open("comb.list", "w") as f:
        for i in listfiles:
            f.write(i + "\n")

    print "Combining"
    iraf.imcombine(input="@comb.list", output="out.fits", combine="median")
Пример #26
0
def populate_tree(tree, node):
    if tree.set(node, "type") != "directory":
        return

    path = tree.set(node, "fullpath")
    tree.delete(*tree.get_children(node))

    parent = tree.parent(node)
    special_dirs = [] if parent else glob.glob(".") + glob.glob("..")

    for p in special_dirs + os.listdir(path):
        ptype = None
        p = os.path.join(path, p).replace("\\", "/")
        if os.path.isdir(p):
            ptype = "directory"
        elif os.path.isfile(p):
            ptype = "file"

        fname = os.path.split(p)[1]
        id = tree.insert(node, "end", text=fname, values=[p, ptype])

        if ptype == "directory":
            if fname not in (".", ".."):
                tree.insert(id, 0, text="dummy")
                tree.item(id, text=fname)
        elif ptype == "file":
            size = os.stat(p).st_size
            tree.set(id, "size", "%d bytes" % size)
Пример #27
0
def cleanupFiles():
    # First get rid of modified files
    for l in ["l1", "l2", "l3"]:
        arcpy.Delete_management(l)

    for f in glob.glob("C:\\Arctmp\\*"):
        try:
            shutil.rmtree(f)
        except:
            print "UNABLE TO REMOVE:", f
    # Now remove the old directory
    for i in xrange(0, 1000000):
        new_workspace = "C:\\Arctmp\\workspace." + str(i)
        if not os.path.exists(new_workspace):
            break
    print "TESTING USING WORKSPACE", new_workspace
    # Now move in fresh copies
    shutil.copytree("C:\\Arcbase", new_workspace)
    print "CONTENTS:"
    arcpy.env.workspace = new_workspace
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.shp")):
        print f
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.lyr")):
        print f
    for f in sorted(glob.glob(arcpy.env.workspace + "\\*.gdb")):
        print f
Пример #28
0
    def __init__(self, files, threads=3, verbose=False):
        """Like `Speech2Text()`, but takes a list of sound files or a directory name to search
        for matching sound files, and returns a list of `(filename, response)` tuples.
        `response`'s are described in `Speech2Text.getResponse()`.

        Can use up to 5 concurrent threads. Intended for
        post-experiment processing of multiple files, in which waiting for a slow response
        is not a problem (better to get the data).

        If `files` is a string, it will be used as a directory name for glob
        (matching all `*.wav`, `*.flac`, and `*.spx` files).
        There's currently no re-try on http error."""
        list.__init__(self)  # [ (file1, resp1), (file2, resp2), ...]
        maxThreads = min(threads, 5)  # I get http errors with 6
        self.timeout = 30
        if type(files) == str and os.path.isdir(files):
            f = glob.glob(os.path.join(files, "*.wav"))
            f += glob.glob(os.path.join(files, "*.flac"))
            f += glob.glob(os.path.join(files, "*.spx"))
            fileList = f
        else:
            fileList = list(files)
        web.requireInternetAccess()  # needed to access google's speech API
        for i, filename in enumerate(fileList):
            gs = Speech2Text(filename)
            self.append((filename, gs.getThread()))  # tuple
            if verbose:
                logging.info("%i %s" % (i, filename))
            while self._activeCount() >= maxThreads:
                core.wait(0.1, 0)  # idle at max count
Пример #29
0
def teardown():
    output = glob.glob(pwd + "/statepoint.*")
    output += glob.glob(pwd + "/source.*")
    output.append(pwd + "/results_test.dat")
    for f in output:
        if os.path.exists(f):
            os.remove(f)
Пример #30
0
def grab_data(path, y):
    sentences = []

    currdir = os.getcwd()
    os.chdir("%s/pos/" % path)
    types = ("*.txt", "*.txt~")
    files_grabbed = []
    for files in types:
        files_grabbed.extend(glob.glob(files))
    for ff in files_grabbed:
        with open(ff, "r") as f:
            sentences.append(f.read().strip())
            y.append(1)
    os.chdir("%s/neg/" % path)
    files_grabbed = []
    for files in types:
        files_grabbed.extend(glob.glob(files))
    for ff in files_grabbed:
        with open(ff, "r") as f:
            sentences.append(f.read().strip())
            y.append(0)
    os.chdir(currdir)
    count = 0
    # with open("sentences.txt", "w+1") as the_file:
    #     for item in sentences:
    #         the_file.write("%s\n" % item)
    #         count += 1
    #         if count == 100:
    #             break
    return sentences