Ejemplo n.º 1
0
def list_directory_contents(directory, templatename=None, templatestyle=None):
    """List Folders in a Directory"""

    # List the contents of the directory
    contents = os.listdir(directory)
    # Decide which are directories and which to keep
    if templatename and templatestyle == 'End':
        subdirs = [
            dir for dir in contents if os.isdir(os.path.join(directory, dir))
            and dir.endswith(templatename)
        ]
    elif templatename and templatestyle == 'Start':
        subdirs = [
            dir for dir in contents if os.isdir(os.path.join(directory, dir))
            and dir.startswith(templatename)
        ]
    elif templatename and templatestyle == 'Contains':
        subdirs = [
            dir for dir in contents
            if os.isdir(os.path.join(directory, dir)) and templatename in dir
        ]
    else:
        subdirs = [
            dir for dir in contents if os.isdir(os.path.join(directory, dir))
        ]
    # Construct Full Paths
    subdirs = [os.path.join(directory, dir) for dir in subdirs]

    return subdirs
Ejemplo n.º 2
0
 def Rename(self, request, context):
     session_id = request.SessionId
     user = server_init._user_session.get_user_by_session(session_id)
     path = request.OldPath
     newpath = request.Path
     user_root = config.efs_file_root + '/user_' + str(user['user_id'])
     pos = path.find('/', 1)
     if pos != -1:
         linkpath = user_root + path[:path.find('/', 1)] + '.li'
         if os.path.exists(linkpath):
             fp = open(path, 'r')
             per = fp.readline()
             fp.close()
             if pos != -1:
                 truepath = per + path[path.find('/', 1):]
             else:
                 truepath = per
         else:
             truepath = user_root + path
     else:
         linkpath = user_root + path + '.li'
         if os.path.exists(linkpath):
             os.rename(linkpath, user_root + newpath + '.li')
             truepath = None
         else:
             truepath = user_root + path
     if truepath is not None:
         if os.isdir(truepath):
             truepath = truepath + '/.metadata'
         else:
             truepath = truepath + '.metadata'
         mnode = server_init._metadata_cache.get_usable_node(truepath)
         mnode.acquire_lock()
         if mnode.empty or not mnode.obj.path == truepath:
             mnode.load(truepath)
         mnode.obj.set_attribute('name', os.path.basename(newpath))
         mnode.obj.set_attribute('fullpath', os.path.basename(newpath))
         mnode.release_lock()
         if os.isdir(truepath):
             truepath = truepath[:-10]
             os.rename(truepath, os.path.dirname(truepath) + '/' + os.path.basename(newpath))
         else:
             os.rename(truepath, os.path.dirname(truepath) + '/' + os.path.basename(newpath) + '.metadata')
     truepath = utils.get_true_path(user, request.Path) 
     if os.isdir(truepath):
         truepath = truepath + '/.metadata'
     else:
         truepath = truepath + '.metadata'
     mnode = server_init._metadata_cache.get_usable_node(truepath)
     mnode.acquire_lock()
     if mnode.empty or not mnode.obj.path == truepath:
         mnode.load(truepath)
     jstr = json.dumps(mnode.obj.get_metadata())
     mnode.release_lock()
     return GRPCServer_pb2.StringResponse(PayLoad = jstr)
Ejemplo n.º 3
0
 def _paths_exist(self):
     """
     These paths should exist for the files. Returns True, [] if all paths exist. Returns
     False and a list of the missing paths.
     """
     all_present = True
     missing = []
     if not os.isdir(os.path.join(self.root_dir, "graph")):
         all_present = False
         missing.append("graph")
     if not os.isdir(os.path.join(self.root_dir, "data")):
         all_present = False
         missing.append("data")
     return all_present, missing
Ejemplo n.º 4
0
 def __init__(self, cfg=None, usersin=None, plugs=None, botname=None, nick=None, bottype=None, nocbs=None, *args, **kwargs):
     logging.debug("type is %s" % str(type(self)))
     if cfg: self.cfg = cfg ; botname = botname or self.cfg.name
     if not botname: botname = u"default-%s" % str(type(self)).split('.')[-1][:-2]
     if not botname: raise Exception("can't determine  botname")
     self.fleetdir = u'fleet' + os.sep + stripname(botname)
     if not self.cfg: self.cfg = Config(self.fleetdir + os.sep + u'config')
     self.cfg.name = botname or self.cfg.name
     if not self.cfg.name: raise Exception("name is not set in %s config file" % self.fleetdir)
     logging.debug("name is %s" % self.cfg.name)
     LazyDict.__init__(self)
     logging.debug("created bot with config %s" % self.cfg.tojson(full=True))
     self.ecounter = 0
     self.ids = []
     self.aliases = getaliases()
     self.reconnectcount = 0
     self.plugs = coreplugs
     self.gatekeeper = GateKeeper(self.cfg.name)
     self.gatekeeper.allow(self.user or self.jid or self.cfg.server or self.cfg.name)
     self.starttime = time.time()
     self.type = bottype or "base"
     self.status = "init"
     self.networkname = self.cfg.networkname or self.cfg.name or ""
     from jsb.lib.datadir import getdatadir
     datadir = getdatadir()
     self.datadir = datadir + os.sep + self.fleetdir
     self.maincfg = getmainconfig()
     self.owner = self.cfg.owner
     if not self.owner:
         logging.debug(u"owner is not set in %s - using mainconfig" % self.cfg.cfile)
         self.owner = self.maincfg.owner
     self.users = usersin or getusers()
     logging.debug(u"owner is %s" % self.owner)
     self.users.make_owner(self.owner)
     self.outcache = outcache
     self.userhosts = LazyDict()
     self.nicks = LazyDict()
     self.connectok = threading.Event()
     self.reconnectcount = 0
     self.cfg.nick = nick or self.cfg.nick or u'jsb'
     try:
         if not os.isdir(self.datadir): os.mkdir(self.datadir)
     except: pass
     self.setstate()
     self.outputlock = thread.allocate_lock()
     try:
         self.outqueue = Queue.PriorityQueue()
         self.eventqueue = Queue.PriorityQueue()
     except AttributeError:
         self.outqueue = Queue.Queue()
         self.eventqueue = Queue.Queue()
     self.laterqueue = Queue.Queue()
     self.encoding = self.cfg.encoding or "utf-8"
     self.cmndperms = getcmndperms()
     self.outputmorphs = outputmorphs
     self.inputmorphs = inputmorphs
     try:
         if nocbs: self.nocbs = nocbs.split(",")
     except ValueError: logging.error("cannot determine %s nocbs argument" % self.nocbs)
     self.lastiter = 0
Ejemplo n.º 5
0
 def print_dir_content(self, dirPath):
     if os.listdir(dirPath) is not None:
         dirChildPath = os.path.join(dirPath, dirChildPath)
         if os.isdir(dirChildPath):
             print_dir_content(self, dirChildPath)
         else:
             print(dirChildPath)
Ejemplo n.º 6
0
def obxod_file(path, level=1):
    print('Level =', level, 'Content:', os.listdir(path))
    for i in os.listdir(path):
        if os.isdir(path + '\\' + i):
            print('Спускаемся', path + '\\' + i)
            obxod_file(path + '\\' + i, level + 1)
            print('Возвращаемся в', path)
Ejemplo n.º 7
0
def tsne_hyperparameter_search(
    mat,
    tsne_dir="../../outputs/tsne",
    plot_root_name="",
    perplexities=[10, 30, 90],
    num_iters=[100, 500, 100, 2000],
    learning_rates=[100.0, 200.0, 300.0],
    early_exaggerations=[6.0, 12.0, 18.0],
    pca_dims=[4, 8, 16, None],
):
    # create a 4-D grid using the cartesian product
    # you are encouraged to use a SMALL grid otherwise this might take a LONG time
    hyperparameters = [{
        "perplexity": perp,
        "n_iter": n_iters,
        "learning_rate": lr,
        "early_exaggeration": early_ex,
        "pca_dim": pca_dim,
    } for perp, n_iters, lr, early_ex, pca_dim in cprod(
        perplexities, num_iters, learning_rates, early_exaggerations)]

    if os.isdir(tsne_dir):
        shutil.rmtree(tsne_dir)
    os.mkdir(tsne_dir)

    idx2class = None
    for tsne_mat, kwargs in map(lambda kwargs: tsne(mat, **kwargs), kwargs,
                                hyperparameters):
        filename = plot_root_name + "_tsne_" + kwargs2names(kwargs) + ".jpeg"
        plot_mat_points(tsne_mat, filenames,
                        idx2class=idx2class)  # TODO add class coloring
Ejemplo n.º 8
0
def vocab_model(input_path,vocab_size,model_type='bpe'):
    templates= '--input={} \
    --pad_id={} \
    --bos_id={} \
    --eos_id={} \
    --unk_id={} \
    --model_prefix={} \
    --vocab_size={} \
    --character_coverage={} \
    --model_type={}'
    if not os.isdir('vocab'):
        os.mkdir('vocab')
    output_path = f"./vocab/{input_path.split('/')[-1].split('.')[0]}"
    pad_id=0  #<pad> token을 0으로 설정
    bos_id=1 #<start> token을 1으로 설정
    eos_id=2 #<end> token을 2으로 설정
    unk_id=3 #<unknown> token을 3으로 설정
    character_coverage = 1.0 # to reduce character set 
    
    cmd = templates.format(input_path,
                    pad_id,
                    bos_id,
                    eos_id,
                    unk_id,
                    output_path,
                    vocab_size,
                    character_coverage,
                    model_type)
    spm.SentencePieceTrainer.Train(cmd)
    return output_path
Ejemplo n.º 9
0
def setup_data(config):
    """Set up the target data for all the different utilties to use"""
    data_dir = os.path.dirname(config.atraining)
    new = os.path.join(config.dir,"orig_data")
    name = config.atraining.split('/')[-1]
    
    if config.dir != data_dir or not os.isdir(new):
        copytree(data_dir,new)
        config.atraining = os.path.join(new,name)
        config.rfile = os.path.join(config.dir,"orig_data/rank_list.txt")

    #if config.base_full_data:
    #basee = "%s_base.e" % config.atraining
    #basef = "%s_base.f" % config.atraining

    basee = "%s_pseudo.e" % config.atraining
    basef = "%s_pseudo.f" % config.atraining
    etrain = "%s.e" % config.atraining
    ftrain = "%s.f" % config.atraining
    copy(basee,etrain)
    copy(basef,ftrain)

    ## copy the rank file
    copied_rank = os.path.join(config.dir,"rank_list.txt")
    copy(config.rfile,copied_rank)
Ejemplo n.º 10
0
 def _add_images(self, directory):
     log = logging.getLogger(__file__)
     if (not os.isdir(directory)):
         log.fatal(
             "performance monitoring image directory not a directory: %s" %
             (directory, ))
         return
     for filename in os.listdir(directory):
         # only care about .png files
         if (not filename.endswith(".png")):
             log.info("Skipping non-image file: %s" % filename)
             continue
         if (filename.startswith("cpu")):
             image = latex_classes.LatexImage(
                 "cpu utilization", filename,
                 os.path.join(directory, filename))
             self._outp["cpu_resource_graphs"].append(image.get_string())
         elif (filename.startswith("disk")):
             image = latex_classes.LatexImage(
                 "disk utilization", filename,
                 os.path.join(directory, filename))
             self._outp["disk_resource_graphs"].append(image.get_string())
         elif (filename.startswith("network")):
             image = latex_classes.LatexImage(
                 "network utilization", filename,
                 os.path.join(directory, filename))
             self._outp["network_resource_graphs"].append(
                 image.get_string())
         elif (filename.startswith("ram")):
             image = latex_classes.LatexImage(
                 "ram utilization", filename,
                 os.path.join(directory, filename))
             self._outp["ram_resource_graphs"].append(image.get_string())
         else:
             log.warn(".png with unknown prefix found: %s", filename)
Ejemplo n.º 11
0
def del_em(file_dict, dest_dir):
    for d, files in file_dict.keys():
        if os.isdir(d):
            glob = '*'
            dp_io.printf("arg %s is a dir, glob(%s)? ", d, glob)
            a = sys.stdin.readline()
            if a == "\n":
                a =  glob
            files = os.listdir(glob)
        for f in files:
            # if dest_file exists and is the same, del in src.:
            dest_file = os.join(d, f)
            num = 0
            while os.exists(dest_file):
                if filecmp.cmp(f, dest_file):
                    os.unlink(f)
                dp_io.printf("dest_file(%s) exists copying with modified name\n",
                             dest_file)
                name, ext = opath.splitext(dest_file)
                dest_file = name + "-" + str(num) + ext
                num += 1
            print "os.rename(%s, %s)" % (f, dest_dir)
        remains = os.listdir(d)
        if remains:
            ans = "n"
            dp_io.printf("files remain in src dir(%s); Remove them(y/N)? ", d)
            ans = sys.std
Ejemplo n.º 12
0
 def save_additional_directories(self, dirs):
     for d in dirs:
         echo ">>> Saving " + d + "..."
         if os.isfile(d) or os.isdir(d):
             os.chdir("/")
             archive_path = self.backup_dir + d.replace("/", "_") + ".tar.bz2"
             bz2( tar(d, "c"), "-9", _out=archive_path )
Ejemplo n.º 13
0
    def execute(self):
        ## check that last arg is a dir
        mode = "file"
        if len(self.args) > 2 and not os.isdir(self.args[-1]):
            raise RuntimeError(
                "Last argument must be a directory for multiple files")
        else:
            mode = "directory"

        for inode_id in self.args[:-1]:
            fd = self.environment._FS.open(inode_id=inode_id)
            if mode == 'directory':
                output_filename = inode_id
                outfd = open("%s/%s" % (self.args[-1], output_filename), 'w')
            else:
                outfd = open(self.args[-1], 'w')

            while 1:
                data = fd.read(100000)
                if not data: break
                outfd.write(data)

            outfd.close()
            yield 'Copying of %s into %s successful' % (inode_id,
                                                        self.args[-1])
Ejemplo n.º 14
0
    def save_summary_image(self, outPath_root = None):
        """
        Generates and exports a stamp summary image (chip stamps concatenated)

        Arguments:
            (str) outPath_root: path of user-defined export root directory

        Returns:
            None

        """

        outPath = self.chip.data_ref.parent
        if outPath_root:
            if not os.isdir(outPath_root):
                em = 'Export directory does not exist: {}'.format(outPath_root)
                raise ValueError(em)
            outPath = Path(outPath_root)

        target = os.path.join(outPath, 'SummaryImages') # Wrapping folder
        os.makedirs(target, exist_ok=True)
        
        c = self.chip
        image = c.summary_image('button')
        name = '{}_{}.tif'.format('Summary', c.data_ref.stem)
        outDir = os.path.join(target, name)
        external.tifffile.imsave(outDir, image)
        logging.debug('Saved ChipQuant Summary Image | ChipQuant: {}'.format(self.__str__()))
Ejemplo n.º 15
0
    def __init__(self, args, train=True):
        self.args = args
        self.train = train
        self.split = 'train' if train else 'test'
        self.scale = args.scale
        self.idx_scale = 0

        self._set_filesystem(args.dir_data)

        def _load_bin():
            self.images_hr = np.load(self._name_hrbin())
            self.images_lr = [np.load(self._name_lrbin(s)) for s in self.scale]

        if args.ext == 'img':
            self.images_hr, self.images_lr = self._scan()
        elif args.ext.find('sep') >= 0:
            self.images_hr, self.images_lr = self._scan()
            if args.ext.find('reset') >= 0:
                print('Preparing seperated binary files')
                for v in self.images_hr:
                    img_hr = misc.imread(v)
                    name_sep = v.replace(self.ext, '.npy')
                    np.save(name_sep, img_hr)
                for si, s in enumerate(self.scale):
                    for v in self.images_lr[si]:
                        img_lr = misc.imread(v)
                        name_sep = v.replace(self.ext, '.npy')
                        np.save(name_sep, img_lr)

            self.images_hr = [
                v.replace(self.ext, '.npy') for v in self.images_hr
            ]
            self.images_lr = [[
                v.replace(self.ext, '.npy') for v in self.images_lr[i]
            ] for i in range(len(self.scale))]

        elif args.ext.find('bin') >= 0:
            try:
                if args.ext.find('reset') >= 0:
                    raise IOError
                print('Loading a binary file')
                _load_bin()
            except:
                print('Preparing a binary file')
                bin_path = os.path.join(self.apath, 'bin')
                if not os.isdir(bin_path):
                    os.mkdir(bin_path)

                list_hr, list_lr = self._scan()
                hr = [misc.imread(f) for f in list_hr]
                np.save(self._name_hrbin(), hr)
                del hr
                for si, s in enumerate(self.scale):
                    lr_scale = [misc.imread(f) for f in list_lr[si]]
                    np.save(self._name_lrbin(s), lr_scale)
                    del lr_scale
                _load_bin()
        else:
            print('Please define data type')
Ejemplo n.º 16
0
def rm_file_or_dir(path) :
    if not os.path.exists(path) :
        logging.info("file to be removed is not exists : %s" %(path))
        return
    if os.isdir(path) :
        shutil.rmtree(path)
    else :
        os.remove(path)
def countFiles(path,num=0):
    for content in listdir(path):
        if isfile(content):
            num+=1
    for content in listdir(path):
        if isdir(content):
            num+=countFiles(join(path,content))
    return num
Ejemplo n.º 18
0
def rm_file_or_dir(path):
    if not os.path.exists(path):
        logging.info("file to be removed is not exists : %s" % (path))
        return
    if os.isdir(path):
        shutil.rmtree(path)
    else:
        os.remove(path)
 def __Validate(webapilogpfolder):
     """
     * Validate constructor arguments.
     """
     if not isinstance(webapilogpfolder, str):
         raise Exception('webapilogpfolder must be a string.')
     elif not os.isdir(webapilogpfolder):
         raise Exception('webapilogpfolder does not point to valid folder.')
Ejemplo n.º 20
0
def main():
    parser = argparse.ArgumentParser(
        description='LJD-mod: LuaJit raw-bytecode Decompiler modified')
    parser.add_argument('-i',
                        help='Input file/folder.',
                        dest='input',
                        type=str,
                        required=True)
    parser.add_argument('-o', help='Output folder.', dest='output', type=str)

    args = parser.parse_args()

    assert os.path.exists(args.input), 'The input does not exists'

    # prepare variables to store files that throw exceptions during decompliation.
    errorred_files = []

    if os.path.isfile(args.input):
        ast = do_decompile(args.input)
        ljd.lua.writer.write(sys.stdout, ast)
    else:
        if args.output == None and not os.isdir(args.output):
            assert 'Please specify a output directory when input is a directory.'

        if not os.path.exists(args.output):
            os.mkdir(args.output)

        files = load_file_from_dir(args.input)

        for file in files:
            print('Decompiling ' + file)
            basename = os.path.basename(file)
            file_name, file_extension = os.path.splitext(basename)
            # ".bytes" is for files extracted from Unity game engine.
            # You will want to add more extensions since they usually vary from project to project.
            if file_extension not in ['.lua', '.luac', ".bytes"]:
                continue

            try:
                ast = do_decompile(file)
            except Exception as e:
                print('%s occurred when decompiling %s' %
                      (type(e).__name__, file))
                errorred_files.append([file, type(e).__name__])
                pass

            with codecs.open(args.output + '\\' + file_name, 'w',
                             'utf-8') as f:
                ljd.lua.writer.write(f, ast)

        print(
            '================================================================')
        print(
            'Decompilation Done! Error and skipped files are listed as below:')
        print(
            '================================================================')
        for file in errorred_files:
            print(file[0] + ', with Exception of %s' % file[1])
Ejemplo n.º 21
0
def down(feed, z):
    feed = check(feed) #Feed Kontrollieren
    text = str(urlopen(feed).read(), encoding="utf-8") #Text Downloaden
    tmpfolder= "/usr/lib/Advanced-Feed/tmp/" #Pfad für Ordner Festlegen
    if os.isdir(tmpfolder) == False: #Wennder Ordner nicht erstellt ist:
        os.mkdir(tmpfolder)          #Erstelle ihn
    os.chdir(tmpfolder) #Wechsle in Ordner
    file = open(feedfolder + "/" + z, "w")
    file = text
Ejemplo n.º 22
0
def disk_usage(path):
    total=os.path.getsize(path)
    if os.path>os.isdir(path):
        for filename in os.listdir(path):
            childpath=os.path.join(path,filename)
            total+=disk_usage(childpath)

    print('{0:<7)'.format(total),path)
    return total
def dirSize(path,size=0):
    from os import listdir,isfile,isdir,stat
    for content in listdir(path):
        if isfile(content):
            size+=stat(join(path,content)).st_size
    for content in listdir(path):
        if isdir(content):
            size+=dirSize(join(path,content))
    return size
Ejemplo n.º 24
0
        def read_messages(message, mlast=False):
            msg = service.users().messages().get(userId='me',
                                                 id=message['id'],
                                                 format='full').execute()
            payload = msg['payload']
            headers = payload.get("headers")
            parts = payload.get("parts")
            cur_folder = os.getcwd()
            folder_name = "null emails"
            has_subject = False
            main = {'id': message['id']}
            if headers:
                for header in headers:
                    name = header.get("name")
                    value = header.get("value")
                    if name.lower() == 'from':
                        main["from"] = value
                    if name.lower() == "to":
                        main["to"] = value
                    if name.lower() == "subject":
                        has_subject = True
                        folder_name = clean(value)
                        folder_counter = 0
                        while os.path.isdir(folder_name):
                            folder_counter += 1
                            if folder_name[-1].isdigit(
                            ) and folder_name[-2] == "_":
                                folder_name = f"{folder_name[:-2]}_{folder_counter}"
                            elif folder_name[-2:].isdigit(
                            ) and folder_name[-3] == "_":
                                folder_name = f"{folder_name[:-3]}_{folder_counter}"
                            else:
                                folder_name = f"{folder_name}_{folder_counter}"
                        if self.mkdir:
                            os.chdir(cur_folder)
                            os.mkdir(folder_name)
                        main["sub"] = value
                    if name.lower() == "date":
                        main["date"] = value

            if not has_subject and self.mkdir:
                if not os.isdir(folder_name):
                    os.chdir(cur_folder)
                    os.mkdir(folder_name)

            if call != True:
                if mlast == True:
                    parse_parts(mlast, msg, parts, folder_name, message, main)
                    print("=" * 75, "\n")
                    print("That's all I had for you.")
                    return
                else:
                    parse_parts(mlast, msg, parts, folder_name, message, main)
                    print("=" * 75, "\n")
            else:
                return (main['id'], main['sub'], main['from'], main['date'],
                        main['to'])  #,main['body'])
Ejemplo n.º 25
0
 def get_ids_from_home(self, base='/home'):
     """Returns the owner's UID and GID of the /home/<username>
     if this exists or None.
     """
     path = os.path.join(base, self.name)
     if os.isdir(path):
         stat = os.stat(path)
         return [stat.st_uid, stat.st_gid]
     return None
 def __Validate(filewatcherlogfolder):
     """
     * Validate constructor parameters.
     """
     if not isinstance(filewatcherlogfolder, str):
         raise Exception('filewatcherlogfolder must be a string.')
     elif not os.isdir(filewatcherlogfolder):
         raise Exception(
             'filewatcherlogfolder does not point to valid folder.')
Ejemplo n.º 27
0
def BunchCrypt(key, path):
    os.makedir("hidden")
    os.system('attrib' + path + "\\hidden" + '+H')
    files = os.walk(path)
    for i in files:
        if not os.isdir(i):
            os.system("move" + i + " " + os.path.dirname(i) + "\\hidden" +
                      os.path.basename(i))
            Main(i, key)
 def run(self):
     zone = dns.zone.from_xfr(dns.query.xfr(**self.config["query"]),
                              **self.config["zone"])
     if self.save:
         if not os.isdir("./dns-zones"):
             os.mkdir("./dns-zones/")
         zone.to_file(f"./dns-zones/{self.name}.txt", sorted=True)
     else:
         print(colored(zone.to_text().decode("ascii"), dark=True))
Ejemplo n.º 29
0
def get_child_paths(path):
	# Just take a single directories items
	children_paths = []
	if os.isdir(path):
		for child in os.listdir(path):
			children_paths += path+'/'+child
		return children_paths
	else:
		return 'Error path object is not a directory.'
Ejemplo n.º 30
0
def batch_annotate_images(path_to_parent_directory,
                          path_to_annotation_directory):
    #directories = listdir_fullpath(path_to_parent_directory)
    directories = os.listdir(path_to_parent_directory)
    for dir in directories:
        if os.isdir(dir):
            path = path_to_parent_directory + '/' + dir
            convert_masks_to_annotation(path,
                                        path_to_annotation_directory,
                                        write=True)
Ejemplo n.º 31
0
def main():
    if sys.argv[1] == "-c":
        print "List of missing files/dirs:"
        backup_list = open("/home/tj/.backup-list")
        for line in backup_list:
            path = os.path.join("/home/tj", line)
            if os.isdir(path) or os.isfile(path):
                print line
        print "End of List"
        sys.exit(0)
Ejemplo n.º 32
0
def ensure_directory(path):
    """
    Ensure that a directory named "path" exists.
    """

    try:
        os.makedirs(path)
    except OSError as exc:
        if exc.errno != os.errno.EEXIST or not os.isdir(path):
            raise
Ejemplo n.º 33
0
def cleanup_after_training(dataset_path, model_name):
    """Deleting all files except latest_net_G.pth file"""

    shutil.rmtree(dataset_path)
    model_path = os.path.join(PIX2PIX_PATH, 'checkpoints', model_name)
    for f in os.listdir(model_path):
        path = os.path.join(model_path, f)
        if os.isdir(path):
            shutil.rmtree(path)
        elif f != 'latest_net_G.pth':
            os.remove(path)
Ejemplo n.º 34
0
def check_data_dir(path):
    """
    check cata_dir
    """
    err = "Data path is not exist, please given a right path" \
          "".format(path)
    try:
        assert os.isdir(path)
    except AssertionError:
        logger.error(err)
        sys.exit(1)
Ejemplo n.º 35
0
def expandpath(path):
    """Expand (fully qualify) an arbitrary path to an existing file or directory.

    If path does not map to an existing file the pathname is returned
    unchanged.
    """
    if os.isdir(path):
        return expand_dir_path(path)
    elif os.isfile(path):
        return expand_file_path(path)
    else:
        return path
Ejemplo n.º 36
0
def expandpath(path):
    """Expand (fully qualify) an arbitrary path to an existing file or directory.

    If path does not map to an existing file the pathname is returned
    unchanged.
    """
    if os.isdir(path):
        return expand_dir_path(path)
    elif os.isfile(path):
        return expand_file_path(path)
    else:
        return path
Ejemplo n.º 37
0
    def __init__(self, io, cache_dir, whitelist='a-z0-9', filesystem=None):
        self.__io = io
        self.__root = cache_dir.rstrip('/')
        self.__whitelist = whitelist
        self.__filesystem = filesystem or Filesystem()
        self.__enabled = True

        if not (os.isdir(self.__root)
            or Silencer.call('mkdir', self.__root, 0777, True))\
            and not os.access(self.__root, os.W_OK):
            self.io.write_error('<warning>Cannot create cache directory {}, or directory is not writable. Proceeding without cache</warning>'.format(self.__root))
            self.__enabled = False
Ejemplo n.º 38
0
def movehelper(ext,src,dest):
	files=os.path.listdir(src)
	print(files)
	for f in files:
		if os.isdir(f):
			new_src=os.path.join(src,f)
			movehelper(ext,new_src,dest)
		else:
			print(f.split(".")[-1])
			if f.split(".")[-1]==ext:
				filename=src.split("/")[-1].split(".")[0]
				shutil.copy2(src+'/'+f,dest+"/"+filename+"."+ext)
Ejemplo n.º 39
0
def cleanReferenceObjects(user):
	"""
		Cleans references objects found in database
		that no longer correspond to files or folders
		on the system.

	"""
	for folder in Folder.objects.by_user(user):
		if not os.isdir(expand_path(user, folder.path)):
			folder.delete()

	for file in File.objects.by_user(user):
		file.read()
Ejemplo n.º 40
0
    def start(self):
        if self.observer is not None:
            self.stop()
            self.join()
            del self.observer

        self.observer = Observer()
        for elem in os.listdir(self.maildirs_path):
            for subdir in ('cur', 'new'):
                mail_subdir = os.path.join(self.maildirs_path, elem, subdir)
                if os.isdir(mail_subdir):
                    handler = MaildirSubdirHandler(mail_subdir)
                    self.observer.schedule(handler, mail_subdir)
Ejemplo n.º 41
0
def read_rfid_port(args):
    """this will interact with some kind of sensor that can get ID details;
       for now, it just looks to the audio directory, and returns the name
       of a randomly chosen subdirectory which contains a complete set of
       audio files"""
    # once we can do so, poll RFID port for presence of tag;
    # if it exists, return its ID, else return standard
    # for now, we will do this by simply randomly selecting a
    # directory of audio files to use
    basedir = '/home/pi/simon/audio'
    if args.soundset:
        if os.isdir(args.soundset):
            return args.soundset
        elif os.isdir('%s/%s' % (basedir, args.soundset)):
            return '%s/%s' % (basedir, args.soundset)
        else:
            print('could not find sound set at %s '
                  '(also checked %s)' % (args.soundset, basedir))
    dirs = subprocess.Popen(['ls', '-1', basedir],
                            stdout=subprocess.PIPE).stdout.read()
    options = dirs[:-1].split('\n')
    return random.choice(options)
Ejemplo n.º 42
0
def match_subj_output_dir(OutputDir, subj):
    match_dir = []
    already_match = None
    for sub_dir in os.listdir(OutputDir):
        if os.isdir(sub_dir):
           if sub_dir.find(subj) != -1:
              if not already_match:
                 already_match = 1
              else:
                 print 'Error: ', subj, ' multi-match ',  
                 print '\t', match_dir
                 print '\t', OutputDir + os.sep + sub_dir
                 return None
              match_dir = OutputDir + os.sep + sub_dir
    return match_dir
Ejemplo n.º 43
0
 def update_media(self, base_path=getattr(settings, "MEDIA_ROOT", "")):
     """
     settings.MEDIA_ROOTに指定されたパス配下をすべてS3にバックアップする。
     保存パス:
     /{{project_name}}/media/{{dir_name}}/{{fn}}
     """
     for fn in os.listdir(base_path):
         path = os.path.join(base_path, fn)
         if ( os.isdir(path) ):
             self.update_media(path)
         if not ( os.isfile(path) ):
             # シンボリックリンク等
             continue
         # ToDo: S3にセーブ
         return
Ejemplo n.º 44
0
    def sendMessage(self, target='page', package=None, topic=None, msg=None):
        msgbag = Bag()
        if target == 'page':
            msgbag['page'] = msg
            fname = self.pageLocalDocument(os.path.join('_messages', '%s.xml' % getUuid()))
            msg.toXml(fname)
        elif target == 'connection':
            msgbag['connection'] = msg
            msgid = '%s.xml' % getUuid()
            for page in os.listdir(self.connectionFolder):
                pagedir = os.path.join(self.connectionFolder, page)
                if os.isdir(pagedir):
                    msg.toXml(os.path.join(pagedir, '_messages', msgid))

        elif target == 'all':
            pass
Ejemplo n.º 45
0
Archivo: lib.py Proyecto: drekels/pykfs
def resolve_git_dir(start=None):
    value = None
    branch, leaf = os.path.split(start)
    if leaf == ".git"  or os.path.basename(branch) == ".git":
        value = start
    else:
        directory = start or os.getcwd()
        while not os.path.exists() and directory and directory != '/':
            gitdir = os.path.join(directory, ".git")
            if os.isdir(gitdir):
                value = gitdir
    if value:
        LOG.debug("Git directory for '{0}' resolved to '{1}'".format(start, value))
    else:
        LOG.warning("Unable to find git directory for '{0}'".format(start))
    return value
def unix_lib_is64bit(lib_file):
    """Check if a library on UNIX is 64 bit or not

    This function uses the `file` command to check if a library on
    UNIX-like platforms is 32 or 64 bit.

    Returns True if the library is 64bit.

    Raises ValueError when magic of header is invalid.
    Raises IOError when file could not be read.
    Raises OSError when execute on none-Windows platform.

    Returns True or False.
    """
    if os.name != 'posix':
        raise OSError("unix_lib_is64bit only useful on UNIX-like systems")

    if os.isdir(lib_file):
        mysqlclient_libs = []
        for root, _, files in os.walk(lib_file):
            for filename in files:
                filepath = os.path.join(root, filename)
                if filename.startswith('libmysqlclient') and \
                   not os.path.islink(filepath) and \
                   '_r' not in filename and \
                   '.a' not in filename:
                    mysqlclient_libs.append(filepath)
            if mysqlclient_libs:
                break
        # give priority to .so files instead of .a
        mysqlclient_libs.sort()
        lib_file = mysqlclient_libs[-1]

    log.debug("# Using file command to test lib_file {0}".format(lib_file))
    if platform.uname() == 'SunOS':
        cmd_list = ['file', '-L', lib_file]
    else:
        cmd_list = ['file', '-L', lib_file]
    prc = Popen(cmd_list, stdin=PIPE, stderr=STDOUT,
                stdout=PIPE)
    stdout = prc.communicate()[0]
    stdout = stdout.split(':')[1]
    log.debug("# lib_file {0} stdout: {1}".format(lib_file, stdout))
    if 'x86_64' in stdout or 'x86-64' in stdout or '32-bit' not in stdout:
        return True

    return False
Ejemplo n.º 47
0
	def getFileList(self, dirname):
		alist = []
		result = []
		if (os.path.exists(dirname) && os.path.isdir(dirname)):
			alist = os.listdir(dirname)
			for item in alist:
				if item:
					filename = os.path.join(dirname, item)
					fileStruc = [] #list of: 0 = full_path, 1 = file_name, 2 = file_ext
					if (not os.isdir(filename)):
						fileStruc.append(filename)
						matchObj = self.fileDesc.search(filename)
						if matchObj:
							fileStruc.append(matchObj.group(1))
							fileStruc.append(matchObj.group(2))
							if fileStruc[2] == 'history':
								result.append(fileStruc)
		return result
Ejemplo n.º 48
0
    def __init__(self, filename):
        if not os.isdir(filename) and not os.path.exists:
            try:
                os.mkdir(os.path.expanduser(filename))
            except:
                raise Exception("Could not create directory '%s'" % os.path.expanduser('filename'))
            try:
                datafile = open('%s/data' % filename, 'w')
                indexfile = open('%s/index' % filename, 'w')
            except:
                raise Exception("Could not create database files")

        elif os.path.exists:
            raise Exception('Specified filename is not a directory')
        try:
            self.datafile = open('%s/data' % filename, 'r+')
            self.index = open('%s/index' % filename, 'r+')
        except:
            raise Exception('Database files appear to be corrupted')
Ejemplo n.º 49
0
 def list_input_directory(self, input_path, recursive=False):
     """
     Loop over directories on the filesystem.
     """
     
     RealTimeLogger.get().info("Enumerating {} from "
         "FileIOStore in {}".format(input_path, self.path_prefix))
     
     for item in os.listdir(os.path.join(self.path_prefix, input_path)):
         if(recursive and os.isdir(item)):
             # Recurse on this
             for subitem in self.list_input_directory(
                 os.path.join(input_path, item), recursive):
                 
                 # Make relative paths include this directory anme and yield
                 # them
                 yield os.path.join(item, subitem)
         else:
             # This isn't a directory or we aren't being recursive
             yield item
Ejemplo n.º 50
0
def create_plugin(argv):
    names, type_names = _get_names(argv)

    if os.path.isdir("plugin"):
        # We're in the root dir
        plugin_dir = os.path.join("plugin", names.base_name)
        os.mkdir(plugin_dir)
        os.chdir(plugin_dir)
    elif os.isdir("../plugin"):
        # We're in plugin root dir
        os.mkdir(names.base_name)
        os.chdir(names.base_name)
    else:
        # Don't know where the hell we are
        print("""add plugin was called from an odd location.
Please either call it from the plugin dir or from the root of the source
tree""")
        sys.exit(4)

    _create_project(names, type_names, project_type="drizzle_plugin")
    return names.base_name
Ejemplo n.º 51
0
def clean_dir():
  """
  Cleans the current of .py and .pyo iff a .pyc exists.
  Recursively calls itself on all sub-directories discovered in the process.
  """
  print os.getcwd()
  files = set(os.listdir('.'))
  for file in files:
    # Does it end in .pyc?
    if file.endswith('.pyc'):
      # If so, do we have a .py or .pyo?
      if os.path.exists(file[:-1]):
        os.remove(file[:-1])
        singletonfiles.add(file)
      if os.path.exists(file[:-1]+'o'):
        os.remove(file[:-1]+'o')
        singletonfiles.add(file)
    # Is it a directory?
    if os.isdir(file):
      os.chdir(file)
      clean_dir()
      os.chdir('..')
Ejemplo n.º 52
0
def rebuild(wipe_sysgen=False):
    '''
    Designed to be run from shell. 
    Will wipe DB and load data from file system.
    '''
    url_list = []
    for root, dirs, files in os.walk(PAGE_PATH):
        head = root.replace(PAGE_PATH, '')
        path = head.split(os.sep)
        for file in files:
            url = '/'.join(path + [file])
            if url[-1:] == '_':
                url = url[:-1]
            else:
                url = url + '/'
            print url
            url_list.append(url)

    confirm = raw_input('About to create {} pages. Ready to wipe DB ([y]/n)? '.format(len(url_list)))
    if confirm and confirm.upper() != 'Y':
        print 'Aborting...'
        sys.exit()
        
    print 'Deleting all Page data'
    Page.objects.all().delete()

    if wipe_sysgen:
        print 'Wiping sysgen'
        for file in os.listdir(SYSGEN_PATH):
            file_path = os.path.join(SYSGEN_PATH, file)
            if os.isdir(file_path):
                shutil.rmtree(file_path)
            else:
                os.unlink(file_path)

    for url in sorted(url_list):
        print 'Creating: ', url
        page = Page(url=url)
        page.update()
Ejemplo n.º 53
0
def rebuild(pull_docinfo=True, wipe_sysgen=False):
    '''
    Designed to be run from shell. 
    Will wipe DB and load data from file system.
    '''
    pg_list = []
    for root, dirs, files in os.walk(wiki_pages_path):
        head = root.replace(wiki_pages_path, '')
        path = head.split(os.sep)
        for file in files:
            pg = '/'.join(path + [file])
            if pg[-1:] == '_':
                pg = pg[:-1]
            else:
                pg = pg + '/'
            print pg
            pg_list.append(pg)

    confirm = raw_input('About to create {} pages. Ready to wipe DB ([y]/n)? '.format(len(pg_list)))
    if confirm and confirm.upper() != 'Y':
        print 'Aborting...'
        sys.exit()
        
    print 'Deleting all Page data'
    Page.objects.all().delete()

    if wipe_sysgen:
        print 'Wiping sysgen'
        for file in os.listdir(SYSGEN_FOLDER):
            file_path = os.path.join(SYSGEN_FOLDER, file)
            if os.isdir(file_path):
                shutil.rmtree(file_path)
            else:
                os.unlink(file_path)

    for pg in sorted(pg_list):
        print 'Creating: ', pg
        page = Page(pg=pg)
        page.update(pull_docinfo=pull_docinfo)
def unix_lib_is64bit(lib_file):
    """Check if a library on UNIX is 64 bit or not

    This function uses the `file` command to check if a library on
    UNIX-like platforms is 32 or 64 bit.

    Returns True if the library is 64bit.

    Raises ValueError when magic of header is invalid.
    Raises IOError when file could not be read.
    Raises OSError when execute on none-Windows platform.

    Returns True or False.
    """
    if os.name != 'posix':
        raise OSError("unix_lib_is64bit only useful on UNIX-like systems")

    if os.isdir(lib_file):
        mysqlclient_lib = None
        for root, dirs, files in os.walk(lib_file):
            for filename in files:
                filepath = os.path.join(root, filename)
                if filename.startswith('libmysqlclient') and \
                    not os.path.islink(filepath) and \
                    '_r' not in filename:
                    mysqlclient_lib = filepath
                    break
            if mysqlclient_lib:
                break
        lib_file = mysqlclient_lib

    prc = Popen(['file', lib_file], stdin=PIPE, stderr=STDOUT, stdout=PIPE)
    stdout = prc.communicate()[0]

    if 'x86_64' in stdout or 'x86-64' in stdout:
        return True

    return False
Ejemplo n.º 55
0
 def execute(self):
     ## check that last arg is a dir
     mode = "file"
     if len(self.args)>2 and not os.isdir(self.args[-1]):
         raise RuntimeError("Last argument must be a directory for multiple files")
     else:
         mode = "directory"
     
     for inode_id in self.args[:-1]:
         fd=self.environment._FS.open(inode_id=inode_id)
         if mode =='directory':
             output_filename = inode_id
             outfd = open("%s/%s" % (self.args[-1], output_filename),'w')
         else:
             outfd = open(self.args[-1],'w')
             
         while 1:
             data=fd.read(10000)
             if not data: break
             outfd.write(data)
                 
         outfd.close()
         yield 'Copying of %s into %s successful' % (inode_id,self.args[-1])
Ejemplo n.º 56
0
    def _CreateIntermediateDirectories(components):
        """Recursively create intermediate directories."""
        path = os.sep

        if aff4.WIN32:
            # On windows we do not want a leading \ (e.g. C:\windows not
            # \C:\Windows)
            path = ""

        for component in components:
            path = path + component + os.sep
            LOGGER.info("Creating intermediate directories %s", path)

            if os.isdir(path):
                continue

            # Directory does not exist - Try to make it.
            try:
                aff4_utils.MkDir(path)
                continue
            except IOError as e:
                LOGGER.error(
                    "Unable to create intermediate directory: %s", e)
                raise
Ejemplo n.º 57
0
def import_nat_logs(path):
    '''Imports NAT logs from a tarfile or directory'''

    insure_nat_table()

    if os.isdir(path):
        for path, _, files in os.walk(dir):
            for name in files:
                fullpath = os.path.join(path, name)
                if os.path.isfile(fullpath):
                    with open(fullpath, 'rb') as f:
                        import_nat_log(io.BufferedReader(f), name)

    else:
        try:
            tf = tarfile.open(tar, mode='r')
        except tarfile.TarError as e:
            log.error(e)
            raise IOError('Error opening %s as tar file', tar) from e

        with tf:
            for member in tf:
                if member.isfile():
                    import_nat_log(tf.extractfile(member), member.name)
Ejemplo n.º 58
0
def check_for_source_directory():
    """ensure the directory that holds the source exists"""
    if not os.isdir(SOURCE_DIRECTORY):
        run('mkdir -p {0}'.format(SOURCE_DIRECTORY))
    else:
        pass
Ejemplo n.º 59
0
Archivo: setup.py Proyecto: cglewis/ns
			serverip = core.ipgrab()
			print("Website created at "+serverip+"/scoutwebsite.php \n")

			answer=raw_input("Do you wish to reboot your pi? [yes|no]    ")
			if answer.lower() == 'y' or answer.lower() == 'yes':
				subprocess.Popen("reboot", shell=True)
			else:
				pass

	elif option == 2:
		print("[*]********** Installing network-scout...")
		core.kill_artillery()
		if os.mkdir("/var/networkscout"):
			subprocess.Popen("cp -r ./* /var/networkscout/", shell=True).wait()
		elif os.isdir("/var/networkscout"):
			errormes = raw_input("Network scout already exists. Do you want to continue?")
			if errormes.lower() == 'y' or isrpi.lower() == 'yes':
				subprocess.Popen("rm -rf /var/networkscout/", shell=True).wait()
				subprocess.Popen("cp -r ./* /var/networkscout/", shell=True).wait()
			else:
				pass
		else:
			pass


		#modifying artillery
		print("[*]**********Modding Artillery for NS logging...")
		mod = open("stuff/artilleryfunction", "r")
		contents = mod.read()