Example #1
0
def mergeConfigurationFiles(base_config_file_path,
                            update_from_config_file_path, merged_save_to_path):
    """Merges two iohub configuration files into one and saves it to a file
    using the path/file name in merged_save_to_path."""
    base_config = yload(open(base_config_file_path, 'r'), Loader=yLoader)
    update_from_config = yload(open(update_from_config_file_path, 'r'),
                               Loader=yLoader)

    def merge(update, base):
        if isinstance(update, dict) and isinstance(base, dict):
            for k, v in base.items():
                if k not in update:
                    update[k] = v
                else:
                    if isinstance(update[k], list):
                        if isinstance(v, list):
                            v.extend(update[k])
                            update[k] = v
                        else:
                            update[k].insert(0, v)
                    else:
                        update[k] = merge(update[k], v)
        return update

    merged = merge(copy.deepcopy(update_from_config), base_config)
    ydump(merged, open(merged_save_to_path, 'w'), Dumper=yDumper)

    return merged
Example #2
0
    def dump(self, data, filename = None):

        # Dump to file
        if filename:
            filename = f'{filename}.yaml'
            self.open(filename, 'write')
            if not self.error:
                try:
                    ydump(
                        data,
                        default_flow_style = False,
                        sort_keys = False,
                        stream = self.fd
                    )
                except Exception as e:
                    YAML.error.add(error['yaml']['save'].format(
                        e = e,
                        filename = self.name
                    ))
            data = None

        # Return dump
        else:
            try:
                data = ydump(
                    data,
                    default_flow_style = False,
                    sort_keys = False
                )
                data = data[:-1]
            except Exception as e:
                YAML.error.add(error['yaml']['dump'].format(e = e))

        return data
Example #3
0
 def save_state(self, filename, mode='yaml'):
     if mode == 'yaml':
         with open(filename + '-state.yaml', 'w') as yd:
             ydump(self.computed_desc_dict, yd, sort_keys=True, Dumper=Dumper)
     else:
         with open(filename + '-state.json', 'w') as jd:
             json.dump(self.computed_desc_dict, jd, sort_keys=True, cls=NpEncoder)
Example #4
0
 def save_state(self, filename, mode='json'):
      if mode == 'yaml':
          with open(filename+'-clustering-state.yaml', 'w') as yd:
              ydump(self.pack(), yd, sort_keys=True, Dumper=Dumper)
      else:
          with open(filename+'-clustering-state.json', 'w') as jd:
              json.dump(self.pack(), jd, sort_keys=True, cls=NpEncoder)
    def test_down_the_rabbit_hole(self):
        """ simulate a changed object graph """
        try:

            from yaml import dump as ydump, safe_load as yload

            somethingtotest = SomethingToTest()
            somethingtotest.var3 = Subvar("3")

            yaml_ = ydump(somethingtotest)

            # probably not a good idea with untrusted data
            data = yload(yaml_)

            somevar = "somevalue"

            self.assert_exp(data, self.extension)
            somethingtotest.added_this = dict(somevar=somevar)
            somethingtotest.var3.value = "3++"

            yaml_ = ydump(somethingtotest)

            # probably not a good idea with untrusted data
            data = yload(yaml_)

            self.assert_exp(data, self.extension)

        except (Exception,) as e:
            if cpdb():
                pdb.set_trace()
            raise
Example #6
0
    def save_descriptor_acronym_state(self, filename, mode='yaml'):

        if mode == 'yaml':
            with open(filename + '-descriptor-acronyms.yaml', 'w') as yd:
                ydump(self.tag_to_acronym, yd, sort_keys=True)
        else:
            with open(filename + '-descriptor-acronyms.json', 'w') as jd:
                json.dump(self.tag_to_acronym, jd, sort_keys=True, cls=NpEncoder)
Example #7
0
def dump_yml(role_dir: str, f: str, y) -> None:
    log = logging.getLogger('ansible-galaxy-local-deps.dump.dump_yml')
    of = os.path.join(role_dir, f)
    log.info('writing out {}...'.format(of))
    with open(of, 'w') as s:
        ydump(
            y,
            stream=s,
            explicit_start=True
        )
 def save(self, filePath=None):
     self._filePath = expanduser(filePath or self._filePath)
     try:
         with open(self._filePath, 'wt') as f:
             ydump(self.copy(), f)
         self.changed = False
         return True
     except:
         warning("File %s can't be written" % self._filePath)
         return False
Example #9
0
 def _getmaps(self, noask=None):
     if not self.maps:
         maplst = self.gameini
         if len(self.servers[self.srv]) > 1:
             maplst = self.servers[self.srv][1]
         if maplst.startswith('~'):
             maplst = expanduser(maplst)
         if not maplst.startswith('/'):
             print(
                 'error: cannot read maplist if no absolute path is provided'
             )
         with open(maplst, 'r') as mfh:
             lines = mfh.readlines()
         try:
             with open(expanduser(self.maptbl), 'r') as mfh:
                 self.mapnames = yload(mfh.read(), Loader=Loader)
         except FileNotFoundError:
             with open(expanduser(self.maptbl), 'w+') as mfh:
                 mfh.write(ydump({}, Dumper=Dumper))
             self.mapnames = {}
         for l in lines:
             if not l or not l.startswith('MapRotation'):
                 continue
             ugcid = l.split('MapId="')[1].split('", ')[0]
             gmmod = l.split('GameMode="')[1].split('")')[0]
             name = self._getmapname(ugcid)
             self.maps[name] = [ugcid, gmmod]
             self.mapnames[ugcid] = name
         with open(expanduser(self.maptbl), 'w+') as mfh:
             mfh.write(ydump(self.mapnames, Dumper=Dumper))
         if noask: return
     ask = [
         iList(
             'map',
             carousel=True,
             message='select map',
             choices=[m for m in self.maps.keys()] + ['<Return>'],
         ),
     ]
     mapp = list(prompt(ask).values())[0]
     if mapp == '<Return>':
         return
     mmod = self.maps[mapp][1]
     modes = [mmod] + [s for s in ['SND', 'TDM', 'DM', 'GUN'] if s != mmod]
     ask = [
         iList(
             'mod',
             carousel=True,
             message='select mode (irrelevant if set by map)',
             choices=[m for m in modes] + ['<Return>'],
         ),
     ]
     mode = list(prompt(ask).values())[0]
     if mode != '<Return>':
         return '%s %s' % (self.maps[mapp][0], mode)
Example #10
0
def yaml_write(path, input):
    """Write a dictionary to a file.

    Args:
        path (str): the output file name.
        input (dict): the data.

    Returns:
        nothing.
    """
    with open(path, "w") as f:
        ydump(input, f, default_flow_style=False)
    return
Example #11
0
File: prod.py Project: sdss/lvmspec
def yaml_write(path, input):
    """Write a dictionary to a file.

    Args:
        path (str): the output file name.
        input (dict): the data.

    Returns:
        nothing.
    """
    with open(path, "w") as f:
        ydump(input, f, default_flow_style=False)
    return
Example #12
0
 def looper(self, dirname):
     mappingFile = os.path.join(dirname, 'mapping.yaml')
     if not os.path.isfile(mappingFile):
         return
     mapping = loadYamlFile(mappingFile)
     for key, val in mapping.items():
         if val.get('type', '') == 'Agent' and val.get('config', ''):
             tmpD = os.path.join(dirname, val.get('config'))
             self._addAgent(tmpD)
         elif val.get('type', '') == 'FE' and val.get('config', ''):
             tmpD = os.path.join(dirname, val.get('config'))
             self._addFE(tmpD)
     with open('prometheus.yml', 'w') as fd:
         ydump(self.default, fd)
     return
Example #13
0
	def write(self):
		'''
		Write metadata once run is over
		'''
		self.yamlfile = 'merger_data' + self.subdir + '/merged.yaml'
		with open(self.yamlfile,'w') as f:
			ydump( self.equivalence , f)
		self.notmergedfile = 'merger_data' + self.subdir + '/notmerged.txt'
		with open(self.notmergedfile,'w') as g:
			for item in self.notmerged:
				g.write(item + '\n')
		self.mergedlist = 'merger_data' + self.subdir + '/mfiles.txt'
		with open(self.mergedlist,'w') as h:
			for item in self.mergedfiles:
				h.write(item + '\n')
Example #14
0
    def write(self):
        '''
		Write metadata once run is over
		'''
        self.yamlfile = 'merger_data' + self.subdir + '/merged.yaml'
        with open(self.yamlfile, 'w') as f:
            ydump(self.equivalence, f)
        self.notmergedfile = 'merger_data' + self.subdir + '/notmerged.txt'
        with open(self.notmergedfile, 'w') as g:
            for item in self.notmerged:
                g.write(item + '\n')
        self.mergedlist = 'merger_data' + self.subdir + '/mfiles.txt'
        with open(self.mergedlist, 'w') as h:
            for item in self.mergedfiles:
                h.write(item + '\n')
Example #15
0
def pretty_print_list(file_name=None, data_format="JSON"):
    # print YAML or JSON representations of list data
    assert(file_name is not None), "Provide a file name"
    assert((data_format == "JSON" or data_format == "YAML")), ("Format must be 'JSON'"
                                                     " or 'YAML'")

    try:
        formatted_list = []
        with open(file_name, "r") as f:
            if data_format == "JSON":
                some_list = jload(f)
                formatted_list = jdumps(some_list)
            elif data_format == "YAML":
                some_list = yload(f)
                formatted_list = ydump(some_list,
                                       default_flow_style=False,
                                       explicit_start=True,
                                       width=1,
                                       indent=2)
    except IOError as e:
        print "Could not read file: %s" % e
    except Exception as e:
        print "Unexpected exception: %s" % e
 
    print "======================"
    print "list from file: %s in %s data_format:" % (file_name, data_format)
    print "======================"
    print formatted_list
    print "======================"
    print "list from file: %s in pretty_print native python" % file_name
    print "======================"
    pp(some_list, width=1)
Example #16
0
def pretty_print_list(file_name=None, data_format="JSON"):
    # print YAML or JSON representations of list data
    assert (file_name is not None), "Provide a file name"
    assert ((data_format == "JSON"
             or data_format == "YAML")), ("Format must be 'JSON'"
                                          " or 'YAML'")

    try:
        formatted_list = []
        with open(file_name, "r") as f:
            if data_format == "JSON":
                some_list = jload(f)
                formatted_list = jdumps(some_list)
            elif data_format == "YAML":
                some_list = yload(f)
                formatted_list = ydump(some_list,
                                       default_flow_style=False,
                                       explicit_start=True,
                                       width=1,
                                       indent=2)
    except IOError as e:
        print "Could not read file: %s" % e
    except Exception as e:
        print "Unexpected exception: %s" % e

    print "======================"
    print "list from file: %s in %s data_format:" % (file_name, data_format)
    print "======================"
    print formatted_list
    print "======================"
    print "list from file: %s in pretty_print native python" % file_name
    print "======================"
    pp(some_list, width=1)
Example #17
0
 def save_state(self, filename, mode='yaml'):
     if mode == 'yaml':
         with open(filename + '-fit-errors.yaml', 'w') as yd:
             ydump(self.fit_error_by_learner, yd, sort_keys=True)
         with open(filename + '-lc.yaml', 'w') as yd:
             ydump(self.lc_by_learner, yd, sort_keys=True)
     else:
         with open(filename + '-fit-errors.json', 'w') as jd:
             json.dump(self.fit_error_by_learner,
                       jd,
                       sort_keys=True,
                       cls=NpEncoder)
         with open(filename + '-lc.json', 'w') as jd:
             json.dump(self.lc_by_learner,
                       jd,
                       sort_keys=True,
                       cls=NpEncoder)
Example #18
0
def detailPipeline(version, name):
    repo = ModuleUltraRepo.loadRepo()
    pipe = repo.getPipelineInstance(name, version=version)

    out = {
        'origins': pipe.listOrigins(),
        'endpoints': pipe.listEndpoints(),
    }
    click.echo(ydump(out))
Example #19
0
def txt2yaml(f_input, output):
    try:
        from yaml import CLoader as Loader, CDumper as Dumper, dump as ydump
    except ImportError:
        from yaml import Loader, Dumper, dump as ydump
    else:
        print("Please install all requirements")
        exit(1)
    file_name = output if output.endswith('.yaml') or output.endswith(
        '.yml') else output + '.yaml'
    with open(file_name, mode='w') as yml_file:
        yml_file.write(ydump(load_txt(f_input), Dumper=Dumper))
Example #20
0
def main(argv=None):

    parser = ArgumentParser(description="Solve a vector packing problem.")
    parser.add_argument('-i', '--input', help='input file')
    parser.add_argument('-o', '--output', default='-', help='output file')
    parser.add_argument('-P', '--pack', default='pack_by_bins', 
                        help='packing algorithm')
    parser.add_argument('-I', '--itemsort', default='none', 
                        help='item sorting algorithm')
    parser.add_argument('-B', '--binsort', default='none', 
                        help='bin sorting algorithm')
    parser.add_argument('-S', '--select', default='none', 
                        help='pairwise selection algorithm')
    parser.add_argument('-s', '--split', default=1, type=int,
                        help='split the problem')

    args = parser.parse_args()

    args.problem = {}
    if isfile(args.input):
        args.problem = yload(open(args.input, 'r'), Loader=Loader)
    else:
        raise SystemExit("error: can't find file %s" % args.input)

    solution = pack_vectors(**args.__dict__)

    # FIXME: hacky
    mclient = None
    mcoll = None
    if args.output.startswith("mongodb://"):
        try:
            dbinfo = uri_parser.parse_uri(args.output)
            host, port = dbinfo['nodelist'][0]
            db, collection = dbinfo['database'].split('/')
            username = dbinfo['username']
            password = dbinfo['password']
            connect_url = host + ':' + str(port)
            if username is not None and password is not None:
                connect_url = username + ':' + password + '@' + connect_url
            connect_url = 'mongodb://' + connect_url
        except (AttributeError, ValueError):
            raise SystemExit('Required mongodb output url format is ' 
                '"mongodb://[user:pass@]host[:port]/database/collection"')
        mclient = MongoClient(connect_url)
        mcoll = mclient[db][collection]
        if mcoll.find_one(solution) is not None:
            raise SystemExit('Solution To This Problem Already Exists!')

    if mcoll is not None and mclient is not None:
        mcoll.insert(solution)
        mclient.close()
    else:
        print(ydump(solution, Dumper=Dumper))
Example #21
0
    def to_text(self, tmp, data):
        try:

            if isinstance(data, str):
                raise NotImplementedError("%s.to_text(data is a string)" %
                                          (self))

            return ydump(data, default_flow_style=False)

        except (Exception, ) as e:  # pragma: no cover pylint: disable=unused-variable, broad-except
            if cpdb():
                pdb.set_trace()
            raise
Example #22
0
 def write(self):
     """
     Writes the current config to the local Config file
     """
     LOGGER.debug('writing roster file to {}'.format(Roster.file_path))
     while not Roster.lock:
         sleep(0.1)
     Roster.lock = False
     LOGGER.debug('lock is free, writing now')
     try:
         if Roster.encode:
             LOGGER.debug('encoding roster')
             with open(Roster.file_path, mode='wb') as f:
                 f.write(encrypt(ydump(self.__dict__), ROSTER_KEY))
         else:
             LOGGER.debug('writing plain')
             with open(Roster.file_path, mode='w') as f:
                 f.write(ydump(self.__dict__))
     except:
         LOGGER.exception('error while writing Roster to {}'.format(
             Roster.file_path))
     finally:
         LOGGER.debug('write successfull, freeing lock')
         Roster.lock = True
Example #23
0
def write_list(some_list=[]):
    # write list to a file using YAML
        try:
            with open("var/yaml_file.yml", "w") as f:
	            f.write(ydump(
	                some_list,
	                default_flow_style=False,
	                explicit_start=True,
	                width=1,
	                indent=2))
        except IOError as e:
            print "Could not write to file: %s" % e

    # write list to a file using JSON
        try:
            with open("var/json_file.json", "w") as f:
                jdump(some_list, f)
        except IOError as e:
            print "Could not write to file: %s" % e
Example #24
0
def write_list(some_list=[]):
    # write list to a file using YAML
    try:
        with open("var/yaml_file.yml", "w") as f:
            f.write(
                ydump(some_list,
                      default_flow_style=False,
                      explicit_start=True,
                      width=1,
                      indent=2))
    except IOError as e:
        print "Could not write to file: %s" % e

# write list to a file using JSON
    try:
        with open("var/json_file.json", "w") as f:
            jdump(some_list, f)
    except IOError as e:
        print "Could not write to file: %s" % e
def get_k8s_resources(report, cluster, project):
    config_file = get_cluster_credentials(report=report,
                                          project=project,
                                          zone=cluster.zone,
                                          cluster_name=cluster.name)
    if not config_file:
        return None
    with open(g_credentials, 'r') as f:
        google = jload(f)
    with open(config_file, 'r') as f:
        kcfg = full_load(f)
    for user in kcfg.get('users'):
        config = user.get('user').get('auth-provider').get('config')
        config['expiry'] = google['credential']['token_expiry']
        config['access-token'] = google['credential']['access_token']
    with open(config_file, 'w') as f:
        ydump(kcfg, f)
    configuration = Configuration()
    k_config.load_kube_config(config_file=config_file,
                              client_configuration=configuration)
    api_client = k_client.ApiClient(configuration=configuration)
    k = k_client.CoreV1Api(api_client=api_client)
    apps = k_client.AppsV1Api(api_client=api_client)

    resources = []
    set_names = [
        "list_deployment_for_all_namespaces",
        "list_replica_set_for_all_namespaces",
        "list_daemon_set_for_all_namespaces",
        "list_stateful_set_for_all_namespaces",
        "list_service_for_all_namespaces"
    ]
    for set_name in set_names:
        if set_name.startswith("list_service"):
            k_set = getattr(k, set_name)()
        else:
            k_set = getattr(apps, set_name)()
        collection = {}
        for s in k_set.items:
            if s.metadata.namespace in excluded_namespaces:
                log.debug(
                    "Skipping resource since it is located in the excluded namespace '%s'",
                    s.metadata.namespace)
                continue
            if s.kind:
                kind = s.kind
            elif 'deployment' in str(type(s)).lower():
                kind = "Deployment"
            elif 'service' in str(type(s)).lower():
                kind = "Service"
            elif 'replicaset' in str(type(s)).lower():
                kind = "ReplicaSet"
            elif 'daemonset' in str(type(s)).lower():
                kind = "DaemonSet"
            elif 'statefulset' in str(type(s)).lower():
                kind = "StatefulSet"
            s_type = None
            ports = None
            selectors = None
            meta = None
            containers = None
            if kind == "Service":
                s_type = s.spec.type
                ports = [{
                    'protocol': port.protocol,
                    'target': port.target_port,
                    'port': port.port
                } for port in s.spec.ports]
                selectors = s.spec.selector
                if s_type == 'ExternalName':
                    meta = {"external_ip": s.spec.externalIPs}
            elif kind == "Deployment":
                containers = [x.image for x in s.spec.template.spec.containers]
            else:
                containers = [x.image for x in s.spec.template.spec.containers]
            if kind == "Deployment" or kind == "Service" or not s.metadata.labels:
                _id = s.metadata.self_link
            else:
                _id = s.metadata.self_link.replace(
                    '-' + s.metadata.labels.get('pod-template-hash', ''), '')
            version = int(s.metadata.resource_version)
            ref = collection.get(_id, {"version": -1, "resource": None})
            if ref['version'] < version:
                collection[_id] = {
                    "version":
                    version,
                    "resource":
                    KComponent(r_id=s.metadata.name,
                               name=s.metadata.name,
                               namespace=s.metadata.namespace,
                               kind=kind,
                               labels=s.metadata.labels,
                               containers=containers,
                               ports=ports,
                               selectors=selectors,
                               subtype=s_type,
                               meta=meta)
                }
        for _id in collection:
            resources.append(collection[_id]["resource"])
    return resources
Example #26
0
def saveConfig(config, dst_path):
    '''
    Saves a config dict to dst_path in YAML format.
    '''
    ydump(config, open(dst_path, 'w'), Dumper=yDumper)
    return os.path.exists(dst_path)
Example #27
0
def graph_write(path, grph):
    with open(path, 'w') as f:
        ydump(grph, f, default_flow_style=False)
    return
Example #28
0
def saveConfig(config, dst_path):
    '''
    Saves a config dict to dst_path in YAML format.
    '''
    ydump(config, open(dst_path, 'w'), Dumper=yDumper)
    return os.path.exists(dst_path)
Example #29
0
def main(cfg):
    global SLURM_PARTITION
    time_per_job = cfg.get("time_per_job",3600.)
    if ":" in str(time_per_job):
        now = datetime(day=1,month=1,year=1900,hour=0,minute=0,second=0)
        dt  = datetime.strptime(str(time_per_job),"%H:%M:%S")
        time_per_job = str(dt-now)
    else:
        time_per_job = str(timedelta(seconds=time_per_job))
    environ["STIME"]=parseMultiDays(time_per_job)
    environ['SLURM_PARTITION'] = SLURM_PARTITION
    #print '** DEBUG ** SLURM_PARTITION: ',environ.get("SLURM_PARTITION")
    #raise Exception
    #print "* DEBUG * STIME: ",environ.get("STIME")
    #raise Exception
    environ["SMEM"] =cfg.get("mem_per_job","2G")
    environ["SWPATH"]=cfg.get("DMPSWSYS","/cvmfs/dampe.cern.ch/rhel6-64/opt/releases/trunk")
    g_maxfiles = int(cfg.get("files_per_job",10))

    ncycles = 1

    version=cfg.get("tag","trunk")

    slurm_exec_dir=dirname(abspath(__file__))
    environ["SLURM_EXEC_DIR"]=slurm_exec_dir
    environ["DLOG"]=cfg.get("log_level","INFO")
    wrapper=opjoin(slurm_exec_dir,"submit_slurm.sh")

    environ["SCRATCH"]=cfg.get("scratch_dir","${HOME}/scratch")

    ### LOOP OVER CYCLES ####
    for i in xrange(ncycles):
        print '++++ CYCLE %i ++++'%i
        txtfiles = []
        for _d in cfg['inputloc']:
            txtfiles+=glob(_d)

        files_to_process = []
        for t in txtfiles:
            print 'reading %s...'%t
            files_to_process+=[f.replace("\n","") for f in open(t,"r").readlines()]
            print 'size: ',len(files_to_process)
        wd=opjoin(cfg['workdir'],cfg['tag'])
        wd=opjoin(wd,"cycle_%i"%(i+1))
        environ["WORKDIR"]=abspath(wd)
        mkdir(wd)
        print '%i: found %i files to process this cycle.'%(i+1, len(files_to_process))
        print 'check if files exist already'

        reco_file = lambda f : mc2reco(f,version=version,newpath=cfg['outputdir'])
        #files_to_process = tqdm([f for f in files_to_process if not isfile(reco_file(f))])
        _files_to_process = []
        for f in tqdm(files_to_process):
            if not isfile(reco_file(f)): _files_to_process.append(f)
        files_to_process = _files_to_process
        print 'after check: found %i files to process this cycle.'%len(files_to_process)
        nfiles = len(files_to_process)
        chunks = [files_to_process[x:x+g_maxfiles] for x in xrange(0, len(files_to_process), g_maxfiles)]
        print 'created %i chunks this cycle'%len(chunks)
        for j,ch in enumerate(chunks):
            print '** working on chunk %i, size: %i **'%(j+1,len(ch))
            ofile = opjoin(wd,"chunk_%i.yaml"%(j+1))
            inf_c = ch
            out_c = [reco_file(f) for f in inf_c]
            ydump(dict(zip(inf_c,out_c)),open(ofile,'wb'))
            assert isfile(ofile), "yaml file missing!"
            print 'size of chunk: ',len(out_c)
        max_jobs = int(cfg.get("max_jobs",10))
        nch = len(chunks)
        sarr = "1-{nchunks}%{jobs}".format(nchunks=nch,jobs=max_jobs) if \
                nch > max_jobs else "1-{nchunks}".format(nchunks=nch)
        environ["SARR"]=sarr

        #print '*** ENV DUMP ***'
        #system("env | sort")
        new_wrapper = opjoin(wd,"submit_slurm.sh")
        make_wrapper(wrapper,new_wrapper)
        system("sbatch {wrapper}".format(wrapper=new_wrapper))
Example #30
0
    if target_char not in out_dict:
        # Initialize the inner dictionary
        out_dict[target_char] = dict()
        for p in pinyin_dict[char]:
            out_dict[target_char][p] = [[], [], []]

    # Add this word into the correct entry
    if target_index == 0:
        out_dict[target_char][pinyin][0].append(word)
    elif target_index == len(word) - 1:
        out_dict[target_char][pinyin][2].append(word)
    else:
        out_dict[target_char][pinyin][1].append(word)


# Make the dictionary
with open("../data/raw1.txt", 'r') as fp:
    out_dict = {}
    lines = fp.readlines()
    for line in lines:
        formated = list(filter(None, re.split(':|,|\s', line)))
        for word in formated[1:]:
            add_wrod(formated[0], word, out_dict)

# Dump the dictionary into yaml and json
with open("../polyphone/polyphone.yaml", 'w') as fp:
    ydump(out_dict, fp, allow_unicode=True)

with open("../polyphone/polyphone.json", 'w') as fp:
    dump(out_dict, fp, indent=4, ensure_ascii=False, sort_keys=True)
Example #31
0
def write_options(path, opts):
    with open(path, 'w') as f:
        ydump(opts, f, default_flow_style=False)
    return
Example #32
0
 def save(self):
     f = open(self.fname, "w")
     f.write(ydump(self._config, Dumper=YDumper))
     f.close()
Example #33
0
 def __str__(self):
     return ydump(self.config, Dumper=Dumper)
Example #34
0
    _params.update(params)
    r = apireq(path, _params)
    re = r['items']
    if 'nextPageToken' in r:
        _params['pageToken'] = r['nextPageToken']
        re += apireqlist(path, _params)
    return re



try:
    with open('credentials.json') as f:
        cred = load(f)
except FileNotFoundError as e:
    auth()



playlists = apireqlist('/playlists', {'mine': 'true'})
out = [{'id': x['id'], 'title': x['snippet']['title']} for x in playlists]
out = sorted(out, key=lambda x: x['title'])

for pl in out:
    print('Downloading [%s] ' % pl['title'], end='')
    items = apireqlist('/playlistItems', {'playlistId': pl['id']})
    pl['items'] = [{'id': x['snippet']['resourceId']['videoId'], 'title': x['snippet']['title']} for x in items]
    print(' Done')

with open('dump.yaml', 'w') as f:
    ydump(out, f, width=250)
Example #35
0
 def save(self):
     f = open(self.fname, "w")
     f.write(ydump(self._config, Dumper=YDumper))
     f.close()