Ejemplo n.º 1
0
 def render_timeline_src(self):
     lang = self.lang
     tfilepath = "../timeline/"+self.auth+"_"+lang+".html"
     block = self.get_globals()
     tvars = {}
     defaults = {
         "theme_color" : "#288EC3",  
         "skin":"timeline.dark", 
         "src" : os.path.join(self.conf['front']['domain'],"timeline",self.auth+"_"+lang+".json")
     }
     varsf = self.langpath+"/timeline_src_params.json"
     if os.path.isfile(varsf) :
         tvars = jc.load(file(varsf))
     elif lang != "he":
         try:
             tvars = jc.load(self.indexpath+"/"+self.siteconfig['primary_language']+"/timeline_src_params.json")
             logger.info("timeline - "+lang+" using defaults found in the hebrew directory")
         except:
             logger.info("no timeline configuration, using general defaults")
     tvars = jsonmerge.merge(defaults,tvars)
     try:
         block = jsonmerge.merge(block,tvars)
         tfile = open(tfilepath,"w")
         tfile.write(stache.render(stache.load_template("timeline_src.html"),block))
         tfile.close()
         logger.info("source written at "+tfilepath)
     except Exception as e:
         logger.error(e)
Ejemplo n.º 2
0
def resolve_to_cmd_options(aws_ctx, template_file, user_config):
    def to_cmd_option(k, v):
        if "tags" == k:
            return "--%s %s" % (k, " ".join("'%s=%s'" % kv
                                            for kv in v.items()))
        elif bool == type(v):
            return ("--%s" if v else "--no-%s") % k
        elif list == type(v) or dict == type(v):
            return "--%s '%s'" % (k, json.dumps(v))
        else:
            return "--%s %s" % (k, json.dumps(v))

    with open(template_file, 'r') as template_f:
        template = template_f.read()

    unresolved_config = yaml.load(pystache.render(template, {}))
    unresolved_defaults = unresolved_config.get('defaults') or dict()
    defaults = yaml.load(
        pystache.render(
            template,
            yamlize_dict(jsonmerge.merge(
                unresolved_defaults, user_config)))).get('defaults') or dict()
    config = jsonmerge.merge(defaults, user_config)
    aws_config = yaml.load(pystache.render(template, config))
    aws_ctx.debug("AWS Config: %s" % aws_config)
    aws_options = aws_config['options']
    cmd_options = [to_cmd_option(*kv) for kv in aws_options.items()]
    if aws_ctx.verbose:
        aws_ctx.debug("AWS-Options:")
        for opt in cmd_options:
            aws_ctx.debug(opt)

    return cmd_options
Ejemplo n.º 3
0
    def test_merge_default(self):
        schema = {}
        base = None
        base = jsonmerge.merge(base, {'a': "a"}, schema)
        base = jsonmerge.merge(base, {'b': "b"}, schema)

        self.assertEqual(base, {'a': "a", 'b': "b"})
Ejemplo n.º 4
0
 def update(self, chart, values):
     self.check_permissions('helmsman.change_chart', chart)
     # 1. Retrieve chart's current user-defined values
     cur_vals = HelmClient().releases.get_values(chart.namespace,
                                                 chart.id,
                                                 get_all=False)
     # 2. Deep merge the latest differences on top
     if cur_vals:
         cur_vals = jsonmerge.merge(cur_vals, values)
     else:
         cur_vals = values
     # 3. Guess which repo the chart came from
     repo_name = self._find_repo_for_chart(chart)
     if not repo_name:
         raise ChartNotFoundException(
             "Could not find chart: %s, version: %s in any repository" %
             (chart.name, chart.chart_version))
     # 4. Apply the updated config to the chart
     HelmClient().releases.update(chart.namespace,
                                  chart.id,
                                  "%s/%s" % (repo_name, chart.name),
                                  values=cur_vals,
                                  value_handling=HelmValueHandling.REUSE)
     chart.values = jsonmerge.merge(chart.values, cur_vals)
     return chart
Ejemplo n.º 5
0
    def _import_files(self, absolute_file_path, content):
        if 'import' not in content:
            return

        # Get directory where analyzed JSON file is located
        base_directory_path = os.path.dirname(absolute_file_path)

        # "import" shouldn't be visible to end user
        imported_files = content['import']
        del content['import']

        # JSONs merging order is from bottom to top
        merged_imported_content = {}
        for imported_file in imported_files:
            if imported_file.startswith(JsonConfiguration._RESOURCE):
                absolute_imported_file_path = imported_file[len(JsonConfiguration._RESOURCE):]
                absolute_imported_file_path = self._compose_absolute_file_path(self._resources_base_path,
                                                                               absolute_imported_file_path)
            else:
                absolute_imported_file_path = self._compose_absolute_file_path(base_directory_path, imported_file)

            if absolute_imported_file_path in self._imported_files:
                raise JsonConfiguration.CyclicalImportError(
                    f'Cyclical import occurred, file {imported_file} was already imported')

            imported_content = JsonConfiguration._load_file(absolute_imported_file_path)
            self._imported_files.append(absolute_imported_file_path)
            self._import_files(absolute_imported_file_path, imported_content)

            merged_imported_content = jsonmerge.merge(merged_imported_content, imported_content)

        self._content = jsonmerge.merge(merged_imported_content, self._content)
Ejemplo n.º 6
0
    def _read_env_configs(self, env_cfg, iface):  # pylint: disable=no-self-use
        """
        Read environment configuration json file.

        :return: False if read fails, True otherwise.
        """
        data = None

        if env_cfg != '':
            env_cfg_filename = env_cfg
        else:
            env_cfg_filename = os.path.abspath(
                os.path.join(__file__, os.path.pardir, os.path.pardir,
                             os.path.pardir, "env_cfg_json"))
        if os.path.exists(env_cfg_filename):
            with open(env_cfg_filename) as data_file:
                try:
                    data = json.load(data_file,
                                     object_pairs_hook=find_duplicate_keys)
                except ValueError as error:
                    self._logger.error(error)
                    raise InconclusiveError(
                        "Environment file {} read failed: {}".format(
                            env_cfg_filename, error))
        elif env_cfg != '':
            raise InconclusiveError(
                'Environment file {} does not exist'.format(env_cfg))

        env = merge({}, data) if data else {}

        if iface:
            env = merge(env, {'sniffer': {'iface': iface}})
        else:
            env = merge(env, {'sniffer': {'iface': "Sniffer"}})
        return env
Ejemplo n.º 7
0
    def _read_exec_configs(self, args):  # pylint: disable=too-many-branches
        """
        Read execution configuration file.

        :return: Nothing.
        :raises TestStepError if file cannot be read or merged into config, or if platform_name
        is not in allowed_platforms.
        """
        tc_cfg = None
        if args.tc_cfg:
            tc_cfg = args.tc_cfg
        # TODO: this bit is not compatible with IceteaManagement's --tc argument.
        elif isinstance(args.tc,
                        string_types) and os.path.exists(args.tc + '.json'):
            tc_cfg = args.tc + '.json'
        if tc_cfg:
            if not os.path.exists(tc_cfg):
                self._logger.error(
                    "Execution configuration file {} does not exist.".format(
                        tc_cfg))
                raise InconclusiveError(
                    "Execution configuration file {} does not exist.".format(
                        tc_cfg))
            with open(tc_cfg) as data_file:
                try:
                    data = json.load(data_file,
                                     object_pairs_hook=find_duplicate_keys)
                    self._config = merge(self._config, data)
                except Exception as error:
                    self._logger.error(
                        "Testcase configuration read from file (%s) failed!",
                        tc_cfg)
                    self._logger.error(error)
                    raise TestStepError("TC CFG read fail! {}".format(error))

        if args.type:
            self._config["requirements"]["duts"]["*"] = merge(
                self._config["requirements"]["duts"]["*"], {"type": args.type})

        if args.bin:
            self._config["requirements"]["duts"]["*"] = merge(
                self._config["requirements"]["duts"]["*"],
                {"application": {
                    'bin': args.bin
                }})

        if args.platform_name:
            allowed = self._config["requirements"]["duts"]["*"].get(
                "allowed_platforms")
            if allowed:
                if args.platform_name in allowed:
                    self._config["requirements"]["duts"]["*"][
                        "platform_name"] = args.platform_name
                else:
                    raise TestStepError(
                        "Required platform_name not in allowed_platforms.")
            else:
                self._config["requirements"]["duts"]["*"][
                    "platform_name"] = args.platform_name
Ejemplo n.º 8
0
    def test_append(self):
        schema = {'mergeStrategy': 'append'}

        base = None
        base = jsonmerge.merge(base, ["a"], schema)
        base = jsonmerge.merge(base, ["b"], schema)

        self.assertEqual(base, ["a", "b"])
Ejemplo n.º 9
0
    def test_merge_overwrite(self):

        schema = {'mergeStrategy': 'objectMerge'}

        base = None
        base = jsonmerge.merge(base, {'a': "a"}, schema)
        base = jsonmerge.merge(base, {'a': "b"}, schema)

        self.assertEqual(base, {'a': "b"})
Ejemplo n.º 10
0
 def compute_merged_config(self):
     default_appwide_config = json.loads(
         self.application_version.application.default_launch_config or "{}")
     default_version_config = json.loads(
         self.application_version.default_launch_config or "{}")
     default_cloud_config = json.loads(self.default_launch_config or "{}")
     default_combined_config = jsonmerge.merge(default_appwide_config,
                                               default_version_config)
     return jsonmerge.merge(default_combined_config, default_cloud_config)
Ejemplo n.º 11
0
    def test_default(self):

        schema = {}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, "b")
Ejemplo n.º 12
0
 def test_base(self):
     dir = './test/input'
     merge(dir, "data", "output", 2000)
     output = open("./test/input/output0.json", "r")
     expected_output = open("./test/input/expected_output.json", "r")
     self.assertTrue(
         compareJSON(json.load(output), json.load(expected_output)))
     output.close()
     expected_output.close()
Ejemplo n.º 13
0
    def post(self):
        print('--------GENERATE RECOMMENDATION--------')
        args = self.parser.parse_args()
        restaurants = self.recommender_restaurant.recommend(args.get('lng'), args.get('lat'), args.get('review'),
                                                            args.get('tags'))
        actvities = self.recommender_activities.recommend(args.get('lng'), args.get('lat'), args.get('review'))
        pois = self.recommender_pois.recommend(args.get('lng'), args.get('lat'))

        return merge(merge(restaurants, actvities), pois)
Ejemplo n.º 14
0
 def test_different_name(self):
     dir = './test/input2'
     merge(dir, "data", "output2", 2000)
     output2 = open("./test/input2/output20.json", "r")
     expected_output2 = open("./test/input2/expected_output2.json", "r")
     self.assertTrue(
         compareJSON(json.load(output2), json.load(expected_output2)))
     output2.close()
     expected_output2.close()
Ejemplo n.º 15
0
    def test_overwrite(self):

        schema = {'mergeStrategy': 'overwrite'}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, "b")
Ejemplo n.º 16
0
    def test_version(self):

        schema = {'mergeStrategy': 'version'}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, [{'value': "a"}, {'value': "b"}])
Ejemplo n.º 17
0
    def test_version_last(self):

        schema = {'mergeStrategy': 'version', 'mergeOptions': {'limit': 1}}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, [{'value': "b"}])
Ejemplo n.º 18
0
    def test_version_last(self):

        schema = {'mergeStrategy': 'version',
                  'mergeOptions': {'limit': 1}}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, [{'value': "b"}])
Ejemplo n.º 19
0
    def test_different_files(self):

        dir = './test/input3'
        merge(dir, "data", "output3", 2000)
        output3 = open("./test/input3/output30.json", "r")
        expected_output3 = open("./test/input3/expected_output3.json", "r")
        self.assertTrue(
            compareJSON(json.load(output3), json.load(expected_output3)))
        output3.close()
        expected_output3.close()
Ejemplo n.º 20
0
def get_conn(args):

    log = logging.getLogger(name='consul_utils')

    try:
        passwd = pwd.getpwnam(getpass.getuser())
    except KeyError:
        print('User %s does not exist locally' % args.user_name)
        exit(1)

    user_home = passwd[5]
    user_config = os.path.join(user_home, '.hecate/config.json')
    global_config = '/usr/local/hecate/etc/config.json'

    log.info('Loading global config.json from %s' % global_config)
    log.info('Loading user config.json from %s' % user_config)

    configuration = default_configuration

    if os.path.exists(global_config):
        configuration = jsonmerge.merge(configuration,
                                        json.load(open(global_config, 'r')))

        if log.isEnabledFor(logging.DEBUG):
            log.info('Merging global config')
            dump_dict(configuration)
    else:
        log.warn('No global config found at %s' % global_config)

    if os.path.exists(user_config):
        configuration = jsonmerge.merge(configuration,
                                        json.load(open(user_config, 'r')))

        if log.isEnabledFor(logging.DEBUG):
            log.info('Merging user config')
            dump_dict(configuration)
    else:
        log.warn('No user config found at %s' % user_config)

    configuration = jsonmerge.merge(configuration, clean_dict(vars(args)))

    if log.isEnabledFor(logging.DEBUG):
        log.info('Merging command line arguments')
        dump_dict(configuration)

    return consul.Consul(host=configuration['consul_host'],
                         port=configuration['consul_port'],
                         token=(configuration['consul_token']
                                if 'conul_token' in configuration else None),
                         scheme='http',
                         consistency='default',
                         dc=(configuration['consul_dc']
                             if 'consul_dc' in configuration else None),
                         verify=configuration['consul_verify_ssl'])
Ejemplo n.º 21
0
    def test_version_does_not_duplicate(self):
        # Don't record change if it didn't change

        schema = {'mergeStrategy': 'version'}

        base = None
        base = jsonmerge.merge(base, "a", schema)
        base = jsonmerge.merge(base, "b", schema)
        base = jsonmerge.merge(base, "b", schema)

        self.assertEqual(base, [{'value': "a"}, {'value': "b"}])
Ejemplo n.º 22
0
def getArtistInfo(songList):
    idList = getIDList(songList, 'artist_id')
    url = 'https://api.spotify.com/v1/artists?ids=' + idList
    artistInfo = requestToServer(url)['artists']
    for i in range(0, len(songList)):
        followerInfo = {
            'artist_followers': artistInfo[i]['followers']['total']
        }
        songList[i] = merge(songList[i], followerInfo)
        artPop = {'artist_popularity': artistInfo[i]['popularity']}
        songList[i] = merge(songList[i], artPop)
    return songList
Ejemplo n.º 23
0
    def test_merge_append_pattern(self):

        schema = {'mergeStrategy': 'objectMerge',
                  'patternProperties': {
                      'a': {'mergeStrategy': 'append'}
                  }}

        base = None
        base = jsonmerge.merge(base, {'a': ["a"]}, schema)
        base = jsonmerge.merge(base, {'a': ["b"], 'b': 'c'}, schema)

        self.assertEqual(base, {'a': ["a", "b"], 'b': 'c'})
Ejemplo n.º 24
0
    def importConfigFiles(self):
        """Merge in referenced config files if present.

        Entries in the current config overwrite any entries read from the file.
        This allows this function to be called recursively to build up a complete
        config that refers to default settings stored in default configs."""
        # If a config json OrderedDict is passed, merge it with the existing configuration
        # Try and parse a config-file if it is passed to us
        configFile = None
        try:
            if self["config-file"] != None:
                configFile = self["config-file"]
                if self.verbose:
                    print("hjsonConfig.importConfigFiles: Import from {:s}".
                          format(configFile))

        except KeyError:
            if self.verbose:
                print(
                    "hjsonConfig.importConfigFiles: No config-files to import")
            configFile = None

        if configFile != None:
            # Might be a list of fileNames or a single filename
            if type(configFile) is type(list()):
                if self.verbose:
                    print(
                        "hjsonConfig.importConfigFiles: Importing config-files {:s}"
                        .format(configFile))
                fileConfig = hjsonConfig(verbose=self.verbose)
                for c in configFile:
                    f = self._readFile(c)
                    fileConfig._copyIn(jsonmerge.merge(fileConfig, f))
            else:
                if self.verbose:
                    print(
                        "hjsonConfig.importConfigFiles: Importing config-file {:s}"
                        .format(configFile))
                fileConfig = hjsonConfig(filename=configFile,
                                         verbose=self.verbose)
            if self.verbose:
                pprint(fileConfig)

            # We will move imported config-files to "imported-config-file"
            self["config-file"] = None
            try:
                self["imported-config-file"].append(configFile)
            except KeyError:
                self["imported-config-file"] = [configFile]

            # clear self and copy the merged ODict from jsonmerge in
            self._copyIn(jsonmerge.merge(fileConfig, self))
Ejemplo n.º 25
0
    def _merge_params(self, key, partition):
        params = merge(self.partitions[partition], self.config["storage"]["defaults"])
        if "key" in params:
            params = merge(params, { "partition": partition })
        else:
            params = merge(params, { "partition": partition, "key": key })

        for index, value in params.items():
            if type(value) is str and value.startswith('eval:'):
                params[index] = self._eval(key, partition, value[5:])
            elif isinstance(value, types.FunctionType):
                params[index] = value(partition=partition, key=key, config=self.config)

        return params
Ejemplo n.º 26
0
    def test_merge_append_additional(self):

        schema = {'mergeStrategy': 'objectMerge',
                  'properties': {
                      'b': {'mergeStrategy': 'overwrite'}
                  },
                  'additionalProperties': {
                      'mergeStrategy': 'append'
                  }}

        base = None
        base = jsonmerge.merge(base, {'a': ["a"]}, schema)
        base = jsonmerge.merge(base, {'a': ["b"], 'b': 'c'}, schema)

        self.assertEqual(base, {'a': ["a", "b"], 'b': 'c'})
Ejemplo n.º 27
0
    def _dump_tensors(self):
        if not self._has_recorded_tensor:
            return

        layout_categories = []

        for scope in self._scope_tensor:
            chart = []
            for name in self._scope_tensor[scope]:
                chart.append(
                    layout_pb2.Chart(
                        title=name,
                        multiline=layout_pb2.MultilineChartContent(
                            tag=[r'name(?!.*margin.*)'.replace('name', name)
                                 ])))
            category = layout_pb2.Category(title=scope, chart=chart)
            layout_categories.append(category)

        if layout_categories:
            layout_proto_to_write = layout_pb2.Layout(
                category=layout_categories)

            try:
                # Load former layout_proto from self._layout_writer_dir.
                multiplexer = event_multiplexer.EventMultiplexer()
                multiplexer.AddRunsFromDirectory(self._layout_writer_dir)
                multiplexer.Reload()
                tensor_events = multiplexer.Tensors(
                    '.', metadata.CONFIG_SUMMARY_TAG)
                shutil.rmtree(self._layout_writer_dir)

                # Parse layout proto from disk.
                string_array = tf.make_ndarray(tensor_events[0].tensor_proto)
                content = np.asscalar(string_array)
                layout_proto_from_disk = layout_pb2.Layout()
                layout_proto_from_disk.ParseFromString(
                    tf.compat.as_bytes(content))

                # Merge two layout proto.
                merged_layout_json = merge(
                    json_format.MessageToJson(layout_proto_from_disk),
                    json_format.MessageToJson(layout_proto_to_write))
                merged_layout_proto = layout_pb2.Layout()
                json_format.Parse(str(merged_layout_json), merged_layout_proto)

                self._layout_writer = tf.summary.FileWriter(
                    self._layout_writer_dir)
                layout_summary = summary_lib.custom_scalar_pb(
                    merged_layout_proto)
                self._layout_writer.add_summary(layout_summary)
                self._layout_writer.close()
            except KeyError:
                # Write the current layout proto into disk
                # when there is no layout.
                self._layout_writer = tf.summary.FileWriter(
                    self._layout_writer_dir)
                layout_summary = summary_lib.custom_scalar_pb(
                    layout_proto_to_write)
                self._layout_writer.add_summary(layout_summary)
                self._layout_writer.close()
Ejemplo n.º 28
0
 def instantiate(self, selector):
     """ Use as-is"""
     structure_parameters = self._element.values.parameters.structure_parameters
     my_default= {}
     self._element.values.parameters.structure_private = merge( my_default, structure_parameters)
     structure_private = self._element.values.parameters.structure_private
     logger.info("private: %s", str(structure_private))
def restoreConfig(ldifFolder, newLdif, ldifModFolder):
    ignoreList = ["objectClass", "ou"]
    current_config_dns = getDns(newLdif)
    oldDnMap = getOldEntryMap(ldifFolder)
    for dn in oldDnMap.keys():
        old_entry = getEntry("%s/%s" % (ldifFolder, oldDnMap[dn]), dn)
        if dn not in current_config_dns:
            addEntry(dn, old_entry, ldifModFolder)
            continue
        new_entry = getEntry(newLdif, dn)
        for attr in old_entry.keys():
            if attr in ignoreList:
                continue
            if not new_entry.has_key(attr):
                writeMod(dn, attr, old_entry[attr], "%s/%s.ldif" % (ldifModFolder, str(uuid.uuid4())), True)
                logIt("Adding attr %s to %s" % (attr, dn))
            else:
                mod_list = None
                if old_entry[attr] != new_entry[attr]:
                    if len(old_entry[attr]) == 1:
                        try:
                            logIt("Merging json value for %s " % attr)
                            old_json = json.loads(old_entry[attr][0])
                            new_json = json.loads(new_entry[attr][0])
                            new_json = merge(new_json, old_json)
                            mod_list = [json.dumps(new_json)]
                        except:
                            mod_list = old_entry[attr]
                    else:
                        mod_list = old_entry[attr]
                        logIt("Keeping multiple old values for %s" % attr)
                else:
                    continue
                writeMod(dn, attr, mod_list, "%s/%s.ldif" % (ldifModFolder, str(uuid.uuid4())))
Ejemplo n.º 30
0
def merge(base, head):
    """Merge two HjsonConfig objects together, using jsonmerge.merge. Keys in
    head overwrite duplicate keys in base.

    Args:
        base: an HjsonConfig or OrderedDict object that represents the
            base of the output.
        head: an HjsonConfig or OrderedDict object to be merged on to the base,
            with duplicated entries overwriting entries in base.

    Returns:
        An HjsonConfig object containing the merged key:value pairs
    """
    try:
        if base is not None:
            verbose = base.verbose or head.verbose
        else:
            verbose = head.verbose
    except AttributeError:
        verbose = False

    merged = jsonmerge.merge(base, head)

    # We copy merged into out, to ensure that the returned value is an
    # HjsonConfig obect rather than an OrderedDict object.
    out = HjsonConfig(verbose=verbose)
    out._copy_in(merged)
    return out
Ejemplo n.º 31
0
def mergeObjects():
    
    tmpTransaction = []
    # Get list of file
    onlyFiles = [ f for f in listdir(INPUT_PATH) if isfile(join(INPUT_PATH,f))]
    objNum = 0
    runner = 0
    # Loop throught every file to create a databand
    for file in onlyFiles:
        with open(INPUT_PATH + file) as fileReader:
            data = json.loads(fileReader.read())
            print("dataModel: %s" % data['URDMEModel'])
            with open(INPUT_META_DATA) as metaReader:
                lines = metaReader.readlines()
                for line in lines:
                    lineData = json.loads(line)
                    runner += 1
                    # print("matching %s : %s \t%d" % (data['URDMEModel'], lineData['URDMEModel'], runner))
                    if lineData['URDMEModel'] == data['URDMEModel']:
                        objNum += 1
                        print("%d\t%d\tMerging %s" % (objNum, runner, data['URDMEModel']))
                        result = merge(data,lineData)
                        f = open(OUTPUT_PATH + data['URDMEModel'] + ".json" ,'w')
                        f.write(json.dumps(result))
                        f.close()
                        break 
        fileReader.close()
        runner = 0
def json_data(tables=None, geoids=None, release='latest'):
    """Make a basic API request for data for a given table, geoid, and/or release.
    tables -- An ACS table ID as a string, or a list of such IDs. Default: 'B01001'
    geoids -- A Census geoID as a string, or a list of such IDs. Default: '040|01000US' ('all states in the US')
    release -- The ACS release from which to retrieve data. Should be one of:
        latest - (default) the ACS release which has data for all of the requested geographies
        acs2013_1yr - the 2013 1-year ACS data. Only includes geographies with population >65,000
        acs2013_3yr - the 2011-13 3-year ACS data. Only includes geographies with population >20,000
        acs2013_5yr - the 2009-13 5-year ACS data. Includes all geographies covered in the ACS.
    """
    geoids = _clean_list_arg(geoids, '040|01000US')
    tables = _clean_list_arg(tables, 'B01001')

    #If the URL is too big it will fail, estimating the size here and if it is too big we'll break this up
    #Each table uses 7 characters and each geoid uses 13 characters
    maxURLSize = 4020
    urlSize = (len(tables) * 7) + (len(geoids) * 13)

    if urlSize > maxURLSize:
        tableSize = len(tables) * 7
        maxGeos = int((maxURLSize - tableSize) / 13)

        resp = get_url_response(tables, geoids[:maxGeos], release)
        if "error" in resp:
            raise Exception(resp['error'])

        return merge(resp, json_data(tables, geoids[maxGeos:], release))

    return get_url_response(tables, geoids, release)
Ejemplo n.º 33
0
def post():
	parser = reqparse.RequestParser()
	parser.add_argument('msg', type=str)
	parser.add_argument('id', type=str)
	parser.add_argument('operation', type=str)
	parser.add_argument('filters', type=str)

	args = parser.parse_args()
	message = args['msg']
	user = args['id']
	operation = args['operation']
	previous_filters = json.loads(args['filters'])
	# return "Yo"
	message =  spell_correct(message) # spell correct
	entities = get_entities_json_wit(message) #this is json object of entities from wit.
	dict_features = interpret_wit_output(entities) #structure derived from wit reply.
	user_ft =  getFeatures(dict_features) # extract user features to be used from wit reply
	updateUser(user,user_ft) #updated user features in elastic search
	filters = wit_extract_filters(dict_features) #extract filters from wit reply
	filters_n1 = to_camelcase(filters)
	# -------------
	filters_n2 = merge(previous_filters, filters_n1)
	filters_n3 = to_underscore(filters_n2)
	project_list = getProjects(filters_n3, user)
	suggestions = get_suggestions(user, operation)
	return  jsonify({"projects" :  project_list, "suggestions" : suggestions, "filters" : filters_n2})
Ejemplo n.º 34
0
    def instantiate(self, selector):
        """ force set values:
        - set values to brick size
        - set values for gates"""

        structure_parameters = self._element.values.parameters.structure_parameters
        my_default = {
            "setup": {
                "radius": 20,
                "radius_sky": 200,
                "lats": 50,
                "longs": 50
            },
            "gates": {}
        }
        counter = 0
        #for gate in self._element.gates:
        #    v_angle = pi/4 * counter
        #    h_angle = pi/4 * ( counter % 8 )
        #    my_default["gates"][gate.get_id()]= { "v_angle":v_angle, "h_angle":h_angle}
        #    counter = counter + 1

        self._element.values.parameters.structure_private = merge(
            my_default, structure_parameters)
        logger.info(
            "setup: %s",
            str(self._element.values.parameters.structure_private["setup"]))
Ejemplo n.º 35
0
    def _populate_endpoint_config(self):
        env_config = self.graph.environment.get("config", {})
        service_config = env_config.get("services", {}).get(self.name, {})
        env_config_data = service_config.get("config", [])

        # XXX: resolve references to overlay vars from service_config.config
        # into the endpoint data as a means of setting runtime values
        # TODO: we should be able to reference vault and/or other secret mgmt tools
        # here do reference actual credentials
        # lookup order in is [interface, endpoint, component, service via graph [TBD], environment]
        # last write wins and is recorded in ep data
        for ep in self.endpoints.values():
            env = utils.pick(env_config_data, endpoint=ep.name,
                             default={}).get("data", {})
            component_data = utils.pick(self.entity.endpoints,
                                        name=ep.name,
                                        default={}).get("data", {})
            data = jsonmerge.merge(component_data, env)
            ep.data.update(data)

        env = None
        for cd in env_config_data:
            epname = cd.get("endpoint")
            if epname:
                continue
            data = cd.get("data", {})
            self.add_facet(data, self.graph.environment.src_ref[0])
        self._interpolate_entity()
Ejemplo n.º 36
0
 def render_styles(self):
     stylertl = open(self.indexpath+"/css/style-rtl.css", 'w')
     styleltr = open(self.indexpath+"/css/style-ltr.css", 'w')
     rtlvars = jsonmerge.merge(self.siteconfig['stylevars'], {"dir": "rtl", "side": "right", "oposide": "left" })
     srtl = lesscpy.compile(six.StringIO(stache.render(stache.load_template('authorsite.less'),rtlvars).encode('utf-8')),minify=True)
     if srtl:
         stylertl.write(srtl) 
     stylertl.close()
     logger.info('rtl styles done')
     ltrvars = jsonmerge.merge(self.siteconfig['stylevars'],{ "dir": "ltr", "side": "left", "oposide": "right" }) 
     sltr = lesscpy.compile(six.StringIO(stache.render(stache.load_template('authorsite.less'),ltrvars).encode('utf-8')),minify=True)
     if sltr:
         styleltr.write(sltr)
     if not sltr or not srtl:
         logger.error("could not compile authorsite.less")
     styleltr.close()
     logger.info('ltr styles done')
Ejemplo n.º 37
0
    def test_merge_empty_schema(self):

        schema = {}

        base = None
        base = jsonmerge.merge(base, {'a': {'b': 'c'}}, schema)

        self.assertEqual(base, {'a': {'b': 'c'}})
Ejemplo n.º 38
0
 def __merge(self, json_blob):
     """
     json merge a json blob into config_dict
     :param json_blob:
     :return:
     """
     schema = {'mergeStrategy': 'objectMerge'}
     self.config_dict = jsonmerge.merge(self.config_dict, json_blob, schema)
Ejemplo n.º 39
0
 def get_water_temp_data(self, sDate):
     eDate = datetime.datetime.today().isoformat().split('.')[0]
     stationNumbers = self.get_station_num('WT')
     temperature_data = None
     for x in stationNumbers:
         temperature_x = self.obj.GetTimeSeriesData(x, ['WT'], sDate,
                                             eDate, self.DataFormat)
         temperature_data = merge(temperature_data, temperature_x)
     return temperature_data
Ejemplo n.º 40
0
    def test_merge_null(self):

        schema = {'mergeStrategy': 'objectMerge'}

        base = {'a': 'a'}
        head = {'a': None}

        r = jsonmerge.merge(base, head, schema)

        self.assertEqual(head, r)
Ejemplo n.º 41
0
def write_role(rolename, data):
    p = path.join(FACTS_DIR, rolename + '.fact')
    try:
        with open(p, 'r') as data_file:
            js = json.load(data_file)
    except IOError:
        js = {}

    js = merge(js, data)
    with open(p, 'w+') as data_file:
        json.dump(js, data_file, indent=4)
def run(files):
   data = []
   flags = []
   result = {}
   flagsWithStr = {}
   openFiles(files, data, flags)
   result = merge(data[0], data[1])  
   assignBranch(data, flags, result)
   assignVariant(data, flags, result)
   validateResult(result)
   createFlags(flags, result)
   dumpResult(result)
def bufferslice(dataframeId, host = "192.168.99.100", port = "5000", newMajor = None, newMinor = None, pageStart = 0, pageEnd = None, buffer = 1000, looplimit = 1000000):
  searchterm = {"dataframeId" : dataframeId, "newMajor" : newMajor, "newMinor" : newMinor, "pageStart" : pageStart, "pageEnd" : pageEnd}
  url = 'http://' + host + ':' + port + '/dataframe/slice'
  if searchterm['newMinor'] is None:
      del searchterm['newMinor']
  else:
      searchterm['newMinor'] = {"keys" : newMinor}
  if searchterm['newMajor'] is None:
      del searchterm['newMajor']
  else:
      searchterm['newMajor'] = {"keys" : newMajor}
  if searchterm['pageStart'] is None:
      searchterm['pageStart'] = 0
  n = pageEnd
  f = searchterm['pageStart']
  l = searchterm['pageStart'] + buffer
  if n is None:
      searchterm['pageEnd'] = l
  elif n > l:
      searchterm['pageEnd'] = l


  response = requests.post(url, json = searchterm)
  ans = json.loads(response.text)
  nans = ans
  c = 1
  if n is not None:
      if n < l:
          return ans

  while len(nans['contents']) > 0:
      f = f + buffer
      l = l + buffer
      c = c + 1
      nreq = searchterm
      nreq['pageStart'] = f
      nreq['pageEnd'] = l
      if n is not None:
          if n < l:
              nreq['pageEnd'] = n
      response = requests.post(url, json = nreq)
      nans = json.loads(response.text)
      if len(nans['contents']) < 1:
          return ans
      ans['contents'] = jsonmerge.merge(ans['contents'], nans['contents'])
      if n is not None:
          if n < l:
              return ans
      elif c >= looplimit:
          ans['loop'] = 'You were stuck in an infinite loop!'
          return ans
  return ans
def transform_file(filename):
    jsons = []
    #found at:
    num_lines = sum(1 for line in open(filename))

    i = 0
    f = open(filename, 'rU')
    while i < num_lines:
        line = f.next()
        jsons.append(line)
        i+=1
    f.close()
    
    first = json.loads(jsons[0])
    second = json.loads(jsons[1])
    rv = merge(first, second)
    for i in range(len(jsons)):
        if i != 0:
            rv = merge(rv, json.loads(jsons[i]))
            
    with open('output.json', 'w') as f: 
        json.dump(rv, f)
Ejemplo n.º 45
0
    def config_file(self, old_cfg_file, new_cfg_file):
        with open(old_cfg_file, 'r') as ocf:
            data_old_cfg_file = json.load(ocf)
        with open(new_cfg_file, 'r') as ncf:
            data_new_cfg_file = json.load(ncf)
        cfg_dir = os.path.split(old_cfg_file)[0]
        file_name = os.path.split(old_cfg_file)[1]
        tmp_file = os.sep.join([cfg_dir, 'tmp_%s' % file_name])
        result = merge(data_old_cfg_file, data_new_cfg_file)

        with open(tmp_file, 'w') as tmp_f:
            tmp_f.write(json.dumps(result, indent=4))

        os.rename(tmp_file, old_cfg_file)
Ejemplo n.º 46
0
    def execute(self, context):
        # logging.info("Options {0}: {1}".format(self.task_id, str(sys.argv)))
        # logging.info(
        #     '{self.task_id}: Looking for files in {self.outputs}'.format(**locals()))

        # all_done = True
        # while all_done:
        #     all_done = False
        #     for t in self.upstream_list:
        #         ti = TaskInstance(
        #             t, execution_date=context['ti'].execution_date)
        #         if ti.state != State.SUCCESS:
        #             all_done = True
        #             break
        #     sleep(1)

        upstream_task_ids = [t.task_id for t in self.upstream_list]
        upstream_data = self.xcom_pull(context=context, task_ids=upstream_task_ids)

        promises = {}
        for j in upstream_data:
            data = j
            promises = merge(promises, data["promises"])
            if "outdir" in data:
                self.outdir = data["outdir"]

        if "working_folder" in promises:
            self.working_dir = promises["working_folder"]
        else:
            raise cwltool.errors.WorkflowException("working_folder is required")

        if not self.outdir:
            raise cwltool.errors.WorkflowException("Outdir is not provided, please use job dispatcher")

        logging.info(
            'Cleanup: {0}\n{1}\n{2}'.format(promises, self.outdir, self.outputs))

        for out in self.outputs:
            if out in promises and promises[out]["class"] == "File":
                dst_file = os.path.join(self.working_dir, os.path.basename(promises[out]["path"]))
                if os.path.exists(dst_file):
                    os.remove(dst_file)
                shutil.copy(promises[out]["path"], self.working_dir)

        for rmf in self.rm_files:
            if os.path.isfile(rmf):
                os.remove(rmf)

        shutil.rmtree(self.outdir, True)
Ejemplo n.º 47
0
def save_json_data(filepath, data):
    """
    Save JSON data to a file
    """
    if not os.path.exists(filepath):
        with open(filepath, 'w+') as outfile:
            json.dump(data, outfile, indent=4, sort_keys=True)
    else:
        loaded_data = {}
        with open(filepath) as data_file:
            loaded_data = json.load(data_file)
        compiled_data = merge(data, loaded_data)

        with open(filepath, 'w+') as outfile:
            json.dump(compiled_data, outfile, indent=4, sort_keys=True)
Ejemplo n.º 48
0
    def _update(self, user_id, new_json, current_config=None):
        ''' handles the insertion of user info into the database '''

        if not current_config:
            current_config = self._get_config_blob(user_id)

        if current_config:
            if current_config['json'] != '0':
                merged_json = merge(
                    pickle.loads(current_config['json']), new_json)

                self.db.update('settings', {'json': pickle.dumps(
                    merged_json, -1)}, {'userid': user_id})
        else:
            self.db.insert(
                'settings', {'userid': user_id, 'json': pickle.dumps(new_json, -1)})
Ejemplo n.º 49
0
def import_process(project_path, path, process_name):
    # folder:   [process name]
    os.mkdir(os.path.join(project_path, "processes", process_name))
    # template: [process name]_controllers.py
    template = env.get_template(path + '/' + process_name + '/' + process_name + '_controllers.tmp')
    template = template.render()
    save_filled_template(os.path.join(project_path, "processes", process_name, process_name + "_controllers.py"), template)
    # template: [process name]_models.py
    template = env.get_template(path + '/' + process_name + '/' + process_name + '_model.tmp')
    template = template.render()
    save_filled_template(os.path.join(project_path, "processes", process_name, process_name + "_models.py"), template)
    # template: [process name]_process.json
    template = env.get_template(path + '/' + process_name + '/' + process_name + '_process.tmp')
    template = template.render()
    save_filled_template(os.path.join(project_path, "processes", process_name, process_name + "_process.json"), template)
    # template: [process name]_views.py
    template = env.get_template(path + '/' + process_name + '/' + process_name + '_views.tmp')
    template = template.render()
    save_filled_template(os.path.join(project_path, "processes", process_name, process_name + "_views.py"), template)
    # template: __init__.py
    template = env.get_template('empty.tmp')
    template = template.render()
    save_filled_template(os.path.join(project_path, "processes", process_name, "__init__.py"), template)

    # folder structures
    os.mkdir(os.path.join(project_path, "processes", process_name, "structure"))
    # loop over files
    for subdir, dirs, files in os.walk(os.path.join("ablaufpad", "templates", "Input_handler", process_name, "structure")):
        for actual_file in files:
            template = env.get_template("Input_handler/" + process_name + "/structure/" + actual_file)
            template = template.render()
            filename = actual_file.split(".")
            save_filled_template(os.path.join(project_path, "processes", process_name, "structure", filename[0] + ".json"), template)

    # merge configuration
    for subdir, dirs, files in os.walk(os.path.join("ablaufpad", "templates", "Input_handler", process_name, "configuration")):
        for actual_file in files:
            _process_data = open(subdir + "/" + actual_file).read()
            process_data = json.loads(_process_data)
            _data = open(os.path.join(project_path, "configuration", actual_file)).read()
            data = json.loads(_data)
            result = jsonmerge.merge(data, process_data)

            with open(os.path.join(project_path, "configuration", actual_file), 'w') as outfile:
                json.dump(result, outfile)
    def ReadSocietyData( self ):
        #demo_fn_array = self.demo_fn_str.split(";")
        overlayed_json = {}
        for demo_fn in self.demo_fn_array:
            filename = demo_fn
            if os.path.isfile( filename ) == False :
                filename = os.path.join( os.path.join( self.input_dir, self.geo_name ), filename )
            #print( "filename = " + filename + "\n" )
            demo_file = open( filename, "r" )
            demo_json = json.loads( demo_file.read() )
            overlayed_json = merge( overlayed_json, demo_json )
            demo_file.close()

        if "Defaults" in overlayed_json.keys():
            if "Society" in overlayed_json["Defaults"].keys():
                self.society_json = overlayed_json["Defaults"]["Society"] # !!! GOT DATA !!!
        else:
            raise exception("Could not find Society element in demographics")
Ejemplo n.º 51
0
def entry_create(pokemon):
    poke = Dict()
    poke.update(
        merge(
            pokemon_data_base, pokemon_data_full[pokemon]
        )
    )
    poke.hatch_time = random.randint(
        poke.hatch_time[0], poke.hatch_time[1]
    )
    poke.leveling_rate = exp_translation[poke.leveling_rate]
    poke.moves_starting = [move_translations[x] for x in poke.moves_starting]
    poke.moves_tms = [move_translations[x] for x in poke.moves_tms]
    poke.moves_evolved = {
        x: move_translations[poke.moves_evolved[x]]
        for x in poke.moves_evolved
    }
    return poke