コード例 #1
0
ファイル: Utils.py プロジェクト: cheunglei/myLENSR
def load_json(f):
    import json, jsoncomment

    if type(f) == str:
        f = open(f, 'r')
    jc = jsoncomment.JsonComment(json)
    r = jc.load(f)
    # ro = jc.load(f, object_hook=lambda d: namedtuple('X', d.keys())(*d.values()))
    f.close()
    return r
コード例 #2
0
ファイル: config.py プロジェクト: Teriks/TGMiner
    def load(self):
        parser = jsoncomment.JsonComment(json)
        with open(self.config_path) as file:
            parsed_object = parser.load(file)

        try:
            self._config = self._validator.validate(parsed_object, namespace=True, copy=False)
        except dschema.ValidationError as e:
            raise TGMinerConfigException("Config Error: " + str(e))

        self.__dict__.update(self._config.__dict__)
コード例 #3
0
    def __init_config_from_file(self, path):
        def add_dict(config, d):
            for k, v in d.items():
                kv = config.get(k, None)
                if isinstance(kv, dict) and isinstance(v, dict):
                    add_dict(config=kv, d=v)
                else:
                    config[k] = v

        with open(path) as f:
            add_dict(config=self.__config,
                     d=jsoncomment.JsonComment(json).loads(f.read()))
コード例 #4
0
    def from_json(filepath):
        # type: (Union[str, Path]) -> Database
        '''
        Constructs a Database instance from a given JSON filepath.

        Args:
            filepath(Path or str): Path to JSON config file.

        Returns:
            Database: Database instance.
        '''
        with open(filepath) as f:
            config = jsonc.JsonComment().load(f)
        return Database(config)
コード例 #5
0
def pytest_generate_tests(metafunc):
    if "test_shiftr" == metafunc.definition.name:
        jsc = jsoncomment.JsonComment()
        os_path = os.path.join(os.path.dirname(os.path.realpath(__file__)),
                               'shiftr_tests')
        lst = []
        ids = []
        for json_file in os.listdir(os_path):
            try:
                js = jsc.loads(read_file(os.path.join(os_path, json_file)))
                lst.append(
                    JoltTestObject(json_file, js['input'], js['spec'],
                                   js['expected']))
                ids.append(json_file)
            except JSONDecodeError:
                pass
        metafunc.parametrize("shiftr_test", lst, ids=ids)
コード例 #6
0
def add_lyrics_lumi(part_results, lyrics_file, year):
    parser = jsoncomment.JsonComment(json)
    s = open(lyrics_file).read()
    lyrics = parser.loads(s, encoding="utf-8")
    i = 0
    for l in lyrics:
        song = clean_song_name(l["song"])
        found = False
        for pr in part_results:
            if clean_song_name(pr['song_name']) == song:
                pr['lyrics'] = l['lyrics'].encode("utf-8")
                i += 1
                found = True
                break
        if not found:
            print "ERROR: year ", year, " song ", song, " lyrics not matched"

    print "INFO: for year {} {}/{} songs have lyrics available".format(
        year, i, len(part_results))
    return part_results
コード例 #7
0
def do_parse(file_path):
    """ @todo """
    try:
        with io.open(file_path, 'rb') as fp:
            s = fp.read()
    except Exception as e:
        return (str(e), None)

    try:
        h = chardet.detect(s)
        try:
            with io.open(file_path, 'r', encoding=h['encoding']) as fp:
                s = fp.read()
        except Exception as e:
            return (str(e), None)
    except Exception as e:
        try:
            with io.open(file_path, 'r') as fp:
                s = fp.read()
        except Exception as e:
            return (str(e), None)

    parser = jsoncomment.JsonComment(json)

    try:
        j = parser.loads(s)
        return ('', j)
    except Exception as e:
        # Strip out single line comments
        lines = s.splitlines()
        s = ''
        for line in lines:
            line = re.sub(r'^\s*//.*$', '', line)
            s += line + "\n"
        try:
            j = parser.loads(s)
        except Exception as e2:
            j = None

        rv = '%s (%s)' % (str(e), h['encoding'])
        return (rv, j)
コード例 #8
0
ファイル: app_test.py プロジェクト: theNewFlesh/shekels
def test_on_event_save_button_error(dash_duo):
    with TemporaryDirectory() as root:
        test_config_path = Path(RESOURCES_DIR, 'test_config.json')
        with open(test_config_path) as f:
            config = jsonc.JsonComment().load(f)

        config['foo'] = 'bar'

        config_path = Path(root, 'config.json')
        with open(config_path, 'w') as f:
            json.dump(config, f)

        app.run(app.APP, config_path, debug=True, test=True)
        test_app = app.APP
        dash_duo.start_server(test_app)

        config = test_app.api.config

        # click on config tab
        dash_duo.find_elements('#tabs .tab')[2].click()
        time.sleep(0.1)

        # click init
        dash_duo.find_elements('#init-button')[-1].click()
        time.sleep(0.12)

        # delete config
        os.remove(config_path)

        # save bad config
        dash_duo.find_elements('#save-button')[-1].click()
        time.sleep(0.1)

        dash_duo.wait_for_element('#error')
        assert not config_path.is_file()
        result = dash_duo.wait_for_element(
            '#error tr td:last-child > div').text
        assert result == 'DataError'
コード例 #9
0
ファイル: server_tools.py プロジェクト: theNewFlesh/shekels
def parse_json_file_content(raw_content):
    # type: (str) -> Dict
    '''
    Parses JSON file content as supplied by HTML request.

    Args:
        raw_content (str): Raw JSON file content.

    Raises:
        ValueError: If header is invalid.
        JSONDecodeError: If JSON is invalid.

    Returns:
        dict: JSON content or reponse dict with error.
    '''
    header, content = raw_content.split(',')
    temp = header.split('/')[-1].split(';')[0]  # type: str
    if temp != 'json':
        msg = f'File header is not JSON. Header: {header}.'
        raise ValueError(msg)

    output = base64.b64decode(content).decode('utf-8')
    return jsonc.JsonComment().loads(output)
コード例 #10
0
ファイル: app_test.py プロジェクト: theNewFlesh/shekels
def test_on_event_save_button(dash_duo):
    with TemporaryDirectory() as root:
        test_config_path = Path(RESOURCES_DIR, 'test_config.json')
        with open(test_config_path) as f:
            config = jsonc.JsonComment().load(f)

        config['columns'] = ['date']

        config_path = Path(root, 'config.json')
        with open(config_path, 'w') as f:
            json.dump(config, f)

        app.run(app.APP, config_path, debug=True, test=True)
        test_app = app.APP
        dash_duo.start_server(test_app)

        config = test_app.api.config

        # click on config tab
        dash_duo.find_elements('#tabs .tab')[2].click()
        time.sleep(0.1)

        # click init
        dash_duo.find_elements('#init-button')[-1].click()
        time.sleep(0.1)

        # delete config
        os.remove(config_path)

        # save bad config
        dash_duo.find_elements('#save-button')[-1].click()
        time.sleep(0.1)

        assert config_path.is_file()
        with open(config_path) as f:
            result = json.load(f)['columns']
        assert result == ['date']
コード例 #11
0
ファイル: data.py プロジェクト: awemany/buv
import logging
import json
import jsoncomment
import buv_types
from validate import incrementalValidateAndAdd

json=jsoncomment.JsonComment(json)

# in memory 'DB' of all data, mapping SHA256 strings to
# objects.
all_data={}

# initial membership proposal
genesis_members_hash=None

# maps handles to addresses, any seen
addr_for_handle={}

def reset():
    global all_data
    all_data={}
    
from buv_types import Vote, ProposalMetadata, MemberDict, ProposalText, Election

def for_type(t):
    for v in all_data.itervalues():
        if isinstance(v, t):
            yield v
            
def readFile(fn):
    logging.info("Reading file "+fn)
コード例 #12
0
    def run(self):
        """ @todo docstring me """

        if len(sys.argv) >= 3:
            filespec = sys.argv[2]
        else:
            filespec = '*.json'

        if len(sys.argv) >= 2:
            dir_name = sys.argv[1]
        else:
            dir_name = '.'

        self.dir = dir_name

        self.logger = logging.getLogger()
        self.logger.setLevel(logging.INFO)

        logger2 = logging.getLogger('urllib3')
        logger2.setLevel(logging.CRITICAL)

        parser = jsoncomment.JsonComment(json)

        if not os.path.isdir(TMP_DIR):
            os.makedirs(TMP_DIR)

        mask = dir_name + '/' + filespec
        logging.info("==> Processing dir %s", mask)
        for file in glob.glob(mask):
            self.file = os.path.basename(file)
            self.basename = os.path.splitext(self.file)[0]
            logging.info("--> Processing file %s", file)
            with io.open(file, 'r', encoding='utf-8') as f:
                self.data = f.read()
            orig_data = self.data
            j = parser.loads(self.data)
            _hash = ''
            if self.check_hash and 'hash' in j:
                _hash = j['hash']
            scheme = self.process(j, 'homepage')
            scheme = self.process(j, 'license')
            scheme = self.process(j, 'url', _hash)
            if self.schemes_changed(scheme):
                logging.info("run: url: scheme=%s", scheme)
                self.fix_schemes(j, 'autoupdate', scheme)
            scheme = self.process(j, 'checkver')
            if 'checkver' in j:
                if isinstance(j['checkver'], dict):
                    scheme = self.process(j['checkver'], 'github')
            if 'architecture' in j:
                scheme = self.process(j['architecture'], '32bit', '',
                                      'architecture')
                if self.schemes_changed(scheme):
                    logging.info("run: architecture.32bit: scheme=%s", scheme)
                    if 'autoupdate' in j:
                        if 'architecture' in j['autoupdate']:
                            self.fix_schemes(j['autoupdate']['architecture'],
                                             '32bit', scheme,
                                             'autoupdate.architecture')

                scheme = self.process(j['architecture'], '64bit', '',
                                      'architecture')
                if self.schemes_changed(scheme):
                    logging.info("run: architecture.64bit: scheme=%s", scheme)
                    if 'autoupdate' in j:
                        if 'architecture' in j['autoupdate']:
                            self.fix_schemes(j['autoupdate']['architecture'],
                                             '64bit', scheme,
                                             'autoupdate.architecture')
            if self.data != orig_data:
                logging.info("Updating %s", file)
                if os.path.isfile(file + '.bak'):
                    os.remove(file + '.bak')
                os.rename(file, file + '.bak')
                with io.open(file, 'w', encoding='utf-8') as f:
                    f.write(self.data)
コード例 #13
0
def do_parse(file_path):
    """@todo"""
    try:
        with io.open(file_path, "rb") as fp:  # read binary
            json_data = fp.read()
    except Exception as e:
        return (str(e), None)

    h = {}
    h["encoding"] = "unknown"
    try:
        h = chardet.detect(json_data)
        try:
            with io.open(file_path, "r", encoding=h["encoding"]) as fp:
                json_data = fp.read()
        except Exception as e:
            return (str(e), None)
    except Exception:
        try:
            with io.open(file_path, "r") as fp:  # read non-binary
                json_data = fp.read()
        except Exception as e:
            return (str(e), None)

    parser = jsoncomment.JsonComment(json)

    try:
        j = parser.loads(json_data)
        rv = ""
        try:
            jsonschema.validate(j, scoop_schema_data)
            return ("", j)
        except Exception as e:
            err = str(e)
            err = parse_validation_error(err)
            # print(
            #    "\nError: Invalid json: %s:\n%s\n%s\n%s"
            #    % (os.path.basename(file_path), "=" * 80, err, "=" * 80)
            # )
            m = re.search(r"(Failed validating.*)", err)
            if m is not None:
                err = m.group(1)
            else:
                err = "Failed schema validation against %s" % scoop_schema_name
            if re.search(r"additionalProperties", err):
                return (rv, j)
            rv = err

        return (rv, j)
    except Exception as e:
        # Strip out single line comments
        lines = json_data.splitlines()
        s = ""
        for line in lines:
            line = re.sub(r"^\s*//.*$", "", line)
            s += line + "\n"
        try:
            j = parser.loads(s)
        except Exception:
            j = None

        rv = "%s (%s)" % (str(e), h["encoding"])
        return (rv, j)
コード例 #14
0
def main(args=None):
    if not args:
        args = sys.argv[1:]

    logging.basicConfig(level=logging.DEBUG)

    _logger.info(
        "initialization script of container argument {0}".format(args))

    parser = argparse.ArgumentParser(
        description="init container 0 - javaagent")
    parser.add_argument("--jacoco",
                        action="store_true",
                        default=False,
                        dest="jacoco",
                        help="activate usage of jacoco agent")
    parser.add_argument("--jacocoagent",
                        action="store",
                        default=None,
                        dest="jacocoagent",
                        help="set jacoco agent property")
    args, boot_args = parser.parse_known_args(args)
    javaagent = None
    javaagent_format = "-javaagent:{pathjar}=output={output},address={address},port={port},includes={includes}"
    if args != None:
        if args.jacoco:
            javaagent = javaagent_format.format(
                pathjar="/opt/cohorte/extra/jacoco/jacocoagent.jar",
                output="tcpserver",
                address="*",
                port="6300",
                includes="com.cohorte.*:org.cohorte.*")
            _logger.info(
                "-jacoco true is setted, construct automatic javaagent={}".
                format(javaagent))
        elif args.jacocoagent != None:
            # use
            javaagent = "-javaagent:{pathjar}" + args.jacocoagent.format(
                pathjar="/opt/cohorte/extra/jacoco/jacocoagent.jar")
            _logger.info(
                "-jacocoagent is setted , construct javaagent={}".format(
                    javaagent))

        if javaagent is not None:
            w_parser = jsoncomment.JsonComment(json)
            _logger.info(
                "read composition file in order to retreieve the isolate name and add vm_args if necessary"
            )
            for isolate_name in get_list_isolate():

                w_isolate_json = get_json_from_file(w_parser, isolate_name)
                _logger.info("update isolate configuration files {}".format(
                    isolate_name))

                if "vm_args" in w_isolate_json:
                    w_isolate_json["vm_args"].append(javaagent)
                else:
                    w_isolate_json["vm_args"] = [javaagent]
                # # add comment
                w_str = '/*'\
                 ' Modify by init_container_0.py, add jacoco agent in vm_args '\
                 '*/'
                w_str = w_str + json.dumps(w_isolate_json, indent=4)
                write_file(w_str, isolate_name, True)

        else:
            print("do nothin, not relevant argument")

            # find in conf all isolat

    for isolate_name in get_list_isolate():
        real_name = isolate_name.replace("isolate_", "")[:-3]
        # manager initialization of current.properties , current.properties.mdl
        filename_path_mdl = "{}/install.properties.mdl".format(real_name)
        filename_path = "{}/install.properties".format(real_name)
        try:
            with open(filename_path_mdl) as a_properties_file:
                lines = a_properties_file.readlines()

            new_content = "\n".join(replace_vars(os.environ, lines))
            new_content = "# generated by init_container_0_iotPack" + new_content

            write_file(new_content, filename_path, False)
        except:
            _logger.info("no install.properties file ")
            pass
    print("do nothin, no argument received")