Пример #1
0
def transform_history_to_factor(r):
    name = 'TODO'
    if 'T_RandomAffine' in r :
        raff = r.T_RandomAffine
        if not isinstance(raff, float):
            par = json.loads(raff)
            name = 'Aff_S{:.1f}R{}'.format(par['scaling'][0], int(par['rotation'][0]))

    if 'T_RandomAffineFFT' in r :
        raff = r.T_RandomAffineFFT
        if not isinstance(raff, float):
            par = json.loads(raff)
            name = 'AffFFT_S{:.1f}R{}'.format(par['scaling'][0], int(par['rotation'][0]))

    if 'T_RandomElasticDeformation' in r :
        raff = r.T_RandomElasticDeformation
        if not isinstance(raff, float):
            name = 'Ela'

    if 'T_RandomBiasField' in r :
        raff = r.T_RandomBiasField
        if not isinstance(raff, float):
            name = 'Ela'

    return name
Пример #2
0
async def on_message(message):
    if message.author == client.user:
        return

    text = message.content
    valid = False

    try:

        data = commentjson.loads("{" + text + "}")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads("[" + text + "]")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads(text)
        valid = True
    except:
        pass

    try:
        if valid and len(text) > MIN_MESSAGE_LENGTH:
            data_string = commentjson.dumps(data, indent=2)
            if data_string.startswith("{") or data_string.startswith("["):
                channel = message.channel
                await message.delete()
                send = await message.channel.send(
                    "**Hey {}, I've formatted your json for you!**\n*Use `?format` for instructions on formatting your own json.*\n```json\n{}``` \n to delete this message react with a 🗑️"
                    .format(message.author.display_name, data_string))
                send

                def check(reaction, user):
                    return user == message.author and str(
                        reaction.emoji) == '🗑️'

                try:
                    reaction, user = await client.wait_for('reaction_add',
                                                           timeout=60.0,
                                                           check=check)
                except asyncio.TimeoutError:
                    return
                else:
                    await send.delete()
    except Exception as exception:
        print(exception)
Пример #3
0
async def on_message(message):
    if message.author == client.user:
        return

    text = message.content
    text = text.replace("```", "") ##This may cause issues if someone has a name like `````` but i doubt that would happen and it would break formatting anyways so ¯\_(ツ)_/¯
    valid = False

    try:

        data = commentjson.loads("{" + text + "}")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads("[" + text + "]")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads(text)
        valid = True
    except:
        pass

    try:
        if valid and len(text) > MIN_MESSAGE_LENGTH:
            data_string = commentjson.dumps(data, indent=2)
            if data_string.startswith("{") or data_string.startswith("["):
                channel = message.channel
                send = await message.channel.send(
                    "**{},**\n To delete this message react with a 🚫.\n```json\n{}``` \n ".format(
                        message.author.display_name, data_string))
                send
                await send.add_reaction('🚫')
                time.sleep(0.2)
                await message.delete()
                def check_reactions(reaction, user) -> bool:
                    return user.id==message.author.id and reaction.emoji=='🚫' and reaction.message.id==send.id
                try:
                    reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check_reactions)
                except asyncio.TimeoutError:
                    await send.clear_reactions()
                    return
                else:
                    await send.delete()
    except Exception as exception:
        print(exception)
Пример #4
0
    def __init__(self, modelpath):
        """Import model data, initialise orbital pair index matrix.

        Arguments:
            modelpath: path to the model json file
        """

        # Catch invalid model path
        if os.path.exists(modelpath) == False:
            print("ERROR: Unable to open tight binding model file:", modelpath)
            sys.exit()

        # Import the tight binding model parameters
        with open(modelpath, 'r') as modelfile:
            modeldata = commentjson.loads(modelfile.read())

        # Store imported data as attributes
        self.atomic = modeldata['species']
        self.data = modeldata['hamiltonian']

        # The model includes l=2 orbitals, hence space for 3 integrals
        # (in order: dd_sigma, dd_pi and dd_delta) is made.
        self.v = np.zeros(3, dtype="double")

        # Generate pair of indices for each pair of shells, showing which values
        # of v to use. This follows from the Slater-Koster table.
        #
        # Example: two interacting shells with max l of 2.
        # l=2, l=2: dd_sigma, dd_pi and dd_delta, slice v from 0 to 3: v[0] to v[2]
        #
        # This needs to be expanded for d and f orbitals, and is model-independent.
        self.v_bgn = np.array([[0]])
        self.v_end = np.array([[3]])
Пример #5
0
def main():
	############# Laod config file ##################
	file_ = open("config.json", "r")
	config_str = file_.read();
	file_.close()
	config = commentjson.loads(config_str)
	################################################

	################ Set logger ######################
	logger = logging.getLogger("iFace Clinet")
	logger.setLevel(logging.DEBUG)
	# create file handler which logs even debug messages
	fh = logging.handlers.RotatingFileHandler('./logs/'+ config['NAME_backup_file'], maxBytes=config['MAX_size_log'], backupCount=config['COUNT_backup'])
	fh.setLevel(logging.DEBUG)

	# create console handler with a higher log level
	ch = logging.StreamHandler()
	ch.setLevel(logging.ERROR)
	# create formatter and add it to the handlers
	formatter = logging.Formatter("%(asctime)s  - %(levelname)s - %(message)s")
	ch.setFormatter(formatter)
	fh.setFormatter(formatter)

	# add the handlers to logger
	logger.addHandler(ch)
	logger.addHandler(fh)
	################################################
	logger.warn("Program is STARTING !")

	t = threading.Thread(target=runFaceDetect, args=(sys.argv[1],config, logger))
	t.start()
	runGui(config)
    def handle_local_config(self):
        '''
        Use local file if present, otherwise use factory values and write that to disk
        unless self.create_missing_files == false, in which case throw IOError
        '''

        # If override config file exists, use it
        if os.path.isfile(self.config_filename):
            with open(self.config_filename, 'r') as f:
                content = f.read()
                config = json.loads(content)

        # If we are instructed to create fresh override file, do it
        # unless the filename is something other than DEFAULT_CONFIG_FILENAME
        elif self.create_missing_files and self.config_filename == res.DEFAULT_CONFIG_FILENAME:
            # Merge in any defaults provided by subclass's
            default_config_copy = copy.deepcopy(res.FACTORY_DEFAULT_CONFIG)
            default_config_copy.update(self.get_factory_defaults_hook())

            # Don't want changes to config modifying the FACTORY_DEFAULT
            config = copy.deepcopy(default_config_copy)

            with open(self.config_filename, 'w') as f:
                f.write(json.dumps(default_config_copy, indent=4, separators=(',', ': ')))

        # Otherwise complain
        else:
            raise IOError(self.config_filename + ' could not be found')

        # Validate and save results
        self._validate_config(config)
        self.config = config
Пример #7
0
    def _get_launch_json(self, name, template):
        replacements = {}
        replacements["%MODULE%"] = name
        replacements["%MODULE_FOLDER%"] = name

        launch_json_file = None
        is_function = False
        if template == "csharp":
            launch_json_file = "launch_csharp.json"
            replacements["%APP_FOLDER%"] = "/app"
        elif template == "nodejs":
            launch_json_file = "launch_node.json"
        elif template == "csharpfunction":
            launch_json_file = "launch_csharp.json"
            replacements["%APP_FOLDER%"] = "/app"
            is_function = True

        if launch_json_file is not None:
            launch_json_file = os.path.join(os.path.split(__file__)[0], "template", launch_json_file)
            launch_json_content = self.utility.get_file_contents(launch_json_file)
            for key, value in replacements.items():
                launch_json_content = launch_json_content.replace(key, value)
            launch_json = commentjson.loads(launch_json_content)
            if is_function and launch_json is not None and "configurations" in launch_json:
                # for Function modules, there shouldn't be launch config for local debug
                launch_json["configurations"] = list(filter(lambda x: x["request"] != "launch", launch_json["configurations"]))
            return launch_json
Пример #8
0
    def __init__(self, modelpath):
        """Import model data, initialise orbital pair index matrix.

        Arguments:
            modelpath: path to the model json file
        """

        # Catch invalid model path
        if os.path.exists(modelpath) == False:
            print "ERROR: Unable to open tight binding model file:", modelpath
            sys.exit()

        # Import the tight binding model parameters
        with open(modelpath, 'r') as modelfile:
            modeldata = commentjson.loads(modelfile.read())

        # Store imported data as attributes
        self.atomic = modeldata['species']
        self.data = modeldata['hamiltonian']

        # The model includes l=2 orbitals, hence space for 3 integrals 
        # (in order: dd_sigma, dd_pi and dd_delta) is made.
        self.v = np.zeros(3, dtype="double")

        # Generate pair of indices for each pair of shells, showing which values
        # of v to use. This follows from the Slater-Koster table.
        # 
        # Example: two interacting shells with max l of 2.
        # l=2, l=2: dd_sigma, dd_pi and dd_delta, slice v from 0 to 3: v[0] to v[2]
        #
        # This needs to be expanded for d and f orbitals, and is model-independent.
        self.v_bgn = np.array([[0]])
        self.v_end = np.array([[3]])
Пример #9
0
 def test_loads(self):
     for index, test_json_ in iteritems(self.test_json):
         commented = test_json_['commented']
         uncommented = test_json_['uncommented']
         self.assertEqual(commentjson.loads(commented),
                          json.loads(uncommented),
                          'Failed for test: %s' % test_json_['commented'])
Пример #10
0
def jsonfile_to_dict(jsonfilename):
    jsonfile = open(jsonfilename, "r")
    json_string = jsonfile.read()
    #parser = JsonComment(json)
    jsonfile.close()
    #json_dict = parser.loads(json_info)
    json_dict = commentjson.loads(json_string)
    return json_dict
Пример #11
0
 def load_bias_sheet(self,jsonfilename):    
     jsonfile = open(jsonfilename,"r")
     json_string = jsonfile.read()
     #parser = JsonComment(json)
     jsonfile.close()
     #json_dict = parser.loads(json_info)
     json_dict = commentjson.loads(json_string)
     return json_dict
Пример #12
0
def jsonfile_to_dict(jsonfilename):
    jsonfile = open(jsonfilename,"r")
    json_string = jsonfile.read()
    #parser = JsonComment(json)
    jsonfile.close()
    #json_dict = parser.loads(json_info)
    json_dict = commentjson.loads(json_string)
    return json_dict
Пример #13
0
 def __init__(self, confPath):
     # load and store the configuration and update the object's dictionary
     #  confPath 所指向的檔案是儲存成 JSON 的檔案
     # 從 confPath 檔案裡產生 conf
     conf = json.loads(open(confPath).read())
     # conf = {"image_dataset": "datasets/caltech101/101_ObjectCategories/car_side"}
     # 將 conf 存到 __dict__
     self.__dict__.update(conf)
Пример #14
0
    def load_config(self, view=None, config=None):
        """
        Load config from self.config_filename, break if it doesn't exist
        Load any overrides from environment variables
        Validate all loaded values
        """
        # Allow overriding the view for testing purposes
        if not view:
            view = self.view

        # Allow overriding of the entire config object
        if self.config_file_override:
            config = self.config_file_override

        # Else read from file
        else:
            if not os.path.isfile(self.config_filename):
                raise Exception(
                    "%s does not exist. Try running the init command to generate it.\n"
                    % self.config_filename)

            with open(self.config_filename, 'r') as f:
                try:
                    if re.search('[.]ya?ml$', self.config_filename):
                        config = yaml.load(f)
                    else:
                        content = f.read()
                        config = json.loads(content)
                except ValueError:
                    print '%s could not be parsed' % self.config_filename
                    raise

        # Load in cli config overrides
        view.update_config(config)

        # record value of the debug variable
        debug = config['global']['print_debug']

        # Check the environment variables for any overrides
        self._config_env_override(config, '', print_debug=debug)

        # Validate and save results
        self._validate_config(config)
        self.config = config

        # Save shortcut references to commonly referenced config sections
        self.globals = self.config.get('global', {})
        self.template_args = self.config.get('template', {})

        # Register all stack handlers
        self.stack_monitor = monitor.StackMonitor(
            self.globals['environment_name'])
        for stack_handler in self.env_config.stack_event_handlers:
            self._add_stack_event_handler(stack_handler)

        # Register all deploy handlers
        for deploy_handler in self.env_config.deploy_handlers:
            self._add_deploy_handler(deploy_handler)
Пример #15
0
 def test_loads(self):
     try: #python2
         test_iter = self.test_json.iteritems()
     except AttributeError: #python3
         test_iter = self.test_json.items()
     for index, test_json_ in test_iter:
         commented = test_json_['commented']
         uncommented = test_json_['uncommented']
         assert commentjson.loads(commented) == json.loads(uncommented)
Пример #16
0
def loadJSON(path):
	# output('Loading JSON object from: `{}` ...'.format(path))
	
	with open(path, encoding='utf-8') as data:
		r = data.read()
	
	r = json.loads(r) if r != 'null' else {}
	
	return r
Пример #17
0
    def __init__(self, jobfile):
        """Initialise the job."""
        if not os.path.exists(jobfile):
            raise FileNotFoundError("Unable to find job file: ", jobfile)

        # Set up variables that define the job in a dictionary
        with open(jobfile, 'r') as inputfile:
            self.Def = commentjson.loads(inputfile.read())

        # Fetch the model name path from the Job file
        if self.Def['model'] == "TBcanonical":
            modelname = self.Def['model']+"_"+self.Def['Hamiltonian'][0]
        else:
            modelname = self.Def['model']
        modelpath = os.path.join("models", modelname + ".py")

        # set isNoncollinearHami flag
        self.isNoncollinearHami = False
        if self.Def['Hamiltonian'] in ('scase', 'pcase', 'dcase',
                                       'vector_stoner'):
            self.isNoncollinearHami = True

        # Catch invalid model path
        if os.path.exists(modelpath) is False:
            print("ERROR: Unable to open tight binding model at %s. ")
            print(modelpath)
            sys.exit()

        # Has a directory for results been specified?
        if "results_dir" in self.Def.keys():
            # check to make sure that it has the final "/"
            if self.Def['results_dir'][-1] == "/":
                self.results_dir = self.Def['results_dir']
            else:
                self.results_dir = self.Def['results_dir']+"/"
            # Make sure that the directory where results will be put exists
            if not os.path.exists(self.results_dir):
                os.makedirs(self.results_dir)
        # If it hasn't been specified then just set it to the current directory.
        else:
            self.results_dir = "./"

        # Import the module responsible for the tight binding model
        model_module = importlib.import_module("models." + modelname)

        # Initialise the model class
        self.Model = model_module.MatrixElements(os.path.join("models", modelname + ".json"))

        # Initialise the geometry
        self.init_geom(self.Def['gy_file'], self.Def['uc_file'])

        # Initialise the Hamiltonian class
        self.Hamilton = Hamiltonian(self)

        # Initialise the electron module
        self.Electron = Electronic(self)
Пример #18
0
    def test_loads_with_kwargs(self):
        def test_hook(loaded_dict):
            return {}
        commented = self.test_json['sample']['commented']
        test_kwargs = dict(object_hook=test_hook)

        c_load = commentjson.loads(commented, **test_kwargs)

        # make sure that object_hook did its work
        assert c_load == {}
    def load_config(self, view=None, config=None):
        """
        Load config from self.config_filename, break if it doesn't exist
        Load any overrides from environment variables
        Validate all loaded values
        """
        # Allow overriding the view for testing purposes
        if not view:
            view = self.view

        # Allow overriding of the entire config object
        if self.config_file_override:
            config = self.config_file_override

        # Else read from file
        else:
            if not os.path.isfile(self.config_filename):
                raise Exception("%s does not exist. Try running the init command to generate it.\n" % self.config_filename)

            with open(self.config_filename, 'r') as f:
                try:
                  if re.search('[.]ya?ml$', self.config_filename):
                    config = yaml.load(f)
                  else:
                    content = f.read()
                    config = json.loads(content)
                except ValueError:
                    print '%s could not be parsed' % self.config_filename
                    raise

        # Load in cli config overrides
        view.update_config(config)

        # record value of the debug variable
        debug = config['global']['print_debug']

        # Check the environment variables for any overrides
        self._config_env_override(config, '', print_debug=debug)

        # Validate and save results
        self._validate_config(config)
        self.config = config

        # Save shortcut references to commonly referenced config sections
        self.globals = self.config.get('global', {})
        self.template_args = self.config.get('template', {})

        # Register all stack handlers
        self.stack_monitor = monitor.StackMonitor(self.globals['environment_name'])
        for stack_handler in self.env_config.stack_event_handlers:
            self._add_stack_event_handler(stack_handler)

        # Register all deploy handlers
        for deploy_handler in self.env_config.deploy_handlers:
            self._add_deploy_handler(deploy_handler)
Пример #20
0
    def __init__(self, filename, stopfile):
        self.schema = commentjson.loads(open(filename, 'r').read())

        #just convenience
        self.ents = self.schema['ents']
        self.types = self.schema['types']
        self.ind = Index(self.types, stopfile)
        self.indexFile = 'index'
        if 'links' in self.schema:
            self.defaults = self.schema['defaults']
            self.links = self.schema['links']
Пример #21
0
 def load_config(self):
     config_path = "config.json"
     argv = COMMON.get_command_arg("-c")
     if argv:
         config_path = argv
     try:
         with open(config_path, "r") as f:
             return commentjson.loads(f.read())
     except Exception as e:
         logging.exception(e)
     return False
Пример #22
0
    def test_loads_with_kwargs(self):
        def test_hook(loaded_dict):
            return {}
        commented = self.test_json['sample']['commented']
        uncommented = self.test_json['sample']['uncommented']
        test_kwargs = dict(object_hook=test_hook)

        c_load = commentjson.loads(commented, **test_kwargs)

        # make sure that object_hook did its work
        assert c_load == {}
Пример #23
0
def readConf(file):
    if path.exists(path.abspath(file)):
        log.debug('Found %s' % file)
        load_conf = file
    else:
        log.fatal('Please check documentation')

    with open(load_conf, 'r') as confInfo:
        info = commentjson.loads(confInfo.read())

        return info
    def write_template_to_file(self):
        """
        Serializes self.template to string and writes it to the file named in config['global']['output']
        """
        indent = 0 if not self.config['global']['print_debug'] else 4

        with open(self.config['global']['output'], 'w') as output_file:
            # Here to_json() loads child templates into S3
            raw_json = self.template.to_template_json()

            reloaded_template = json.loads(raw_json)
            json.dump(reloaded_template, output_file, indent=indent, separators=(',', ':'))
Пример #25
0
def columns() -> List[Dict[str, str]]:
    analysis = AnalysisResult()
    cols: Dict[str, ColumnDict] = {}
    # build up dictionary of default configs
    for attr, _ in analysis.openapi_types.items():
        cols.update({attr: gen_default_column(attr)})
    # apply configuration file on top
    with open(PATH + "/column-config.jsonc") as js_file:
        config: List[ColumnDict] = commentjson.loads(js_file.read())
        for c in config:
            cols.update({c["field_name"]: {**cols[c["field_name"]], **c}})
        return list(cols.values())
Пример #26
0
def printColours(winTerm):
    ''' Print the colours for a theme '''
    scheme = commentjson.loads(open(winTerm).read()[:-2])
    keys = [
        "background", "black", "brightBlack", "foreground", "white",
        "brightWhite", "red", "yellow", "brightYellow", "green", "cyan",
        "blue", "purple", "brightRed", "brightYellow", "brightGreen",
        "brightCyan", "brightBlue", "brightPurple"
    ]
    for key in keys:
        cPrint(scheme[key])
    print()
Пример #27
0
 def _merge_launch_json(self, new_launch_json):
     vscode_dir = os.path.join(os.getcwd(), ".vscode")
     self.utility.ensure_dir(vscode_dir)
     launch_json_file = os.path.join(vscode_dir, "launch.json")
     if os.path.exists(launch_json_file):
         launch_json = commentjson.loads(self.utility.get_file_contents(launch_json_file))
         launch_json['configurations'].extend(new_launch_json['configurations'])
         with open(launch_json_file, "w") as f:
             commentjson.dump(launch_json, f, indent=2)
     else:
         with open(launch_json_file, "w") as f:
             commentjson.dump(new_launch_json, f, indent=2)
Пример #28
0
def loadJSONS(s):
	if isinstance(s, bytes):
		return json_alt.loads(s)
		
		"""
		try:
			s = s.decode('utf-8')
		except:
			output('Unable to decode bytes to string: {}'.format(s), 'ERROR')
			return False
		"""
	
	return json.loads(s)
Пример #29
0
    def _cmnd(self, cmnd):
        if self.url.startswith('http://'):
            return requests.get(posixpath.join(self.url, 'cm'), {'cmnd': cmnd}, timeout=1).json()

        if self.url.startswith('mqtt://'):
            device_name, host, port, username, password = parse_mqtt_url(self.url)

            try:
                cmnd, payload = cmnd.split(None, 1)
            except ValueError:
                payload = None

            return json.loads(mqtt_cmnd(f'cmnd/{device_name}/{cmnd}', payload, f'stat/{device_name}/RESULT', host, port, username, password))
Пример #30
0
def winTerm2hex(filename):
    """Generate the shemes (without #)

	Args:
		filename (str): filename from args

	Returns:
		dict: a dictionary representing the source theme
	"""
    profiles = commentjson.loads(open(filename).read())
    for scheme in profiles["schemes"]:
        for colour in scheme:
            scheme[colour] = scheme[colour].replace("#", "")

    return profiles["schemes"]
Пример #31
0
def _get_trustzone(config: elftosb_helper.MasterBootImageConfig) -> TrustZone:
    """Create appropriate TrustZone instance."""
    if not config.trustzone_preset_file:
        return TrustZone.disabled()
    try:
        tz_config_data = json.loads(load_file(config.trustzone_preset_file))
        tz_config = elftosb_helper.TrustZoneConfig(tz_config_data)
        return TrustZone.custom(
            family=tz_config.family, revision=tz_config.revision, customizations=tz_config.presets
        )
    except ValueError:
        tz_raw_data = load_binary(config.trustzone_preset_file)
        return TrustZone.from_binary(
            family=config.family, revision=config.revision, raw_data=tz_raw_data
        )
Пример #32
0
def load(config_name, mode=None):
    # load from a config.json
    file_name = config_name + '.json'
    file_dir = os.path.dirname(os.path.realpath(__file__))
    file_path = os.path.join(file_dir, file_name)

    if not os.path.exists(file_path):
        raise ValueError('no such config file: %s' % file_path)

    with open(file_path) as f:
        json_str = clean_json(f.read())
        config = cjson.loads(json_str)

    define_link(config)

    return config
Пример #33
0
def load_json_conf(conf_file: str, key: str = 'BACKEND') -> dict:
    """Read configuration from file (JSON or JSONC).

    Return contents of 'key' block.
    """
    f_path = Path(conf_file)
    if not f_path.is_file():
        logger.error("No such file: " + f_path.absolute().as_posix())
        return None
    with open(f_path, encoding='utf-8') as f:
        raw_data = f.read()
    data = cjson.loads(raw_data)
    key_value = data.get(key, None)
    if key_value:
        return key_value
    return data
Пример #34
0
    def read(self, filename):
        """load json file as dict object

        Parameters
        ----------
        filename : str
            filename of json file
    
        Returns
        ----------
        conf : dict
            dictionary containing contents of json file
    
        Examples
        --------
        """
        return json.loads(open(filename).read())
Пример #35
0
def loads(s):
    """Load json or fallback to commentjson.

    We try to load the json with built-in json, and
    if it fails with JSONDecodeError we fallback to
    the slower but more tolerant commentjson to
    accomodate devices that use trailing commas
    in their json since iOS allows it.

    This approach ensures only devices that produce
    the technically invalid json have to pay the
    price of the double decode attempt.
    """
    try:
        return json.loads(s)
    except json.JSONDecodeError:
        return commentjson.loads(s)
Пример #36
0
def complete():
    token_and_usernames = []
    try:
        with writelock.acquire(timeout=10):
            w_file = open(filetemp, "r")
            temp = w_file.read().rstrip(",")
            current_string = []
            in_data = False
            for the_char in temp:
                if not in_data and the_char == '[':
                    in_data = True
                    current_string = ["["]
                elif in_data and the_char == ']':
                    current_string.append("]")
                    #logging.error("current string: ".join(current_string))
                    token_and_usernames.append(
                        commentjson.loads("".join(current_string)))
                    current_string = []
                    in_data = False
                elif in_data:
                    current_string.append(the_char)

            #for line in temp.split(','):
            #  logging.error(line)
            #  token_and_usernames.append(commentjson.loads(line))
            #token_and_usernames = commentjson.loads(temp)
            w_file.close()
    except filelock.Timeout:
        logging.error("Timeout when re-read temp file")

    result = {}
    result["version"] = 1
    result["variables"] = []
    variable = {}
    result["variables"].append(variable)
    variable["names"] = ["token", "username"]
    variable["values"] = token_and_usernames

    w_file = open(fileoutput, "w")
    w_file.write(str(result).replace("'", "\""))
    w_file.close()

    logging.info("Finished writing token for " +
                 str(len(token_and_usernames)) + " users.")
Пример #37
0
    def __init__(self, particle_definition):
        """
        Args:
            particle_definition(json): Particle definition in json format. Depends of each particle
            implementation, but always requires a pcf_name field.
        """
        # If particle definition is passed as a json file, load it as a dictionary and continue
        if isinstance(particle_definition, str):
            with open(particle_definition) as file:
                particle_definition = commentjson.loads(file.read())
            file.close()
        self.particle_definition = particle_definition
        self.name = self.particle_definition["pcf_name"]
        self.validate_unique_id()
        self.persist_on_termination = self.particle_definition.get(
            "persist_on_termination", False)
        self.persist_on_update = self.particle_definition.get(
            "persist_on_update", False)
        self.callbacks = self.particle_definition.get("callbacks", {})
        self.desired_state = STATE_STRING_TO_ENUM.get(
            self.particle_definition.get("desired_state"))
        self.current_state_definition = {}
        self.desired_state_definition = {}
        self.custom_config = {}
        self.pcf_id = self.get_pcf_id()
        self.state_transition_table = {
            (State.terminated, State.running): self.start,
            (State.stopped, State.running): self.start,
            (State.stopped, State.terminated): self.terminate,
            (State.running, State.stopped): self.stop,
            (State.running, State.terminated): self.terminate
        }
        self.parents = set()
        self.children = set()

        self.current_state_transiton = None
        self.current_state_transition_start_time = None

        self.state_last_refresh_time = None
        self.state_cache_ttl = 15
        self.state_dirty = False

        self.unique_keys = []
Пример #38
0
	def __init__(self, modelpath):
		"""Import model data, initialise orbital pair index matrix.

		Arguments:
			modelpath: path to the model json file
		"""

		# Catch invalid model path
		if os.path.exists(modelpath) == False:
			print "ERROR: Unable to open tight binding model file:", modelpath
			sys.exit()

		# Import the tight binding model parameters
		with open(modelpath, 'r') as modelfile:
			modeldata = commentjson.loads(modelfile.read())

		# Store imported data as attributes
		self.atomic = modeldata['species']
		self.data = modeldata['hamiltonian']

		# The model includes up to l=1 orbitals, hence space for five integrals 
		# (in order: ss_sigma, sp_sigma, ps_sigma, pp_sigma, pp_pi) is made.
		self.v = np.zeros(5, dtype="double")

		# Generate pair of indices for each pair of shells, showing which values
		# of v to use. This follows from the Slater-Koster table.
		# 
		# Example: two interacting shells with max l of 1 (eg C-C).
		# l=0, l=0: ss_sigma, slice v from 0 to 1: v[0] (ss_sigma)
		# l=0, l=1: sp_sigma, slice v from 1 to 2: v[1] (sp_sigma)
		# l=1, l=0: ps_sigma, slice v from 2 to 3: v[2] (ps_sigma)
		# l=1, l=1: pp_sigma and pp_pi, slice v from 3 to 5: v[3] and v[4]
		# (pp_sigma and pp_pi)
		#
		# This needs to be expanded for d and f orbitals, and is model-independent.
		self.v_bgn = np.array([[0, 1], [2, 3]])
		self.v_end = np.array([[1, 2], [3, 5]])
Пример #39
0
Файл: main.py Проект: kidk/felt
def main(args):
    """Main function.

    The main function parses the command line arguments, reads the input file
    and inits the generator.
    """
    # Parse arguments
    parser = argparse.ArgumentParser(description='Start workload.')
    parser.add_argument('--debug', action='store_true',
                        help="enable debug information")
    parser.add_argument('--verbose', action='store_true',
                        help="makes generator more verbose")
    parser.add_argument('--threads', type=int,
                        default=Options.DEFAULT_THREADS,
                        help="number of threads to run simultaneously")
    parser.add_argument('--test', action='store_true',
                        help="run a scenario only once")
    parser.add_argument('--slimerjs', action='store_true',
                        help="use slimerjs instead of phantomjs")
    parser.add_argument('--screenshot', action='store_true',
                        help="provide screenshots after each step")
    parser.add_argument('--user-agent', type=str, dest='userAgent',
                        help="provide a custom User-Agent")
    parser.add_argument('--max-time', type=int,
                        default=Options.DEFAULT_MAXTIME, dest='maxTime',
                        help="provide a maximum runtime")
    parser.add_argument('scenario')
    args = parser.parse_args()

    # Check if scenario exists
    if not os.path.isfile(args.scenario):
        print "scenario '%s' not found" % args.scenario
        return

    # Load from file and parse
    with open(args.scenario, 'r') as content_file:
        content = content_file.read()
    scenario = commentjson.loads(content)

    # Load in scenario
    scenario = Scenario(scenario)

    # Parse options
    options = Options()

    # Which browser are we using
    if args.slimerjs:
        options.setBrowser('slimerjs')

    # Threads option
    options.setThreads(args.threads)

    # Test option
    options.setTest(args.test)

    # Output information
    options.setVerbose(args.verbose)

    # Debug mode
    options.setDebug(args.debug)

    # Screenshot mode
    options.setScreenshot(args.screenshot)

    # User agent
    options.setUserAgent(args.userAgent)

    # Create watchdog thread
    options.setMaximumExectionTime(args.maxTime)

    # Create new Felt class
    felt = Felt(options, scenario)

    # Start worker
    felt.run()
Пример #40
0
def loadProductUoms():
    json_data=open(Config.PRODUCT_UOM_FILE).read()
    return commentjson.loads(json_data)
Пример #41
0
 def test_loads(self):
     for index, test_json_ in self.test_json.iteritems():
         commented = test_json_['commented']
         uncommented = test_json_['uncommented']
         assert commentjson.loads(commented) == json.loads(uncommented)
Пример #42
0
def loadGroups():
    json_data=open(Config.GROUPS_FILE).read()
    return commentjson.loads(json_data)
Пример #43
0
def loadTaxCodes():
    json_data=open(Config.TAX_CODES_FILE).read()
    return commentjson.loads(json_data)
Пример #44
0
def loadFiscalPositions():
    json_data=open(Config.FPOSITIONS_FILE).read()
    return commentjson.loads(json_data)
Пример #45
0
def loadStockLocations():
    json_data=open(Config.LOCATIONS_FILE).read()
    return commentjson.loads(json_data)
Пример #46
0
def loadPaymentModes():
    json_data=open(Config.PAYMENT_MODES_FILE).read()
    return commentjson.loads(json_data)
Пример #47
0
 def __init__(self, filename):
     self.filename = filename
     with open(filename, 'r') as file_handle:
         data = commentjson.loads(file_handle.read())
     self.data = data
Пример #48
0
	def __init__(self, modelpath):
		"""Import model data, initialise orbital pair index matrix.

		Arguments:
			modelpath: path to the model json file
		"""

		# Catch invalid model path
		if os.path.exists(modelpath) == False:
		    print "ERROR: Unable to open tight binding model file:", modelpath
		    sys.exit()

		# Import the tight binding model parameters
		with open(modelpath, 'r') as modelfile:
			modeldata = commentjson.loads(modelfile.read())

		# Store imported data as attributes
		self.atomic = modeldata['species']
		self.data = modeldata['hamiltonian']
		self.pairpotentials = modeldata['pairpotentials']
		self.embedded = modeldata['embedding']
		self.tabularisation = modeldata['tabularisation']

		# The model includes up to l=1 orbitals, hence space for five integrals 
		# (in order: ss_sigma, sp_sigma, ps_sigma, pp_sigma, pp_pi) is made.
		self.v = np.zeros(5, dtype='double')

		# Generate pair of indices for each pair of shells, showing which values
		# of v to use. This follows from the Slater-Koster table.
		# 
		# Example: two interacting shells with max l of 1 (eg C-C).
		# l=0, l=0: ss_sigma, slice v from 0 to 1: v[0] (ss_sigma)
		# l=0, l=1: sp_sigma, slice v from 1 to 2: v[1] (sp_sigma)
		# l=1, l=0: ps_sigma, slice v from 2 to 3: v[2] (ps_sigma)
		# l=1, l=1: pp_sigma and pp_pi, slice v from 3 to 5: v[3] and v[4]
		# (pp_sigma and pp_pi)
		#
		# This needs to be expanded for d and f orbitals, and is model-independent.
		self.v_bgn = np.array([[0, 1], [2, 3]])
		self.v_end = np.array([[1, 2], [3, 5]])

		# Shell grid for the integral tables of s,p and d. This grid contains the
		# total number of Slater-Koster integrals required to compute the interaction.
		# The row index refers to l of the first atom, the column to l of the second.
		#			
		# Example: Hydrogen is max l=0, carbon has max l=1.
		# Hence shells[0][1] returns 2, as there are two integrals to compute
		# for H-C: ss_sigma, sp_sigma.
		shells = [[1,  2,  3], 
		          [2,  5,  7],
		          [3,  7, 14]]

		# Create a function grid that stores the radial functions required to 
		# compute all the interatomic matrix elements. The grid has the dimension
		# [number of atomic species in the model]^2
		function_grid = [[0 for species1 in self.atomic] for species2 in self.atomic]
		pairpotential_grid = [[0 for species1 in self.atomic] for species2 in self.atomic]

		# This matrix saves radial function indices for the interaction between species i,j.
		function_map = [[[] for species1 in self.atomic] for species2 in self.atomic]
				
		# Attention: this assumes that all species have an s shell. Might have 
		# to be changed in the future to account for specieso only having p and d.
		for i, species1 in enumerate(self.atomic):
			for j, species2 in enumerate(self.atomic):
				shell_list1 = species1['l']
				shell_list2 = species2['l']

				# Fetch the total number of radial functions for the species i,j
				num_radialfunctions = shells[shell_list1[-1]][shell_list2[-1]]
				# Make space for the radial functions in the function_grid
				function_grid[i][j] = [0] * num_radialfunctions

				# Create mapping function
				for l1 in shell_list1:
					for l2 in shell_list2: 
						[function_map[i][j].append(val) for val in range(self.v_bgn[l1, l2], self.v_end[l1, l2])]



		# Loop over interactions, assinging radial functions to the grid
		for i, species1 in enumerate(function_grid):
			for j, species2 in enumerate(species1):
				for radial_index, radial_function in enumerate(species2):
					function_grid[i][j][radial_index] = GoodWin(**self.data[i][j][radial_index]).radial

		# Create a function grid that stores the pairpotential functions required to 
		# compute the interatomic pairpotential. The grid has the dimension
		# [number of atomic species in the model]^2


		# Loop over interactions, assinging pairpotential functions to the grid
		for i, species1 in enumerate(self.atomic):
			for j, species2 in enumerate(self.atomic):
				pairpotential_grid[i][j] = GoodWin(**self.pairpotentials[i][j]).radial

		# ==== THIS IS MODEL SPECIFIC ===== #
		# Create a function grid that stores the embedded pairpotential functions 
		embedded_pairpotential = [[0 for species1 in self.atomic] for species2 in self.atomic]

		# Pair potential embedded function
		embed = lambda x: x*(self.embedded['a1'] + x*(self.embedded['a2'] + x*(self.embedded['a3'] + x*self.embedded['a4'])))

		# For some reason this does not work:
		#for i, species1 in enumerate(self.atomic):
		#	for j, species2 in enumerate(self.atomic):
		#		embedded_pairpotential[i][j] = lambda x: embed(pairpotential_grid[i][j](x))
		#
		# Test via: 
		# print embed(pairpotential_grid[0][0](0.25)), embedded_pairpotential[0][0](0.25)
		#
		# Using the above loops, the results are inconsistent. This does not happen if the 
		# same is defined explicitly as below.
		embedded_pairpotential[0][0] = lambda x: embed(pairpotential_grid[0][0](x))
		embedded_pairpotential[0][1] = lambda x: embed(pairpotential_grid[0][1](x))
		embedded_pairpotential[1][0] = lambda x: embed(pairpotential_grid[1][0](x))
		embedded_pairpotential[1][1] = lambda x: embed(pairpotential_grid[1][1](x))

		# Optionally interpolate radial functions
		if self.tabularisation["enable"] == 1:
			# Range of radii for the interpolating function
			rvalues = np.arange(0.5, 2.6, self.tabularisation["resolution"], dtype="double")
			interp_settings = {"k": 3, "s": 0, "ext": "zeros"}

			# Loop over interactions, interpolating radial functions
			for i, species1 in enumerate(function_grid):
				for j, species2 in enumerate(species1):
					for radial_index, radial_function in enumerate(species2):
						yvalues = [function_grid[i][j][radial_index](r) for r in rvalues]
						function_grid[i][j][radial_index] = UnivariateSpline(rvalues, yvalues, **interp_settings)
		# ==== END MODEL SPECIFIC ========= #

		# Store radial functions into the class
		self.shells = shells
		self.function_grid = function_grid
		self.function_map = function_map

		self.pairpotential_grid = embedded_pairpotential
def get_json_resource(resource_name, relative_to_module_name=__name__):
    """
    Get package resource as json
    """
    return json.loads(get_resource(resource_name, relative_to_module_name))
Пример #50
0
def make_magmomcorr_graphs(numeperatom):
    Verbose = 1
    number_decimals = 6
    orb_type = "p"
    plotname = "../output_PyLATO/Mag_Corr_"+orb_type+"_"+str(numeperatom)
    op_sq_name="\\frac{1}{3} \langle :\hat{\mathbf{m}}_1.\hat{\mathbf{m}}_2:\\rangle"

    U_min = 0.005
    U_max = 10
    U_num_steps = 100

    J_min = 0.005
    J_max = 5
    J_num_steps = 100

    dJ_min = 0
    dJ_max = 1
    dJ_num_steps = 1

    U_array, U_step = np.linspace(U_min, U_max, num=U_num_steps, retstep=True)
    # test
    U_array = np.append(U_array,U_max+U_step)

    if orb_type == "s":
        J_array = [0.0]
        J_step = 0.0
        dJ_array = [0.0]
        dJ_step = 0.0
    elif orb_type == "p":
        J_array, J_step = np.linspace(J_min, J_max, num=J_num_steps, retstep=True)
        # test
        J_array = np.append(J_array,J_max+J_step)
        dJ_array = [0.0]
        dJ_step = 0.0
    elif orb_type == "d":
        J_array, J_step = np.linspace(J_min, J_max, num=J_num_steps, retstep=True)
        dJ_array, dJ_step = np.linspace(dJ_min, dJ_max, num=dJ_num_steps, retstep=True)
        # test
        J_array = np.append(J_array,J_max+J_step)
        dJ_array = np.append(dJ_array,dJ_max+dJ_step)
    else:
        print("ERROR: orb_type must be 's', 'p' or 'd'. Exiting. ")
        sys.exit()


    jobdef_file = "JobDef.json"
    jobdef_backup = "JobDef_backup.json"
    # Make a backup of the JobDef file
    shutil.copyfile(jobdef_file, jobdef_backup)
    # Read in the JobDef file
    with open(jobdef_file, 'r') as f:
        jobdef = commentjson.loads(f.read())


    # Read in the model file
    modelfile = "models/TBcanonical_"+orb_type+".json"
    model_temp = "TBcanonical_"+orb_type+"_temp"
    temp_modelfile = "models/"+model_temp+".json"
    with open(modelfile, 'r') as f:
        model = commentjson.loads(f.read())
    # Copy and paste the regular python model to one with the same temp name
    model_python = "models/TBcanonical_"+orb_type+".py"
    model_python_temp = "models/"+model_temp+".py"
    shutil.copyfile(model_python, model_python_temp)
    # make sure that the number of electrons is correct.
    model['species'][0]["NElectrons"] = numeperatom

    # change the model and Hamiltonian in jobdef
    jobdef["Hamiltonian"] = orb_type+"case"
    jobdef["model"] = model_temp
    # make sure that the scf is on
    jobdef["scf_on"] = 1
    # write jobdef back to file
    with open(jobdef_file, 'w') as f:
        commentjson.dump(jobdef, f, sort_keys=True, indent=4, separators=(',', ': '))

    #pdb.set_trace()

    magFlag, mag_corr = mag_corr_loop(U_array, J_array, dJ_array, jobdef, jobdef_file, model, temp_modelfile, orb_type, number_decimals)


    # clean up temp files
    os.remove(temp_modelfile)
    os.remove(model_python_temp)
    os.remove(model_python_temp+"c")
    # restore backup of JobDef.json
    shutil.copyfile(jobdef_backup, jobdef_file)
    os.remove(jobdef_backup)

    # Make the plot if the mag_corr_loop was successful
    if magFlag == True:
        Plot_OpSq_U_J(Verbose,mag_corr,orb_type,plotname,U_min,U_step,U_num_steps,J_min,J_step,J_num_steps,dJ_min,dJ_step,dJ_num_steps,op_sq_name, number_decimals)
    else:
        print("Simulation failed.")
Пример #51
0
	def __init__(self, confPath):
		# load and store the configuration and update the object's dictionary
		conf = json.loads(open(confPath).read())
		self.__dict__.update(conf)
Пример #52
0
def loadPaymentTerms():
    json_data=open(Config.PAYMENT_TERMS_FILE).read()
    return commentjson.loads(json_data)