Ejemplo n.º 1
0
    def get_advanced_config(self):
        path = os.path.join(config.data_path, "advanced_settings.json")
        if os.path.isfile(path):
            data = FileIO.read(path, "r")

            return Dicked(**jstyleson.loads(data))

        return Dicked()
Ejemplo n.º 2
0
 def _extract_items(self, node):
     script = node.xpath('string()')
     try:
         # TODO: `strict=False` can be configurable if needed
         data = json.loads(script, strict=False)
     except ValueError:
         # sometimes JSON-decoding errors are due to leading HTML or JavaScript comments
         data = jstyleson.loads(HTML_OR_JS_COMMENTLINE.sub('', script),
                                strict=False)
     if isinstance(data, list):
         return data
     elif isinstance(data, dict):
         return [data]
Ejemplo n.º 3
0
 def load(self):
     if not os.path.exists(self.file_name):
         self.recent_projects = [self.get_default_project_folder()]
         self.open_notes = {}
     else:
         with open(self.file_name,
                   mode='r', encoding='utf-8', errors='ignore') as f:
             txt = f.read()
             json_dict = json.loads(txt)
             self.recent_projects = json_dict.get('recent_projects')
             if not self.recent_projects:
                 self.recent_projects = [self.get_default_project_folder()]
             self.open_notes = json_dict.get('open_notes', {})
             self.recently_viewed = json_dict.get('accessed', {})
     self.save()
Ejemplo n.º 4
0
    def get_advanced_config(self):
        paths = []
        if Prefs['path_to_advanced_settings']:
            paths = [
                Prefs['path_to_advanced_settings'],
                os.path.join(Prefs['path_to_advanced_settings'], "advanced_settings.json")
            ]

        paths.append(os.path.join(config.data_path, "advanced_settings.json"))

        for path in paths:
            if os.path.isfile(path):
                data = FileIO.read(path, "r")

                d = Dicked(**jstyleson.loads(data))
                self.adv_cfg_path = path
                Log.Info(u"Using advanced settings from: %s", path)
                return d

        return Dicked()
Ejemplo n.º 5
0
 def test_loads(self):
     result = jstyleson.loads(json_test_case)
     self.assertDictEqual(json_expected, result)
Ejemplo n.º 6
0
def process_image(input_path, input_map_path, dest_path):
    #Load input image
    source = None
    try:
        source = Image.open(input_path)
    except (FileNotFoundError, IOError, UnicodeDecodeError,
            SystemError) as err:
        print("Unable to load image: " + str(input_path) + "\n" + str(err))
        return
    except:
        print("Unable to load image: " + str(input_path))
        return
    if (source == None):
        return
    #load input map json
    input_map = None
    try:
        result = open(input_map_path, 'r')
        input_map = jstyleson.dispose(result)
        input_map = jstyleson.loads(input_map)
        result.close()
    except (FileNotFoundError, IOError, UnicodeDecodeError, SystemError,
            json.JSONDecodeError) as err:
        print("Unable to load input json map: " + str(input_map_path) + "\n" +
              str(err))
        return
    except:
        print("Unable to load input json map: " + str(input_map_path))
        return
    #Extract json values
    source_quantity = None
    generator_map = None
    if (input_map == None):
        return
    else:
        if ("input_size" in input_map):
            try:
                source_quantity = abs(int(input_map["input_size"]))
            except:
                source_quantity = None
        if ("input_map" in input_map):
            generator_map = validate_generator_map(input_map["input_map"])
    if (source_quantity == None or generator_map == None):
        if (source_quantity == None):
            print("input_size parameter invalid or not found in json file.")
        if (generator_map == None):
            print("input_map parameter invalid or not found in json file.")
        return
    #Process files and save
    source = source.convert("RGBA")
    tile_size = int(source.width / source_quantity)
    tile_half_size = int(tile_size * 0.5)
    tile_quantity = source_quantity
    variation_quantity = math.floor(source.height / tile_size)
    max_cell_len = 0
    for cells in generator_map:
        cell_len = len(cells)
        if (cell_len > max_cell_len):
            max_cell_len = cell_len
    dest_tile_quantity = (max_cell_len, len(generator_map))
    dest_tile_size = (dest_tile_quantity[0] * tile_size,
                      dest_tile_quantity[1] * tile_size * variation_quantity)
    dest = Image.new("RGBA", dest_tile_size)
    #image sections,  1 cell is composed of 4 portions of each tile,
    #where 0 = top left, 1 = top right, 2 = bot left and 3 = bot right. Those values are the ones used by generator_map
    source_map = []
    for v in range(0, variation_quantity):
        variation = []
        for x in range(0, source.width, tile_size):
            cell = []
            pos_x = x
            pos_y = v * tile_size
            cell.append(
                source.crop((pos_x, pos_y, pos_x + tile_half_size,
                             pos_y + tile_half_size)))
            pos_x = x + tile_half_size
            pos_y = v * tile_size
            cell.append(
                source.crop((pos_x, pos_y, pos_x + tile_half_size,
                             pos_y + tile_half_size)))
            pos_x = x
            pos_y = v * tile_size + tile_half_size
            cell.append(
                source.crop((pos_x, pos_y, pos_x + tile_half_size,
                             pos_y + tile_half_size)))
            pos_x = x + tile_half_size
            pos_y = v * tile_size + tile_half_size
            cell.append(
                source.crop((pos_x, pos_y, pos_x + tile_half_size,
                             pos_y + tile_half_size)))
            variation.append(cell)
        source_map.append(variation)
    #create autotile image
    generator_map_size = len(generator_map)
    for v in range(0, variation_quantity):
        for x in range(0, generator_map_size):
            r_cells = generator_map[x]
            r_cells_size = len(r_cells)
            for y in range(0, r_cells_size):
                r_cell = r_cells[y]
                cell = r_cell[0]
                if (cell > 0):
                    dest.paste(source_map[v][cell - 1][0],
                               (y * tile_size,
                                (x + v * generator_map_size) * tile_size))
                cell = r_cell[1]
                if (cell > 0):
                    dest.paste(source_map[v][cell - 1][1],
                               (y * tile_size + tile_half_size,
                                (x + v * generator_map_size) * tile_size))
                cell = r_cell[2]
                if (cell > 0):
                    dest.paste(source_map[v][cell - 1][2],
                               (y * tile_size,
                                (x + v * generator_map_size) * tile_size +
                                tile_half_size))
                cell = r_cell[3]
                if (cell > 0):
                    dest.paste(source_map[v][cell - 1][3],
                               (y * tile_size + tile_half_size,
                                (x + v * generator_map_size) * tile_size +
                                tile_half_size))
    try:
        dest.save(dest_path)
    except (ValueError, IOError, SystemError) as err:
        print("Unable to save file: " + str(dest_path) +
              "\nCheck input_size in input map. Tile calculated size: " +
              str(tile_size) + "\n" + str(err))
Ejemplo n.º 7
0
    def __init__(self,
                 config_location,
                 is_dict_config=False,
                 dict_config=None):
        """Read in configuration file and parse into specified values

        Args:
            config_location (str): valid filepath for file
            is_dict_config (bool): are we passing in a dictionary configuration directly
            dict_config (dict): dictionary object, if is_dict_config

        """
        if is_dict_config:
            ext = None

            if dict_config is None:
                raise Exception("expected dict_config was None")

            if not isinstance(dict_config, dict):
                raise Exception("did not receive expected dict_config")

            dict_str = jstyleson.dumps(dict_config)

            config_str = Configuration.perform_any_config_fragment_substitution(
                dict_str)

        else:
            logging.info("Loading config file at {}".format(config_location))
            self.config_location = config_location

            if os.path.exists(config_location):
                ext = os.path.splitext(config_location)[1].lower()
                if ext not in SUPPORTED_EXTS:
                    raise ValueError(
                        "config file at: {} has improper extension type - please use a .json or .yml file"
                        .format(config_location))

                with open(config_location, "r") as f:
                    config_str = f.read()

                config_str = Configuration.perform_any_config_fragment_substitution(
                    config_str)

            else:
                raise Exception(
                    "config file at: {} not found".format(config_location))

        if ext is None or ext == ".json":
            self.config = jstyleson.loads(
                config_str, object_pairs_hook=self.dict_raise_on_duplicates)
        elif ext in [".yaml", ".yml"]:
            self.config = yaml.load(config_str, Loader=yaml.FullLoader)

        assert isinstance(self.config, dict)

        # check top-level keys
        for k in self.config:
            if k not in ConfigurationSectionType.values():
                msg = "Unsupported top-level key: %s. " % k
                msg += "Supported keys are %s" % str(
                    ConfigurationSectionType.values())
                raise ConfigurationError(msg)

        # metadata section can be optional
        self.config_metadata = None
        if ConfigurationSectionType.METADATA.value in self.config:
            self.config_metadata = self.config[
                ConfigurationSectionType.METADATA.value]

        # implemetation_config section is required
        if not ConfigurationSectionType.IMPLEMENTATION_CONFIG.value in self.config:
            raise ConfigurationError(
                "Did not find required top-level key %s" %
                ConfigurationSectionType.IMPLEMENTATION_CONFIG.value)

        # keep a copy of the complete configuration
        self.complete_config = self.config.copy()

        # note: config is now just the implementation component of the dictionary
        self.config = self.config[
            ConfigurationSectionType.IMPLEMENTATION_CONFIG.value]

        # store the dag object
        self.dag = ConfigurationDag(self.config)

        # populate configuration file string and hash
        self.config_string, self.config_hash = self._get_configuration_hash()

        # get the formatted time this file was instantiated
        self.config_time = datetime.datetime.now().strftime("%Y%m%d_%H%M")

        # parse the file into an internal config object
        self._parse_config()

        self.check_config()
Ejemplo n.º 8
0
def read_json_file(file_path):
    with open(file_path, 'r') as file:
        file_content = file.read()
    return jstyleson.loads(file_content)
Ejemplo n.º 9
0
 def test_loads(self):
     result = jstyleson.loads(json_test_case)
     self.assertDictEqual(json_expected, result)
Ejemplo n.º 10
0
def parse(opt_path, is_train=True):
    # remove comments starting with '//'
    json_str = ''
    with open(opt_path, 'r') as f:
        for line in f:
            line = line.split('//')[0] + '\n'
            json_str += line
    opt = json.loads(json_str, object_pairs_hook=OrderedDict)

    opt['is_train'] = is_train
    scale = opt['scale']

    # datasets
    for phase, dataset in opt['datasets'].items():
        phase = phase.split('_')[0]
        dataset['phase'] = phase
        dataset['scale'] = scale
        is_lmdb = False
        if 'dataroot_GT' in dataset and dataset['dataroot_GT'] is not None:
            dataset['dataroot_GT'] = os.path.expanduser(dataset['dataroot_GT'])
            if dataset['dataroot_GT'].endswith('lmdb'):
                is_lmdb = True
        if 'dataroot_GT_bg' in dataset and dataset[
                'dataroot_GT_bg'] is not None:
            dataset['dataroot_GT_bg'] = os.path.expanduser(
                dataset['dataroot_GT_bg'])
        if 'dataroot_NOISY' in dataset and dataset[
                'dataroot_NOISY'] is not None:
            dataset['dataroot_NOISY'] = os.path.expanduser(
                dataset['dataroot_NOISY'])
            if dataset['dataroot_NOISY'].endswith('lmdb'):
                is_lmdb = True
        dataset['data_type'] = 'lmdb' if is_lmdb else 'img'

        if phase == 'train' and 'subset_file' in dataset and dataset[
                'subset_file'] is not None:
            dataset['subset_file'] = os.path.expanduser(dataset['subset_file'])

    # path
    for key, path in opt['path'].items():
        if path and key in opt['path']:
            opt['path'][key] = os.path.expanduser(path)
    if is_train:
        experiments_root = os.path.join(opt['path']['root'], 'experiments',
                                        opt['name'])
        opt['path']['experiments_root'] = experiments_root
        opt['path']['models'] = os.path.join(experiments_root, 'models')
        opt['path']['training_state'] = os.path.join(experiments_root,
                                                     'training_state')
        opt['path']['log'] = experiments_root
        opt['path']['val_images'] = os.path.join(opt['path']['val_root'],
                                                 'val_images')

        # change some options for debug mode
        if 'debug' in opt['name']:
            opt['train']['val_freq'] = 8
            opt['logger']['print_freq'] = 2
            opt['logger']['save_checkpoint_freq'] = 8
            opt['train']['lr_decay_iter'] = 10
    else:  # test
        results_root = os.path.join(opt['path']['root'], 'results',
                                    opt['name'])
        opt['path']['results_root'] = results_root
        opt['path']['log'] = results_root

    # network
    opt['network_G']['scale'] = scale

    # export CUDA_VISIBLE_DEVICES
    gpu_list = ','.join(str(x) for x in opt['gpu_ids'])
    os.environ['CUDA_VISIBLE_DEVICES'] = gpu_list
    print('export CUDA_VISIBLE_DEVICES=' + gpu_list)

    return opt
print ("")
print ("******************************************************************")
print ("                          Input parameters                        ")
print ("******************************************************************")
print ("")

# Parse JSON configuration file.
# Raise exception if wrong number of inputs are provided to script.
if len(sys.argv) != 2:
    raise Exception("Only provide a JSON config file as input!")

json_input_file = open(sys.argv[1])
with open(sys.argv[1], 'r') as json_input_file:
  json_input_string = json_input_file.read()
config = jstyleson.loads(json_input_string)
jstyleson.dumps(config)
pprint(config)

print ("")
print ("******************************************************************")
print ("                            Operations                            ")
print ("******************************************************************")
print ("")

print ("Fetching data from database ...")

# Connect to SQLite database.
try:
    database = sqlite3.connect(config['database'])
Ejemplo n.º 12
0
totalPointNumber = width * height 

OS = sys.platform

if OS == 'win32':
    ARCSimPath = '../build/Release/Arcsim.exe'
elif OS == 'linux':
    ARCSimPath = '../build/Arcsim'

jsonConfigPath = '../conf/uniform_grid_drop.json'

jsonData = None

with open(jsonConfigPath,'r') as f:
    jsonDataWithoutComment = ''.join(line for line in f if not line.startswith('//'))
    jsonData = js.loads(jsonDataWithoutComment)

dirBatchingConfPathPrefix = os.getcwd() + '/batching_conf/'

dirPathName = jsonConfigPath.split('/')[-1]
dirPathName = dirPathName[:dirPathName.rfind('.')]
dirBatchingConfPathPrefix += dirPathName
# dirBatchingConfPathPrefix += '/'

if(not os.path.exists(dirBatchingConfPathPrefix)):
    print('Folder {} is not existed! Create it!'.format(dirBatchingConfPathPrefix))
    os.makedirs(dirBatchingConfPathPrefix)

for i in range(0,totalPointNumber):
    jsonData['handles'][0]['nodes'] = i
    configFileName = dirBatchingConfPathPrefix + '/' + dirPathName + '_{:0>8d}.json'.format(i)
Ejemplo n.º 13
0
# and is stoppable with ^C

import re
import os
import sys

print(sys.executable)

import shlex
import jstyleson
import subprocess as sp

with open(".vscode/launch.json", "r", encoding="utf-8") as f:
    tj = f.read()

oj = jstyleson.loads(tj)
argv = oj["configurations"][0]["args"]

try:
    sargv = " ".join([shlex.quote(x) for x in argv])
    print(sys.executable + " -m copyparty " + sargv + "\n")
except:
    pass

argv = [os.path.expanduser(x) if x.startswith("~") else x for x in argv]

if re.search(" -j ?[0-9]", " ".join(argv)):
    argv = [sys.executable, "-m", "copyparty"] + argv
    sp.check_call(argv)
else:
    sys.path.insert(0, os.getcwd())