コード例 #1
0
    def handle_local_config(self):
        '''
        Use local file if present, otherwise use factory values and write that to disk
        unless self.create_missing_files == false, in which case throw IOError
        '''

        # If override config file exists, use it
        if os.path.isfile(self.config_filename):
            with open(self.config_filename, 'r') as f:
                content = f.read()
                config = json.loads(content)

        # If we are instructed to create fresh override file, do it
        # unless the filename is something other than DEFAULT_CONFIG_FILENAME
        elif self.create_missing_files and self.config_filename == res.DEFAULT_CONFIG_FILENAME:
            # Merge in any defaults provided by subclass's
            default_config_copy = copy.deepcopy(res.FACTORY_DEFAULT_CONFIG)
            default_config_copy.update(self.get_factory_defaults_hook())

            # Don't want changes to config modifying the FACTORY_DEFAULT
            config = copy.deepcopy(default_config_copy)

            with open(self.config_filename, 'w') as f:
                f.write(json.dumps(default_config_copy, indent=4, separators=(',', ': ')))

        # Otherwise complain
        else:
            raise IOError(self.config_filename + ' could not be found')

        # Validate and save results
        self._validate_config(config)
        self.config = config
コード例 #2
0
    def load_ami_cache(template, create_missing_files=True):
        """
        Method gets the ami cache from the file locally and adds a mapping for ami ids per region into the template
        This depends on populating ami_cache.json with the AMI ids that are output by the packer scripts per region
        @param template The template to attach the AMI mapping to
        @param create_missing_file File loading policy, if true
        """
        file_path = None

        # Users can provide override ami_cache in their project root
        local_amicache = os.path.join(os.getcwd(), res.DEFAULT_AMI_CACHE_FILENAME)
        if os.path.isfile(local_amicache):
            file_path = local_amicache

        # Or sibling to the executing class
        elif os.path.isfile(res.DEFAULT_AMI_CACHE_FILENAME):
            file_path = res.DEFAULT_AMI_CACHE_FILENAME

        if file_path:
            with open(file_path, 'r') as json_file:
                json_data = json.load(json_file)
        elif create_missing_files:
            json_data = res.FACTORY_DEFAULT_AMI_CACHE
            with open(res.DEFAULT_AMI_CACHE_FILENAME, 'w') as f:
                f.write(json.dumps(res.FACTORY_DEFAULT_AMI_CACHE, indent=4, separators=(',', ': ')))
        else:
            raise IOError(res.DEFAULT_AMI_CACHE_FILENAME + ' could not be found')

        template.add_ami_mapping(json_data)
コード例 #3
0
 def main(args: Namespace) -> None:
     logging.basicConfig(level=args.log_level)
     hotkeys_config = HotkeysConfigSchema().loadf(args.hotkeys_config_file)
     with ScreenReader(int(get_tibia_wid(args.tibia_pid))) as screen_reader:
         with OcrNumberReader(screen_reader, PyTessBaseAPI()) as ocr_reader:
             finder = AppConfigMemoryAddressFinder(
                 tibia_pid=args.tibia_pid,
                 memory_address_finder=MemoryAddressFinder(
                     ocr_reader, args.tibia_pid),
                 hotkeys_config=hotkeys_config,
                 mana_rect=Rect(
                     args.mana_xy[0],
                     args.mana_xy[1],
                     args.mana_wh[0],
                     args.mana_wh[1],
                 ),
                 speed_rect=Rect(
                     args.speed_xy[0],
                     args.speed_xy[1],
                     args.speed_wh[0],
                     args.speed_wh[1],
                 ),
             )
             app_config = finder.build_app_config_entry()
             print(
                 json.dumps(AppConfigSchema().dump(app_config),
                            indent=4,
                            sort_keys=True))
コード例 #4
0
def main(args: Namespace) -> None:
    logging.basicConfig(level=args.log_level)
    app_configs = read_app_configs(args.app_config)
    if args.command == "update_pids":
        tibia_pids = list(get_tibia_pids())
        if len(tibia_pids) == 0:
            print("There are no running Tibia process, nothing to do.")
            sys.exit(0)

        if not args.only_append:
            app_configs = remove_stale_entries(app_configs, tibia_pids)

        if not args.only_cleanup:
            app_configs = append_new_entries(app_configs, tibia_pids)

        print(f"Successfully updated {args.app_config}")

    if args.command == "find_addresses":
        app_configs = find_memory_addresses(args, app_configs)

    if not args.dry_run:
        write_app_config(app_configs, args.app_config)
        print(f"Successfully updated {args.app_config} with new memory addresses")
    else:
        print(json.dumps(to_dict(app_configs), indent=2))
コード例 #5
0
    def generate_config(self):
        """
        Generate config dictionary from defaults
        Add defaults from all registered config handlers (added patterns, etc.)
        Write file to self.config_filename
        """

        if os.path.isfile(self.config_filename):
            overwrite = raw_input("%s already exists. Overwrite? (y/n) " %
                                  self.config_filename).lower()
            print
            if not overwrite == 'y':
                return

        config = copy.deepcopy(res.FACTORY_DEFAULT_CONFIG)

        # Merge in any defaults provided by registered config handlers
        for handler in self._config_handlers:
            config.update(handler.get_factory_defaults())

        with open(self.config_filename, 'w') as f:
            f.write(
                json.dumps(config,
                           indent=4,
                           sort_keys=True,
                           separators=(',', ': ')))
            print 'Generated config file at %s\n' % self.config_filename
コード例 #6
0
def StrObj(obj, sort_keys=True):
    return json.dumps(obj,
                      indent=4,
                      ensure_ascii=False,
                      sort_keys=sort_keys,
                      default=_save_to_json_process_
                      )  #ensure_ascii=false for non-ascii character
コード例 #7
0
    def test_dumps_with_kwargs(self):
        test_dict = dict(a=1, b=2)
        test_kwargs = dict(indent=4)

        c_dump = commentjson.dumps(test_dict, **test_kwargs)
        j_dump = json.dumps(test_dict, **test_kwargs)
        assert c_dump, j_dump
コード例 #8
0
    def request(self, method, path, query={}, headers={}, body={}, base_url=None):
        '''
        Issues a request to Onshape

        Args:
            - method (str): HTTP method
            - path (str): Path  e.g. /api/documents/:id
            - query (dict, default={}): Query params in key-value pairs
            - headers (dict, default={}): Key-value pairs of headers
            - body (dict, default={}): Body for POST request
            - base_url (str, default=None): Host, including scheme and port (if different from creds file)

        Returns:
            - requests.Response: Object containing the response from Onshape
        '''

        req_headers = self._make_headers(method, path, query, headers)
        if base_url is None:
            base_url = self._url
        url = base_url + path + '?' + urllib.parse.urlencode(query)

        if self._logging:
            utils.log(body)
            utils.log(req_headers)
            utils.log('request url: ' + url)

        # only parse as json string if we have to
        body = json.dumps(body) if type(body) == dict else body

        res = requests.request(method, url, headers=req_headers, data=body, allow_redirects=False, stream=True)

        if res.status_code == 307:
            location = urlparse(res.headers["Location"])
            querystring = parse_qs(location.query)

            if self._logging:
                utils.log('request redirected to: ' + location.geturl())

            new_query = {}
            new_base_url = location.scheme + '://' + location.netloc

            for key in querystring:
                new_query[key] = querystring[key][0]  # won't work for repeated query params

            return self.request(method, location.path, query=new_query, headers=headers, base_url=new_base_url)
        elif not 200 <= res.status_code <= 206:
            print('! ERROR ('+str(res.status_code)+') while using OnShape API')
            if res.text:
                print('! '+res.text)
            exit()
            if self._logging:
                utils.log('request failed, details: ' + res.text, level=1)
        else:
            if self._logging:
                utils.log('request succeeded, details: ' + res.text)

        return res
コード例 #9
0
def init2():
	if settings.nohead:
		head=None
	else:
		head = [
			'// SDK v{}'.format(settings._version),
			'// ARGV {}'.format(' '.join(sys.argv)),
			'// SETTINGS {}'.format(json.dumps(settings.getData()))
		]
		
		# This may contain sensitive information, we only collect it on test mode
		try:
			if settings.test == True:
				head.append('// ENV {}'.format(json.dumps(os.environ.__dict__)))
		except:
			pass
	
	init_logging(head)
	init_recording(head)
コード例 #10
0
    def generate_ami_cache(self):
        """
        Generate ami_cache.json file from defaults
        """
        if os.path.isfile(res.DEFAULT_AMI_CACHE_FILENAME):
            overwrite = raw_input("%s already exists. Overwrite? (y/n) " % res.DEFAULT_AMI_CACHE_FILENAME).lower()
            print
            if not overwrite == 'y':
                return

        with open(res.DEFAULT_AMI_CACHE_FILENAME, 'w') as f:
            f.write(json.dumps(res.FACTORY_DEFAULT_AMI_CACHE, indent=4, separators=(',', ': ')))
            print "Generated AMI cache file at %s\n" % res.DEFAULT_AMI_CACHE_FILENAME
コード例 #11
0
async def on_message(message):
    if message.author == client.user:
        return

    text = message.content
    valid = False

    try:

        data = commentjson.loads("{" + text + "}")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads("[" + text + "]")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads(text)
        valid = True
    except:
        pass

    try:
        if valid and len(text) > MIN_MESSAGE_LENGTH:
            data_string = commentjson.dumps(data, indent=2)
            if data_string.startswith("{") or data_string.startswith("["):
                channel = message.channel
                await message.delete()
                send = await message.channel.send(
                    "**Hey {}, I've formatted your json for you!**\n*Use `?format` for instructions on formatting your own json.*\n```json\n{}``` \n to delete this message react with a 🗑️"
                    .format(message.author.display_name, data_string))
                send

                def check(reaction, user):
                    return user == message.author and str(
                        reaction.emoji) == '🗑️'

                try:
                    reaction, user = await client.wait_for('reaction_add',
                                                           timeout=60.0,
                                                           check=check)
                except asyncio.TimeoutError:
                    return
                else:
                    await send.delete()
    except Exception as exception:
        print(exception)
コード例 #12
0
ファイル: json_monster.py プロジェクト: SirLich/json-monster
async def on_message(message):
    if message.author == client.user:
        return

    text = message.content
    text = text.replace("```", "") ##This may cause issues if someone has a name like `````` but i doubt that would happen and it would break formatting anyways so ¯\_(ツ)_/¯
    valid = False

    try:

        data = commentjson.loads("{" + text + "}")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads("[" + text + "]")
        valid = True
    except:
        pass

    try:
        data = commentjson.loads(text)
        valid = True
    except:
        pass

    try:
        if valid and len(text) > MIN_MESSAGE_LENGTH:
            data_string = commentjson.dumps(data, indent=2)
            if data_string.startswith("{") or data_string.startswith("["):
                channel = message.channel
                send = await message.channel.send(
                    "**{},**\n To delete this message react with a 🚫.\n```json\n{}``` \n ".format(
                        message.author.display_name, data_string))
                send
                await send.add_reaction('🚫')
                time.sleep(0.2)
                await message.delete()
                def check_reactions(reaction, user) -> bool:
                    return user.id==message.author.id and reaction.emoji=='🚫' and reaction.message.id==send.id
                try:
                    reaction, user = await client.wait_for('reaction_add', timeout=60.0, check=check_reactions)
                except asyncio.TimeoutError:
                    await send.clear_reactions()
                    return
                else:
                    await send.delete()
    except Exception as exception:
        print(exception)
コード例 #13
0
ファイル: run.py プロジェクト: tmolitor-stud-tu/AnonPubSub
def send_command(ip, command, data=None, ignore_errors=False):
    global logger
    try:
        data = copy.deepcopy(data if isinstance(data, dict) else {})
        data["_command"] = command
        logger.debug("************ Sending command '%s' to '%s'..." %
                     (command, ip))
        return post_data("http://" + ip + ":9980/command",
                         bytes(json.dumps(data), "UTF-8"))
    except:
        if ignore_errors:
            return
        subprocess.call(
            ["./helpers.sh",
             "stop"])  # kill all running nodes before raising the exception
        raise
コード例 #14
0
    def generate_ami_cache(self):
        """
        Generate ami_cache.json file from defaults
        """
        if os.path.isfile(res.DEFAULT_AMI_CACHE_FILENAME):
            overwrite = raw_input("%s already exists. Overwrite? (y/n) " %
                                  res.DEFAULT_AMI_CACHE_FILENAME).lower()
            print
            if not overwrite == 'y':
                return

        with open(res.DEFAULT_AMI_CACHE_FILENAME, 'w') as f:
            f.write(
                json.dumps(res.FACTORY_DEFAULT_AMI_CACHE,
                           indent=4,
                           separators=(',', ': ')))
            print "Generated AMI cache file at %s\n" % res.DEFAULT_AMI_CACHE_FILENAME
コード例 #15
0
    def generate_config(self):
        """
        Generate config dictionary from defaults
        Add defaults from all registered config handlers (added patterns, etc.)
        Write file to self.config_filename
        """

        if os.path.isfile(self.config_filename):
            overwrite = raw_input("%s already exists. Overwrite? (y/n) " % self.config_filename).lower()
            print
            if not overwrite == 'y':
                return

        config = copy.deepcopy(res.FACTORY_DEFAULT_CONFIG)

        # Merge in any defaults provided by registered config handlers
        for handler in self._config_handlers:
            config.update(handler.get_factory_defaults())

        with open(self.config_filename, 'w') as f:
            f.write(json.dumps(config, indent=4, sort_keys=True, separators=(',', ': ')))
            print 'Generated config file at %s\n' % self.config_filename
コード例 #16
0
ファイル: validate_setup.py プロジェクト: reyesdark/ragnarok
    def handle(self, *args, **options):

        try:
            with open(os.path.join(
                    BASE_DIR, 'alfheimproject/conf/config.json')) as config:
                config_json = commentjson.load(config)
        except FileNotFoundError:
            raise CommandError(
                'config.json not found. Did you forgot to add it?')

        validator = ConfigValidator(config_json)

        if validator.is_valid():
            self.stdout.write(
                'Configuration file is valid, now you can enable your site.')
        else:
            self.stdout.write(
                'Configuration is not valid, please check main.log')
            self.stdout.write(
                'We created a config.test.json file with valid configuration for you.'
            )
            with open('config.test.json', 'w+') as file:
                json = commentjson.dumps(validator.config)
                file.write(json)
コード例 #17
0
def run():
    common.output('Generating gallery from {}...'.format(settings.local_path))
    iext = ["jpg", "png", "tif", "tiff", "gif", "bmp"]
    rfn = '{}___gallery_{}.html'.format(settings.local_path, settings._start)
    match_ts = r'(_|\s|\-)(\d+\.\d+|\d+)'
    fh = open(rfn, 'wb+')

    common.output('Collecting files...')
    fns = common.getAllFiles(settings.local_path, recursive=False)

    iFeatures = []  # image feature list
    jFeatures = []  # json feature list
    pFeatures = []  # points feature list
    fc = {}  # categorized image filenames
    fm = {}  # image meta data
    jm = {}  # json/other meta data
    fmfn = {}  # meta filenames
    spoints = {}  # screenpoints meta data
    metadataCol = {}  # metadata collection bags
    fi = len(fns)

    if fi == 0:
        common.output('No files found in {}'.format(settings.local_path))
        return

    common.output('Processing {} files...'.format(fi))

    try:
        os.stat_float_times(True)
    except:
        pass

    for fn in tqdm(fns):
        lnm = os.path.basename(fn).lower()
        fty = None  # image feature type, eg: thermal, rgb, depth, etc

        ext = lnm.split(".")[-1:][0]
        isJSON = True if ext == 'json' else False
        isImage = True if ext in iext else False

        if not isJSON and not isImage:
            continue

        if settings.align == 'time':
            fts = os.path.getmtime(fn)
        elif settings.align == 'linear':
            fts = '.'.join(os.path.basename(fn).split('.')[:-1])
        elif settings.align == 'sequential':
            try:
                fts = re.findall(match_ts, lnm)[-1][-1:][0]
            except IndexError:
                common.output(
                    'Unable to find number on {}, skipping'.format(fn),
                    'WARNING')
                continue

        if settings.debug:
            common.output('Processing: {}'.format(lnm))

        fty = "default" if settings.flat_gallery == True else re.sub(
            match_ts, '', " ".join(lnm.split(".")[0:-1]))

        if isJSON:
            if 'lookup' in lnm:
                # skip lookups
                continue
            elif 'metadata' in lnm:
                # this is a collection of objects following a lookup key and misc data
                try:
                    obj = common.loadJSON(fn)
                except:
                    common.output('[0] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    obj = {}

                metadataCol[fts] = copy.deepcopy(obj)
            elif "seg" in lnm or "bbox" in lnm:
                if settings.align == 'time':
                    # HACK
                    while has_attribute(fm, fts):
                        fts += .000001

                try:
                    jsonContents = common.loadJSON(fn)
                    fmfn[fts] = fn

                    if isinstance(jsonContents, list):
                        fm[fts] = jsonContents
                    else:
                        fm[fts] = jsonContents["boundingBoxes"]
                except KeyError:
                    fm[fts] = {}
                except:
                    common.output('[1] Invalid JSON data in {}'.format(fn),
                                  'ERROR')

            elif 'screenpoints' in lnm:
                try:
                    point = common.loadJSON(fn)
                except:
                    common.output('[2] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    continue

                if fty not in pFeatures:
                    pFeatures.append(fty)
                    spoints[fty] = {}

                val = json.loads(point.get('value'))
                spoints[fty][fts] = copy.deepcopy(val)

            else:
                try:
                    obj = common.loadJSON(fn)
                except:
                    common.output('[3] Invalid JSON data in {}'.format(fn),
                                  'ERROR')
                    continue

                # HACK: Skip multi value objects, they cannot be graphed
                try:
                    if obj.get('value') != None and isinstance(
                            obj.get('value'),
                            str) and (obj.get('value')[0] == '['
                                      or obj.get('value')[0] == '{'):
                        continue
                        # test = common.loadJSONS(obj['value'])
                        # obj['value'] = test
                except:
                    pass

                if fty not in jFeatures:
                    jFeatures.append(fty)
                    jm[fty] = {}

                jm[fty][fts] = copy.deepcopy(obj)

                if settings.debug:
                    common.output(
                        'Found JSON object in {} that\'s not linked to cameras, skipping'
                        .format(fn), 'WARN')

            continue

        if fty not in iFeatures:
            iFeatures.append(fty)
        if not has_attribute(fc, fty):
            fc[fty] = {}
        if settings.align == 'time':
            while has_attribute(fc[fty], fts):
                fts += .000001

            fc[fty][os.path.getmtime(fn)] = os.path.basename(fn)
        else:
            fc[fty][fts] = os.path.basename(fn)

    if len(fm) > 0:
        iFeatures.append('bbox')

    total_images = 0

    for i in fc:
        if total_images > 0:
            total_images = min(total_images, len(fc[i]))
        else:
            total_images = len(fc[i])

    common.output('Generating html...')
    html = Template(common.readAll('{}index.tpl'.format(tpl_path)))

    js_static = ''
    for i in static_assets['js']:
        js_static += '// {}\n\n{}\n'.format(
            i, common.readAll('{}js/{}'.format(tpl_path, i)))

    css_static = ''
    for i in static_assets['css']:
        css_static += '// {}\n\n{}\n'.format(
            i, common.readAll('{}css/{}'.format(tpl_path, i)))

    fh.write(
        html.render(title='Gallery [{}]'.format(settings._start),
                    js_static=js_static,
                    css_static=css_static,
                    iFeatures=iFeatures,
                    jFeatures=jFeatures,
                    pFeatures=pFeatures,
                    metadataCol=metadataCol,
                    fc=json.dumps(fc, sort_keys=True),
                    fm=json.dumps(fm, sort_keys=True),
                    jm=json.dumps(jm),
                    fmfn=json.dumps(fmfn, sort_keys=True),
                    total_images=total_images,
                    invert_bboxx='false' if settings.no_invert_bboxx ==
                    True else 'true',
                    spoints=json.dumps(spoints)).encode('utf-8') + b"")

    fh.close()

    common.output('Wrote {}'.format(rfn))
コード例 #18
0
 def test_dumps(self):
     test_dict = dict(a=1, b=2)
     c_dump = commentjson.dumps(test_dict)
     j_dump = json.dumps(test_dict)
     assert c_dump, j_dump
コード例 #19
0
ファイル: rewriteccjson.py プロジェクト: lanza/nworkout
    for index, command in enumerate(commands):
        if (command == "-frontend" or command == "-enable-objc-interop"
                or command == "-serialize-debugging-options"
                or command == "-no-clang-module-breadcrumbs"
                or command == "-target-sdk-version"):
            deletes.append(index)

        if command == "-target-sdk-version":
            deletes.append(index)
            deletes.append(index + 1)

    for i in reversed(deletes):
        del element.arguments[i]

    file = element.file
    if not "|" in file:
        final.append(element)
        continue

    files = file.split("|")

    for file in files:
        new = CompileCommand(file, element.arguments.copy(), element.directory)
        final.append(new)

jsout = list(map(lambda x: x.to_dict(), final))
jsonified = commentjson.dumps(jsout)
f = open("compile_commands.json", "w")
f.write(jsonified)
f.close()
コード例 #20
0
def addPart(occurrence, matrix):
    global config, occurrenceLinkNames
    part = occurrence['instance']

    if part['suppressed']:
        return

    # Importing STL file for this part
    justPart, prefix = extractPartName(part['name'], part['configuration'])

    extra = ''
    if occurrence['instance']['configuration'] != 'default':
        extra = Style.DIM + ' (configuration: ' + \
            occurrence['instance']['configuration']+')'
    symbol = '+'
    if partIsIgnore(justPart):
        symbol = '-'
        extra += Style.DIM + ' / ignoring visual and collision'

    print(Fore.GREEN + symbol + ' Adding part ' +
          occurrence['instance']['name'] + extra + Style.RESET_ALL)

    if partIsIgnore(justPart):
        stlFile = None
    else:
        stlFile = prefix.replace('/', '_') + '.stl'
        # shorten the configuration to a maximum number of chars to prevent errors. Necessary for standard parts like screws
        if len(part['configuration']) > 40:
            shortend_configuration = hashlib.md5(
                part['configuration'].encode('utf-8')).hexdigest()
        else:
            shortend_configuration = part['configuration']
        stl = client.part_studio_stl_m(part['documentId'],
                                       part['documentMicroversion'],
                                       part['elementId'], part['partId'],
                                       shortend_configuration)
        f = open(config['outputDirectory'] + '/' + stlFile, 'wb')
        f.write(stl)
        f.close()

        stlMetadata = prefix.replace('/', '_') + '.part'
        f = open(config['outputDirectory'] + '/' + stlMetadata, 'wb')
        f.write(json.dumps(part).encode('UTF-8'))
        f.close()

        stlFile = config['outputDirectory'] + '/' + stlFile

    # Import the SCAD files pure shapes
    shapes = None
    if config['useScads']:
        scadFile = prefix + '.scad'
        if os.path.exists(config['outputDirectory'] + '/' + scadFile):
            shapes = csg.process(config['outputDirectory'] + '/' + scadFile,
                                 config['pureShapeDilatation'])

    # Obtain metadatas about part to retrieve color
    if config['color'] is not None:
        color = config['color']
    else:
        metadata = client.part_get_metadata(part['documentId'],
                                            part['documentMicroversion'],
                                            part['elementId'], part['partId'],
                                            part['configuration'])
        if 'appearance' in metadata:
            colors = metadata['appearance']['color']
            color = np.array([colors['red'], colors['green'], colors['blue']
                              ]) / 255.0
        else:
            color = [0.5, 0.5, 0.5]

    # Obtain mass properties about that part
    if config['noDynamics']:
        mass = 0
        com = [0] * 3
        inertia = [0] * 12
    else:
        if prefix in config['dynamicsOverride']:
            entry = config['dynamicsOverride'][prefix]
            mass = entry['mass']
            com = entry['com']
            inertia = entry['inertia']
        else:
            massProperties = client.part_mass_properties(
                part['documentId'], part['documentMicroversion'],
                part['elementId'], part['partId'], part['configuration'])

            if part['partId'] not in massProperties['bodies']:
                print(Fore.YELLOW + 'WARNING: part ' + part['name'] +
                      ' has no dynamics (maybe it is a surface)' +
                      Style.RESET_ALL)
                return
            massProperties = massProperties['bodies'][part['partId']]
            mass = massProperties['mass'][0]
            com = massProperties['centroid']
            inertia = massProperties['inertia']

            if abs(mass) < 1e-9:
                print(
                    Fore.YELLOW + 'WARNING: part ' + part['name'] +
                    ' has no mass, maybe you should assign a material to it ?'
                    + Style.RESET_ALL)

    pose = occurrence['transform']
    if robot.relative:
        pose = np.linalg.inv(matrix) * pose

    robot.addPart(pose, stlFile, mass, com, inertia, color, shapes, prefix)
コード例 #21
0
def main():
    launch_names = get_launch_names()
    out_f = open("sample_names.json", "w")
    print(commentjson.dumps({"names": launch_names}, indent=4), file=out_f)
コード例 #22
0
ファイル: import json.py プロジェクト: nicklovin/maya_tools
def formatJson(data):
    return commentjson.dumps(data,
                             sort_keys=True,
                             indent=4,
                             default=repr,
                             separators=(', ', ': ')).replace('    ', '\t')
コード例 #23
0
    def test_dumping_parsing_simple_string(self):
        string = '//'
        self.assertEqual(commentjson.loads(commentjson.dumps(string)), string)

        string = '#'
        self.assertEqual(commentjson.loads(commentjson.dumps(string)), string)
コード例 #24
0
ファイル: run.py プロジェクト: tmolitor-stud-tu/AnonPubSub
def evaluate(task, settings, task_imports, args):
    global logger

    logger.info(
        "******** Creating graph with %d nodes (%d publishers and %d subscribers)..."
        % (task["nodes"], task["publishers"], task["subscribers"]))
    base_ip = str(task["base_ip"]).split(".")
    G = genGraph("random", task["nodes"], **task["graph_args"])
    pubs, subs = task["publishers"], task["subscribers"]
    for n in sorted(list(G.nodes())):
        roles = {}
        if subs:
            roles["subscriber"] = ["test"]
            subs = subs - 1
        elif pubs:
            roles["publisher"] = ["test"]
            pubs = pubs - 1
        G.node[n] = {
            "ip":
            "%d.%d.%d.%d" % (int(base_ip[0]), int(base_ip[1]), int(base_ip[2]),
                             (int(base_ip[3]) + n)),
            "roles":
            roles
        }
        nx.relabel_nodes(G, {n: "ID: %d" % n}, False)
    if subs or pubs:
        logger.warning(
            "******** Too few nodes (created only %d publishers and %d subscribers)..."
            % (task["publishers"] - pubs, task["subscribers"] - subs))

    # remove all old logs
    shutil.rmtree("logs", ignore_errors=True)
    os.mkdir("logs")

    # create json string from graph
    G.graph["settings"] = settings
    node_link_data = json_graph.node_link_data(G)
    graph_data = json.dumps(node_link_data, sort_keys=True, indent=4)
    with open("logs/graph.json", "w") as f:
        f.write(graph_data)

    # start nodes (cleanup on sigint (CTRL-C) while nodes are running)
    def sigint_handler(sig, frame):
        signal.signal(signal.SIGINT,
                      signal.SIG_IGN)  # ignore SIGINT while shutting down
        logger.warning("Got interrupted, killing nodes!")
        subprocess.call(["./helpers.sh", "stop"])
        sys.exit(0)

    signal.signal(signal.SIGINT, sigint_handler)
    ips = []
    for n in sorted(list(G.nodes())):
        ips.append(G.node[n]["ip"])
    subprocess.run(["./helpers.sh", "start"],
                   input=bytes("%s\n" % ("\n".join(ips)), "UTF-8"))

    logger.info(
        "******** Checking for availability of all nodes and opening their SSE streams..."
    )
    sse_streams = {}
    for n in sorted(list(G.nodes())):
        ip = G.node[n]["ip"]
        online = False
        for i in range(1, 30):
            try:
                logger.debug("************ Try %d for node '%s' (%s)..." %
                             (i, n, ip))
                stream = open_url("http://%s:9980/events" % ip)
                online = True
                break
            except:
                time.sleep(1)
                continue
        if not online:
            logger.info(
                "************ Node '%s' (%s) does not come online, aborting!" %
                (n, ip))
            subprocess.call(["./helpers.sh", "stop"])
            sys.exit(1)
        logger.debug("************ Node '%s' (%s) is online..." % (n, ip))
        sse_streams[stream] = ip
        send_command(ip, "stop")

    logger.info("******** Configuring node filters (%s)..." % args.filters)
    with open(args.filters, "r") as f:
        code = "\n".join(["task = %s" % str(task), f.read()])
        for n in sorted(list(G.nodes())):
            send_command(G.node[n]["ip"], "load_filters", {"code": code})

    logger.info("******** Starting routers...")
    for n in sorted(list(G.nodes())):
        send_command(G.node[n]["ip"], "start", {
            "router": task["router"],
            "settings": settings
        })
    time.sleep(1)

    logger.info("******** Configuring node connections...")
    for n in sorted(list(G.nodes())):
        for neighbor in G[n]:
            send_command(G.node[n]["ip"], "connect",
                         {"addr": G.node[neighbor]["ip"]})
    time.sleep(2)

    logger.info(
        "******** Configuring node roles (%d publishers, %d subscribers)..." %
        (task["publishers"], task["subscribers"]))
    role_to_command = {"subscriber": "subscribe", "publisher": "publish"}
    for n in sorted(list(G.nodes())):
        for roletype, channellist in G.node[n]["roles"].items():
            for channel in channellist:
                send_command(G.node[n]["ip"], role_to_command[roletype],
                             {"channel": channel})

    logger.info(
        "******** Waiting at most %.3f seconds for routers doing their work..."
        % task["runtime"])
    #TODO: implement reading of sse streams (read log entries and write them to files like the helpers.sh script does)
    #TODO: additionally implement a way for the filter scripts to signal that a node completed its measurement allowing for
    #TODO: an early evaluation stop instead of waiting the full task["runtime"] every time
    #timeout = time.time() + task["runtime"]
    #while time.time() < timeout:
    #(ready_to_read, _, exceptions_list) = select.select(list(sse_streams.keys()), [], list(sse_streams.keys()), 1.0)
    #for entry in exceptions_list:
    #ip = sse_streams[entry]
    #logger.warning("Node %s had an error, ignoring this node now..." % str(ip))
    #del sse_streams[entry]
    #for entry in ready_to_read:
    #ip = sse_streams[entry]
    #events = handle_sse(entry, ip)
    #for e in events:
    #logger.error(str(e))

    time.sleep(task["runtime"])

    logger.info("******** Stopping routers and killing nodes...")
    for n in sorted(list(G.nodes())):
        send_command(G.node[n]["ip"], "stop", ignore_errors=True)
    time.sleep(2)
    subprocess.call(["./helpers.sh", "stop"])
    signal.signal(signal.SIGINT, signal.SIG_DFL)

    return extract_data(task, task_imports)