Example #1
0
    def _test_populate(self):
        from uuid import uuid1 as uuidgen
        once = datetime.utcnow()

        def minutes_td(minutes):
            return timedelta(minutes=minutes)
        # assert(hasattr(self, '_track_order'))
        self._span = Span(once - minutes_td(10), minutes_td(30))
        track0 = QTrackItem(self, self.coords, 'IMAGE:test::timeline:GOES-21:QBI:mars', 1,
                            "G21 QBI B99 BT", "test track", tooltip="peremptorily cromulent")
        # scene.addItem(abitrack)  # done in init
        frame01 = QFrameItem(track0, self.coords, uuidgen(), once + minutes_td(5), minutes_td(5),
                             Flags([VisualState.BUSY]), "abi1", "fulldiskimus")
        track1 = QTrackItem(self, self.coords, 'IMAGE:test::timeline:Himawari-11:AHI:mars', 0,
                            "H11 AHI B99 Rad", "second test track", tooltip="nominally cromulent")
        frame11 = QFrameItem(track1, self.coords, uuidgen(), once + minutes_td(6), minutes_td(1),
                             Flags([VisualState.READY]), "ahi1", "JP04")
        # self.insert_track(track0)
        # self.insert_track(track1)
        # assert(hasattr(self, '_propagate_max_z'))
        self.propagate_max_z()
        for track in [track0, track1]:
            track.update_pos_bounds()
            track.update_frame_positions()
        # scene.addItem(frame1)  # done in init
        # blabla = QGraphicsTextItem('abcdcba')
        # font = QFont('White Rabbit')
        # blabla.setFont(font)
        # blabla.setPos(140, 100)
        # self.addItem(blabla)
        # blabla = None
        self.content = [track0, frame01, track1, frame11]
Example #2
0
    def test_lsblk_read(self):

        session = self.SessionFactory()
        writer = job_exec.job_exec()
        new_uuid1 = str(uuidgen())
        new_uuid2 = str(uuidgen())
        new_uuid3 = str(uuidgen())
        writer.job_class = "lsblk_query"
        writer.cmdln = "lsblk  --output %s  --pairs" % ( ",".join(lsblk_wantedFields) )
        writer.save(
            uuid_tempate=new_uuid1,
            uuid_execution=new_uuid2,
            cmdln_template=new_uuid3,
            cmdln_paramters="",
            uuid_job_def="",
            session=session,
            )
        reader = job_exec.job_exec()
        reader.job_class = "lsblk_query"
        reader.cmdln = ""
        reader.save(
            uuid_tempate=new_uuid1,
            uuid_execution=new_uuid2,
            cmdln_template=new_uuid3,
            cmdln_paramters="",
            uuid_job_def="",
            session=session,
            )
        new = job_exec.job_exec()
Example #3
0
    def test_session_set(self):
        session = self.SessionFactory()
        new = job_exec.job_exec(session = session)
        new.session = session

        new_uuid1 = str(uuidgen())
        new_uuid2 = str(uuidgen())
        new_uuid3 = str(uuidgen())
        new.job_class = "lsblk_query"
        self.assertTrue(new.job_class == "lsblk_query")
        new.save(
            session=session,
            uuid_tempate=new_uuid1,
            uuid_execution=new_uuid2,
            cmdln_template=new_uuid3,
            cmdln_paramters="",
            uuid_job_def="",
            )
Example #4
0
    def setUp(self):
        self.log = logging.getLogger("TestJobManager")

        f = tempfile.NamedTemporaryFile(delete=False)
        databaseConnectionString = "sqlite:///%s" % (f.name)


        self.engine = create_engine(databaseConnectionString, echo=False)
        model.init(self.engine)
        self.SessionFactory = sessionmaker(bind=self.engine)
        devices.initial_data_add_enumerated(self.SessionFactory())
        session = self.SessionFactory()
        jobmanager = job_manage.job_manage()
        jobmanager.session = session
        jobmanager.create_job_class(name = "lsblk_query")
        jobmanager.create_job_class(name = "lsblk_read")
        self.jobmanager_uuid1 = str(uuidgen())
        self.jobmanager_uuid2 = str(uuidgen())
        self.jobmanager_uuid3 = str(uuidgen())

        rc1 = jobmanager.create_job_def(
                uuid=self.jobmanager_uuid1,
                job_class = "lsblk_read",
                cmdln_template = "ls",
                reocuring = 1,
            )
        rc2 = jobmanager.create_job_def(
                uuid=self.jobmanager_uuid2,
                job_class = "lsblk_query",
                cmdln_template = "ls",
                reocuring = 1,
            )
        job_details1 = jobmanager.get_job_def(
            uuid = self.jobmanager_uuid1,
            session =session,
            )

        job_details2 = jobmanager.get_job_def(
            uuid = self.jobmanager_uuid1,
            session =session,
            )

        job_details1.save()
        job_details2.save()
    def _add_token(self, token):
        """Adds a token to the cache and writes cache to disk.

        cache_lock may not be held by anyone.

        :param token: Dict of the token to be added to the cache
        """
        uuid = uuidgen().hex
        self.debug('Adding token %s to cache', uuid)
        with self._cache_lock:
            self.__refresh_cache()
            self._cache[uuid] = token
            self.__write_cache()
        return uuid
Example #6
0
 def new(self, title, draft=False):
     """
     Create a new Post…
     """
     self.title = title
     self.filename = newfile(slugify(self.title), draft)
     self.guid = str(uuidgen())
     self.author = self.blog.config['blog']['defaultauthor']
     self.categories = self.blog.config['blog']['categories']
     self.updated = self.date
     self.permalink = '{}/{}/{}'.format(self.blog.config['blog']['url'],
                                        self.date.strftime('%Y/%m/%d'),
                                        slugify(self.title))
     self.save()
     return self.filename
Example #7
0
 def new(self, title, draft=False):
     """
     Create a new Post…
     """
     self.title = title
     self.filename = newfile(slugify(self.title), draft)
     self.guid = str(uuidgen())
     self.author = self.blog.config['blog']['defaultauthor']
     self.categories = self.blog.config['blog']['categories']
     self.updated = self.date
     self.permalink = '{}/{}/{}'.format(self.blog.config['blog']['url'],
                                        self.date.strftime('%Y/%m/%d'),
                                        slugify(self.title))
     self.save()
     return self.filename
Example #8
0
 def post(self, _):
     uuid = str(uuidgen())
     return self._create(None, uuid)
Example #9
0
def _post_md5_task():
    js = {
        'id': str(uuidgen()),
        'b': [content_maker.next() for _ in range(100)]
    }
    return Response(json.dumps(js), status=200, mimetype='application/json')
Example #10
0
 def post(self, _):
     uuid = str(uuidgen())
     return self._create(None, uuid)
Example #11
0
    def save(self, *args, **kwargs):
        session = kwargs.get('session', None)
        if session == None:
            session = self.session
        if session == None:
            raise InputError("No session set")

        uuid_save = kwargs.get('uuid', None)
        if uuid_save == None:
           uuid_save = self.uuid
        if uuid_save == None:
            raise InputError("No uuid set")

        cmdln_template = kwargs.get('cmdln_template', None)
        if cmdln_template == None:
           cmdln_template = self.cmdln_template
        if cmdln_template == None:
            raise InputError("No cmdln_template set")



        session = kwargs.get('session', None)
        if session == None:
            session = self.session
        if session == None:
            sdsdfsdfs
            self.log.error("No session set")
            return False

        job_class = kwargs.get('job_class', None)
        if job_class == None:
           job_class = self.job_class
        if job_class == None:
            self.log.error("No job_class set")
            return False

        # Finished input validation

        query_job_namespace = session.query(model.job_namespace).\
                filter(model.job_namespace.name == job_class)
        if query_job_namespace.count() == 0:
            job_namespace = model.job_namespace()
            job_namespace.name = job_class
            session.add(job_namespace)
            session.commit()
            query_job_namespace = session.query(model.job_namespace).\
                filter(model.job_namespace.name == job_class)

        job_namespace = query_job_namespace.one()

        query_job_def = session.query(model.job_def).\
                filter(model.job_def.uuid_job_def == uuid_save)

        if query_job_def.count() == 0:
            job_def = model.job_def()
            job_def.fk_type = job_namespace.id
            job_def.cmdln_template = self.cmdln_template
            job_def.cmdln_paramters = self.cmdln_paramters
            job_def.reocuring = self.reocuring
            job_def.uuid_job_def = uuid_save
            session.add(job_def)
            session.commit()
            query_job_def = session.query(model.job_def).\
                filter(model.job_def.uuid_job_def == uuid_save)

        job_def = query_job_def.one()
        job_def.cmdln_template = self.cmdln_template
        job_def.cmdln_paramters = self.cmdln_paramters
        job_def.reocuring = self.reocuring
        job_def.fk_type = job_namespace.id
        session.add(job_def)
        session.commit()


        # Define sets of source and dest
        subscribers_found = set()
        publishers_found = set()

        source_job = aliased(model.job_def, name='source_job')
        dest_job = aliased(model.job_def, name='dest_job')

        query_subscribers = session.query(dest_job).\
                filter(source_job.uuid_job_def == uuid_save).\
                filter(model.job_triggers.dest == dest_job.id).\
                filter(model.job_triggers.source == source_job.id)

        for item in query_subscribers:
            subscribers_found.add(item.uuid_job_def)


        query_publishers = session.query(source_job.uuid_job_def).\
                filter(dest_job.uuid_job_def == uuid_save).\
                filter(model.job_triggers.dest == dest_job.id).\
                filter(model.job_triggers.source == source_job.id)

        for item in query_publishers:
            publishers_found.add(item.uuid_job_def)


        subscribers_missing = self.subscribe_list.difference(subscribers_found)
        publishers_missing = self.publish_list.difference(subscribers_found)
        subscribers_extra = subscribers_found.difference(self.subscribe_list)
        publishers_extra= subscribers_found.difference(self.publish_list)

        self.log.debug("subscribers_missing=%s" % (subscribers_missing))
        for item in subscribers_missing:
            query_new = session.query(model.job_def).\
                filter(model.job_def.uuid_job_def == item)
            for item in query_new:
                newtrigger = model.job_triggers()
                newtrigger.source = item.id
                newtrigger.dest = job_def.id
                newtrigger.sk_uuid = str(uuidgen())
                session.add(newtrigger)
                session.commit()

        self.log.debug("publishers_missing=%s" % (publishers_missing))
        for item in publishers_missing:
            query_new = session.query(model.job_def).\
                filter(model.job_def.uuid_job_def == item)
            for item in query_new:
                newtrigger = model.job_triggers()
                newtrigger.source = job_def.id
                newtrigger.sk_uuid = str(uuidgen())
                newtrigger.dest = item.id
                session.add(newtrigger)
                session.commit()
        self.log.debug("subscribers_extra=%s" % (subscribers_extra))
        self.log.debug("publishers_extra=%s" % (publishers_extra))
Example #12
0
 def __init__(self, name):
     self.party_name = name
     self.players = set()
     self.game_over = False
     self.currently_playing = ""
     self.uuid = uuidgen()
                    "0.0.0.0"
                })

    # Check if network metadata is not skipped
    if len(network_data) > 0:
        with open(net_file_path, 'w') as net_file:
            json_dump(network_data, net_file)

    generate_iso(cfg_file_path, cfg_dir_path, args.quiet)
    if args.clean_up:
        xprint("Cleaning up working dir.")
        rmtree(cfg_dir_path)


if __name__ == '__main__':
    uuid = str(uuidgen())
    parser = argparse.ArgumentParser(
        description='Config drive generator for MCP instances.',
        prog=argv[0],
        usage='%(prog)s [options]')
    parser.add_argument(
        '--gpg-key',
        type=str,
        help=
        'Upload gpg key for salt master. Specify path to file in asc format.',
        required=False)
    parser.add_argument(
        '--name',
        type=str,
        default=uuid,
        help='Specify instance name. Hostname in short format, without domain.',
Example #14
0
def dosync(configs):
    """
    :param configs:
    :return:
    """

    bkupsrc = """%s:""" % configs['host'] if configs['host'] else ''
    bkupsrc += """%s""" % os.path.join(configs['src-dir'], '').replace(
        ' ', '\ ')
    bkupdst = configs['dst-dir']

    today = datetime.datetime.now().strftime("%Y-%m-%d")
    uuid = uuidgen()
    """
    check command
    """

    cmd_rsync_r = CMD_RSYNC if not configs[
        'asroot'] else CMD_SUDO + " " + CMD_RSYNC
    if not is_executable(CMD_RSYNC):
        print("command not found 'rsync', please install it.")
        print("sudo apt-get install -y rsync")

    if configs['asroot'] and not is_executable(CMD_SUDO):
        print("command not found 'sudo', please install it.")
        print("sudo apt-get install -y sudo")
    """
    check directory
    """
    if not os.path.isdir(configs['dst-dir']):
        if not os.path.isfile(configs['dst-dir']):
            print("Destination directory does not directory. please check it")
            return

        print("Destination directory does not existed. create it")
        if sys.version_info[0] < 3:
            makedirs(configs['dst-dir'])
        else:
            makedirs(configs['dst-dir'])
    """
    Change Current Working Directory
    """
    os.chdir(configs['dst-dir'])

    rsync_opts = """ -apvz """

    if not configs['keep']:
        rsync_opts += """ --delete """

    if configs['proto'] == 'ssh':
        if configs['password']:
            rshopts = " /usr/bin/sshpass -p %s ssh %s " % (configs['password'],
                                                           configs['rsh-opts'])
        else:
            rshopts = " ssh %s " % configs['rsh-opts']

        if configs['username']:
            rshopts += " -l %s " % configs['username']

        if configs['port'] is not 22:
            rshopts += " -p %d " % configs['port']

        rsync_opts += """ -e "%s" """ % rshopts

    rsync_opts += """ --rsync-path="%s" """ % cmd_rsync_r
    rsync_opts += """ --numeric-ids """

    if configs['inc-backup']:
        logdir = "%s/%s" % (configs['dst-dir'], today)
        bkupname = str(uuid)
    else:
        logdir = configs['dst-dir'] + "/../Log"
        bkupname = os.path.basename(configs['dst-dir'])

    makedirs(logdir)
    logpath = os.path.join(logdir, bkupname + "-" + today + ".log")
    # print(logpath)
    logfile = open(logpath, 'w')  # set bufsize to 0
    logfile.write(str(datetime.datetime.now()) + '\n')
    """
    sync .backupman.excludes
    """
    rsync_excl_opts = """ --delete --include "%s" --exclude="*" """ % EXCLUDES

    try:
        rsync(bkupsrc, bkupdst, rsync_opts + rsync_excl_opts, logfile)
    except Exception:
        print(
            "Error during backup process. please see more details inside log file `%s`"
            % logpath)
        return
    """
    sync
    """
    if configs['inc-backup']:
        bkupdst = configs['dst-dir'] + "/" + today + "/" + str(uuid)
        makedirs(bkupdst)
    """
    If .backupman.excludes is existed, append --exclude-from option
    """
    excloc = os.path.join(configs['dst-dir'], EXCLUDES)
    if os.path.isfile(excloc):
        rsync_opts += """ --exclude-from "%s" """ % excloc

    try:
        rsync(bkupsrc, bkupdst, rsync_opts, logfile, verbose=True)
    except Exception:
        print(
            "Error during backup process. please see more details inside log file `%s`"
            % logpath)
        return

    if not configs['inc-backup']: return

    lastest = os.path.join(configs['dst-dir'], 'lastest')

    if os.path.exists(lastest):
        if not os.path.islink(lastest):
            print("`%s` is not link. can't create lastest link." % lastest)
            return
        else:
            os.remove(lastest)

    os.symlink(os.path.relpath(bkupdst, configs['dst-dir']), lastest)
Example #15
0
def test_satpy_importer_basic(tmpdir, monkeypatch, mocker):
    """Basic import test using Satpy."""
    from uwsift.workspace import Workspace
    from uwsift.model.layer import DocBasicLayer
    from uuid import uuid1 as uuidgen
    ws = Workspace(str(tmpdir))
    c01_attrs = {
        Info.SHORT_NAME: 'C01',
        Info.DATASET_NAME: 'C01',
        Info.CENTRAL_WAVELENGTH: 2.0,
        Info.UUID: uuidgen(),
    }
    c03_attrs = {
        Info.SHORT_NAME: 'C03',
        Info.DATASET_NAME: 'C03',
        Info.CENTRAL_WAVELENGTH: 4.0,
        Info.UUID: uuidgen(),
    }
    doc = mocker.MagicMock()
    for ds in [c01_attrs, c03_attrs]:
        ds[Info.ORIGIN_X] = -5434894.885056
        ds[Info.ORIGIN_Y] = 5434894.885056
        ds[Info.CELL_HEIGHT] = 1000.0
        ds[Info.CELL_WIDTH] = 1000.0
        ds[Info.STANDARD_NAME] = "toa_bidirectional_reflectance"
        ds[Info.VALID_RANGE] = (0, 120)
        ds[Info.SCHED_TIME] = ds[Info.OBS_TIME] = datetime(
            2018, 9, 10, 17, 0, 31, 100000)
        ds[Info.SHAPE] = (2, 2)
        ds[Info.PROJ] = "+proj=merc"
        ds[Info.FAMILY] = "family"
        ds[Info.CATEGORY] = "category"
        ds[Info.SERIAL] = "serial"
        ds[Info.PLATFORM] = Platform.GOES_16
        ds[Info.INSTRUMENT] = Instrument.ABI

    c01 = DocBasicLayer(doc, c01_attrs)
    c03 = DocBasicLayer(doc, c03_attrs)
    ops = "z = x - y"
    ns = {'y': c03_attrs[Info.UUID], 'x': c01_attrs[Info.UUID]}

    def get_metadata(u):
        return c01 if u == c01_attrs[Info.UUID] else c03

    def get_content(u):
        return np.ones((2, 2)) if u == c01_attrs[Info.UUID] else np.zeros(
            (2, 2))

    monkeypatch.setattr(ws, 'get_metadata', get_metadata)
    monkeypatch.setattr(ws, 'get_content', get_content)
    uuid, info, data = ws.create_algebraic_composite(ops,
                                                     ns,
                                                     info={
                                                         Info.SHORT_NAME:
                                                         'new',
                                                         Info.DATASET_NAME:
                                                         'new'
                                                     })

    np.testing.assert_equal(data, 1)
    assert info.get(Info.STANDARD_NAME) == "unknown"
Example #16
0
def init_blockdevice_scan(session):
    log = logging.getLogger("init_blockdevice_scan")
    BlockUpdateLsblk_count = session.query(model.BlockUpdateLsblk).count()
    if BlockUpdateLsblk_count > 0:
        return
    writer = job_exec.job_exec()
    writer.session = session
    new_uuid1 = str(uuidgen())
    new_uuid2 = str(uuidgen())
    new_uuid3 = str(uuidgen())
    writer.job_class = "lsblk_query"
    writer.cmdln = "lsblk  --output %s  --pairs" % ( ",".join(lsblk_wantedFields) )
    writer.save(
        session=session,
        uuid_tempate=new_uuid1,
        uuid_execution=new_uuid2,
        cmdln_template=new_uuid3,
        cmdln_paramters="",
        uuid_job_def="",
        )
    writer.run()
    reader = job_exec.job_exec()

    reader.session = session
    reader.job_class = "lsblk_read"
    reader.cmdln = ""
    reader.save(
        session=session,
        uuid_tempate=new_uuid1,
        uuid_execution=new_uuid2,
        cmdln_template=new_uuid3,
        cmdln_paramters=writer.outputjson,
        uuid_job_def="",
        )

    reader.inputjson = writer.outputjson
    reader.run()
    if reader.outputjson == None:
        return


    json_copntent = json.loads(reader.outputjson)
    for item in json_copntent:

        job_query_udev = job_exec.job_exec()
        job_query_udev.session = session
        job_query_udev.job_class = "udev_query"
        job_query_udev.cmdln = item["cmdln"]
        job_query_udev.save(
            session=session,
            uuid_tempate=new_uuid1,
            uuid_execution=new_uuid2,
            cmdln_template=new_uuid3,
            cmdln_paramters=writer.outputjson,
            uuid_job_def="",
            )
        job_query_udev.run()

        job_read_udev = job_exec.job_exec()
        job_read_udev.session = session
        job_read_udev.job_class = "udev_read"
        job_read_udev.save(
            session=session,
            uuid_tempate=new_uuid1,
            uuid_execution=new_uuid2,
            cmdln_template=new_uuid3,
            cmdln_paramters="",
            uuid_job_def="",
            )
        job_read_udev.inputjson = job_query_udev.outputjson
        job_read_udev.run()

    job_mount_query = job_exec.job_exec()
    job_mount_query.session = session
    job_mount_query.job_class = "mount_query"
    job_mount_query.save(
        session=session,
        uuid_tempate=new_uuid1,
        uuid_execution=new_uuid2,
        cmdln_template=new_uuid3,
        cmdln_paramters="",
        uuid_job_def="",
        )
    job_mount_query.inputjson = job_query_udev.outputjson
    job_mount_query.run()

    job_mount_read = job_exec.job_exec()
    job_mount_read.session = session
    job_mount_read.job_class = "mount_read"
    job_mount_read.save(
        session=session,
        uuid_tempate=new_uuid1,
        uuid_execution=new_uuid2,
        cmdln_template=new_uuid3,
        cmdln_paramters="",
        uuid_job_def="",
        )

    job_mount_read.inputjson = job_mount_query.outputjson
    job_mount_read.run()