Beispiel #1
0
    def add_lctr_ref_md5(self, opts=None, sa=None, *refs):
        "Add locator and ref_md5 attr for URLs"
        if refs:
            if isinstance( refs[0], basestring ):
                opts.ref_md5 = True
                lctrs = [ ret['lctr'] for ret in self.add_lctrs(sa, opts, *refs) ]
                return
            else:
                assert isinstance( refs[0], Locator), refs
                lctrs = refs

            for lctr in lctrs:
                ref = lctr.ref or lctr.global_id
                ref_md5 = hashlib.md5( ref ).hexdigest()
                md5 = MD5Digest.fetch(( MD5Digest.digest == ref_md5, ),
                        exists=False)
                if not md5:
                    md5 = MD5Digest( digest=ref_md5,
                            date_added=datetime.now() )
                    sa.add( md5 )
                    log.info("New %s", md5)
                lctr.ref_md5 = md5
                sa.add( lctr )
                if opts.rsr_auto_commit:
                    sa.commit()
                log.note("Updated ref_md5 for %s to %s", lctr, md5)
Beispiel #2
0
def rsr_update_metafiles(prog=None, volume=None, volumedb=None, opts=None):
    log.debug("{bblack}rsr{bwhite}:update-volume{default}")
    i = 0
    for path in res.Metafile.walk(prog.pwd):
        print(path)
        i += 1
        new, updated = False, False
        metafile = res.Metafile(path)
        #if options:
        #metafile.basedir = 'media/application/metalink/'
        #if metafile.key in volumedb:
        #    metafile = volumedb[metafile.key]
        #    #log.info("Found %s in volumedb", metafile.key)
        #else:
        #    new = True
        if metafile.needs_update():
            log.note("Updating metafile for %s", metafile.path)
            metafile.update()
            updated = True
        #if updated or metafile.key not in volumedb:
        #    log.note("Writing %s to volumedb", metafile.key)
        #    volumedb[metafile.key] = metafile
        #    new = True
        if new or updated:
            #if options.persist_meta:
            #if metafile.non_zero:
            #    log.note("Overwriting previous metafile at %s", metafile.path)
            metafile.write()
            for k in metafile.data:
                print('\t'+k+':', metafile.data[k])
            print('\tSize: ', lib.human_readable_bytesize(
                metafile.data['Content-Length'], suffix_as_separator=True))
        else:
            print('\tOK')
    volume.store.sync()
Beispiel #3
0
 def name_and_categorize(self, opts=None, sa=None,
         name=None, mtype=None, mformat=None, genres=None, *paths):
     if len(paths) > 1:
         assert opts.interactive
         for path in paths:
             for subpath in res.fs.Dir.walk(paths, opts):#dict(recurse=True)):
                 print subpath
     elif not opts.interactive:
         path = paths[0]
         mm = Mediameta(name=name)
         if mtype:
             mt = [ ret['mt'] for ret in self.add_mtype( mtype, None, opts=opts, sa=sa )
                     if 'mt' in ret ].pop()
             mm.mediatype = mt
         if mformat:
             mf = [ ret['mf'] for ret in self.add_mformats( opts, sa, mformat )
                     if 'mf' in ret ].pop()
             mm.mediaformat = mf
         if genres:
             mm.genres = [ ret['genre']
                     for ret in self.add_genre( genre, None, opts=opts, sa=sa )
                     if 'genre' in ret ]
         sa.add(mm)
         sa.commit()
         log.note("Created media %s", mm)
Beispiel #4
0
 def signalGetType(xml_node):
     ret = xml_node.attrib.get('type', '')
     if not ret:
         if xml_node.tag.endswith('_vec'):
             signal_type = io_vector_default_type
         else:
             signal_type = io_signal_default_type
         log.note("getType: no type defined, defaulting to: " + signal_type)
         ret = signal_type
     return ret
Beispiel #5
0
	def __sendHiz(self):
		log.note("Sending hiz {0:d}->{1:d}".format(self.impl.hiz[self.cur_hiz][0], self.impl.hiz[self.cur_hiz][1]))

		bytelist = []
		bytelist.append(int(HwSimCommand.HIZ))
		bytelist.append(self.impl.hiz[self.cur_hiz][0])
		bytelist.append(self.impl.hiz[self.cur_hiz][1])
		bytelist.append(self.__checkSum(bytelist, len(bytelist)))

		log.debug("HIZ frame = " + str(bytelist))
		self.comm.write(bytearray(bytelist))
Beispiel #6
0
def get_session(dbref, initialize=False, metadata=SqlBase.metadata):
    if dbref.startswith('sqlite'):
        register_sqlite_connection_event()
    engine = create_engine(dbref)#, encoding='utf8')
    #engine.raw_connection().connection.text_factory = unicode
    metadata.bind = engine
    if initialize:
        log.info("Applying SQL DDL to DB %s..", dbref)
        metadata.create_all()  # issue DDL create
        log.note('Updated schema for %s to %s', dbref, 'X')
    session = sessionmaker(bind=engine)()
    return session
Beispiel #7
0
 def rsr_info(self, prog, context, opts, sa, nodes):
     "Log some session statistics and info"
     log.note("SQLAlchemy session: %s", sa)
     models = taxus.core.ID, Node, Name, Tag, taxus.GroupNode, taxus.INode, taxus.Locator
     cnt = {}
     for m in models:
         cnt[m] = sa.query(m).count()
         log.note("Number of %s: %s", m.__name__, cnt[m])
     if 'node' in self.globaldict and self.globaldict.node:
         log.info("Auto commit: %s", opts.rsr_auto_commit)
         log.info("%s", self.globaldict.node)
     sys.stderr.write('rsr-info: see notice log (-vvv)\n')
Beispiel #8
0
 def __buildTestSequence(self, xml_node):
     sequence = CLS.TestSequence()
     cmds = xml_node.findall('vec')
     for cmd in cmds:
         value = cmd.text
         if value[0:4] == '####':
             interval = UT.TimeUtils.timeToInterval(
                 self.meta.component.interval)
             value = value.replace('####', interval)
             log.note(
                 "buildTestSequence: no interval defined, defaulting to: \'"
                 + interval + "\'")
         sequence.list.append(value)
         log.info("buildTestSequence: built sequence step with values: \'" +
                  value + "\'")
     return sequence
Beispiel #9
0
	def __sendVector(self):
		log.note("Sending vector {0:d}/{1:d}".format(self.cur_vec + 1, self.impl.md.vectors))

		bytelist = []
		bytelist.append(int(HwSimCommand.CFG_VECTOR))
		bytelist.append(self.impl.vs[self.cur_vec].vector_num & 0xFF)
		bytelist.append((self.impl.vs[self.cur_vec].vector_num >> 8) & 0xFF)
		bytelist.append(self.impl.vs[self.cur_vec].testcase)
		for e in self.impl.vs[self.cur_vec].interval.to_bytes(4, byteorder="little"):
			bytelist.append(e)
		for s in self.impl.sm:
			bytelist.append(ord(self.impl.vs[self.cur_vec].content[s[2]]))
		bytelist.append(self.__checkSum(bytelist, len(bytelist)))

		log.debug("Vector frame = " + str(bytelist))
		self.comm.write(bytearray(bytelist))
Beispiel #10
0
 def _assert_node(self, Klass, name, sa, opts):
     """
     Helper for node creation.
     """
     assert name
     node = Klass.find(( Klass.name==name, ), sa=sa)
     if node:
         if name != node.name:
             node.name = name
     else:
         node = Klass(name=name, date_added=datetime.now())
         sa.add(node)
         log.info("Added new node to session: %s", node)
         if opts.rsr_auto_commit:
             sa.commit()
     yield dict( node = node )
     log.note('Asserted %s', node)
Beispiel #11
0
def cmd_read_issues(settings, opts, tasks_file, grep_file):
    """
        Read new issues from grep-list and write to tasks-doc.
    """
    issues = Issue.parse_doc(tasks_file, settings.project_slug)
    for comment in Comment.parse_tag_grep(grep_file, settings):
        if not comment.issue_id:
            log.warn("No %r ID for %s:%s: %s", settings.project_slug,
                    comment.srcfile, comment.line_nr or '', comment.text )
            continue
        # TODO: scan for project slug, and match with taskdoc.
        if comment.issue_id not in issues:
            log.note("New issue from comment: %s" % comment)
            issues[comment.issue_id] = Issue.from_comment(comment, settings)
        else:
            log.info("Existing issue for comment: %s" % comment)
            pass # TODO: check, update from changed comment
def setpass():
    """Routing rule to set the password"""
    if flask.request.method == "POST":
        form = flask.request.form
        if not password.check(form['password']):
            log.warn("Failed password check")
            return ("Incorrect current password. If you can't remember it, "
                    "try running this: <code>python3 password.py</code> in "
                    "the application directory.")
        else:
            if form['npass1'] != form['npass2']:
                return "Passwords don't match"
            else:
                log.note("Changing password")
                password.set(form['npass1'])
                return "ok"
    else:
        return setpass_text
Beispiel #13
0
 def stats(self, prog=None, opts=None, sa=None):
     assert sa, (opts, sa)
     urls = sa.query(Locator).count()
     log.note("Number of URLs: %s", urls)
     bms = sa.query(Bookmark).count()
     log.note("Number of bookmarks: %s", bms)
     for lctr in sa.query(Locator).filter(Locator.global_id==None).all():
         lctr.delete()
         log.note("Deleted Locator without global_id %s", lctr)
     for bm in sa.query(Bookmark).filter(Bookmark.ref_id==None).all():
         bm.delete()
         log.note("Deleted bookmark without ref %s", bm)
Beispiel #14
0
 def mm_stats(self, sa=None):
     mfs = sa.query(Mediaformat).count()
     log.note("Number of mediaformat's: %s", mfs)
     mts = sa.query(Mediatype).count()
     log.note("Number of mediatype's: %s", mts)
     mms = sa.query(Mediameta).count()
     log.note("Number of mediameta's: %s", mms)
Beispiel #15
0
def txs_session(prog=None, sa=None, opts=None, settings=None):
    # default SA session
    dbref = opts.dbref
    if opts.init:
        log.debug("Initializing SQLAlchemy session for %s", dbref)
    sa = SessionMixin.get_session('default', opts.dbref, opts.init)
    # Host
    hostnamestr = current_hostname(opts.init, opts.interactive)
    if opts.init:
        hostname = hostname_find([hostnamestr], sa)
        assert not hostname or not isinstance(hostname, (tuple, list)), hostname
        if not hostname:
            log.note("New Name: %s", hostnamestr)
            hostname = Name(
                    name=hostnamestr,
                    date_added=datetime.now())
            hostname.commit()
        else:
            log.warn("Name exists: %s", hostname)
        assert hostname
        host = host_find([hostname], sa)
        if not host:
            log.note("New Host: %s", hostnamestr)
            host = Host(
                    hostname=hostname,
                    date_added=datetime.now())
            host.commit()
        else:
            log.warn("Host exists: %s", host)
        assert host
    else:
        host, name = sa.query(Host, Name)\
            .join('hostname')\
            .filter(Name.name == hostnamestr).one()
        if not host:
            log.crit("Could not get host")
    urlresolver = LocalPathResolver(host, sa)
    log.info("On %s", host)
    yield Keywords(sa=sa, ur=urlresolver)
Beispiel #16
0
def txs_run(sa=None, ur=None, opts=None, settings=None):
    log.debug("{bblack}txs{bwhite}:run{default}")
    # XXX: Interactive part, see lind.
    """
    """
    results = []
    if settings.taxus.walk.yield_directly:
        results = None
    classes = {}
    tags = {}
    if '' not in tags:
        tags[''] = 'Root'
    FS_Path_split = re.compile('[\/\.\+,]+').split
    log.info("{bblack}Tagging paths in {green}%s{default}",
            os.path.realpath('.') + sep)
    cwd = os.getcwd()
    assert isinstance(cwd, basestring), cwd
    try:
        for pathstr in res.Dir.walk_tree_interactive(cwd, opts):
            path = ur.get(pathstr, opts)
            if isinstance(results, list):
                # XXX: path is not initialized yet
                results.append(pathstr)
            else:
                yield path
            continue
            parts = FS_Path_split(pathstr)
            for tagstr in parts:
                try:
                    tag = sa.query(Tag).filter(Tag.name == tagstr).one()
                    log.note(tag)
                except NoResultFound, e:
                    log.note(e)
                # Ask about each new tag, TODO: or rename, fuzzy match.
                if tagstr not in tags:
                    type = raw_input('%s%s%s:?' % (
                        log.palette['yellow'], tagstr,
                        log.palette['default']) )
                    if not type: type = 'Tag'
                    tags[tagstr] = type
            log.info(pathstr)
            #log.info(''.join( [ "{bwhite} %s:{green}%s{default}" % (tag, name)
            #    for tag in parts if tag in tags] ))
    except KeyboardInterrupt, e:
        log.note(e)
        pass
Beispiel #17
0
    def rsr_session(self, prog, volume, workspace, homedir, opts):
        """
        Determine context, and from there get the session/dbref to initialize an
        SQLAlchemy session.
        The context depends on the current working directory, and defaults to
        the nearest workspace; perhaps a volume or the homedir.
        """
        session = Session.init(prog.pwd, opts.session)
        log.note('Session: %s', session)

        if session.context and confparse.haspath(session.context, 'data.repository.root_dir'):
            prog.session = session
            yield dict(context=session.context)
            log.note('Context: %s', session.context)
            repo_root = session.context.settings.data.repository.root_dir

        else:
            repo_root = 'sa_migrate'

        # SA session
        repo_path = os.path.join(repo_root, opts.repo)

        if os.path.exists(repo_path):
            log.info("Reading SA migrate config for repo %r" % repo_path)
            # hard coded module name, root dir for local repos
            from sa_migrate import custom
            config = custom.read(repo_path)
            log.info("Reading SA migrate config from %r" % config)
            repo_opts = custom.migrate_opts(repo_path, config)
            dbref = repo_opts['url']
        else:
            dbref = opts.dbref

        log.note('DBRef: %s', dbref)

        if opts.init_db:
            log.debug("Initializing SQLAlchemy session for %s", dbref)
        sa = SessionMixin.get_session(opts.session, dbref, opts.init_db,
                SqlBase=SqlBase)

        yield dict(sa=sa)
Beispiel #18
0
    def static_init(self):
        """
        Initializes the `prog` variable, determines its name and working
        directory and from there looks up all configuration files.

        Using the name it then sets up all command-line options.
        """

        # Set up a static name context
        inheritor = self.__class__
        static = StaticContext( inheritor )# XXX IStaticContext()
        yield dict( prog=dict( name = static ) )
        log.note('prog.name: %s', static)

        # Prepare a specification of the paths and types of configuration files
        configspec = ConfigSpec( static )# XXX ISimpleConfigSpec(config_file)
        yield dict( prog=dict( configspec = configspec ) )
        log.note('prog.configspec: %s', configspec)

        # Lastly also aggragate all options defined on the inheritance chain
        optspec = SimpleCommand.get_optspec( inheritor )
        yield dict( prog=dict( optspec = optspec ) )
        log.note('prog.optspec: %s', optspec)
Beispiel #19
0
def main(argv, doc=__doc__, usage=__usage__):

    """
    Execute using docopt-mpe options.

        prog [opts] [CTX] ( FILE... | DIR... )

    """

    # Process environment
    db = os.getenv( 'FINFO_DB', __db__ )
    if db is not __db__:
        usage = usage.replace(__db__, db)

    ctx = confparse.Values(dict(
        opts = util.get_opts(doc + usage, version=get_version(), argv=argv[1:])
    ))
    ctx.opts.flags.dbref = taxus.ScriptMixin.assert_dbref(ctx.opts.flags.dbref)
    # Load configuration
    ctx.config_file = list(confparse.expand_config_path(ctx.opts.flags.config)).pop()
    ctx.settings = settings = confparse.load_path(ctx.config_file)
    # Load SA session
    ctx.sa = get_session(ctx.opts.flags.dbref)

    # DEBUG: pprint(ctx.settings.todict())

    # Process arguments
    dirs = []
    # Shift paths from ctx arg
    if ctx.opts.args.CTX and os.path.exists(ctx.opts.args.CTX):
        ctx.opts.args.FILE.append(ctx.opts.args.CTX)
        ctx.opts.args.CTX = None

    # Sort out dirs from files
    for arg in ctx.opts.args.FILE:
        if os.path.isdir(arg):
            ctx.opts.args.FILE.remove(arg)
            dirs.append(arg)
        elif os.path.isfile(arg):
            pass
        else:
            log.note("Unhandled path %r" % arg)
    ctx.opts.args.DIR = dirs

    # Set default path context
    if ctx.opts.flags.name:
        assert not ctx.opts.args.CTX
        ctx.opts.args.CTX = ctx.opts.flags.name

    elif not ctx.opts.args.CTX:
        ctx.opts.args.CTX = 'current'


    # XXX: create prefixes object on context
    ctx.prefixes = confparse.Values(dict(
        map= settings.finfo['prefix-map'],
        env={},
        map_={}
    ))
    if 'homedir' not in ctx.prefixes.map:
        ctx.prefixes.map['homedir'] = 'HOME=%s' % os.path.expanduser('~')
    if 'current' not in ctx.prefixes.map:
        ctx.prefixes.map['current'] = '$PWD:$HOME'
    if 'pwd' not in ctx.prefixes.map:
        ctx.prefixes.map['pwd'] = 'PWD=%s' % os.path.abspath('.')

    for prefix, path in ctx.prefixes.map.items():
        if '=' in path:
            envvar, path = path.split('=')
            if envvar in ctx.prefixes.env:
                assert ctx.prefixes.env[envvar] == prefix, (
                        ctx.prefixes.env[envvar], prefix )
            ctx.prefixes.env[envvar] = prefix

    # Pre-pocess binds from env flags

    if not isinstance(ctx.opts.flags.env, list):
        ctx.opts.flags.env = [ ctx.opts.flags.env ]

    for env_map in ctx.opts.flags.env:
        envvar, prefix = env_map.split('=')
        if envvar in ctx.prefixes.env:
            assert prefix == ctx.prefixes.env[envvar]
        else:
            ctx.prefixes.env[envvar] = prefix

        envvalue = os.getenv(envvar, None)
        if envvalue:
            ctx.prefixes.map[prefix] = "%s=%s" % ( envvar, envvalue )
            #ctx.prefixes.map_[prefix] = envvalue.split(':')

    # Post-process prefixes after passed flags, and resolve all values
    for prefix, spec in ctx.prefixes.map.items():
        if '=' in spec:
            envvar, spec = spec.split('=')
            if envvar in ctx.prefixes.env:
                assert ctx.prefixes.env[envvar] == prefix, (
                        ctx.prefixes.env[envvar], prefix )
            ctx.prefixes.env[envvar] = prefix

        specs = spec.split(':')
        set_ = []

        for idx, path in enumerate(specs):
            path = os.path.expanduser(path)
            if varname.match(path):
                refpref = ctx.prefixes.env[path[1:]]
                #refpath = ctx.prefixes.map[]
                path = '#prefixes/'+refpref

            elif '$' in path:
                pass
            #else:
            #    path = '#prefixes/'+prefix+':'+str(idx)

            set_.append(path)

        ctx.prefixes.map_[prefix] = set_

    ctx.pathrefs = ctx.prefixes.map_[ctx.opts.args.CTX]

    #DEBUG:
    #print ctx.opts.todict()
    #print pformat(ctx.prefixes.todict())
    #print pformat(ctx.pathrefs)

    # Preprocess filters to regex
    if 'FILTER' not in ctx.opts.args:
        ctx.opts.args.FILTER = []

    if not ctx.opts.args.FILTER:
        ctx.opts.args.FILTER = default_filters
    if ctx.opts.flags.documents:
        ctx.opts.args.FILTER = doc_filters + ctx.opts.args.FILTER
    for idx, filter in enumerate(ctx.opts.args.FILTER):
        if isinstance(filter, str):
            ctx.opts.args.FILTER[idx] = fnmatch.translating(filter)

    # Resolve FILE/DIR arguments
    files, dirs = [], []
    for arg in ctx.opts.args.FILE + ctx.opts.args.DIR:
        r = find_local(arg, search_path(ctx.pathrefs, ctx))
        if not r: continue
        prefix, file, dir = r
        if not dir:
            raise Exception("No path for %s" % arg)
        elif file:
            files.append((prefix, file))
        else:
            dirs.append((prefix, dir))

    print("Resolved arguments to %s dirs, %s files" % ( len(dirs), len(files) ))

    # XXX: if not ctx.opts.flags.directories:

    if ctx.opts.flags.recurse:
        # Resolve all dirs to file lists
        for p, d in dirs:
            for top, path_dirs, path_files in os.walk(os.path.join(p, d)):
                for path_dir in list(path_dirs):
                    for filter in ctx.opts.args.FILTER:
                        if not filter.match(os.path.basename(path_dir)):
                            path_dirs.remove(path_dir)
                            break

                if top.startswith('./'):
                    top = top[2:]

                for path_file in list(path_files):
                    filter = None
                    for filter in ctx.opts.args.FILTER:
                        if filter.match(os.path.basename(path_file)):
                            break
                        else:
                            continue
                    if not filter.match(os.path.basename(path_file)):
                        path_files.remove(path_file)
                    if path_file not in path_files:
                        continue
                    files.append((p, os.path.join(top, path_file)))

    print("Continue with %s files" % len(files))

    mfadapter = None
    res.persistence.PersistedMetaObject.stores['metafile'] = mfadapter


    prefix = None
    for p, f in files:

        if ctx.opts.flags.auto_prefix:

            prefixes = find_prefixes(p, ctx)
            assert prefixes # FIXME: how come only use first??
            prefix = prefixes.next()
            assert len(ctx.prefixes.map_[prefix]) == 1, prefix
            name = f[len(ctx.prefixes.map_[prefix][0])+1:]

        else:
            prefix = ctx.opts.args.CTX
            name = f[len(p)+1:]

        ref = prefix+':'+name

        if ctx.opts.flags.names_only:
            print ref

        else:
            # TODO: get INode through context? Also add mediatype & parameters
            # resolver. But needs access to finfo ctx..
            record = taxus.INode.get_instance(name=ref, _sa=ctx.sa)

            # GNU/Linux: -bi = --brief --mime
            # Darwin/BSD: -bI = --brief --mime
            #mediatype = lib.cmd('file --brief --mime "%s"', path).strip()
            # XXX: see basename-reg?

            #if ctx.opts.flags.update == True:

            # TODO: repopulate metadata;

            mf = res.metafile.Metafile(f)

            assert mf.date_accessed
            record.date_accessed = mf.date_accessed
            assert mf.date_modified
            record.date_modified = mf.date_modified

            if not record.node_id:
                ctx.sa.add(record)

            print record, record.date_updated, record.date_modified
            #sys.exit()

            if ctx.opts.flags.update:
                ctx.sa.commit()
Beispiel #20
0
 def rsr_commit(self, sa):
     "Commit changes to SQL"
     log.note("Committing SQL changes");
     sa.commit()
     log.debug("Commit finished");
Beispiel #21
0
    opts = util.get_opts(__usage__, version=get_version())

    # Override dbref setting from schema
    if opts.flags.v or opts.flags.verbosity:
        log.category = 6
        # FIXME: log.category log.test()
    #if opts.flags.v
    #    print opts.flags.v
    #if opts.flags.verbosity:
    #    print opts.flags.verbosity


    # schema corresponds to module name
    if opts.args.schema:
        log.note("Using schema %r", opts.args.schema)
        schema = __import__(os.path.splitext(opts.args.schema)[0])
    else:
        log.note("Using local schema %s", __name__)
        schema = sys.modules[__name__]

    metadata = schema.SqlBase.metadata

    if opts.flags.dbref == __db__:
        if hasattr(schema, '__db__'):
            opts.flags.dbref = schema.__db__

    if ':/' not in opts.flags.dbref: # FIXME: scan for uri properly (regex)
        opts.flags.dbref = taxus.ScriptMixin.assert_dbref(opts.flags.dbref)
        print opts.flags.dbref
Beispiel #22
0
	def __sendCmdReset(self):
		log.note("Resetting simulator")
		self.comm.write(b'rr')
Beispiel #23
0
	def __sendReport(self):
		log.note("Requesting current report")

		self.comm.write(b'ss')
Beispiel #24
0
 def run(self, execution_graph, context, reporter, args=[], kwds={}):
     """
     execution_graph:ExecGraph
         ties command targets together as nodes in a
         network, expressing dependencies and other relations as typed edges.
     context:ContextStack  is an stack for multiple properties, where each
         property is stacked by attribute assignment and popped using 'del'.
         Used only for generator right now.
     args and kwds are argument vectors shared with all commands, updated and
         overriden from each command if needed
     """
     log.debug('Target resolver starting with %s', execution_graph.execlist)
     target = execution_graph.nextTarget()
     while target:
         log.note('Run: %s', target.name)
         assert isinstance(kwds, dict)
         # Execute Target Command routine (returns generator)
         context.generator = target.handler.func(
                         **self.select_kwds(target.handler.func, kwds, args))
         if not context.generator:
             log.warn("target %s did not return generator", target)
             # isgeneratorfunction(context.generator):
         else:
             # Handle results from Command
             for r in context.generator:
                 if isinstance(r, str):
                     pass # resolve something from string notation?
                 # post-exec subcommands..
                 if res.iface.ITarget.providedBy(r):
                     if r.required:
                         execution_graph.require(target, r)
                         self.run(execution_graph, context, reporter, args=args, kwds=kwds)
                     else:
                         execution_graph.append(target, r)
                 # push post-commands
                 elif isinstance(r, Targets):
                     for t in r:
                         execution_graph.require(target, t)
                 # replace argument vector
                 elif isinstance(r, Arguments):
                     #if r:
                     #    log.warn("Ignored %s", r)
                     args = r#args.extend(r)
                 # update keywords
                 elif isinstance(r, Keywords):
                     kwds.update(r)
                 # stop & set process return status bit
                 elif isinstance(r, int):
                     if r == 0:
                         assert not execution_graph, '???'
                     reporter.flush()
                     sys.exit(r)
                 # aggregate var. result data for reporting (to screen, log, ...)
                 elif res.iface.IReport.providedBy(r):
                     reporter.append(r)
                 elif res.iface.IReportable.providedBy(r):
                     reporter.append(r)
                 else:
                     log.warn("Ignored yield from %s: %r", target.name, r)
         del context.generator
         target = execution_graph.nextTarget()
Beispiel #25
0
	def __sendExecute(self):
		log.note("Requesting execute vector {0:d}/{1:d}".format(self.cur_vec_execute, len(self.impl.vs)))

		self.comm.write(b'ee')
Beispiel #26
0
	def __sendDeviceInfo(self):
		log.note("Requesting device information")

		self.comm.write(b'dd')
Beispiel #27
0
	def run(self):
		log.note("Loading signal map")
		self.sm, self.hiz = self.__loadSignalMap(self.map_file_path)
		log.debug("SignalMap = " + str(self.sm))

		log.note("Loading metadata")
		self.md = Metadata(self.metadata_file_path)

		log.note("Loading default vector")
		self.dv = self.__loadDefaultVector(self.def_vector_path)
		log.debug("DefVector = " + str(self.dv))

		log.note("Building vector file list")
		if self.tc:
			vec_list = []
			for i in self.tc:
				vec_list.append(self.target_sim_path + "/" + self.comp + "_{0:02d}.vec".format(int(i)))
		else:
			vec_list = glob.glob(self.target_sim_path + "/" + self.comp + "*.vec")
		for vec in vec_list:
			if not vec.find("df.vec") == -1:
				vec_list.remove(vec)
		vec_list.sort()
		self.md.testcases = len(vec_list)
		log.debug("VectorFiles = " + str(vec_list))

		log.note("Building vectors list")
		self.vs, self.flags = self.__loadVectors(vec_list)
		log.debug("Vectors:")
		for v in self.vs:
			log.debug(str(v))
		self.md.vectors = len(self.vs)

		log.note("Metadata = " + str(self.md))

		if not self.communication.discoverHwsim():
			log.error("HW simulator discovery failed")
		log.info("HW simulator discovery " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		if len(self.vs) > self.device_info.vectors_cnt_max:
			log.error("Vector count not supported by hwsim ({0:d}/{1:d})".format(len(self.vs), self.device_info.vectors_cnt_max))

		if not self.communication.initSim():
			log.error("HW simulator initialization failed")
		log.info("HW simulator initialization " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		if not self.communication.sendHiz():
			log.error("Couldn't send HIZ information")
		log.info("HW simulator sending HIZ " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		if not self.communication.sendVectors():
			log.error("Couldn't send vectors")
		log.info("HW simulator sending vectors " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		if not self.communication.sendFlags():
			log.error("Couldn't send flags")
		log.info("HW simulator sending flags " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		self.communication.executeTests()
		if self.failed_testcases > 0:
			log.warning((log.TermColor.RED + "FINISHED (failed {0:d} of {1:d} testcases)" + log.TermColor.NC).format(self.failed_testcases, self.md.testcases))
		else:
			log.info((log.TermColor.LGREEN + "FINISHED (failed {0:d} of {1:d} testcases)" + log.TermColor.NC).format(self.failed_testcases, self.md.testcases))

		if not self.communication.sendReset():
			log.error("HW simulator RESET failed")
		else:
			log.note("HW simulator RESET " + log.TermColor.LGREEN + "OK" + log.TermColor.NC)

		return
Beispiel #28
0
    def __sanityCheckValues(self, meta):
        for testcase in meta.testcases.list:
            for test_entry in testcase.content:
                if type(test_entry) is CLS.TestStep:
                    # set is handled in schema, take care of expect only
                    expect_list = test_entry.expect_list
                    for expect_entry in expect_list:
                        if meta.signals.getSignal(expect_entry).role == "out":
                            if expect_list[expect_entry] not in ("h", "l",
                                                                 "-"):
                                log.error(
                                    "sanityCheckValues: TestStep out={0:s} expect is not ('h', 'l', '-'), got={1:s}"
                                    .format(expect_entry,
                                            expect_list[expect_entry]))
                        elif meta.signals.getSignal(
                                expect_entry).role == "out_vec":
                            for value_pos in range(
                                    0, len(expect_list[expect_entry])):
                                if expect_list[expect_entry][
                                        value_pos] not in ("h", "l", "-"):
                                    log.error(
                                        "sanityCheckVallues: TestStep out_vec[{0:d}]={1:s} expect is not ('h', 'l', '-'), got={2:s}"
                                        .format(
                                            value_pos, expect_entry,
                                            expect_list[expect_entry]
                                            [value_pos]))

                        elif meta.signals.getSignal(
                                expect_entry).role == "inout":
                            if expect_list[expect_entry] not in ("Z", "H", "L",
                                                                 "-", "h", "l",
                                                                 "X"):
                                log.error(
                                    "sanityCheckValues: TestStep inout={0:s} expect is not ('Z', 'H', 'L', '-', 'h', 'l', 'X'), got={1:s}"
                                    .format(expect_entry,
                                            expect_list[expect_entry]))
                        elif meta.signals.getSignal(
                                expect_entry).role == "inout_vec":
                            for value_pos in range(
                                    0, len(expect_list[expect_entry])):
                                if expect_list[expect_entry][
                                        value_pos] not in ("Z", "H", "L", "-",
                                                           "h", "l", "X"):
                                    log.error(
                                        "sanityCheckVallues: TestStep inout_vec[{0:d}]={1:s} expect is not ('Z', H', 'L', '-', 'h', 'l', 'X'), got={2:s}"
                                        .format(
                                            value_pos, expect_entry,
                                            expect_list[expect_entry]
                                            [value_pos]))
                elif type(test_entry) is CLS.TestSequence:
                    vector_list = test_entry.list
                    for value in vector_list:
                        vector = value[len("#### "):].replace(" ", "")
                        for pos in range(0, len(vector)):
                            signal = meta.signals.getSignalAtPosition(pos)
                            if signal.role.startswith("inout"):
                                # no way to distinguish between set and exp
                                if vector[pos] not in ("H", "L", "-", "0", "1",
                                                       "Z", "l", "h", "X"):
                                    log.error(
                                        "sanityCheckValues: TestSequence {0:s}={1:s} expect is not ('H', 'L', '-', '0', '1', 'Z', 'l', 'h', 'X'), got={2:s}"
                                        .format(signal.role, signal.name,
                                                vector[pos]))
                elif type(test_entry) is CLS.TestScript:
                    log.note(
                        "sanityCheckValues: no validation for script test available"
                    )
                else:
                    # should be handeld by schema, but...
                    log.error("sanityCheckValues: invalid test type")
        return True
@app.route("/import", methods=["GET", "POST"])
def import_shapefile():
    """Routing rule for this "Import Shapefile" page."""
    db = app.config["DATABASE"]
    if flask.request.method == 'POST':
        if not password.check(flask.request.form['secret']):
            return "Incorrect passcode"
        myfile = flask.request.files['file']
        if myfile:
            with db.insert_lock:
                if 'cleardata' in flask.request.form:
                    db.clear()
                try:
                    import_tool.import_shape_file(myfile, db)
                    db.commit()
                    return "ok"
                except import_tool.error as e:
                    db.rollback()
                    return e.args[0]
        else:
            return "No file submitted"
    else:
        return iform_text

if __name__ == '__main__':
    log.note("Starting server")
    app.run(debug=app.config["DEBUG"],
            host=app.config["HOST"],
            port=app.config["PORT"]
            )
Beispiel #30
0
    def rdc_run_embedded_issue_scan(self, sa, issue_format=None, opts=None, *paths):

        """
        Main function: scan multiple sources and print/log embedded issues
        found.
        """

        if not paths: paths=['.']

        # TODO: make ascii peek optional, charset configurable
        # TODO: implement contexts, ref per source
        context = ''
        source_iter = self.walk_paths(paths)

        # pre-compile patterns XXX: per context
        matchbox = compile_rdc_matchbox(rc)

        taskdocs = {}

        for source in source_iter:
            data = open(source).read()

            lines = get_lines(data)

            srcdoc = SrcDoc( source, len(lines) )
            taskdocs[source] = srcdoc

            parser = Parser(sa, matchbox, source, context, data, lines)

            for tag in parser.find_tags():

                try:
                    cmt = parser.for_tag(srcdoc, tag)
                except (Exception) as e:
                    if not opts.quiet:
                        log.err("Unable to find comment span for tag '%s' at %s:%s " % (
                            parser.data[tag.start:tag.end], srcdoc.source_name, tag.char_span))
                    continue

                if not cmt:
                    continue

                # XXX
                srcdoc.scei.append(cmt)

                if opts.quiet:
                    issue_format = 'id'

                print EmbeddedIssue.formats[issue_format](cmt, data)
        return

        # TODO: old clean/rewrite functions

        # iterate paths
        for embedded in find_files_with_tag(sa, matchbox, paths):

            if embedded.tag_id:
                #if embedded.tag_id == NEED_ID:
                    yield dict(issues=[ embedded ])
                    try:
                        if issue_format:
                            print EmbeddedIssueOld.formats[issue_format](embedded)
                        log.note('Embedded Issue %r', (embedded.file_name, \
                                embedded.tag_name, embedded.tag_id, \
                                embedded.comment_span, embedded.comment_lines, \
                                embedded.description, embedded.comment_flavour))
                    except Exception, e:
                        log.err(e)
                    #new_id = service.new_issue(embedded.tag_name, embedded.description)
                    #embedded.set_new_id(new_id)
                    #service.update_issue(embedded.tag_name, embedded.tag_id,
                    #        embedded.description)
            else:
                assert False
                pass
Beispiel #31
0
import log

log.trace(file=False, line=False, caller=True)


def test():
    log.error("ERROR")
    log.warning("WARNING")
    log.success("SUCCESS")


test()
log.info("INFO")
log.debug("DEBUG")

log.suppress("ERROR")
log.error("ERROR")
log.note("NOTE")