Exemplo n.º 1
0
    def calc_stats(self, just_save=False):
        if just_save:
            logging.trace('just_save')
            save(
                os.path.join(self.save_dir, 'pat_stats%s.npy') %
                self.file_postfix, self.stats)
            return self.stats

        t0 = time.time()
        divs = self.divide_parts(self.hps.train_its, self.n_threads)
        out_que = queue.Queue(self.n_threads)
        cnt_mb_que = queue.Queue()
        # initialize threads
        for t in range(self.n_threads):
            self.threads.append(
                Thread(target=self.calc_patch_stats,
                       args=(t, self.mb_queue, divs[t], out_que, cnt_mb_que)))
            self.threads[t].start()
        print('')
        for t in range(self.n_threads):
            self.threads[t].join()
        self.weighted_stats(out_que)
        self.calc_scalar_stats()
        save(
            os.path.join(self.save_dir, 'pat_stats%s.npy') % self.file_postfix,
            self.stats)
        logging.trace('calc. stats: time = %3.0f s ' % (time.time() - t0))
        return self.stats
Exemplo n.º 2
0
def process(source_assets,
            staged_assets,
            output_path,
            droid_key,
            py_exe=None,
            py_version=(2, 7)):
    logging.info("Processing client python...")

    def iter_python_paks():
        for client_path, source_asset in source_assets.items():
            if "python" in source_asset.categories and client_path.suffix.lower(
            ) == ".pak":
                yield client_path, source_asset

    # Check for any Python paks -- if they exist, bail.
    if any(iter_python_paks()):
        logging.warning(
            "Using prebuilt Python packages -- this is not recommended!")
        for client_path, source_asset in iter_python_paks():
            logging.trace(f"Prebuilt Python: '{client_path.name}'")
            staged_assets[client_path].file_name = client_path
        return

    # The compyler was written assuming a minimum of Python 2.3
    if py_version[0] == 2 and py_version[1] < 3:
        logging.critical(
            f"Python {'.'.join(py_version)} is not supported by the compyler.")
        logging.critical("No Python.pak will be generated!")
        return

    module_code = _compyle_all(source_assets, staged_assets, py_exe)
    if module_code:
        _package(source_assets, staged_assets, module_code, output_path,
                 droid_key)
Exemplo n.º 3
0
 def _onCookieAdded(self, cookie):
     cookie_str = str(cookie.toRawForm(QNetworkCookie.NameAndValueOnly), encoding='utf-8')
     cookie_name = str(cookie.name(), encoding='utf-8')
     cookie_val = str(cookie.value(), encoding='utf-8')
     if (cookie_name == self.authn_cookie_name) and (cookie.domain() == self.config.get("host")):
         logging.trace("%s cookie added:\n\n%s\n\n" % (self.authn_cookie_name, cookie_str))
         self.credential["cookie"] = "%s=%s" % (self.authn_cookie_name, cookie_val)
         host = self.auth_url.host()
         cred_entry = dict()
         cred_entry[host] = self.credential
         if self.credential_file:
             creds = read_credential(self.credential_file, create_default=True)
             creds.update(cred_entry)
             write_credential(self.credential_file, creds)
         self.token = cookie_val
         self._session.cookies.set(self.authn_cookie_name, cookie_val, domain=host, path='/')
         if self.cookie_jar is not None:
             self.cookie_jar.set_cookie(
                 create_cookie(self.authn_cookie_name,
                               cookie_val,
                               domain=host,
                               path='/',
                               expires=0,
                               discard=False,
                               secure=True))
             for path in self.config.get("cookie_jars", DEFAULT_CONFIG["cookie_jars"]):
                 path_dir = os.path.dirname(path)
                 if os.path.isdir(path_dir):
                     logging.debug("Saving cookie jar to: %s" % path)
                     self.cookie_jar.save(path, ignore_discard=True, ignore_expires=True)
                 else:
                     logging.debug("Cookie jar save path [%s] does not exist." % path_dir)
Exemplo n.º 4
0
def load_asset_database(mfs_path, list_path, db_type):
    logging.info("Reading asset database...")

    db_cls = manifest.ManifestDB.get(db_type)
    if db_cls is None:
        raise AssetError(f"Invalid asset db type '{db_type}'")
    manifests, lists = db_cls.load_db(mfs_path, list_path)

    # Merge assets into case insensitive dict and verify hashes. Use a custom type so we don't
    # compare the file flags, which can legally differ (eg sound decompression)
    asset = namedtuple(
        "AssetEntry",
        ("file_hash", "download_hash", "file_size", "download_size"))
    assets, conflicts = {}, 0
    for mfs_name, mfs_entries in manifests.items():
        for mfs_entry in mfs_entries:
            mfs_asset = asset(mfs_entry.file_hash, mfs_entry.download_hash,
                              mfs_entry.file_size, mfs_entry.download_size)
            if assets.setdefault(mfs_entry.file_name, mfs_asset) != mfs_asset:
                logging.warn(f"CONFLICT: '{mfs_entry.file_name}'")
                conflicts += conflicts
                assets[mfs_entry.file_name] = None
    if conflicts:
        logging.warn(f"Discarded {conflicts} conflicting asset entries!")
    logging.trace(
        f"Loaded {len(assets)} asset entries from {len(manifests)} manifests, with {len(lists)} legacy auth-lists."
    )

    db = namedtuple("AssetDb", ("assets", "manifests", "lists"))
    return db(assets, manifests, lists)
Exemplo n.º 5
0
 def _onCookieRemoved(self, cookie):
     cookie_str = str(cookie.toRawForm(QNetworkCookie.NameAndValueOnly), encoding='utf-8')
     cookie_name = str(cookie.name(), encoding='utf-8')
     if cookie_name == self.authn_cookie_name and cookie.domain() == self.url().host():
         logging.trace("%s cookie removed:\n\n%s\n\n" % (self.authn_cookie_name, cookie_str))
         if self.cookie_jar:
             self.cookie_jar.clear(cookie_name, path=cookie.path(), domain=cookie.domain())
Exemplo n.º 6
0
def check_python_version(py_exe, py_version=(2, 7)):
    logging.debug(f"Checking Python interpreter version: {py_exe}")
    if not py_exe or not py_exe.is_file():
        logging.debug("Non-file input")
        return False

    args = (str(py_exe), "-V")
    result = subprocess.run(args,
                            stdout=subprocess.PIPE,
                            stderr=subprocess.STDOUT,
                            encoding="ascii")
    if result.returncode == 0:
        logging.trace(f"{py_exe}: {result.stdout}")

        expected_version = f"Python {py_version[0]}.{py_version[1]}"
        result_version = result.stdout.strip()
        if not result_version.startswith(expected_version):
            logging.error(
                f"Python interpreter '{py_exe}' is wrong version--expected '{expected_version}' got '{result_version}'"
            )
            return False
        return True
    else:
        logging.debug("Nonzero returncode")
        return False
Exemplo n.º 7
0
    def read_param_string(self, param_str):
        """
        Parse the dict with the parametrization strings and evaluate for
        the bin energies needed.
        """

        evals = get_bin_centers(self.ebins)
        n_e = len(self.ebins) - 1
        n_cz = len(self.czbins) - 1

        parametrization = {}
        for flavour in param_str:
            parametrization[flavour] = {}
            for int_type in param_str[flavour]:  #['cc', 'nc']
                logging.debug('Parsing function strings for %s %s' %
                              (flavour, int_type))
                parametrization[flavour][int_type] = {}
                for axis in param_str[flavour][
                        int_type]:  #['energy', 'coszen']
                    parameters = {}
                    for par, funcstring in param_str[flavour][int_type][
                            axis].items():
                        # this should contain a lambda function:
                        function = eval(funcstring)
                        logging.trace('  function: %s' % funcstring)
                        # evaluate the function at the given energies
                        vals = function(evals)
                        # repeat for all cos(zen) bins
                        parameters[par] = np.repeat(vals, n_cz).reshape(
                            (n_e, n_cz))
                    parametrization[flavour][int_type][axis] = copy(parameters)
        return parametrization
Exemplo n.º 8
0
    def read_param_string(self, param_str):
        """
        Parse the dict with the parametrization strings and evaluate for
        the bin energies needed.
        """

        evals = get_bin_centers(self.ebins)
        n_e = len(self.ebins)-1
        n_cz = len(self.czbins)-1

        parametrization = {}
        for flavour in param_str:
          parametrization[flavour] = {}
          for int_type in param_str[flavour]:    #['cc', 'nc']
            logging.debug('Parsing function strings for %s %s'
                          %(flavour, int_type))
            parametrization[flavour][int_type] = {}
            for axis in param_str[flavour][int_type]:    #['energy', 'coszen']
                parameters = {}
                for par, funcstring in param_str[flavour][int_type][axis].items():
                    # this should contain a lambda function:
                    function = eval(funcstring)
                    logging.trace('  function: %s'%funcstring)
                    # evaluate the function at the given energies
                    vals = function(evals)
                    # repeat for all cos(zen) bins
                    parameters[par] = np.repeat(vals,n_cz).reshape((n_e,n_cz))
                parametrization[flavour][int_type][axis] = copy(parameters)
        return parametrization
Exemplo n.º 9
0
    def collect_root_files(self):
        root_files = copy.copy(self.root_files)

        if len(self.scandirs) > 0:
            for scandir in self.scandirs[::-1]:
                if not scandir.endswith('/'): scandir += '/'
                root_files_this_scandir = glob.glob(scandir + self.globpat +
                                                    '.root')
                n_found = len(root_files_this_scandir)
                logging.debug('Found {0} root files in {1}'.format(
                    n_found, scandir))
                root_files.extend(root_files_this_scandir)
                if self.read_one_scandir and len(root_files_this_scandir) > 0:
                    logging.warning(
                        'Using only root files found in {0} (ignoring others for globpat {1})'
                        .format(scandir, self.globpat))
                    self.scandirs = [scandir]
                    break
            logging.debug('Found {0} root files in {1}'.format(
                len(root_files), ', '.join(self.scandirs)))

        if len(root_files) == 0:
            raise RuntimeError(
                'Attemped to retrieve scan for x:{0} y:{1}, '
                'but no .root files were found. Passed list of dirs to look in:\n'
                .format(self.x_variable, self.y_variable) +
                '\n'.join(self.scandirs))

        logging.trace('List of root files:\n' + '\n'.join(root_files))
        return root_files
Exemplo n.º 10
0
def _linux_pkg_checks() -> None:
    """Check that all the packages required by SIERRA are installed on whatever
    flavor of Linux SIERRA is running on.

    """
    import distro

    dist = distro.id()
    os_info = distro.os_release_info()

    if os_info['id_like'] in ['debian', 'ubuntu']:
        import apt
        cache = apt.Cache()
        missing = []

        for pkg in kRequiredDebPackages:
            logging.trace("Checking for .deb package '%s'", pkg)
            if pkg not in cache or not cache[pkg].is_installed:
                missing.append(pkg)

        if missing:
            raise RuntimeError(
                (f"Required .deb packages {missing} missing on "
                 f"Linux distribution '{dist}'. Install all "
                 "required packages before running SIERRA! "
                 "(Did you read the \"Getting Started\" docs?)"))

    else:
        logging.warning(
            ("Unknown Linux distro '%s' detected: skipping package "
             "check"), dist)
        logging.warning(
            ("If SIERRA crashes it might be because you don't have "
             "all the required packages installed"))
Exemplo n.º 11
0
def drop_pdfindices(card_file, category_pats=None):
    if differentials.core.is_testmode():
        return
    with open(card_file, 'r') as card_fp:
        card = card_fp.read()

    if category_pats is None:
        category_pats = ['recoPt_600p0_10000p0']

    lines = []
    for line in card.split('\n'):
        for category_pat in category_pats:
            if re.match(r'pdfindex_.*{0}'.format(category_pat), line):
                logging.debug(
                    'Dropping following line from {0} (matched to {2}):\n{1}'.
                    format(card_file, line, category_pat))
                break
        else:
            lines.append(line)

    new_card = '\n'.join(lines)
    logging.trace('Datacard after removing lines:\n{0}'.format(new_card))
    logging.info(
        'Writing new card after deleting lines to {0}'.format(card_file))
    if not core.is_testmode():
        with open(card_file, 'w') as card_fp:
            card_fp.write(new_card)
Exemplo n.º 12
0
def get_next_address(list_obj, item):
    if list_obj == None: return None
    if item == None: return None
    
    if type(list_obj) is dict:
        list_obj = list(list_obj.keys())
        list_obj.sort()
            
    if item not in list_obj:
        logging.trace('Item not in list. Estimating position.')
        for x in range(len(list_obj)-1, -1, -1):
            address = list_obj[x]
            if item > address:
                item = address
                break
    if item not in list_obj: 
        logging.trace('Item not in list. Returning None.')
        return None
    
    # Find index of the address and get next one up.
    if (list_obj.index(item)) < (len(list_obj) - 1):
        next_address = list_obj[list_obj.index(item) + 1]
    else:
        next_address = None
    return next_address
Exemplo n.º 13
0
    def __init__(self,
                 identifier,
                 execparams,
                 propertydefs=(),
                 loggerName=None):
        """propertydefs is a iterable of tuples that contain

        (propid, propname, type, mode, defaultvalue, units, action, kinds)

        defaultvalue should be None if there is no default, otherwise:
            simple - a single str, int, float, bool
            simple sequence - an iterable of str, int, float, or bool
            struct - a dictionary there the key is the "id" and the value is a tuple
                     of (propname, type, mode)
            struct sequence - a iterable of dictionaries following the struct format
        """
        self.propertySetAccess = threading.Lock()
        self._id = identifier
        self._started = False
        if loggerName == None:
            self._log = logging.getLogger(self._id)
        else:
            self._log = logging.getLogger(loggerName)
        self._name = execparams.get("NAME_BINDING", "")
        # The base resource class manages properties ...
        self._props = PropertyStorage(self, propertydefs, execparams)
        self._props.initialize()
        # ... and also manages ports
        self.__loadPorts()

        logging.trace("Initial property storage %s", self._props)
Exemplo n.º 14
0
 def handle_dump(self, interfaces, neighbours, xroutes, routes):
     # neighbours = {neigh_prefix: (neighbour, {dst_prefix: route})}
     n = dict(((n.address, n.ifindex), (n, {})) for n in neighbours)
     unidentified = set(n)
     self.neighbours = neighbours = {}
     a = len(self.network)
     for route in routes:
         assert route.flags & 1, route  # installed
         if route.prefix.startswith("\0\0\0\0\0\0\0\0\0\0\xff\xff"):
             continue
         assert route.neigh_address == route.nexthop, route
         address = route.neigh_address, route.ifindex
         neigh_routes = n[address]
         ip = utils.binFromRawIp(route.prefix)
         if ip[:a] == self.network:
             prefix = ip[a : route.plen]
             if prefix and not route.refmetric:
                 neighbours[prefix] = neigh_routes
                 unidentified.remove(address)
         else:
             prefix = None
         neigh_routes[1][prefix] = route
     self.locked.clear()
     if unidentified:
         routes = {}
         for address in unidentified:
             neigh, r = n[address]
             if not neigh.cost_multiplier:
                 self.locked.add(address)
             routes.update(r)
         if routes:
             neighbours[None] = None, routes
             logging.trace("Routes via unidentified neighbours. %r", neighbours)
     self.interfaces = dict((i.index, name) for i, name in interfaces)
     self.handler.babel_dump()
Exemplo n.º 15
0
    def read_manifest(cls, path):
        logging.debug(f"Reading manifest: {path}")
        with path.open(mode="r") as f:
            for i, line in enumerate(f):
                line = line.strip()
                if not line:
                    continue
                data = line.split(',')
                if len(data) != 7:
                    logging.error(f"Malformed manifest '{path}' line #{i}")
                    continue

                try:
                    entry = manifest.ManifestEntry()
                    entry.file_name = Path(PureWindowsPath(data[0]))
                    entry.download_name = Path(PureWindowsPath(data[1]))
                    entry.file_hash = data[2]
                    entry.download_hash = data[3]
                    entry.file_size = int(data[4])
                    entry.download_size = int(data[5])
                    entry.flags = int(data[6])
                except Exception as e:
                    logging.error(f"Malformed manifest '{path}' line #{i}")
                else:
                    logging.trace(manifest.pformat(entry))
                    yield entry
Exemplo n.º 16
0
def _osx_pkg_checks() -> None:
    """Check that all the packages required by SIERRA are installed on whatever
    version of OSX SIERRA is running on.

    """
    missing = []

    for pkg in kRequiredOSXPackages:
        logging.trace("Checking for homebrew package '%s'", pkg)
        p1 = subprocess.Popen('brew list | grep {pkg}',
                              shell=True,
                              stderr=subprocess.DEVNULL,
                              stdout=subprocess.DEVNULL)
        p2 = subprocess.Popen('brew list --cask | grep {pkg}',
                              shell=True,
                              stderr=subprocess.DEVNULL,
                              stdout=subprocess.DEVNULL)
        p1.wait()
        p2.wait()

        if p1.returncode != 0 and p2.returncode != 0:
            missing.append(pkg)

    if missing:
        raise RuntimeError((f"Required brew package {missing} missing on OSX. "
                            "Install all required packages before running"
                            "SIERRA! (Did you read the \"Getting Started\" "
                            "docs?)"))
def read_pht(matched_files_tuple, star_range_0, apertureidx: int, fake_references=False):
    fileidx = matched_files_tuple[0]
    file_entry = matched_files_tuple[1]
    # open the file for reading
    with open(file_entry, mode='rb') as file: # b is important -> binary
        fileContent = file.read()
        photheader, _, nrstars, stars, stardata = read_pht_file(file_entry, fileContent, only_apertureidx=int(apertureidx))
        collect = np.full([len(init.star_list), 2],np.inf, dtype=float) # we don't use nrstars because this could be e.g. 1000, but with stars which have id's > 1000

        fwhm = [photheader.fwhm_exp, photheader.fwhm_mean, photheader.fwhm_err]
        jd = photheader.jd

        # for every star
        for staridx, starentry in enumerate(stardata):
            ref_id_0 = stars[staridx].ref_id - 1 # if photometry then all is -1

            if ref_id_0 is -2: # -1 - 1
                if fake_references:
                    ref_id_0 = staridx
                else:
                    continue
            try:
                if ref_id_0 > len(collect):
                    logging.trace(f"Star idx too big for collect shape: staridx: {staridx}, ref_id_0: {ref_id_0}, shape: {collect.shape}")
                collect[ref_id_0] = [starentry.mag, starentry.err]
            except:
                logging.error(f"staridx: {staridx}, ref_id_0: {ref_id_0}, shape: {collect.shape}")
        collected = collect[star_range_0]
        return fileidx, jd, fwhm, nrstars, collected
Exemplo n.º 18
0
 def _onCookieAdded(self, cookie):
     cookie_str = str(cookie.toRawForm(QNetworkCookie.NameAndValueOnly),
                      encoding='utf-8')
     cookie_name = str(cookie.name(), encoding='utf-8')
     cookie_val = str(cookie.value(), encoding='utf-8')
     if (cookie_name
             == self.authn_cookie_name) and (cookie.domain()
                                             == self.config.get("host")):
         logging.trace("%s cookie added:\n\n%s\n\n" %
                       (self.authn_cookie_name, cookie_str))
         self.credential["cookie"] = "%s=%s" % (self.authn_cookie_name,
                                                cookie_val)
         host = self.auth_url.host()
         cred_entry = dict()
         cred_entry[host] = self.credential
         if self.credential_file:
             creds = read_credential(self.credential_file,
                                     create_default=True)
             creds.update(cred_entry)
             write_credential(self.credential_file, creds)
         self.token = cookie_val
         self._session.cookies.set(self.authn_cookie_name,
                                   cookie_val,
                                   domain=host,
                                   path='/')
         self.authn_session_page.setUrl(
             QUrl(self.auth_url.toString() + "/authn/session"))
Exemplo n.º 19
0
    def find_sdls(sdl_mgrs,
                  descriptor_name,
                  embedded_sdr=False,
                  optional=False):
        dependencies = set()
        descriptors = set()

        for sdl_file, mgr in sdl_mgrs.items():
            # Be sure to loop over all files in case someone moves a record to a new file
            # from version to version. Please don't do that, though. It's mean :<
            for descriptor in mgr.find_descriptors(descriptor_name):
                dependencies.add(sdl_file)
                descriptors.add(descriptor.name)

                # We need to see if there are any embedded state descriptor variables...
                sdrs = (i for i in descriptor.variables
                        if i.descriptor is not None
                        and i.descriptor not in descriptors)
                for variable in sdrs:
                    more_dependencies, more_descriptors = find_sdls(
                        sdl_mgrs, variable.descriptor, True)
                    dependencies.update(more_dependencies)
                    descriptors.update(more_descriptors)

        if descriptor_name not in descriptors:
            if embedded_sdr:
                raise AssetError(
                    f"Embedded SDL Descriptor '{descriptor_name}' is missing.")
            elif not optional:
                raise AssetError(
                    f"Top-level SDL '{descriptor_name}' is missing.")
            else:
                logging.trace(
                    f"Optional SDL Descriptor '{descriptor_name}' not found.")
        return dependencies, descriptors
Exemplo n.º 20
0
def execute(cmd,
            capture_output=False,
            ignore_testmode=False,
            py_capture_output=False):
    # Allow both lists and strings to be passed as the cmd
    if not isinstance(cmd, basestring):
        cmd = [l for l in cmd if not len(l.strip()) == 0]
        cmd_str = '\n    '.join(cmd)
        cmd_exec = ' '.join(cmd)
    else:
        cmd_str = cmd
        cmd_exec = cmd

    logging.info('Executing the following command:\n{0}'.format(cmd_str))
    logging.trace('Actual passed command: {0}'.format(cmd_exec))
    if not (is_testmode()) and not (ignore_testmode):
        if py_capture_output:
            return subprocess.check_output(cmd_exec, shell=True)
        elif capture_output:
            with RedirectStdout() as redirected:
                os.system(cmd_exec)
                output = redirected.read()
            return output
        else:
            os.system(cmd_exec)
Exemplo n.º 21
0
    def __init__(self, identifier, execparams, propertydefs=(), loggerName=None):
        """propertydefs is a iterable of tuples that contain

        (propid, propname, type, mode, defaultvalue, units, action, kinds)

        defaultvalue should be None if there is no default, otherwise:
            simple - a single str, int, float, bool
            simple sequence - an iterable of str, int, float, or bool
            struct - a dictionary there the key is the "id" and the value is a tuple
                     of (propname, type, mode)
            struct sequence - a iterable of dictionaries following the struct format
        """
        self.propertySetAccess = threading.Lock()
        self._id = identifier
        self._started = False
        if loggerName == None:
            self._log = logging.getLogger(self._id)
        else:
            self._log = logging.getLogger(loggerName)
        self._name = execparams.get("NAME_BINDING", "")
        # The base resource class manages properties ...
        self._props = PropertyStorage(self, propertydefs, execparams)
        self._props.initialize()
        # ... and also manages ports
        self.__loadPorts()

        logging.trace("Initial property storage %s", self._props)
Exemplo n.º 22
0
def raspberry_pi_infos():
    """
    Returns infos about current raspberry pi board

    Note:
        https://elinux.org/RPi_HardwareHistory#Board_Revision_History

    Returns:
        dict: raspberry pi board infos::

            {
                date (string): release date
                model (string): raspberry pi model
                pcbrevision (string): PCB revision
                ethernet (bool): True if ethernet is natively available on board
                wireless (bool): True if wifi is natively available on board,
                audio (bool): True if audio is natively available on board
                gpiopins (int): number of pins available on board
                memory (string): memory amount
                notes (string): notes on board
                revision (string): raspberry pi revision
            }

    """
    if not platform.machine().startswith('arm'):
        raise Exception('Not arm platform')
    cmd = u'/usr/bin/awk \'/^Revision/ {sub("^1000", "", $3); print $3}\' /proc/cpuinfo'
    p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE)
    revision = p.communicate()[0].decode('utf-8').replace(u'\n', u'')
    logging.trace('Raspberrypi revision=%s' % revision)
    infos = RASPBERRY_PI_REVISIONS[revision] if revision and revision in RASPBERRY_PI_REVISIONS else RASPBERRY_PI_REVISIONS[u'unknown']
    infos[u'revision'] = revision

    return infos
    def compare_memory_objects(self, test_set, pattern_memory_obj,
                               function_memory_obj):
        if function_memory_obj == None:
            return False
        pattern_keys = list(pattern_memory_obj.keys())

        for pattern_key in pattern_keys:
            if pattern_key.startswith("group"):
                is_group_present = self.check_for_output_memory_block(
                    test_set, function_memory_obj,
                    pattern_memory_obj[pattern_key])
                if is_group_present == False:
                    return False
            else:
                memory_address = int(pattern_key, 16)
                if memory_address not in function_memory_obj:
                    logging.trace('Memory address ' + pattern_key +
                                  ' not present in function execution output.')
                    return False
                if (pattern_memory_obj[pattern_key] !=
                        function_memory_obj[memory_address]):
                    logging.trace(
                        'Value at memory address ' + pattern_key +
                        ' doesn\'t match in function execution output.')
                    return False
        return True
Exemplo n.º 24
0
    def __init__(self, identifier, execparams, propertydefs=(), loggerName=None):
        """propertydefs is a iterable of tuples that contain

        (propid, propname, type, mode, defaultvalue, units, action, kinds)

        defaultvalue should be None if there is no default, otherwise:
            simple - a single str, int, float, bool
            simple sequence - an iterable of str, int, float, or bool
            struct - a dictionary there the key is the "id" and the value is a tuple
                     of (propname, type, mode)
            struct sequence - a iterable of dictionaries following the struct format
        """
        #print "resource __init__:"
        #print "id:" + str(identifier)
        #print "loggerName:" + str(loggerName)
        #print "execparams:" + str(execparams)
        self.propertySetAccess = threading.Lock()
        self._id = identifier
        self._started = False
        self._domMgr = None
        self._ecm = None

        ##
        ## logging context for the resource
        ##
        self.logLevel = logging.INFO
        self._logLevel = CF.LogLevels.INFO
        self.logConfig = ""
        self.loggingMacros = ossie.logger.GetDefaultMacros()
        ossie.logger.ResolveHostInfo( self.loggingMacros )
        self.loggingCtx = None
        self.loggingURL=None

        if loggerName == None:
            self._logid = execparams.get("NAME_BINDING", self._id )
            self._logid = self._logid.rsplit("_", 1)[0]
        else:
            self._logid = loggerName
        self._logid = self._logid.replace(":","_")
        self._log = logging.getLogger(self._logid)
        self._log.setLevel(self.logLevel)
        self.logListenerCallback=None
        self._name = execparams.get("NAME_BINDING", "")
        # The base resource class manages properties ...
        self._props = PropertyStorage(self, propertydefs, execparams)
        self._props.initialize()

        # property change listener registry and monitoring thread
        self._propChangeRegistry = {}
        self._propChangeThread = _PropertyChangeThread(self)
        self._propMonitors = {}

        # ... and also manages ports
        self.__loadPorts()

        logging.trace("Initial property storage %s", self._props)

        self.__initialized = False
        self.__propertiesInitialized = False
Exemplo n.º 25
0
 def log(self):
     if logging.getLogger().isEnabledFor(5):
         logging.trace("Cache:")
         for prefix, address, _try in self._db.execute(
                 "SELECT peer.*, try FROM peer, volatile.stat"
                 " WHERE prefix=peer ORDER BY prefix"):
             logging.trace("- %s: %s%s", prefix, address,
                           ' (blacklisted)' if _try else '')
Exemplo n.º 26
0
 def log(self):
     if logging.getLogger().isEnabledFor(5):
         logging.trace("Cache:")
         for prefix, address, _try in self._db.execute(
                 "SELECT peer.*, try FROM peer, volatile.stat"
                 " WHERE prefix=peer ORDER BY prefix"):
             logging.trace("- %s: %s%s", prefix, address,
                           ' (blacklisted)' if _try else '')
Exemplo n.º 27
0
	def run(self) -> bool:
		'''
		Make the input file standalone.
		:return: True if the execution is a success, otherwise False.
		'''
		self.__init()
		content = self._analyze_document()
		logging.trace(_T("File content: %s") % (content))
		return self._generate_flat_document(content)
Exemplo n.º 28
0
 def write_list(cls, path, name, entries):
     out_path = path.joinpath(name).with_suffix(".list")
     logging.debug(f"Writing secure list: {out_path}")
     with out_path.open("w") as f:
         for i in entries:
             fn = PureWindowsPath(i.file_name)
             ln = f"{fn},{i.file_size}"
             logging.trace(ln)
             f.write(f"{ln}\n")
Exemplo n.º 29
0
def storage_sanity_checks(module) -> None:
    logging.trace("Verifying selected --storage-medium plugin interface")

    functions = ['df_read', 'df_write']
    in_module = inspect.getmembers(module, inspect.isfunction)

    for f in functions:
        assert (any([f in name for name, _ in in_module])),\
            f"Storage medium plugin does not define {f}"
Exemplo n.º 30
0
 def write(self, buffer):
     logging.trace("send %s%r", self.__class__.__name__, (self.id,) + self.args)
     offset = len(buffer)
     buffer += "\0" * header.size
     r = self.request
     if isinstance(r, Struct):
         r.encode(buffer, self.args)
     else:
         r.encode(buffer, *self.args)
     buffer.pack_into(header, offset, self.id, len(buffer) - header.size - offset)
Exemplo n.º 31
0
 def write_manifest(cls, path, name, entries):
     out_path = path.joinpath(name).with_suffix(".mfs")
     logging.debug(f"Writing manifest: {out_path}")
     with out_path.open("w") as f:
         for i in entries:
             fn, dn = PureWindowsPath(i.file_name), PureWindowsPath(
                 i.download_name)
             ln = f"{fn},{dn},{i.file_hash},{i.download_hash},{i.file_size},{i.download_size},{int(i.flags) & 0xFFFF}"
             logging.trace(ln)
             f.write(f"{ln}\n")
Exemplo n.º 32
0
 def generate(self) -> Generator[APData, None, None]:
     logging.debug(
         f"Pulling the rss feed at {self.url}, last etag: {self.last_etag}, modif: {self.last_modified}"
     )
     feed = feedparser.parse(self.url,
                             etag=self.last_etag,
                             modified=self.last_modified)
     if feed.status != 304:
         for entry in feed.entries:
             logging.trace(f"Rss entry: {json.dumps(entry, indent=4)}")
             yield self.mapper(entry)
Exemplo n.º 33
0
def exec_env_sanity_checks(module) -> None:
    logging.trace("Verifying selected --exec-env plugin interface")

    classes = [
        'ParsedCmdlineConfigurer', 'ExpRunShellCmdsGenerator',
        'ExpShellCmdsGenerator', 'ExecEnvChecker'
    ]
    in_module = inspect.getmembers(module, inspect.isclass)
    for c in classes:
        assert (any([c in name for name, _ in in_module])),\
            f"Execution environment plugin '{module.__name__}' does not define '{c}'"
    def match_function_to_pattern(self, start_address):
        end_address = utils.id_function_block_end(start_address)

        has_unsupported_operations = self.unsupported_operations(
            start_address, end_address)
        if has_unsupported_operations == True:
            logging.trace('Unsupported function.')
            return False

        is_match = self.analyse_function(start_address, end_address)
        return is_match
Exemplo n.º 35
0
    def run(self):
        self.logger.notice(
                'brozzler %s - brozzler-worker starting', brozzler.__version__)
        last_nothing_to_claim = 0
        try:
            while not self._shutdown.is_set():
                self._service_heartbeat_if_due()
                if time.time() - last_nothing_to_claim > 20:
                    try:
                        self._start_browsing_some_sites()
                    except brozzler.browser.NoBrowsersAvailable:
                        logging.trace(
                                "all %s browsers are in use",
                                self._max_browsers)
                    except brozzler.NothingToClaim:
                        last_nothing_to_claim = time.time()
                        logging.trace(
                                "nothing to claim, all available active sites "
                                "are already claimed by a brozzler worker")
                time.sleep(0.5)

            self.logger.notice("shutdown requested")
        except r.ReqlError as e:
            self.logger.error(
                    "caught rethinkdb exception, will try to proceed",
                    exc_info=True)
        except brozzler.ShutdownRequested:
            self.logger.info("shutdown requested")
        except:
            self.logger.critical(
                    "thread exiting due to unexpected exception",
                    exc_info=True)
        finally:
            if self._service_registry and hasattr(self, "status_info"):
                try:
                    self._service_registry.unregister(self.status_info["id"])
                except:
                    self.logger.error(
                            "failed to unregister from service registry",
                            exc_info=True)

            self.logger.info(
                    'shutting down %s brozzling threads',
                    len(self._browsing_threads))
            with self._browsing_threads_lock:
                for th in self._browsing_threads:
                    if th.is_alive():
                        brozzler.thread_raise(th, brozzler.ShutdownRequested)
            self._browser_pool.shutdown_now()
            # copy to avoid "RuntimeError: Set changed size during iteration"
            thredz = set(self._browsing_threads)
            for th in thredz:
                th.join()
Exemplo n.º 36
0
    def run(self):
        self.logger.notice(
                'brozzler %s - brozzler-worker starting', brozzler.__version__)
        last_nothing_to_claim = 0
        try:
            while not self._shutdown.is_set():
                self._service_heartbeat_if_due()
                if time.time() - last_nothing_to_claim > 20:
                    try:
                        self._start_browsing_some_sites()
                    except brozzler.browser.NoBrowsersAvailable:
                        logging.trace(
                                "all %s browsers are in use",
                                self._max_browsers)
                    except brozzler.NothingToClaim:
                        last_nothing_to_claim = time.time()
                        logging.trace(
                                "nothing to claim, all available active sites "
                                "are already claimed by a brozzler worker")
                time.sleep(0.5)

            self.logger.notice("shutdown requested")
        except r.ReqlError as e:
            self.logger.error(
                    "caught rethinkdb exception, will try to proceed",
                    exc_info=True)
        except brozzler.ShutdownRequested:
            self.logger.info("shutdown requested")
        except:
            self.logger.critical(
                    "thread exiting due to unexpected exception",
                    exc_info=True)
        finally:
            if self._service_registry and hasattr(self, "status_info"):
                try:
                    self._service_registry.unregister(self.status_info["id"])
                except:
                    self.logger.error(
                            "failed to unregister from service registry",
                            exc_info=True)

            self.logger.info(
                    'shutting down %s brozzling threads',
                    len(self._browsing_threads))
            with self._browsing_threads_lock:
                for th in self._browsing_threads:
                    if th.is_alive():
                        brozzler.thread_raise(th, brozzler.ShutdownRequested)
            self._browser_pool.shutdown_now()
            # copy to avoid "RuntimeError: Set changed size during iteration"
            thredz = set(self._browsing_threads)
            for th in thredz:
                th.join()
Exemplo n.º 37
0
 def _onPreAuthContent(self, content):
     try:
         if not content:
             logging.debug("no preauth content")
             return
         preauth = json.loads(content)
         logging.trace("webauthn preauth:\n%s\n", json.dumps(preauth, indent=2))
         qApp.setOverrideCursor(Qt.WaitCursor)
         self.authn_session_page.setUrl(QUrl(preauth["redirect_url"]))
     except (ValueError, Exception) as e:
         logging.error(format_exception(e))
         self.set_current_html(ERROR_HTML % content)
Exemplo n.º 38
0
	def refreshEvent(self,dontRebuild=False,forceIx=-1):
		currSeq=self.getter()
		if len(currSeq)!=self.form.count()/2: #rowCount():
			if dontRebuild: return # length changed behind our back, just pretend nothing happened and update next time instead
			self.rebuild()
			currSeq=self.getter()
		for i in range(len(currSeq)):
			item=self.form.itemAt(i,QFormLayout.FieldRole)
			logging.trace('got item #%d %s'%(i,str(item.widget())))
			widget=item.widget()
			if not widget.hot:
				widget.refresh()
			if forceIx>=0 and forceIx==i: widget.setFocus()
Exemplo n.º 39
0
 def smooth(self, seq):
     if len(seq) < self.window:
         # Cannot smooth a sequence smaller than the smooth window
         if log.is_trace(): log.trace("Not smoothing sequence; too small")
         return seq
     
     def smooth_beg(seq):
         for e in self.beg_exprs:
             m = e.match(seq)
             if m:
                 a, b, rest = m.group(1, 2, 3)
                 if len(a) > len(b):
                     seq = "{0}{1}{2}".format(a, a[0] * len(b), rest)
                 else:
                     seq = "{0}{1}{2}".format(b[0] * len(a), b, rest)
                 return seq
         return None
     
     def smooth_end(seq):
         for e in self.end_exprs:
             m = e.match(seq)
             if m:
                 rest, a, b = m.group(1, 2, 3)
                 if len(a) > len(b):
                     seq = "{0}{1}{2}".format(rest, a, a[0] * len(b))
                 else:
                     seq = "{0}{1}{2}".format(rest, b[0] * len(a), b)
                 return seq
         return None
     
     def smooth_mid(seq):
         for e in self.mid_exprs:
             m = e.match(seq)
             if m:
                 beg, mid, end = m.group(1, 2, 3)
                 return "{0}{1}{2}".format(beg, end[0] * len(mid), end)
         return None
     
     for func in (smooth_beg, smooth_end, smooth_mid):
         for i in xrange(0, self.max_iter):
             newseq = func(seq)
             if newseq:
                 seq = newseq
             else:
                 break
     
     return seq
Exemplo n.º 40
0
    def current_rates(self, time_period_minutes):
        assert time_period_minutes > 0
        with self._lock:
            now = time.time()
            urls = self.urls
            warc_bytes = self.warc_bytes

        t = now - time_period_minutes * 60
        if time_period_minutes <= 2:
            start_snap = self._closest_ten_sec_snap(t)
        else:
            start_snap = self._closest_minute_snap(t)

        elapsed = now - start_snap[0]
        logging.trace(
                'elapsed=%0.1fs urls=%s warc_bytes=%s', elapsed,
                urls - start_snap[1], warc_bytes - start_snap[2])
        return elapsed, (urls - start_snap[1]) / elapsed, (warc_bytes - start_snap[2]) / elapsed
Exemplo n.º 41
0
def run( application, address, port ):
	loop    = asyncio.get_event_loop()
	server  = Server(application, address, port)
	coro    = asyncio.start_server(server.request, address, port, loop=loop)
	server  = loop.run_until_complete(coro)
	socket = server.sockets[0].getsockname()
	logging.info("Retro {font_server}asyncio{reset} server listening on {font_url}http://{host}:{port}{reset}".format(
		host=socket[0], port=socket[1],
		font_server=bold(255),
		font_url=normal(51),
		reset=RESET,
	))
	try:
		loop.run_forever()
	except KeyboardInterrupt:
		pass
	# Close the server
	server.close()
	loop.run_until_complete(server.wait_closed())
	loop.close()
	logging.trace("done")
Exemplo n.º 42
0
	def rebuild(self):
		currSeq=self.getter()
		# clear everything
		rows=int(self.form.count()/2)
		for row in range(rows):
			logging.trace('counts',self.form.rowCount(),self.form.count())
			for wi in self.form.itemAt(row,QFormLayout.FieldRole),self.form.itemAt(row,QFormLayout.LabelRole):
				self.form.removeItem(wi)
				logging.trace('deleting widget',wi.widget())
				widget=wi.widget(); widget.hide(); del widget # for some reason, deleting does not make the thing disappear visually; hiding does, however
			logging.trace('counts after ',self.form.rowCount(),self.form.count())
		logging.debug('cleared')
		# add everything
		Klass=_fundamentalEditorMap.get(self.itemType,None)
		if not Klass:
			errMsg=QTextEdit(self)
			errMsg.setReadOnly(True); errMsg.setText("Sorry, editing sequences of %s's is not (yet?) implemented."%(self.itemType.__name__))
			self.form.insertRow(0,'<b>Error</b>',errMsg)
			return
		class ItemGetter(object):
			def __init__(self,getter,index): self.getter,self.index=getter,index
			def __call__(self): return self.getter()[self.index]
		class ItemSetter(object):
			def __init__(self,getter,setter,index): self.getter,self.setter,self.index=getter,setter,index
			def __call__(self,val): seq=self.getter(); seq[self.index]=val; self.setter(seq)
		for i,item in enumerate(currSeq):
			widget=Klass(self,ItemGetter(self.getter,i),ItemSetter(self.getter,self.setter,i)) #proxy,'value')
			self.form.insertRow(i,'%d. '%i,widget)
			logging.debug('added item %d %s'%(i,str(widget)))
		if len(currSeq)==0: self.form.insertRow(0,'<i>empty</i>',QLabel('<i>(right-click for menu)</i>'))
		logging.debug('rebuilt, will refresh now')
		self.refreshEvent(dontRebuild=True) # avoid infinite recursion it the length would change meanwhile
Exemplo n.º 43
0
 def _read(self):
     d = self.socket.recv(65536)
     if not d:
         raise ConnectionClosed(self.socket_path)
     b = self.read_buffer
     b += d
     while b.ready:
         if self._decode:
             packet = b.decode(self._decode)
             self._decode = None
             b.want(header.size)
             name = packet.__class__.__name__
             logging.trace("recv %r", packet)
             try:
                 h = getattr(self, "handle_" + name)
             except AttributeError:
                 h = getattr(self.handler, "babel_" + name)
             h(*packet)
         else:
             packet_type, size = b.unpack_from(header)
             self._decode = Packet.response_dict[packet_type]
             b.want(size)
Exemplo n.º 44
0
	def _formatElement( self, element ):
		"""Formats the given element and its content, by using the formatting
		operations defined in this class."""
		attributes = element._attributesAsHTML()
		exceptions = HTML_EXCEPTIONS.get(element.name)
		content    = element.content
		mode       = element.mode.split("+")[0] if element.mode else None
		# FIXME: Flags are not properly supported
		if exceptions:
			not_empty = exceptions.get("NOT_EMPTY")
			if not_empty != None and not content:
				element.content.append(Text(not_empty))
		# Does this element has any content that needs to be pre-processed?
		if mode == "sugar":
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			res, _ = paml.web.processSugar(source, "", cache=self.useProcessCache, includeSource=element.mode.endswith("+source"))
			logging.trace("Parsed Sugar: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif mode in ("coffeescript", "coffee"):
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			res, _ = paml.web.processCoffeeScript(source, "", cache=self.useProcessCacheFalse)
			logging.trace("Parsed CoffeeScript: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif mode in ("typescript", "ts"):
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			res, _ = paml.web.processTypeScript(source, "", cache=self.useProcessCacheFalse)
			logging.trace("Parsed TypeScript: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif mode  in ("clevercss", "ccss"):
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			res, _ = paml.web.processCleverCSS(source, ".")
			logging.trace("Parsed CleverCSS: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif mode  in ("pythoniccss", "pcss"):
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			res, _ = paml.web.processPythonicCSS(source, ".")
			logging.trace("Parsed PythonicCSS: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif element.mode and element.mode.endswith("nobrackets"):
			lines = element.contentAsLines()
			import paml.web
			source = u"".join(lines)
			t = time.time()
			prefix = element.mode[0:0-(len("nobrackets"))]
			suffix = ".nb"
			if prefix: suffix = "." + prefix + suffix
			p = tempfile.mktemp(suffix=suffix)
			with open(p, "w") as f: f.write(source)
			res, _ = paml.web.processNobrackets(source, p)
			if os.path.exists(p): os.unlink(p)
			logging.trace("Parsed Nobrackets: {0} lines in {1:0.2f}s".format(len(lines), time.time() - t))
			element.content = [Text(res)]
		elif mode == "texto":
			lines = element.contentAsLines()
			import texto
			source = u"".join(lines)
			res    = ensure_unicode(texto.toHTML(source))
			element.content = [Text(res)]
		elif mode == "hjson":
			lines = element.contentAsLines()
			import hjson
			source = u"".join(lines)
			res    = ensure_unicode(hjson.dumpsJSON(hjson.loads(source)))
			element.content = [Text(res)]
		elif mode == "raw":
			element.setFormat(FORMAT_PRESERVE)
		# NOTE: This is a post-processor
		if element.mode and (element.mode.endswith ("+escape") or "+escape+" in element.mode):
			for text in element.content:
				if isinstance(text, Text):
					text.content = text.content.replace("<", "&lt;").replace(">", "&gt;")
		# If the element has any content, then we apply it
		if element.content:
			flags = element.getFormatFlags() + list(self.getDefaults(element.name))
			self.pushFlags(*flags)
			if element.isPI:
				assert not attributes, "Processing instruction cannot have attributes"
				start   = "<?%s " % (element.name)
				end     = " ?>"
			else:
				start   = "<%s%s>" % (element.name, attributes)
				end     = "</%s>" % (element.name)
			if self.hasFlag(FORMAT_INLINE):
				if self._inlineCanSpanOneLine(element):
					self.setFlag(FORMAT_SINGLE_LINE)
			# If the element is an inline, we enter the SINGLE_LINE formatting
			# mode, without adding an new line
			# FIXME: isInline is always false
			if element.isInline:
				self.pushFlags(FORMAT_SINGLE_LINE)
				self.writeTag(start)
				self._formatContent(element)
				self.writeTag(end)
				self.popFlags()
			# Or maybe the element has a SINGLE_LINE flag, in which case we add a
			# newline inbetween
			elif self.hasFlag(FORMAT_SINGLE_LINE) or element.isTextOnly():
				self.newLine()
				self.writeTag(start)
				self._formatContent(element)
				self.writeTag(end)
			# Otherwise it's a normal open/closed element
			else:
				self.newLine()
				self.writeTag(start)
				self.newLine()
				self.startIndent()
				self._formatContent(element)
				self.endIndent()
				self.ensureNewLine()
				self.writeTag(end)
			self.popFlags()
		# Otherwise it doesn't have any content
		else:
			if exceptions and exceptions.get("NO_CLOSING"):
				text =  "<%s%s>" % (element.name, attributes)
			else:
				text =  "<%s%s />" % (element.name, attributes)
			# And if it's an inline, we don't add a newline
			if not element.isInline: self.newLine()
			self.writeTag(text)
Exemplo n.º 45
0
    def get_data(self, pkt_dump_only=False, timeout=1):
        """ read data from the socket """
        # This method behaves slightly differently depending on whether it is
        # called to read the response to a command (pkt_dump_only = 0) or if
        # it is called specifically to read a packet dump (pkt_dump_only = 1).
        #
        # Packet dumps look like:
        #   pktdump,<port_id>,<data_len>\n
        #   <packet contents as byte array>\n
        # This means the total packet dump message consists of 2 lines instead
        # of 1 line.
        #
        # - Response for a command (pkt_dump_only = 0):
        #   1) Read response from the socket until \n (end of message)
        #   2a) If the response is a packet dump header (starts with "pktdump,"):
        #     - Read the packet payload and store the packet dump for later
        #       retrieval.
        #     - Reset the state and restart from 1). Eventually state 2b) will
        #       be reached and the function will return.
        #   2b) If the response is not a packet dump:
        #     - Return the received message as a string
        #
        # - Explicit request to read a packet dump (pkt_dump_only = 1):
        #   - Read the dump header and payload
        #   - Store the packet dump for later retrieval
        #   - Return True to signify a packet dump was successfully read
        ret_str = None
        dat = ""
        done = 0
        while done == 0:
            # recv() is blocking, so avoid calling it when no data is waiting.
            ready = select.select([self._sock], [], [], timeout)
            if ready[0]:
                logging.debug("Reading from socket")
                dat = self._sock.recv(256)
                ret_str = ""
            else:
                logging.debug("No data waiting on socket")
                done = 1
            logging.trace("Iterating over input buffer (%d octets)", len(dat))

            i = 0
            while i < len(dat) and (done == 0):
                if dat[i] == '\n':
                    # Terminating \n for a string reply encountered.
                    if ret_str.startswith('pktdump,'):
                        logging.trace("Packet dump header read: [%s]", ret_str)
                        # The line is a packet dump header. Parse it, read the
                        # packet payload, store the dump for later retrieval.
                        # Skip over the packet dump and continue processing: a
                        # 1-line response may follow the packet dump.
                        _, port_id, data_len = ret_str.split(',', 2)
                        port_id, data_len = int(port_id), int(data_len)

                        data_start = i + 1      # + 1 to skip over \n
                        data_end = data_start + data_len
                        pkt_payload = array.array('B', map(ord, dat[data_start:data_end]))
                        pkt_dump = PacketDump(port_id, data_len, pkt_payload)
                        self._pkt_dumps.append(pkt_dump)

                        # Reset state. Increment i with payload length and add
                        # 1 for the trailing \n.
                        ret_str = ""
                        i += data_len + 1

                        if pkt_dump_only:
                            # Return boolean instead of string to signal
                            # successful reception of the packet dump.
                            logging.trace("Packet dump stored, returning")
                            ret_str = True
                            done = 1
                    else:
                        # Regular 1-line message. Stop reading from the socket.
                        logging.trace("Regular response read")
                        done = 1
                else:
                    ret_str += dat[i]

                i = i + 1

        logging.debug("Received data from socket: [%s]", ret_str)
        return ret_str
Exemplo n.º 46
0
def simple_table(array, has_hdr = True):
    """Generate a simple table with the contents of array.

    Args:
        array ([[hdr1,hdr2,...], [...],...]): A list of lists containing the
            values to put in the table.
        has_hdr (bool): True if the first list contains the header names of
            the columns. False if the first list contains data.

    Returns:
        str. A simple table in RST format containing the array data.
    """
    result = ''

    logging.trace('Array passed in: %s', array)
    # Calculate max. lengths of each column. RST tables require column
    # alignment.
    col_widths = [0] * len(sorted(array, cmp=lambda x,y: -cmp(len(x), len(y)))[0])
    logging.trace('Initialized col_widths with 0: %s', col_widths)
    for row in array:
        logging.trace('- Adjusting column widths for row %s', row)
        for index in range(len(row)):
            col_widths[index] = max(col_widths[index], len(str(row[index])))
        logging.trace('  New col_widths: %s', col_widths)

    # Draw top border
    for col_width in col_widths:
        result += '=' * col_width
        result += '  '
    result += '\n'

    # Draw table cells, pad every cell with appropriate amount of spaces for
    # correct alignment.
    hdr_border_drawn = False
    for row in array:
        if len(row) > 1:
            for index in range(len(row)):
                result += str(row[index]).ljust(col_widths[index])
                result += '  '
            result += '\n'

            # Draw table header border if needed
            if has_hdr and not hdr_border_drawn:
                for col_width in col_widths:
                    result += '=' * col_width
                    result += '  '
                result += '\n'

            hdr_border_drawn = True
        else:
            result += row[0] + "\n"

            for index in range(len(col_widths)):
                result += '=' * col_widths[index]
                result += '==' if index > 0 else ""
            result += '\n'
            hdr_border_drawn = True

    # Draw bottom border
    for col_width in col_widths:
        result += '=' * col_width
        result += '  '
    result += '\n\n'

    return result
Exemplo n.º 47
0
def start_component(componentclass, interactive_callback=None, thread_policy=None, loggerName=None):   
    execparams, interactive = parseCommandLineArgs(componentclass)
    setupSignalHandlers()
    orb = None

    try:
        try:
            orb = createOrb()
            globals()['__orb__'] = orb
            name_binding=""
            component_identifier=""
            
            # set up backwards-compatable logging
            #configureLogging(execparams, loggerName, orb)

            componentPOA = getPOA(orb, thread_policy, "componentPOA")
          
            if not execparams.has_key("COMPONENT_IDENTIFIER"):
                if not interactive:
                    logging.warning("No 'COMPONENT_IDENTIFIER' argument provided")
                execparams["COMPONENT_IDENTIFIER"] = ""
            
            if not execparams.has_key("NAME_BINDING"):
                if not interactive:
                    logging.warning("No 'NAME_BINDING' argument provided")
                execparams["NAME_BINDING"] = ""
            
            if not execparams.has_key("PROFILE_NAME"):
                if not interactive:
                    logging.warning("No 'PROFILE_NAME' argument provided")
                execparams["PROFILE_NAME"] = ""

            # Configure logging (defaulting to INFO level).
            log_config_uri = execparams.get("LOGGING_CONFIG_URI", None)
            debug_level = execparams.get("DEBUG_LEVEL", None)
            if debug_level != None: debug_level = int(debug_level)
            dpath=execparams.get("DOM_PATH", "")
            component_identifier=execparams.get("COMPONENT_IDENTIFIER", "")
            name_binding=execparams.get("NAME_BINDING", "")

            ## sets up logging during component startup
            ctx = ossie.logger.ComponentCtx(
                name = name_binding,
                id = component_identifier,
                dpath = dpath )
            ossie.logger.Configure(
                logcfgUri = log_config_uri,
                logLevel = debug_level,
                ctx = ctx)

            # Create the component
            component_Obj = componentclass(execparams["COMPONENT_IDENTIFIER"], execparams)
            component_Obj.setAdditionalParameters(execparams["PROFILE_NAME"])
            componentPOA.activate_object(component_Obj)
            component_Var = component_Obj._this()

            ## sets up logging context for resource to support CF::Logging
            component_Obj.saveLoggingContext( log_config_uri, debug_level, ctx )

            # get the naming context and bind to it
            if execparams.has_key("NAMING_CONTEXT_IOR"):
                rootContext = orb.string_to_object(execparams['NAMING_CONTEXT_IOR'])
                if rootContext == None:
                    logging.error("Failed to lookup naming context")
                    sys.exit(-1)

                rootContext = rootContext._narrow(CosNaming.NamingContext)
                name = URI.stringToName(execparams['NAME_BINDING'])
                rootContext.rebind(name, component_Var)
            else:
                if not interactive:
                    logging.warning("Skipping name-binding because required execparams 'NAMING_CONTEXT_IOR' is missing")

            if not interactive:
                logging.trace("Starting ORB event loop")
                orb.run()
            else:
                logging.trace("Entering interactive mode")
                if callable(interactive_callback):
                    # Pass only the Var to prevent anybody from calling non-CORBA functions
                    interactive_callback(component_Obj)
                else:
                    print orb.object_to_string(component_Obj._this())
                    orb.run()

            try:
               orb.shutdown(true)
            except:
                pass
            signal.signal(signal.SIGINT, signal.SIG_IGN)
        except SystemExit:
            pass
        except KeyboardInterrupt:
            pass
        except:
            logging.exception("Unexpected Error")
    finally:
        if orb:
            orb.destroy()
Exemplo n.º 48
0
def start_component(componentclass, interactive_callback=None, thread_policy=None, loggerName=None):   
    execparams, interactive = parseCommandLineArgs(componentclass)
    setupSignalHandlers()

    try:
        try:
            orb = createOrb()
            globals()['__orb__'] = orb

            configureLogging(execparams, loggerName, orb)

            componentPOA = getPOA(orb, thread_policy, "componentPOA")
          
            if not execparams.has_key("COMPONENT_IDENTIFIER"):
                if not interactive:
                    logging.warning("No 'COMPONENT_IDENTIFIER' argument provided")
                execparams["COMPONENT_IDENTIFIER"] = ""
            
            if not execparams.has_key("NAME_BINDING"):
                if not interactive:
                    logging.warning("No 'NAME_BINDING' argument provided")
                execparams["NAME_BINDING"] = ""

            # Create the component
            component_Obj = componentclass(execparams["COMPONENT_IDENTIFIER"], execparams)
            componentPOA.activate_object(component_Obj)
            component_Var = component_Obj._this()

            # get the naming context and bind to it
            if execparams.has_key("NAMING_CONTEXT_IOR"):
                rootContext = orb.string_to_object(execparams['NAMING_CONTEXT_IOR'])
                if rootContext == None:
                    logging.error("Failed to lookup naming context")
                    sys.exit(-1)

                rootContext = rootContext._narrow(CosNaming.NamingContext)
                name = URI.stringToName(execparams['NAME_BINDING'])
                rootContext.rebind(name, component_Var)
            else:
                if not interactive:
                    logging.warning("Skipping name-binding because required execparams 'NAMING_CONTEXT_IOR' is missing")

            if not interactive:
                logging.trace("Starting ORB event loop")
                orb.run()
            else:
                logging.trace("Entering interactive mode")
                if callable(interactive_callback):
                    # Pass only the Var to prevent anybody from calling non-CORBA functions
                    interactive_callback(component_Obj)
                else:
                    print orb.object_to_string(component_Obj._this())
                    orb.run()
        except SystemExit:
            pass
        except KeyboardInterrupt:
            pass
        except:
            logging.exception("Unexpected Error")
    finally:
        if orb:
            orb.destroy()
Exemplo n.º 49
0
  objectPlacements = {
    "Object 1": (2, 3),
    "Object 2": (6, 2),
    "Object 3": (3, 7),
    "Object 4": (7, 6)
  }

  # Learn objects in egocentric space.
  exp.learnObjects(objectPlacements)

  # Infer the objects without any location input.
  filename = "logs/infer-no-location.csv"
  with open(filename, "w") as fileOut:
    print "Logging to", filename
    with trace(exp, csv.writer(fileOut)):
      exp.inferObjectsWithRandomMovements(objectPlacements)

  # Shuffle the objects. Infer them without any location input.
  filename = "logs/infer-shuffled-location.csv"
  with open(filename, "w") as fileOut:
    print "Logging to", filename
    with trace(exp, csv.writer(fileOut)):
      exp.inferObjectsWithRandomMovements({
        "Object 1": (7, 6),
        "Object 2": (2, 7),
        "Object 3": (7, 2),
        "Object 4": (3, 3)
      })

  print "Visualize these CSV files at:"
Exemplo n.º 50
0
  def _forwardPacket(self, packet, next_hop):
    #FIXME: implement
    logging.trace("Forwarding packet %s at LSR %s to %s", packet, self, next_hop)

    return None
Exemplo n.º 51
0
    def make_test(f):
        logging.trace("Wrapping test %s with setup and teardown", f.__name__)

        f.__dict__.update(is_test=True, setup=setup, teardown=teardown)
        return f
Exemplo n.º 52
0
def main():
    # Get arguments
    config = getConfig()
    cert = x509.Cert(config.ca, config.key, config.cert)
    config.openvpn_args += cert.openvpn_args

    if config.test:
        sys.exit(eval(config.test, None, config.__dict__))

    # Set logging
    utils.setupLog(config.verbose, os.path.join(config.log, "re6stnet.log"))

    logging.trace("Environment: %r", os.environ)
    logging.trace("Configuration: %r", config)
    utils.makedirs(config.state)
    db_path = os.path.join(config.state, "cache.db")
    if config.ovpnlog:
        plib.ovpn_log = config.log

    exit.signal(0, signal.SIGINT, signal.SIGTERM)
    exit.signal(-1, signal.SIGHUP, signal.SIGUSR2)

    cache = Cache(db_path, config.registry, cert)
    network = cert.network

    if config.client_count is None:
        config.client_count = cache.client_count
    if config.max_clients is None:
        config.max_clients = cache.max_clients

    if config.table is not None:
        logging.warning("--table option is deprecated: use --default instead")
        config.default = True
    if config.default and config.gateway:
        sys.exit("error: conflicting options --default and --gateway")

    if "none" in config.disable_proto:
        config.disable_proto = ()
    if config.default:
        # Make sure we won't tunnel over re6st.
        config.disable_proto = tuple(set(("tcp6", "udp6")).union(config.disable_proto))
    address = ()
    server_tunnels = {}
    forwarder = None
    if config.client:
        config.babel_args.append("re6stnet")
    elif config.max_clients:
        if config.pp:
            pp = [(int(port), proto) for port, proto in config.pp]
            for port, proto in pp:
                if proto in config.disable_proto:
                    sys.exit("error: conflicting options --disable-proto %s" " and --pp %u %s" % (proto, port, proto))
        else:
            pp = [x for x in ((1194, "udp"), (1194, "tcp")) if x[1] not in config.disable_proto]

        def ip_changed(ip):
            for family, proto_list in ((socket.AF_INET, ("tcp", "udp")), (socket.AF_INET6, ("tcp6", "udp6"))):
                try:
                    socket.inet_pton(family, ip)
                    break
                except socket.error:
                    pass
            else:
                family = None
            return family, [(ip, str(port), proto) for port, proto in pp if not family or proto in proto_list]

        if config.gw_list:
            gw_list = deque(config.gw_list)

            def remote_gateway(dest):
                gw_list.rotate()
                return gw_list[0]

        else:
            remote_gateway = None
        if len(config.ip) > 1:
            if "upnp" in config.ip or "any" in config.ip:
                sys.exit("error: argument --ip can be given only once with" " 'any' or 'upnp' value")
            logging.info(
                "Multiple --ip passed: note that re6st does nothing to"
                " make sure that incoming paquets are replied via the correct"
                " gateway. So without manual network configuration, this can"
                " not be used to accept server connections from multiple"
                " gateways."
            )
        if "upnp" in config.ip or not config.ip:
            logging.info("Attempting automatic configuration via UPnP...")
            try:
                from re6st.upnpigd import Forwarder

                forwarder = Forwarder("re6stnet openvpn server")
            except Exception, e:
                if config.ip:
                    raise
                logging.info("%s: assume we are not NATed", e)
            else:
                atexit.register(forwarder.clear)
                for port, proto in pp:
                    forwarder.addRule(port, proto)
                ip_changed = forwarder.checkExternalIp
                address = (ip_changed(),)
        elif "any" not in config.ip:
            address = map(ip_changed, config.ip)
            ip_changed = None
        for x in pp:
            server_tunnels.setdefault("re6stnet-" + x[1], x)
Exemplo n.º 53
0
 def _process_batch(self, batch):
     batch_buckets = self._tally_batch(batch)
     self._update_db(batch_buckets)
     logging.trace('updated stats from batch of %s', len(batch))
Exemplo n.º 54
0
def classify_windows(snps, genos, exclude, window_size, slide, min_hom, smoothing_size):
    nsnp = len(snps)
    if log.is_debug(): log.debug("{0} snps".format(nsnp))

    nsam = len(genos)
    start = 0
    end = start + window_size - 1
    excl = exclude.next()
    
    regions = []
    region = None
    start_pos = snps[0].position

    while end < nsnp:
        pos = Range(snps[start].position, snps[end].position, True)
        if log.is_trace(): log.trace("Current region: {pos.start}-{pos.end}".format(pos=pos))
        
        if excl and excl.intersects(pos):
            if log.is_trace(): log.trace("Current region overlaps exclude region: {0}".format(excl))
            
            if region:
                region.end_pos = excl.start - 1
                if log.is_trace(): log.trace(
                    "Ending region {0} with {1} windows".format(region, len(region.windows)))
                region = None

            while end <= nsnp and excl >= snps[start].position:
                start += 1
            
            start_pos = excl.end + 1
            end = start + window_size - 1
            excl = exclude.next()
        
        else:
            if not region:
                region = Region(nsam, start_pos)
                regions.append(region)
            
            window = region.add_window(pos)
            if log.is_trace(): log.trace("Adding window for range {0}".format(pos))
            
            for i,g in enumerate(genos):
                hom = g.homozygosity(start, end)
                call = hom >= min_hom
                if log.is_trace(): log.trace(
                    "{0} markers are homozygous; call = {1}".format(hom, call))
                
                window.append(hom)
                region.append(i, call)
            
            start += slide
            end += slide

    if region:
        region.end_pos = snps[-1].position
        if log.is_trace(): log.trace(
            "Ending region {0} with {1} windows".format(region, len(region.windows)))
    
    if log.is_debug(): log.debug("Regions: {0}".format(regions))

    return regions
Exemplo n.º 55
0
def main():
    print "Dataplane Automated Testing System, version " + __version__
    print "Copyright (c) 2015-2016, Intel Corporation. All rights reserved."
    print "Copyright (c) 2016, Viosoft Corporation. All rights reserved."
    print

    args = parse_cmdline()
    config.set_cmdline_args(args)

    read_configfile(args)
    setup_logging()

    logging.debug("Command line arguments: %s", args)

    all_tests = dats.test.get_tests(args.tests_dir)

    if args.list:
        print "Tests in directory " + args.tests_dir + ": " + ' '.join(sorted(all_tests.keys()))
        sys.exit(0)


    # SUT information
    sut_information_hw = [["Hardware"]]
    sut_inf_commands_hw = [
            ["sudo dmidecode --type system | grep 'Product Name' | cut -d: -f2 2> /dev/null", "Platform"],
            ["grep 'model name' /proc/cpuinfo | uniq | cut -d : -f 2 | cut -c 2- 2> /dev/null", "Processor"],
            ["cat /proc/cpuinfo | grep processor | wc -l", "# of cores"],
            ["printf '%d MB' \\$(free -m | grep Mem | tr -s ' ' | cut -d' ' -f2)", "RAM"],
            [config.getOption("sutDpdkDir") + "/tools/dpdk_nic_bind.py --status | grep  drv=igb_uio | cut -d\\' -f 2", "DPDK ports"],
    ]
    sut_information_sw = [["Software"]]
    sut_inf_commands_sw = [
            ["sudo dmidecode --type bios | grep 'Version' | cut -d: -f2 2> /dev/null", "BIOS version"],
            ["sudo dmidecode --type bios | grep 'Release Date' | cut -d: -f2 2> /dev/null", "BIOS release date"],
            ["sed '1!d' /etc/*-release", "OS"],
            ["uname -rm", "Kernel"],
            ["printf 'v%d.%d' "
                + "\\$(grep '#define VERSION_MAJOR' "
                + config.getOption('sutProxDir') + "/version.h "
                + "| sed 's/[^0-9]//g') "
                + "\\$(grep '#define VERSION_MINOR' "
                + config.getOption('sutProxDir') + "/version.h "
                + "| sed 's/[^0-9]//g')",
              "PROX version"],
            ["printf 'v%d.%d.%d' "
                + "\\$(grep '#define RTE_VER_MAJOR' "
                + config.getOption('sutDpdkDir') + "/lib/librte_eal/common/include/rte_version.h "
                + "| sed 's/[^0-9]//g') "
                + "\\$(grep '#define RTE_VER_MINOR' "
                + config.getOption('sutDpdkDir') + "/lib/librte_eal/common/include/rte_version.h "
                + "| sed 's/[^0-9]//g') "
                + "\\$(grep '#define RTE_VER_PATCH_LEVEL' "
                + config.getOption('sutDpdkDir') + "/lib/librte_eal/common/include/rte_version.h "
                + "| sed 's/[^0-9]//g') ",
              "DPDK version"],
            ["cat /sys/devices/system/node/node0/hugepages/hugepages-2048kB/nr_hugepages 2>/dev/null",
                "Hugepages - 2048kB"],
            ["cat /sys/devices/system/node/node0/hugepages/hugepages-1048576kB/nr_hugepages 2>/dev/null",
                "Hugepages - 1GB"],
            #["uname -a"],
            #["cat /proc/cmdline"],
            #["lspci | grep 82599 | cut -d ' ' -f1", "Niantic ports"],
            #["lspci | grep X710 | cut -d ' ' -f1", "Fortville ports"],
    ]

    logging.info("Retrieving SUT hardware description")
    execute_inf_commands(sut_inf_commands_hw, sut_information_hw)
    logging.info("Retrieving SUT software description")
    execute_inf_commands(sut_inf_commands_sw, sut_information_sw)


    ### Main program
    if not os.path.exists(args.report_dir):
        os.makedirs(args.report_dir)

    # update the parameters.lua file to use the correct CPU socket
    os.system("sed -i 's/tester_socket_id=.*/tester_socket_id=\"" + str(config.getOption('testerSocketId')) + "\"/' " \
            + args.tests_dir + "/prox-configs/parameters.lua")
    os.system("sed -i 's/sut_socket_id=.*/sut_socket_id=\"" + str(config.getOption('sutSocketId')) + "\"/' " \
            + args.tests_dir + "/prox-configs/parameters.lua")

    # Determine which tests to run. These locations are checked in order, the
    # first non-empty result is used:
    # - command line: test names specified as parameters
    # - config file: test names specified in the [general] section, key 'tests'
    # - all tests in the directory specified on the command line with -d
    # - all tests in directory tests/
    tests_to_run = args.test

    if tests_to_run is not None and len(tests_to_run) == 0:
        logging.debug("No test specified on the command line. Checking config file")
        tests_to_run = config.getOption('tests')
        if tests_to_run is not None:
            tests_to_run = tests_to_run.split(',')

    if tests_to_run is None:
        logging.debug("No test specified in the config file. Running all tests from " + args.tests_dir)
        tests_to_run = sorted(all_tests.keys())

    logging.debug("Tests to run: '%s'", "', '".join(tests_to_run))

    test_summaries = []
    for test in tests_to_run:
        if test not in all_tests.keys():
            logging.error("Test '%s' not found. Use the '-l' command line parameter, possibly with '-d' to see the list of available tests.", test)
            continue

        logging.info("Loading test suite %s", test)

        # Load test script directly from disk
        test_module = imp.load_source(test, all_tests[test])

        # Get all classes defined in test_module. Filter out imported classes and
        # classes that are not derived from DATSTest.
        test_classes = [c[1] for c in inspect.getmembers(test_module, inspect.isclass)]
        test_classes = [c for c in test_classes if c.__module__ == test_module.__name__]
        test_classes = [c for c in test_classes if issubclass(c, TestBase)]

        for test_class in test_classes:
            test = test_class()
            logging.info("Running test %s - %s",
                    test.__class__.__name__, test.short_descr())

            test_results = None
            try:
                test.setup_class()
                test_results = test.run_all_tests()
                test.teardown_class()
                logging.trace('Test results: %s', test_results)
                test_summaries.append(dict(test=test, results=test_results))
            except KeyboardInterrupt:
                logging.error("Test run interrupted by keyboard. Generating partial report.")
                test_summaries.append(dict(test=test, results=Exception('Test run interrupted by user')))
                break
            except IOError, ex:
                logging.error("I/O error ({0}): {1}: {2}".format(ex.errno, ex.filename, ex.strerror))
                test_results = ex
            except Exception, ex:
                logging.error(ex)
                logging.debug("Exception: %s", traceback.format_exc())