Esempio n. 1
0
def get_random_ascii(n, seed=42):
    """
    Get a random ASCII-only unicode string of size *n*.
    """
    arr = np.frombuffer(get_random_bytes(n, seed=seed), dtype=np.int8) & 0x7f
    result, _ = codecs.ascii_decode(arr)
    assert isinstance(result, str)
    assert len(result) == n
    return result
Esempio n. 2
0
def get_random_ascii(n, *, seed=42):
    """
    Get a random ASCII-only unicode string of size *n*.
    """
    arr = np.frombuffer(get_random_bytes(n, seed=seed), dtype=np.int8) & 0x7f
    result, _ = codecs.ascii_decode(arr)
    assert isinstance(result, str)
    assert len(result) == n
    return result
Esempio n. 3
0
def sort_by_maintainer(db):
    maints = {}
    maint_ids = {}
    for pkg in db.keys():
        maint = None
        pkginfo = db[pkg]
        # start with the blends info
        if 'blends' in pkginfo and 'responsible' in pkginfo['blends']:
            maint = pkginfo['blends']['responsible']
        if not 'main' in db[pkg] and maint is None:
            # no info
            continue
        info = db[pkg]['main']
        if not 'maintainer' in info and maint is None:
            # no maintainer info
            continue
        if 'original_maintainer' in info and not info[
                'original_maintainer'] is None:
            maint = info['original_maintainer']
        elif 'maintainer' in info and not info['maintainer'] is None:
            maint = info['maintainer']
        if maint is None:
            # no sane maintainer info
            continue
        # safeguard: <> confuses sphinx and we don't care about different emails
        maint = maint[:maint.find('<')].strip()
        # kick out non-ascii ones (should not be, but too tired to find the bug)
        try:
            codecs.ascii_decode(maint)
        except UnicodeEncodeError:
            continue
        if not maint.lower() in maints:
            maints[maint.lower()] = []
            maint_ids[maint.lower()] = [maint]
        else:
            maint_ids[maint.lower()].append(maint)
        maints[maint.lower()].append(pkg)
    # remove duplicates
    out = {}
    for m in maints:
        out[maint_ids[m][0]] = np.unique(maints[m])
    return out
Esempio n. 4
0
def sort_by_maintainer(db):
    maints = {}
    maint_ids = {}
    for pkg in db.keys():
        maint = None
        pkginfo = db[pkg]
        # start with the blends info
        if 'blends' in pkginfo and 'responsible' in pkginfo['blends']:
            maint = pkginfo['blends']['responsible']
        if not 'main' in db[pkg] and maint is None:
            # no info
            continue
        info = db[pkg]['main']
        if not 'maintainer' in info and maint is None:
            # no maintainer info
            continue
        if 'original_maintainer' in info and not info['original_maintainer'] is None:
            maint = info['original_maintainer']
        elif 'maintainer' in info and not info['maintainer'] is None:
            maint = info['maintainer']
        if maint is None:
            # no sane maintainer info
            continue
        # safeguard: <> confuses sphinx and we don't care about different emails
        maint = maint[:maint.find('<')].strip()
        # kick out non-ascii ones (should not be, but too tired to find the bug)
        try:
            codecs.ascii_decode(maint)
        except UnicodeEncodeError:
            continue
        if not maint.lower() in maints:
            maints[maint.lower()] = []
            maint_ids[maint.lower()] = [maint]
        else:
            maint_ids[maint.lower()].append(maint)
        maints[maint.lower()].append(pkg)
    # remove duplicates
    out = {}
    for m in maints:
        out[maint_ids[m][0]] = np.unique(maints[m])
    return out
Esempio n. 5
0
def hash_from_fields(test_case):
    """Compute a string hash from any acyclic, JSON-ic :class:`dict`
    
    :param dict test_case: test case data to be hashed
    :returns: a repeatably generatable hash of *test_case*
    :rtype: str
    
    The hash is computed by encoding *test_case* in ASN1 DER (see
    :const:`.json_asn1.types.ASN1_SOURCE` for the ASN1 syntax of the data
    format), then hashing with SHA-256, and finally Base64 encoding to get the
    result.
    
    Note that this function hashes **all** key/value pairs of *test_case*.
    """
    key = test_case if isinstance(test_case, dict) else dict(test_case)
    key = asn1_der(key)
    key = hashlib.sha256(key).digest()
    key = ascii_decode(b64encode(key))[0]
    return key
Esempio n. 6
0
    def test_codecs_builtins(self):
        s = "abc"

        encoded = codecs.utf_8_encode(s)
        self.assertEqual(s, codecs.utf_8_decode(encoded[0])[0])

        encoded = codecs.utf_7_encode(s)
        self.assertEqual(s, codecs.utf_7_decode(encoded[0])[0])

        encoded = codecs.utf_16_encode(s)
        self.assertEqual(s, codecs.utf_16_decode(encoded[0])[0])

        encoded = codecs.utf_16_le_encode(s)
        self.assertEqual(s, codecs.utf_16_le_decode(encoded[0])[0])

        encoded = codecs.utf_16_be_encode(s)
        self.assertEqual(s, codecs.utf_16_be_decode(encoded[0])[0])

        encoded = codecs.utf_32_encode(s)
        self.assertEqual(s, codecs.utf_32_decode(encoded[0])[0])

        encoded = codecs.utf_32_le_encode(s)
        self.assertEqual(s, codecs.utf_32_le_decode(encoded[0])[0])

        encoded = codecs.utf_32_be_encode(s)
        self.assertEqual(s, codecs.utf_32_be_decode(encoded[0])[0])

        encoded = codecs.utf_32_be_encode(s)
        self.assertEqual(s, codecs.utf_32_be_decode(encoded[0])[0])

        encoded = codecs.raw_unicode_escape_encode(s)
        self.assertEqual(s, codecs.raw_unicode_escape_decode(encoded[0])[0])

        encoded = codecs.unicode_escape_encode(s)
        self.assertEqual(s, codecs.unicode_escape_decode(encoded[0])[0])

        encoded = codecs.latin_1_encode(s)
        self.assertEqual(s, codecs.latin_1_decode(encoded[0])[0])

        encoded = codecs.ascii_encode(s)
        self.assertEqual(s, codecs.ascii_decode(encoded[0])[0])
Esempio n. 7
0
 def decode(self, input, final=False):
     return codecs.ascii_decode(input, self.errors)[0]
def doit(arglist):
    ud.init('/var/log/univention/directory-manager-cmd.log', 1, 1)
    out = []
    configRegistry = univention.config_registry.ConfigRegistry()
    configRegistry.load()
    op = 'add'
    scope = 'user'
    cmd = os.path.basename(arglist[0])
    if cmd == 'univention-addgroup':
        scope = 'group'
        op = 'add'
    elif cmd == 'univention-deluser':
        scope = 'user'
        op = 'del'
    elif cmd == 'univention-delgroup':
        scope = 'group'
        op = 'del'
    elif cmd == 'univention-addmachine':
        scope = 'machine'
        op = 'add'
    elif cmd == 'univention-delmachine':
        scope = 'machine'
        op = 'del'
    elif cmd == 'univention-setprimarygroup':
        scope = 'user'
        op = 'primarygroup'

    opts, args = getopt.getopt(arglist[1:], '', ['status-fd=', 'status-fifo='])

    co = None
    try:
        lo, position = univention.admin.uldap.getAdminConnection()
    except Exception as e:
        ud.debug(ud.ADMIN, ud.WARN, 'authentication error: %s' % str(e))
        try:
            lo, position = univention.admin.uldap.getMachineConnection()
        except Exception as e2:
            ud.debug(ud.ADMIN, ud.WARN, 'authentication error: %s' % str(e2))
            out.append('authentication error: %s' % str(e))
            out.append('authentication error: %s' % str(e2))
            return out

    for i in range(0, len(args)):
        try:
            args[i] = codecs.utf_8_decode(args[i])[0]
        except:
            args[i] = codecs.latin_1_decode(args[i])[0]

    univention.admin.modules.update()

    if len(args) == 1:
        if scope == 'machine':
            machine = args[0]
            if machine[-1] == '$':
                machine = machine[0:-1]
            if configRegistry.get('samba/defaultcontainer/computer'):
                position.setDn(
                    configRegistry['samba/defaultcontainer/computer'])
            else:
                position.setDn(
                    univention.admin.config.getDefaultContainer(
                        lo, 'computers/windows'))
        elif scope == 'group':
            group = args[0]
            if configRegistry.get('samba/defaultcontainer/group'):
                position.setDn(configRegistry['samba/defaultcontainer/group'])
            else:
                position.setDn(
                    univention.admin.config.getDefaultContainer(
                        lo, 'groups/group'))
        else:
            user = args[0]
            if configRegistry.get('samba/defaultcontainer/user'):
                position.setDn(configRegistry['samba/defaultcontainer/user'])
            else:
                position.setDn(
                    univention.admin.config.getDefaultContainer(
                        lo, 'users/user'))
        action = op + scope

    elif len(args) == 2:
        user, group = args
        if op == 'del':
            action = 'deluserfromgroup'
        elif op == 'primarygroup':
            action = 'setprimarygroup'
        else:
            action = 'addusertogroup'
    else:
        return out

    if action == 'adduser':
        out.append(status('Adding user %s' % codecs.utf_8_encode(user)[0]))
        object = univention.admin.handlers.users.user.object(co,
                                                             lo,
                                                             position=position)
        object.open()
        object['username'] = user
        try:
            object['lastname'] = codecs.ascii_decode(user)[0]
        except UnicodeEncodeError:
            object['lastname'] = 'unknown'
        a, b = os.popen2('/usr/bin/makepasswd --minchars=8')
        line = b.readline()
        if line[-1] == '\n':
            line = line[0:-1]
        object['password'] = line
        object['primaryGroup'] = univention.admin.config.getDefaultValue(
            lo, 'group')
        object.create()
        nscd_invalidate('passwd')

    elif action == 'deluser':
        out.append(status('Removing user %s' % codecs.utf_8_encode(user)[0]))
        object = univention.admin.modules.lookup(
            univention.admin.handlers.users.user,
            co,
            lo,
            scope='domain',
            base=position.getDomain(),
            filter='(username=%s)' % user,
            required=True,
            unique=True)[0]
        object.remove()
        nscd_invalidate('passwd')

    elif action == 'addgroup':
        out.append(status('Adding group %s' % codecs.utf_8_encode(group)[0]))
        object = univention.admin.handlers.groups.group.object(
            co, lo, position=position)
        object.options = ['posix']
        object['name'] = group
        object.create()
        nscd_invalidate('group')

    elif action == 'delgroup':
        out.append(status('Removing group %s' % codecs.utf_8_encode(group)[0]))
        object = univention.admin.modules.lookup(
            univention.admin.handlers.groups.group,
            co,
            lo,
            scope='domain',
            base=position.getDomain(),
            filter='(name=%s)' % group,
            required=True,
            unique=True)[0]
        object.remove()
        nscd_invalidate('group')

    elif action == 'addusertogroup':
        ucr_key_samba_bdc_udm_cli_addusertogroup_filter_group = 'samba/addusertogroup/filter/group'
        if configRegistry.get(
                ucr_key_samba_bdc_udm_cli_addusertogroup_filter_group):
            if group in configRegistry[
                    ucr_key_samba_bdc_udm_cli_addusertogroup_filter_group].split(
                        ','):
                out.append(
                    status('addusertogroup: filter protects group "%s"' %
                           (codecs.utf_8_encode(group)[0])))
                return out
        out.append(
            status(
                'Adding user %s to group %s' %
                (codecs.utf_8_encode(user)[0], codecs.utf_8_encode(group)[0])))
        groupobject = univention.admin.modules.lookup(
            univention.admin.handlers.groups.group,
            co,
            lo,
            scope='domain',
            base=position.getDn(),
            filter='(name=%s)' % group,
            required=True,
            unique=True)[0]
        userobject = get_user_object(user, position, lo, co)
        if isinstance(userobject, types.StringType):
            out.append(userobject)
            return out

        if userobject.dn not in groupobject['users']:
            if groupobject['users'] == [''] or groupobject['users'] == []:
                groupobject['users'] = [userobject.dn]
            else:
                groupobject['users'].append(userobject.dn)
            groupobject.modify()
            nscd_invalidate('group')

    elif action == 'deluserfromgroup':
        out.append(
            status(
                'Removing user %s from group %s' %
                (codecs.utf_8_encode(user)[0], codecs.utf_8_encode(group)[0])))
        groupobject = univention.admin.modules.lookup(
            univention.admin.handlers.groups.group,
            co,
            lo,
            scope='domain',
            base=position.getDn(),
            filter='(name=%s)' % group,
            required=True,
            unique=True)[0]

        userobject = get_user_object(user, position, lo, co)
        if isinstance(userobject, types.StringType):
            out.append(userobject)
            return out

        userobject.open()
        if userobject.dn in groupobject[
                'users'] and not userobject['primaryGroup'] == groupobject.dn:
            groupobject['users'].remove(userobject.dn)
            groupobject.modify()
            nscd_invalidate('group')

    elif action == 'addmachine':
        out.append(
            status('Adding machine %s' % codecs.utf_8_encode(machine)[0]))
        object = univention.admin.handlers.computers.windows.object(
            co, lo, position=position)
        univention.admin.objects.open(object)
        object.options = ['posix']
        object['name'] = machine
        object['primaryGroup'] = univention.admin.config.getDefaultValue(
            lo, 'computerGroup')
        object.create()
        nscd_invalidate('hosts')
        nscd_invalidate('passwd')

    elif action == 'delmachine':
        out.append(
            status('Removing machine %s' % codecs.utf_8_encode(machine)[0]))
        object = univention.admin.modules.lookup(
            univention.admin.handlers.computers.windows,
            co,
            lo,
            scope='domain',
            base=position.getDomain(),
            filter='(name=%s)' % machine,
            required=True,
            unique=True)[0]
        object.remove()
        nscd_invalidate('hosts')

    elif action == 'setprimarygroup':
        out.append(
            status(
                'Set primary group %s for user %s' %
                (codecs.utf_8_encode(group)[0], codecs.utf_8_encode(user)[0])))
        try:
            groupobject = univention.admin.modules.lookup(
                univention.admin.handlers.groups.group,
                co,
                lo,
                scope='domain',
                base=position.getDn(),
                filter='(name=%s)' % group,
                required=True,
                unique=True)[0]
        except:
            out.append('ERROR: group not found, nothing modified')
            return out

        userobject = get_user_object(user, position, lo, co)
        if isinstance(userobject, types.StringType):
            out.append(userobject)
            return out

        if hasattr(userobject, 'options'):
            if 'samba' in userobject.options:
                userobject.options.remove('samba')
        userobject.open()

        if userobject.has_property('primaryGroup'):
            userobject['primaryGroup'] = groupobject.dn
        elif userobject.has_property('machineAccountGroup'):
            userobject['machineAccountGroup'] = groupobject.dn
        else:
            out.append('ERROR: unknown group attribute, nothing modified')
            return out

        userobject.modify()

        if userobject.dn not in groupobject['users']:
            groupobject['users'].append(userobject.dn)
            groupobject.modify()

        nscd_invalidate('group')
        nscd_invalidate('passwd')
    return out
Esempio n. 9
0
 def decode(self, input, final=False):
     return codecs.ascii_decode(input, self.errors)[0]
Esempio n. 10
0
 def test_ascii_decode(self):
     #sanity
     new_str, num_processed = codecs.ascii_decode(b"abc")
     self.assertEqual(new_str, 'abc')
     self.assertEqual(num_processed, 3)
Esempio n. 11
0
def decode(input, errors='strict'):
#    warnings.warn("Implicit conversion of str to unicode", UnicodeWarning, 2)
    return codecs.ascii_decode(input, errors)
Esempio n. 12
0
 def decode(self, input, final=False):
     if _allowed_coercion(input):
         return codecs.ascii_decode(input, self.errors)[0]
     raise UnicodeError("encoding coercion blocked")
Esempio n. 13
0
def hashcash_mint(resource, nbits=None, stime=None):
    """
    Mints and returns a hashcash stamp with parameters specified.

    Args:
        resource: Resource string (eg IP address, email address).
        nbits:    Number of leading zero bits the stamp must have,
                  default: 20 bits.
        stime:    Override stamp time, system time by default in
                  YYMMDDhhmmss format, supported formats: YYMMDD,
                  YYMMDDhhmm, YYMMDDhhmmss

    Returns:
        Hashcash stamp string.

    Raises:
        ValueError: Raises a value error for invalid parameters.
    """

    # Python global interpreter lock (GIL) allows only one
    # thread to execute at any given time. Since minting
    # is CPU intensive it does not make sense to make this
    # into a parallel function. The 'multiprocessing' way
    # isnt really feasible either due to high sync costs.

    if nbits is None:
        # The default is 20 bits.
        nbits = 20

    if stime is None:
        date_time = datetime.datetime.today().strftime('%y%m%d%H%M%S')
    else:
        # Valid date formats are: YYMMDD, YYMMDDhhmm, YYMMDDhhmmss
        try:
            datetime.datetime.strptime(stime, '%y%m%d%H%M%S')
        except ValueError:
            try:
                datetime.datetime.strptime(stime, '%y%m%d%H%M')
            except ValueError:
                datetime.datetime.strptime(stime, '%y%m%d')
        date_time = stime

    # Stamp format is ver:bits:date:resource:[ext]:rand:counter
    hashcash_header = ('1:' + str(nbits) + ':' + \
        date_time + ':' + resource + '::')

    # Base64 encoding overhead is 4:3 so to get 16 chars
    # b64 encoded value 96 bits random value is required.
    random_bits = random.getrandbits(96)
    random_bytes = struct.pack('>QL', \
        random_bits & 0xFFFFFFFFFFFFFFFF, \
        random_bits >> 64)

    # Allowed characters are from alphabet a-zA-Z0-9+/=
    # Base64 encode and strip padding to reduce noise.
    # Padding can be restored later with something like:
    # b64padded = b64 + '=' * (-len(b64) % 4)
    random_string_b64 = base64.b64encode(random_bytes)
    random_string = codecs.ascii_decode(random_string_b64)

    hashcash_header = hashcash_header + \
        random_string[0].rstrip('=') + ':'

    return mint_hash_for_cash(nbits, hashcash_header)
Esempio n. 14
0
 def process_bind_param(self, value, dialect):
     if isinstance(value, (unicode, str)):
         return codecs.ascii_decode(value.encode('utf8'), 'ignore')[0]
         #return b(value).decode(ESCAPE, 'replace')
     return value
Esempio n. 15
0
def transient_analysis_subsystem(inputfiles,
                                 reductiontype,
                                 filter_,
                                 offsetsfile,
                                 expect_missing_catalog=False,
                                 install_catalog_as_ref=False,
                                 dimmconfig=None,
                                 is_kevin=False,
                                 mask_suffix=None):
    """
    Take in a list of input files from one subsystem of a single observation
    and the reduction type (e.g. 'R1', 'R2' etc).

    Returns a list of output files.

    If an offsetsfile is not given then an initial reduction will be done
    to determine the offsets.  In this case the newly created offsetsfile
    will be the first file returned.
    """

    # Get source, utdate, obsnum and fiter.
    logger.debug('Reading header from file "%s"', inputfiles[0])
    header = fits.Header.fromstring(''.join(
        ascii_decode(x)[0] for x in Ndf(inputfiles[0]).head['FITS']))
    raw_source = header['OBJECT']
    source = safe_object_name(raw_source)
    date = header['UTDATE']
    obsnum = header['OBSNUM']
    if filter_ != header['FILTER']:
        raise Exception('Unexpected value of FILTER header')

    is_gbs = False
    survey_code = None
    project = header['PROJECT']
    if ((project == 'M16AL001') or (project == 'M20AL007')
            or re.match('M\d\d[AB]EC30', project)):
        field_name = source
    elif project.startswith('MJLSG'):
        is_gbs = True
        field_name = gbs_field_name(raw_source)
        survey_code = 'G'
    elif project in ('M17BP054', 'M18AP017'):
        field_name = source
        survey_code = 'H'
    else:
        raise Exception('Unexpected project value "{}"'.format(project))

    logger.info(
        'Performing %sum %s reduction for %s on %s (observation %i, %s)',
        filter_, reductiontype, source, date, obsnum,
        ('GBS' if is_gbs else 'transient'))

    # Set wavelength-dependent parameters up.
    prepare_kwargs = get_prepare_parameters(filter_)
    match_kwargs = get_match_parameters(filter_)

    # Get dimmconfig, reference and masks.
    logger.debug('Identifying dimmconfig, mask and reference files')
    if dimmconfig is None:
        dimmconfig = dimmconfigdict[reductiontype]
    dimmconfig = os.path.expandvars(dimmconfig)
    if not os.path.exists(dimmconfig):
        raise Exception('Dimmconfig file "{}" not found'.format(dimmconfig))

    mask_reductiontype = maskdict.get(reductiontype)
    if mask_reductiontype is not None:
        # Use the appropriate mask as the reference image.
        reference = get_filename_mask(source,
                                      filter_,
                                      mask_reductiontype,
                                      is_gbs,
                                      suffix=mask_suffix)
        if not os.path.exists(reference):
            raise Exception('Mask file "{}" not found'.format(reference))
    else:
        # Use the general reference image as we don't need a mask.
        reference = get_filename_reference(field_name, filter_)
        if not os.path.exists(reference):
            raise Exception('Reference file "{}" not found'.format(reference))
    reference = shutil.copy(reference, '.')

    logger.debug('Checking configuration file "%s" exists', param_file)
    if not os.path.exists(param_file):
        raise Exception('Configuration file "{}" not found'.format(param_file))
    param_file_copy = shutil.copy(param_file, '.')

    # Create list of input files
    filelist = tempfile.NamedTemporaryFile(mode='w',
                                           prefix='tmpList',
                                           delete=False)
    filelist.file.writelines([i + '\n' for i in inputfiles])
    filelist.file.close()

    # Prepare environment.
    os.environ['SMURF_THREADS'] = '16'
    os.environ['ADAM_EXIT'] = '1'

    output_files = []

    if offsetsfile is None:
        # Identify reference catalog.
        refcat = get_filename_ref_cat(field_name, filter_, reductiontype)
        if os.path.exists(refcat):
            # If it already exists, don't try to install a new one.
            install_catalog_as_ref = False
            refcat = shutil.copy(refcat, '.')
        elif not install_catalog_as_ref:
            # Raise an exception only if we're not making a new ref catalog.
            raise Exception('Reference catalog "{}" not found'.format(refcat))

        # Create output file name.
        out = get_filename_output(source, date, obsnum, filter_, reductiontype,
                                  False, survey_code, is_kevin)

        # run makemap
        logger.debug('Running MAKEMAP, output: "%s"', out)
        sys.stderr.flush()
        subprocess.check_call([
            os.path.expandvars('$SMURF_DIR/makemap'),
            'in=^{}'.format(filelist.name),
            'config=^{}'.format(dimmconfig),
            'out={}'.format(out),
            'ref={}'.format(reference),
            'msg_filter=normal',
        ],
                              shell=False,
                              stdout=sys.stderr)

        if not os.path.exists(out):
            raise Exception('MAKEMAP did not generate output "{}"'.format(out))

        # Prepare the image (smoothing etc) by running J. Lane's
        # prepare image routine.
        logger.debug('Preparing image')
        prepare_image(out, **prepare_kwargs)
        prepared_file = out[:-4] + '_crop_smooth_jypbm.sdf'

        # Identify the sources run J. Lane's run_gaussclumps routine.
        logger.debug('Running CUPID')
        run_gaussclumps(prepared_file, param_file_copy)
        sourcecatalog = prepared_file[:-4] + '_log.FIT'

        if not os.path.exists(sourcecatalog):
            raise Exception(
                'CUPID did not generate catalog "{}"'.format(sourcecatalog))

        if install_catalog_as_ref:
            # Install source catalog as reference.
            shutil.copyfile(sourcecatalog, refcat)

            # Use as the reference in this run.
            refcat = sourcecatalog

        # Calculate offsets with J. Lane's source_match
        logger.debug('Performing source match')
        results = source_match(sourcecatalog, refcat, **match_kwargs)
        xoffset = results[0][0]
        yoffset = results[0][1]

        if (xoffset is None) or (yoffset is None):
            raise Exception('Pointing offsets not found')

        # Create the pointing offset file.
        offsetsfile = out[:-4] + '_offset.txt'
        create_pointing_offsets(offsetsfile,
                                xoffset,
                                yoffset,
                                system='TRACKING')

        # Apply FCF calibration.
        out_cal = out[:-4] + '_cal.sdf'
        logger.debug('Calibrating file "%s" (making "%s")', out, out_cal)
        sys.stderr.flush()
        subprocess.check_call([
            os.path.expandvars('$KAPPA_DIR/cmult'),
            'in={}'.format(out),
            'out={}'.format(out_cal),
            'scalar={}'.format(get_fcf_arcsec(filter_) * 1000.0),
        ],
                              shell=False,
                              stdout=sys.stderr)
        subprocess.check_call([
            os.path.expandvars('$KAPPA_DIR/setunits'),
            'ndf={}'.format(out_cal),
            'units=mJy/arcsec**2',
        ],
                              shell=False,
                              stdout=sys.stderr)

        output_files.extend([offsetsfile, out_cal, sourcecatalog])

        output_files.extend(create_png_previews(out))

    elif not offsetsfile.endswith('_offset.txt'):
        raise Exception(
            'File "{}" does not look like an offsets file'.format(offsetsfile))

    # Re reduce map with pointing offset.
    out = get_filename_output(source, date, obsnum, filter_, reductiontype,
                              True, survey_code, is_kevin)

    logger.debug('Running MAKEMAP, output: "%s"', out)
    sys.stderr.flush()
    subprocess.check_call([
        os.path.expandvars('$SMURF_DIR/makemap'),
        'in=^{}'.format(filelist.name),
        'config=^{}'.format(dimmconfig),
        'out={}'.format(out),
        'ref={}'.format(reference),
        'pointing={}'.format(offsetsfile),
        'msg_filter=normal',
    ],
                          shell=False,
                          stdout=sys.stderr)

    if not os.path.exists(out):
        raise Exception('MAKEMAP did not generate output "{}"'.format(out))

    # Re run Lane's smoothing and gauss clumps routine.
    logger.debug('Preparing image')
    prepare_image(out, **prepare_kwargs)
    prepared_file = out[:-4] + '_crop_smooth_jypbm.sdf'

    # Identify the sources run J. Lane's run_gaussclumps routine.
    logger.debug('Running CUPID')
    run_gaussclumps(prepared_file, param_file_copy)
    sourcecatalog = prepared_file[:-4] + '_log.FIT'

    if os.path.exists(sourcecatalog):
        if expect_missing_catalog:
            raise Exception('CUPID unexpectedly generated catalog "{}"'.format(
                sourcecatalog))
        output_files.append(sourcecatalog)

    elif not expect_missing_catalog:
        raise Exception(
            'CUPID did not generate catalog "{}"'.format(sourcecatalog))

    # Apply FCF calibration.
    out_cal = out[:-4] + '_cal.sdf'
    logger.debug('Calibrating file "%s" (making "%s")', out, out_cal)
    sys.stderr.flush()
    subprocess.check_call([
        os.path.expandvars('$KAPPA_DIR/cmult'),
        'in={}'.format(out),
        'out={}'.format(out_cal),
        'scalar={}'.format(get_fcf_arcsec(filter_) * 1000.0),
    ],
                          shell=False,
                          stdout=sys.stderr)
    subprocess.check_call([
        os.path.expandvars('$KAPPA_DIR/setunits'),
        'ndf={}'.format(out_cal),
        'units=mJy/arcsec**2',
    ],
                          shell=False,
                          stdout=sys.stderr)

    output_files.append(out_cal)

    output_files.extend(create_png_previews(out))

    return output_files
Esempio n. 16
0
			action='deluserfromgroup'
		elif op == 'primarygroup':
			action='setprimarygroup'
		else:
			action='addusertogroup'
	else:
		return out	
	
	if action == 'adduser':
		out.append(status('Adding user %s' % codecs.utf_8_encode(user)[0]))
		object=univention.admin.handlers.users.user.object(co, lo, position=position)
		object.options=['posix', 'person', 'mail']
		object.open()
		object['username']=user
		try:
			object['lastname']=codecs.ascii_decode(user)[0]
		except UnicodeEncodeError:
			object['lastname']='unknown'
		a,b=os.popen2('/usr/bin/makepasswd --minchars=8')
		line=b.readline()
		if line[-1] == '\n':
			line=line[0:-1]
		object['password']=line
		object['primaryGroup']=univention.admin.config.getDefaultValue(lo, 'group')
		object.create()
		nscd_invalidate('passwd')
		
	elif action == 'deluser':
		out.append(status('Removing user %s' % codecs.utf_8_encode(user)[0]))
		object=univention.admin.modules.lookup(univention.admin.handlers.users.user, co, lo, scope='domain', base=position.getDomain(), filter='(username=%s)' % user, required=1, unique=1)[0]
		object.remove()
Esempio n. 17
0
def warning_decode(input, errors='strict'):
    warnings.warn(get_warning(sys._getframe(1), target=unicode),
                  stacklevel=2)
    return codecs.ascii_decode(input, errors)
Esempio n. 18
0
def decrypt(text):
    return codecs.ascii_decode(text)
Esempio n. 19
0
 def to_py(c_val):
     s, _ = ascii_decode(c_val)
     return s
Esempio n. 20
0
	def decode(input, errors='strict'):
		if _deferred_codec.other or b'\x1B' in input:
			_deferred_codec.other = _deferred_codec.other or codecs.lookup(other)
			return _deferred_codec.other.decode(input, errors)
		else:
			return codecs.ascii_decode(input, errors)
Esempio n. 21
0
 def test_ascii_decode(self):
     #sanity
     new_str, size = codecs.ascii_decode("abc")
     self.assertEqual(new_str, u'abc')
     self.assertEqual(size, 3)
Esempio n. 22
0
def get_random_ascii(n):
    arr = np.frombuffer(get_random_bytes(n), dtype=np.int8) & 0x7f
    result, _ = codecs.ascii_decode(arr)
    assert isinstance(result, str)
    assert len(result) == n
    return result
Esempio n. 23
0
 def decode(self, input, errors='strict'):
     if _allowed_coercion(input):
         return codecs.ascii_decode(input, errors)
     raise UnicodeError("encoding coercion blocked")
Esempio n. 24
0
 def test_ascii_decode(self):
     #sanity
     new_str, size = codecs.ascii_decode("abc")
     self.assertEqual(new_str, u'abc')
     self.assertEqual(size, 3)
Esempio n. 25
0
 def update_event(self, inp=-1):
     self.set_output_val(0, codecs.ascii_decode(self.input(0),
                                                self.input(1)))