Exemplo n.º 1
0
    def create_main_tub(self):
        certfile = os.path.join(self.basedir, "private", self.CERTFILE)
        self.tub = self._create_tub(certFile=certfile)

        self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
        self.write_config("my_nodeid", b32encode(self.nodeid).lower() + "\n")
        self.short_nodeid = b32encode(self.nodeid).lower()[:8] # for printing
        cfg_tubport = self.get_config("node", "tub.port", None)
        cfg_location = self.get_config("node", "tub.location", None)
        portlocation = self.get_tub_portlocation(cfg_tubport, cfg_location)
        if portlocation:
            tubport, location = portlocation
            for port in tubport.split(","):
                if port in ("0", "tcp:0"):
                    raise ValueError("tub.port cannot be 0: you must choose")
                self.tub.listenOn(port)
            self.tub.setLocation(location)
            self._tub_is_listening = True
            self.log("Tub location set to %s" % (location,))
            # the Tub is now ready for tub.registerReference()
        else:
            self._tub_is_listening = False
            self.log("Tub is not listening")

        self.tub.setServiceParent(self)
Exemplo n.º 2
0
def b32():
	if sys.argv[2] == 'e':
		print base64.b32encode(sys.argv[3])
	elif sys.argv[2] == 'd':
		print base64.b32decode(sys.argv[3])
	else:
			usage()
Exemplo n.º 3
0
def get_service_id(private_key_file=None, cert=None):
    '''
    service_id is the first half of the sha1 of the rsa public key encoded in base32
    '''
    if private_key_file:
        with open(private_key_file, 'rb') as fd:
            private_key = fd.read()
        public_key = RSA.importKey(private_key).publickey().exportKey('DER')[22:]
        # compute sha1 of public key and encode first half in base32
        service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
        '''
        # compute public key from priate key and export in DER format
        # ignoring the SPKI header(22 bytes)
        key = load_privatekey(FILETYPE_PEM, private_key)
        cert = X509()
        cert.set_pubkey(key)
        public_key = dump_privatekey(FILETYPE_ASN1, cert.get_pubkey())[22:]
        # compute sha1 of public key and encode first half in base32
        service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
        '''
    elif cert:
        # compute sha1 of public key and encode first half in base32
        key = load_certificate(FILETYPE_ASN1, cert).get_pubkey()
        pub_der = DerSequence()
        pub_der.decode(dump_privatekey(FILETYPE_ASN1, key))
        public_key = RSA.construct((pub_der._seq[1], pub_der._seq[2])).exportKey('DER')[22:]
        service_id = base64.b32encode(hashlib.sha1(public_key).digest()[:10]).lower().decode()
    return service_id
Exemplo n.º 4
0
def createImage (request):

    (_, ids) = json.loads (base64.b32decode (request.POST['nodeId']))

    type = LEAF_TYPE.objects.get (_code='image')
    node = NODE.objects.get (pk=ids[0])
    text = request.POST['data']

    with open (get_path (request.session.session_key), 'w') as uuid_file:
        uuid_file.write (text)

        leaf = LEAF.objects.create (
            type = type,
            node = node,
            name = request.POST['name'],
            file = uuid_file.name,
            rank = get_next_rank (node))

    if 'leafId' in request.POST:
        js_string = json.dumps ([{
            'success' : True,
            'uuid' : request.POST['leafId'],
            'id' : base64.b32encode (
                json.dumps (('leaf', [leaf.node.pk, leaf.pk])))}])

    else:
        js_string = json.dumps ([{
            'success' : True,
            'id' : base64.b32encode (
                json.dumps (('leaf', [leaf.node.pk, leaf.pk])))}])

    return HttpResponse (js_string, mimetype='application/json')
Exemplo n.º 5
0
    def create_tub(self):
        certfile = os.path.join(self.basedir, "private", self.CERTFILE)
        self.tub = Tub(certFile=certfile)
        self.tub.setOption("logLocalFailures", True)
        self.tub.setOption("logRemoteFailures", True)
        self.tub.setOption("expose-remote-exception-types", False)

        # see #521 for a discussion of how to pick these timeout values.
        keepalive_timeout_s = self.get_config("node", "timeout.keepalive", "")
        if keepalive_timeout_s:
            self.tub.setOption("keepaliveTimeout", int(keepalive_timeout_s))
        disconnect_timeout_s = self.get_config("node", "timeout.disconnect", "")
        if disconnect_timeout_s:
            # N.B.: this is in seconds, so use "1800" to get 30min
            self.tub.setOption("disconnectTimeout", int(disconnect_timeout_s))

        self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
        self.write_config("my_nodeid", b32encode(self.nodeid).lower() + "\n")
        self.short_nodeid = b32encode(self.nodeid).lower()[:8] # ready for printing

        tubport = self.get_config("node", "tub.port", "tcp:0")
        self.tub.listenOn(tubport)
        # we must wait until our service has started before we can find out
        # our IP address and thus do tub.setLocation, and we can't register
        # any services with the Tub until after that point
        self.tub.setServiceParent(self)
Exemplo n.º 6
0
    def get_new_style_checksum(self, record):
        """Return a base32-encoded sha1
        For revisit records, return the original sha1
        """

        if 'revisit' == record.type:
            digest = record.get_header('WARC-Payload-Digest')
            if digest is None:
                return '-'
            else:
                return digest.replace('sha1:', '')
        elif 'response' == record.type and self.is_response(record.content_type):
            digest = record.get_header('WARC-Payload-Digest')
            #Our patched warc-tools fabricates this header if it is not present in the record
            return digest.replace('sha1:', '')
        elif 'response' == record.type and self.content is not None:
            # This is an arc record. Our patched warctools fabricates the WARC-Payload-Digest
            # header even for arc files so that we don't need to load large payloads in memory
            digest = record.get_header('WARC-Payload-Digest')
            if digest is not None:
                return digest.replace('sha1:', '')
            else:
                h = hashlib.sha1(self.content)
                return base64.b32encode(h.digest())
        else:
            h = hashlib.sha1(record.content[1])
            return base64.b32encode(h.digest())
Exemplo n.º 7
0
    def create_tub(self):
        certfile = os.path.join(self.basedir, "private", self.CERTFILE)
        self.tub = Tub(certFile=certfile)
        self.tub.setOption("logLocalFailures", True)
        self.tub.setOption("logRemoteFailures", True)
        self.tub.setOption("expose-remote-exception-types", False)

        # see #521 for a discussion of how to pick these timeout values.
        keepalive_timeout_s = self.get_config("node", "timeout.keepalive", "")
        if keepalive_timeout_s:
            self.tub.setOption("keepaliveTimeout", int(keepalive_timeout_s))
        disconnect_timeout_s = self.get_config("node", "timeout.disconnect", "")
        if disconnect_timeout_s:
            # N.B.: this is in seconds, so use "1800" to get 30min
            self.tub.setOption("disconnectTimeout", int(disconnect_timeout_s))

        self.nodeid = b32decode(self.tub.tubID.upper()) # binary format
        self.write_config("my_nodeid", b32encode(self.nodeid).lower() + "\n")
        self.short_nodeid = b32encode(self.nodeid).lower()[:8] # ready for printing
        tubport = self.get_tub_port()
        if tubport in ("0", "tcp:0"):
            raise ValueError("tub.port cannot be 0: you must choose")
        self.tub.listenOn(tubport)

        location = self.get_tub_location(tubport)
        self.tub.setLocation(location)
        self.log("Tub location set to %s" % (location,))

        # the Tub is now ready for tub.registerReference()
        self.tub.setServiceParent(self)
Exemplo n.º 8
0
def resetpw():
	try:
		if request.method == 'POST':
			token = base64.b32encode(os.urandom(10)).decode()
			reset_user_password(request.form['email'], token)
			send_email(
				recipient = request.form['email'],
				subject   = "Password reset link",
				content   = url_for('resetpw', email=request.form['email'], token=token, _external=True)
			)
			flash("<b>A password reset link was sent to %s</b>" % request.form['email'], "success")
		elif "token" in request.args:
			password = base64.b32encode(os.urandom(10)).decode()
			reset_user_password(request.args['email'], request.args['token'], password)
			send_email(
				recipient = request.args['email'],
				subject   = "Password",
				content   = "Your Password: %s" % password
			)
			flash("<b>Password reset successful!</b> - Your password was sent to %s" % request.args['email'], "success")
	except AccountNotExisting:
		flash("<b>No Account found with this E-Mail address!</b>", "danger")
	except InvalidToken:
		flash("<b>Invalid password token!</b>", "danger")
	return render_template("resetpw.html")
Exemplo n.º 9
0
    def _assemble_entry(self, recorded_url, records):
        if recorded_url.payload_digest:
            if recorded_url.payload_digest.name == "sha1":
                sha1base32 = base64.b32encode(
                        recorded_url.payload_digest.digest()
                        ).decode("utf-8")
            else:
                self.logger.warn(
                        "digest type is %r but big captures table is indexed "
                        "by sha1",
                        recorded_url.payload_digest.name)
        else:
            digest = hashlib.new("sha1", records[0].content[1])
            sha1base32 = base64.b32encode(digest.digest()).decode("utf-8")

        if (recorded_url.warcprox_meta
                and "captures-bucket" in recorded_url.warcprox_meta):
            bucket = recorded_url.warcprox_meta["captures-bucket"]
        else:
            bucket = "__unspecified__"

        canon_surt = urlcanon.semantic(recorded_url.url).surt().decode('ascii')

        entry = {
            # id only specified for rethinkdb partitioning
            "id": "{} {}".format(
                canon_surt[:20], records[0].id.decode("utf-8")[10:-1]),
            "abbr_canon_surt": canon_surt[:150],
            "canon_surt": canon_surt,
            "timestamp": recorded_url.timestamp.replace(
                tzinfo=doublethink.UTC),
            "url": recorded_url.url.decode("utf-8"),
            "offset": records[0].offset,
            "filename": os.path.basename(records[0].warc_filename),
            "warc_type": records[0].type.decode("utf-8"),
            "warc_id": records[0].id.decode("utf-8"),
            "sha1base32": sha1base32,
            "content_type": recorded_url.mimetype,
            "response_code": recorded_url.status,
            "http_method": recorded_url.method,
            "bucket": bucket,
            "record_length": records[0].length, # compressed (or not) length of
                                                # warc record including record
                                                # headers
            "wire_bytes": recorded_url.size, # count of bytes transferred over
                                             # the wire, including http headers
                                             # if any
        }

        if recorded_url.warcprox_meta:
            if "dedup-ok" in recorded_url.warcprox_meta:
                entry["dedup_ok"] = recorded_url.warcprox_meta["dedup-ok"]
            if "captures-table-extra-fields" in recorded_url.warcprox_meta:
                extras = recorded_url.warcprox_meta[
                        "captures-table-extra-fields"]
                for extra_field in extras:
                    entry[extra_field] = extras[extra_field]

        return entry
Exemplo n.º 10
0
 def create_key(self, password, password_confirm):
     if password == password_confirm:
         key = self._new_key()
         key = base64.b32encode(key.sk_s)
         key = scrypt.encrypt(key, password, maxtime=1)
         self._encrypted_key = base64.b32encode(key)
         return True
     else:
         return False
Exemplo n.º 11
0
def create_channel_id(session):
    s = StringIO()
    s.write('b-')
    s.write(base64.b32encode(session.key().name().decode('hex')).replace("=", ""))
    s.write('|')
    s.write(base64.b32encode(uuid.uuid4().bytes).replace("=", ""))
    channel_id = s.getvalue()
    azzert(len(channel_id) <= 64, "Channel ids should not be longer than 64 bytes")
    return channel_id
Exemplo n.º 12
0
def ab(x): # debuggery
    if len(x) >= 3:
        return "%s:%s" % (len(x), b32encode(x[-3:]),)
    elif len(x) == 2:
        return "%s:%s" % (len(x), b32encode(x[-2:]),)
    elif len(x) == 1:
        return "%s:%s" % (len(x), b32encode(x[-1:]),)
    elif len(x) == 0:
        return "%s:%s" % (len(x), "--empty--",)
Exemplo n.º 13
0
 def derive(self,parentstoragekey,key,attenuated):
   #Derive an unattenuated child cap using attenuated parent cap
   intermediatekey =  _macfun(self.secret,parentstoragekey)
   childkey = _macfun(key.encode(),parentstoragekey)
   if attenuated == False:
     return "rw-" + base64.b32encode(childkey)[:-4].decode("utf-8")
   else:
     #Attenuate the result if requested.
     key2=_macfun(b"read-only::nosalt",childkey)
     return "ro-" + base64.b32encode(key2)[:-4].decode("utf-8")
Exemplo n.º 14
0
def guid128(salt=None):
    """Generates an 26 character ID which should be globally unique.

    >>> guid128()
    'MTB2ONDSL3YWJN3CA6XIG7O4HM'
    """
    if salt:
        data = "%s%s%s" % (salt, uuid.uuid1(), salt)
        return str(base64.b32encode(hashlib.md5(data).digest()).rstrip('='))
    return str(base64.b32encode(uuid.uuid1().bytes).rstrip('='))
Exemplo n.º 15
0
	def set_key(self,id,key,enc_key=None):
		""" A convenience method to set a key.
		If enc_key is provided, the key are crypted with AES. Else,
		the key are just Base32 encoded. If enc_key is None, use
		the key provided during the initialization of this class """
		if not key: return
		if not enc_key: enc_key=self.key
		if enc_key:
			self.set('Keys:'+id,base64.b32encode(AES.new(key).encrypt(key)))
		else:
			self.set('Keys:'+id,base64.b32encode(key))
Exemplo n.º 16
0
def hotp_gen(password, interval):
	key = base64.b32decode(password, casefold=True)
	big = struct.pack('>Q', interval) # set unsigned long long bigendian
	dig = hmac.new(key, big, hashlib.sha1).digest()
	offset = ord(dig[19])&15
	dt = dig[offset:offset + 4]
	print base64.b32encode(dt)
	uint = struct.unpack('>I', dt) # reset to unsigned int bigendian
	print uint
	otp = (uint[0] & 0x7fffffff) % 1000000
	return otp
Exemplo n.º 17
0
def _stamp_resource(dist, resource_path, encodings=None):
    _stamp_dist = get_distribution('van.static')
    r_path = resource_path
    if _PY3:
        r_path32 = base64.b32encode(r_path.encode('utf-8')).decode('ascii')
    else:
        r_path32 = base64.b32encode(r_path)
    if not encodings:
        return _stamp_dist, '%s-%s-%s.stamp' % (dist.project_name, dist.version, r_path32)
    encodings = '-'.join(sorted(encodings))
    return _stamp_dist, '%s-%s-%s-%s.stamp' % (dist.project_name, dist.version, encodings, r_path32)
Exemplo n.º 18
0
def hash_file(filepath):
    f = open(filepath, "rb")
    sha1 = hashlib.sha1()
    sha512 = hashlib.sha512()

    for piece in read_in_chunks(f):
        sha1.update(piece)
        sha512.update(piece)
    return {
        "sha1": str(base64.b32encode(sha1.digest()), "ascii"),
        "sha512": str(base64.b32encode(sha512.digest()), "ascii"),
    }
Exemplo n.º 19
0
    def _get_table(self, value=None):
        try:
            f = default_storage.fs.listdir(automount(self.folder))
        except:
            return []

        elements = []
        files = []
        if value:
            cmp = re.compile(value, re.IGNORECASE)
        else:
            cmp = None

        if self.folder!='/':
            f = ['..',] + f
        for p in f:
            pos = fs.path.join(self.folder, p)
            if default_storage.fs.isdir(pos) or p.lower().endswith('.zip'):
                if cmp and cmp.match(p) or not cmp:
                    id = b32encode(pos.encode('utf-8')).decode('utf-8')
                    info = default_storage.fs.getdetails(pos)
                    #if not ha'created_time' in info:
                    #    info['created_time'] = ''
                    elements.append([
                        id,
                        (p, ',#fdd'),
                        '',
                        #(info['created_time'], ',,#f00,s'),
                        (info.modified.replace(tzinfo=None), ',,#f00,s'),
                        info.raw,
                        {'edit': ('tableurl', '../../%s/_/' % id, _('Change folder'))},
                    ])
            else:
                files.append((p, pos))
        for pp in files:
            p=pp[0]
            pos=pp[1]
            if cmp and cmp.match(p) or not cmp:
                id = b32encode(pos.encode('utf-8')).decode('utf-8')
                info = default_storage.fs.getdetails(pos)
                #size = info['size']
                #ctime = info['created_time']
                size = info.size
                ctime = info.modified.replace(tzinfo=None)
                elements.append([
                    id,
                    p,
                    (size, '>,' + self._size_to_color(size)),
                    (ctime, ',' + self._time_to_color(ctime)),
                    info.raw,
                    {'edit': ('command', '../../%s/_/' % id, _('Open file'))},
                    ])
        return elements
Exemplo n.º 20
0
    def compute_client_filename(self, id, ms, login):
        """Compute a client database filename"""

        # Compute login hash
        ho = hashlib.sha256()
        ho.update(hmac_sha512(ms, login))
        hlogin = ho.digest()

        # Filename construction
        filename = (base64.b32encode(hlogin))[:52] + (base64.b32encode(id))[:52]

        return filename.decode()  # Return client database filename (a string)
Exemplo n.º 21
0
def api_set(request, key):
    access_token = request.GET.get('access_token')
    key = base64.b32encode(key)
    value = base64.b32encode(request.body)

    github_payload = {'title': key, 'body': value}
    headers = {'Authorization': 'token {}'.format(access_token)}
    response = requests.post(
            'https://api.github.com/repos/mrooney/metakv/issues',
            data=cjson.encode(github_payload),
            headers=headers
    )
    return HttpResponse(unicode(response))
Exemplo n.º 22
0
  def __init__(self, seed=None, pub=None, sig=None):
    self.seed=seed
    self.pub=pub
    self.sig=sig

    if self.pub:
      h=hashlib.sha1()
      h.update(self.pub.save_pkcs1('DER'))
      self.owner=base64.b32encode(h.digest())

      h=hashlib.sha1()
      h.update(self.serialize())
      self.id=base64.b32encode(h.digest())
Exemplo n.º 23
0
  def deserialize(self, l):
    seed, pub, sig=l
    self.seed=decode(seed)
    self.pub=fixPub(rsa.key.PublicKey.load_pkcs1(decode(pub), 'DER'))
    self.sig=decode(sig)

    h=hashlib.sha1()
    h.update(self.pub.save_pkcs1('DER'))
    self.owner=base64.b32encode(h.digest())

    h=hashlib.sha1()
    h.update(self.serialize())
    self.id=base64.b32encode(h.digest())
Exemplo n.º 24
0
 def nodecaps(self,firstcap):
   if firstcap[1] == 'o': #Check for read-only or attenuated cap prefix.
     cap1=None            #There is no unattenuated/rw cap for this one
     cap2=firstcap        #We use the function argument as ro/attenuated cap
     key2=base64.b32decode(firstcap[3:]+"====") #Calculate the FEK by decoding the non-prefix part of the ro/attenuated cap.
   else:
     cap1=firstcap        #Use the function parameter as rw/unattenuated cap
     key1=base64.b32decode(firstcap[3:]+"====") #Decode the non-prefix part of the unattenuated cap.
     key2=_macfun(b"read-only::nosalt",key1) #Derive the FEK by hashing a fixed string with cap1 as key.
     cap2="ro-" + base64.b32encode(key2)[:-4].decode("utf-8") #Also encode the FEK into a cap for ro/attenuated access.
   key3=_macfun(self.cloudsecret.encode(),cap2.encode()) #Derive a third key from the attenuated/ro cap
   str3= base64.b32encode(key3)[:-4].decode("utf-8") #Now start off with encoding in base32.
   location = str3[0:3] + "/" + str3[3:6] + "/" + str3[6:] #Create a path for a ballanced directory tree for where to serialize our nodes.
   return (cap1,cap2,location,key2) 
Exemplo n.º 25
0
def generate_totp_secret(short = False):
   """
   Generates a new base32 encoded, random secret.

   :param short: If `True`, generate 5 bytes entropy, else 10 bytes.
   :type short: bool

   :returns: bytes -- The generated secret.
   """
   assert(type(short) == bool)
   if short:
      return base64.b32encode(os.urandom(5))
   else:
      return base64.b32encode(os.urandom(10))
Exemplo n.º 26
0
def gen_session_id():
    """ Generates a session ID """
    # Be kind to future support people and developers by using a base32
    # encoded session id. Why is this cool? Read RFC3548 §5 and rejoice
    # at the lack of ambiguity regarding "one", "ell", "zero" and
    # "ohh". You can thank me later.
    return base64.b32encode(M2Crypto.m2.rand_bytes(current_app.config['SESSION_BYTES']))
Exemplo n.º 27
0
 def get_playlist_id(self, playlist_uri):
     # Only A-Za-z0-9_ is allowed, which is 63 chars, so we can't use
     # base64. Luckily, D-Bus does not limit the length of object paths.
     # Since base32 pads trailing bytes with "=" chars, we need to replace
     # them with an allowed character such as "_".
     encoded_uri = base64.b32encode(playlist_uri).replace('=', '_')
     return '/com/mopidy/playlist/%s' % encoded_uri
Exemplo n.º 28
0
    def create_renew_sesh_id(self, sesh_id, force=False):
        #if sesh_id in self.cache and not force:
        if sesh_id and ((sesh_id + ':c') in self.cache) and not force:
            return sesh_id

        sesh_id = base64.b32encode(os.urandom(5)).lower()
        return sesh_id
Exemplo n.º 29
0
    def get(self, user_id):
        """
        get user 2fa secret
        ---
        tags:
          - users 2fa
        responses:
          201:
            description: user 2fa secret
            schema:
              id: 2fa_out
              properties:
                username:
                  type: string
                  description: username for user
                twoFASecret:
                  type: integer
                  description: secret key for user
          404:
            description: user not found
            schema:
              id: error
        """

        if user_id is None:
            return jsonify(error="invalid user id"), 404
        user = User.query.filter_by(id=user_id).first()
        if user is None:
            return jsonify(error="invalid user"), 404
        user2fa_secret = base64.b32encode(hashlib.sha256(str(random.getrandbits(256))).digest())[:32]
        return jsonify(username=user.username, twoFASecret=user2fa_secret)
Exemplo n.º 30
0
    def generate_barcodes(self, format=''):
        filename_format = format or self.config['barcode'] + '-%02d.png'
        
        self.Barcoder = Barcoder(barcode=self.config['barcode'])

        for index,segment in enumerate(self.generate_segments()):
            segment_hash = hashlib.sha1()
            segment_hash.update(segment)

            barcode_path = os.path.join(
                self.config['barcode_dir'],
                filename_format % (index)
            )

            self.Barcoder.encode(
                base64.b32encode(
                    segment
                ),
                barcode_path
            )
            
            yield {
                'barcode_filename': barcode_path,
                'hexdigest': segment_hash.hexdigest(),
            }
Exemplo n.º 31
0
# Add Source argument
parser.add_argument('-source', help='Text to encode', nargs='+')

# Set args
args = parser.parse_args()

# If codetype is Base64
if args.type == "64":
    print(
        base64.encodestring(
            bytes(" ".join(str(x) for x in args.source), "utf-8")).decode())
# If codetype is Base32
elif args.type == "32":
    print(
        base64.b32encode(bytes(" ".join(str(x) for x in args.source),
                               "utf-8")).decode())
# If codetype is Base16
elif args.type == "16":
    print(
        base64.b16encode(bytes(" ".join(str(x) for x in args.source),
                               "utf-8")).decode())
# If codetype is URL safe Base64
elif args.type == "u64":
    print(
        base64.urlsafe_b64encode(
            bytes(" ".join(str(x) for x in args.source), "utf-8")).decode())
# If codetype is anythong else
else:
    print("Type has to be 64/32/16/u64.")  # Error
Exemplo n.º 32
0
def on_go_process_srcs(unit):
    """
        _GO_PROCESS_SRCS() macro processes only 'CGO' files. All remaining *.go files
        and other input files are currently processed by a link command of the
        GO module (GO_LIBRARY, GO_PROGRAM)
    """

    srcs_files = get_appended_values(unit, 'GO_SRCS_VALUE')

    asm_files = []
    c_files = []
    cxx_files = []
    go_files = []
    in_files = []
    proto_files = []
    s_files = []
    syso_files = []

    classifed_files = {
        '.c': c_files,
        '.cc': cxx_files,
        '.cpp': cxx_files,
        '.cxx': cxx_files,
        '.go': go_files,
        '.in': in_files,
        '.proto': proto_files,
        '.s': asm_files,
        '.syso': syso_files,
        '.C': cxx_files,
        '.S': s_files,
    }

    # Classify files specifed in _GO_SRCS() macro by extension and process CGO_EXPORT keyword
    # which can preceed C/C++ files only
    is_cgo_export = False
    for f in srcs_files:
        _, ext = os.path.splitext(f)
        ext_files = classifed_files.get(ext)
        if ext_files is not None:
            if is_cgo_export:
                is_cgo_export = False
                if ext in ('.c', '.cc', '.cpp', '.cxx', '.C'):
                    unit.oncopy_file([f, f, 'OUTPUT_INCLUDES', '${BINDIR}/_cgo_export.h'])
                    f = '${BINDIR}/' + f
                else:
                    ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS()/_GO_SRCS() macro')
            ext_files.append(f)
        elif f == 'CGO_EXPORT':
            is_cgo_export = True
        else:
            # FIXME(snermolaev): We can report an unsupported files for _GO_SRCS here
            pass
    if is_cgo_export:
        ymake.report_configure_error('Unmatched CGO_EXPORT keyword in SRCS()/_GO_SRCS() macro')

    for f in go_files:
        if f.endswith('_test.go'):
            ymake.report_configure_error('file {} must be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))
    go_test_files = get_appended_values(unit, 'GO_TEST_SRCS_VALUE')
    go_xtest_files = get_appended_values(unit, 'GO_XTEST_SRCS_VALUE')
    for f in go_test_files + go_xtest_files:
        if not f.endswith('_test.go'):
            ymake.report_configure_error('file {} should not be listed in GO_TEST_SRCS() or GO_XTEST_SRCS() macros'.format(f))

    # Add gofmt style checks
    resolved_go_files = []
    for path in itertools.chain(go_files, go_test_files, go_xtest_files):
        if path.endswith('.go'):
            resolved = unit.resolve_arc_path([path])
            if resolved != path and need_lint(resolved):
                resolved_go_files.append(resolved)
    if resolved_go_files:
        basedirs = {}
        for f in resolved_go_files:
            basedir = os.path.dirname(f)
            if basedir not in basedirs:
                basedirs[basedir] = []
            basedirs[basedir].append(f)
        for basedir in basedirs:
            unit.onadd_check(['gofmt'] + basedirs[basedir])

    is_test_module = unit.enabled('GO_TEST_MODULE')

    # Go coverage instrumentation (NOTE! go_files list is modified here)
    if is_test_module and unit.enabled('GO_TEST_COVER'):
        cover_info = []

        for f in go_files:
            if f.endswith('_test.go'):
                continue
            cover_var = 'GoCover_' + base64.b32encode(f).rstrip('=')
            cover_file = unit.resolve_arc_path(f)
            unit.on_go_gen_cover_go([cover_file, cover_var])
            if cover_file.startswith('$S/'):
                cover_file = arc_project_prefix + cover_file[3:]
            cover_info.append('{}:{}'.format(cover_var, cover_file))

        # go_files should be empty now since the initial list shouldn't contain
        # any non-go or go test file. The value of go_files list will be used later
        # to update the value of GO_SRCS_VALUE
        go_files = []
        unit.set(['GO_COVER_INFO_VALUE', ' '.join(cover_info)])

    # We have cleaned up the list of files from GO_SRCS_VALUE var and we have to update
    # the value since it is used in module command line
    unit.set(['GO_SRCS_VALUE', ' '.join(itertools.chain(go_files, asm_files, syso_files))])

    unit_path = unit.path()

    # Add go vet check
    if unit.enabled('_GO_VET') and not unit.enabled('NO_GO_VET') and need_lint(unit_path):
        unit.onadd_check(["govet", '$(BUILD_ROOT)/' + tobuilddir(os.path.join(unit_path, unit.filename() + '.vet.txt'))[3:]])

    # Process .proto files
    for f in proto_files:
        unit.on_go_proto_cmd(f)

    # Process .in files
    for f in in_files:
        unit.onsrc(f)

    # Generate .symabis for .s files (starting from 1.12 version)
    if compare_versions('1.12', unit.get('GOSTD_VERSION')) >= 0 and len(asm_files) > 0:
        unit.on_go_compile_symabis(asm_files)

    # Process cgo files
    cgo_files = get_appended_values(unit, 'CGO_SRCS_VALUE')

    cgo_cflags = []
    if len(c_files) + len(cxx_files) + len(s_files) + len(cgo_files) > 0:
        if is_test_module:
            go_test_for_dir = unit.get('GO_TEST_FOR_DIR')
            if go_test_for_dir and go_test_for_dir.startswith('$S/'):
                cgo_cflags.append(os.path.join('-I${ARCADIA_ROOT}', go_test_for_dir[3:]))
        cgo_cflags.append('-I$CURDIR')
        unit.oncgo_cflags(cgo_cflags)
        cgo_cflags = get_appended_values(unit, 'CGO_CFLAGS_VALUE')

    for f in itertools.chain(c_files, cxx_files, s_files):
        unit.onsrc([f] + cgo_cflags)

    if len(cgo_files) > 0:
        if not unit.enabled('CGO_ENABLED'):
            ymake.report_configure_error('trying to build with CGO (CGO_SRCS is non-empty) when CGO is disabled')
        import_path = rootrel_arc_src(unit_path, unit)
        go_std_root = unit.get('GOSTD') + os.path.sep
        if import_path.startswith(go_std_root):
            import_path = import_path[len(go_std_root):]
        if import_path != runtime_cgo_path:
            unit.onpeerdir(os.path.join(go_std_root, runtime_cgo_path))
        race_mode = 'race' if unit.enabled('RACE') else 'norace'
        import_runtime_cgo = 'false' if import_path in import_runtime_cgo_false[race_mode] else 'true'
        import_syscall = 'false' if import_path in import_syscall_false[race_mode] else 'true'
        args = [import_path] + cgo_files + ['FLAGS', '-import_runtime_cgo=' + import_runtime_cgo, '-import_syscall=' + import_syscall]
        unit.on_go_compile_cgo1(args)
        cgo2_cflags = get_appended_values(unit, 'CGO2_CFLAGS_VALUE')
        for f in cgo_files:
            if f.endswith('.go'):
                unit.onsrc([f[:-2] + 'cgo2.c'] + cgo_cflags + cgo2_cflags)
            else:
                ymake.report_configure_error('file {} should not be listed in CGO_SRCS() macros'.format(f))
        args = [go_package_name(unit)] + cgo_files
        if len(c_files) > 0:
            args += ['C_FILES'] + c_files
        if len(s_files) > 0:
            args += ['S_FILES'] + s_files
        if len(syso_files) > 0:
            args += ['OBJ_FILES'] + syso_files
        unit.on_go_compile_cgo2(args)
Exemplo n.º 33
0
 def __init__(self):
     self.tubID = base64.b32encode("foo")
     self.listening_ports = []
Exemplo n.º 34
0
 def __str__(self) -> str:
     return base64.b32encode(self).decode()
Exemplo n.º 35
0
def getNextGameId():
    global nextGameCounter
    nextGameCounter = nextGameCounter - random.randint(1, 10)
    return base64.b32encode(nextGameCounter.to_bytes(
        5, byteorder='little'))[:4].decode("utf-8")
Exemplo n.º 36
0
def normalize_secret(secret):
    if set(secret.lower()) <= set('0123456789abcdef'):
        return base64.b32encode(bytes.fromhex(secret)).decode('ascii').rstrip('=')
    else:
        return secret.upper().rstrip('=')
def getName(target):
    return "vpngate-" + \
        base64.b32encode(\
            hashlib.sha512(target.encode('utf-8')).digest()[:12]
        ).decode('ascii').rstrip("=")
Exemplo n.º 38
0
def encryptDes3(destype, importx, impfilepath, export, filepath, outputformat, ivtype, iv, passwd, raw, keyimport):


    if keyimport == 'base64':
    
        key = base64.b64decode(passwd)
    
    elif keyimport == 'base32':
    
        key = base64.b32decode(passwd)
    
    elif keyimport == 'base16':
    
        key = base64.b16decode(passwd)
    
    elif keyimport == 'base58':
    
        key = base58.b58decode(passwd)
    
    elif keyimport == 'hex':
    
        key = passwd.decode('hex')
    
    elif keyimport == 'dec':
    
        key = long_to_bytes(passwd)
    
    elif keyimport == 'binary':
    
        key = text_from_bits(passwd)
        
    elif keyimport == 'raw':
    
        key = passwd 
               
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False
        

    if importx == 'file':
    
        f = open(impfilepath, 'r')
        raw = f.read()
        f.close()
        
    elif importx == 'print':
    
        raw = raw
        
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False
        
    raw = pad(raw)
    
    if len(key) == 16 or len(key) == 24:
    
        key = passwd
            
    else:
        
        print('\033[1;31m[-]\033[0m DES3 Key must be 16 or 24 bytes long')

    if ivtype == 'randomstart':
    
        iv = Random.new().read(DES3.block_size)
        sadd = iv
        eadd = ''
    
    elif ivtype == 'randomend':
    
        iv = Random.new().read(DES3.block_size)
        sadd = ''
        eadd = iv
    
    elif ivtype == 'custom':
    
        iv = iv
        sadd = iv
        eadd = ''
    
    elif ivtype == 'noiv':
    
        sadd = ''
        eadd = ''
    
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False

    if destype == 'ecb':
        
        cipher = DES3.new(key, DES3.MODE_ECB)
    
    
    elif destype == 'cbc':
    
        cipher = DES3.new(key, DES3.MODE_CBC, iv)
        
    elif destype == 'ofb':
    
        cipher = DES3.new(key, DES3.MODE_OFB, iv)
    
    elif destype == 'ocb':
    
        cipher = DES3.new(key, DES3.MODE_OCB, iv)
    
    elif destype == 'ctr':
    
        cipher = DES3.new(key, DES3.MODE_CTR)
    
    elif destype == 'cfb':
    
        cipher = DES3.new(key, DES3.MODE_CFB, iv)
    
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False
        
    out = cipher.encrypt(raw)
    
    out = sadd + out + eadd
        
    if outputformat == 'base64':
    
        output = base64.b64encode(out)
        
    elif outputformat == 'raw':
    
        output = out 
    
    elif outputformat == 'base32':
    
        output = base64.b32encode(out)
    
    elif outputformat == 'base16':
    
        output = base64.b16encode(out)
    
    elif outputformat == 'base58':
    
        output = base58.b58encode(out)
    
    elif outputformat == 'hex':
    
        output = out.encode('hex')
    
    elif outputformat == 'dec':
    
        output = bytes_to_long(out)
    
    elif outputformat == 'binary':
    
        output = text_to_bits(out)
        
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False
        
    if export == 'file':
    
        filename = open(filepath, 'w')
        filename.write(output)
        filename.close()
        
        return True
        
    elif export == 'print':
    
        return output
        
    else:
    
        print('\033[1;31m[-]\033[0m Unknown error.')
        return False
Exemplo n.º 39
0
 def _encode_username(self, username):
     return base64.b32encode(hashlib.sha1(username).digest()).lower()
Exemplo n.º 40
0
def encode3(ans):
    return base64.b32encode(ans)
Exemplo n.º 41
0
def blake2s(outputformat, importx, inputformat, raw, infilepath, outfilepath):

    if importx == 'file':

        f = open(infilepath, 'r')
        raw = f.read()
        f.close()

    elif importx == 'print':

        raw = raw

    else:

        print('\033[1;31m[-]\033[0m Unknown error.')
        return False

    inp = raw

    if inputformat == 'base64':

        iput = base64.b64decode(inp)

    elif inputformat == 'raw':

        iput = inp

    elif inputformat == 'base32':

        iput = base64.b32decode(inp)

    elif inputformat == 'base16':

        iput = base64.b16decode(inp)

    elif inputformat == 'base58':

        iput = base58.b58decode(inp)

    elif inputformat == 'base85':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif inputformat == 'hex':

        iput = inp.decode('hex')

    elif inputformat == 'dec':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif inputformat == 'octal':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif inputformat == 'binary':

        iput = text_from_bits(inp)

    else:

        print('\033[1;31m[-]\033[0m Unknown error.')
        return False

    m = pyblake2.blake2s()
    m.update(iput)
    out = m.digest()

    if outputformat == 'base64':

        output = base64.b64encode(out)

    elif outputformat == 'raw':

        output = out

    elif outputformat == 'base32':

        output = base64.b32encode(out)

    elif outputformat == 'base16':

        output = base64.b16encode(out)

    elif outputformat == 'base58':

        output = base58.b58encode(out)

    elif outputformat == 'base85':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif outputformat == 'hex':

        output = out.encode('hex')

    elif outputformat == 'dec':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif outputformat == 'octal':

        print('\033[1;31m[-]\033[0m Option not available yet')

    elif outputformat == 'binary':

        output = text_to_bits(out)

    else:

        print('\033[1;31m[-]\033[0m Unknown error.')
        return False

    return output
Exemplo n.º 42
0
    r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
)
mail_pat = re.compile(
    r"([0-9a-zA-Z\_*\.*\-*]+)@([a-zA-Z0-9\-*\_*\.*]+)\.([a-zA-Z]+$)")
user_pat = re.compile(r'[a-zA-Z\_][0-9a-zA-Z\_]')
comma_pat = re.compile(r"\s*,\s*")
logger = DO(sys=Logger("sys").getLogger,
            api=Logger("api").getLogger,
            err=Logger("error").getLogger,
            access=Logger("access").getLogger,
            plugin=Logger("plugin").getLogger)
md5 = lambda pwd: hashlib.md5(pwd).hexdigest()
hmac_sha256 = lambda message: hmac.new(key=SYSTEM["HMAC_SHA256_KEY"],
                                       msg=message,
                                       digestmod=hashlib.sha256).hexdigest()
gen_token = lambda n=32: b32encode(uuid4().hex)[:n]
gen_requestId = lambda: str(uuid4())
get_today = lambda format="%Y-%m-%d %H:%M": datetime.datetime.now().strftime(
    format)
gen_rnd_filename = lambda: "%s%s" % (datetime.datetime.now().strftime(
    '%Y%m%d%H%M%S'), str(random.randrange(1000, 10000)))
ListEqualSplit = lambda l, n=5: [l[i:i + n] for i in range(0, len(l), n)]


def ip_check(ip):
    if isinstance(ip, (str, unicode)):
        return ip_pat.match(ip)


def ParseMySQL(mysql, callback="dict"):
    """解析MYSQL配置段"""
Exemplo n.º 43
0
    def pre_callback(self, ldap, dn, entry_attrs, attrs_list, *keys,
                     **options):
        # Fill in a default UUID when not specified.
        if entry_attrs.get('ipatokenuniqueid', None) is None:
            entry_attrs['ipatokenuniqueid'] = str(uuid.uuid4())
            dn = DN("ipatokenuniqueid=%s" % entry_attrs['ipatokenuniqueid'],
                    dn)

        if not _check_interval(options.get('ipatokennotbefore', None),
                               options.get('ipatokennotafter', None)):
            raise ValidationError(name='not_after',
                                  error='is before the validity start')

        # Set the object class and defaults for specific token types
        options['type'] = options['type'].lower()
        entry_attrs['objectclass'] = otptoken.object_class + [
            'ipatoken' + options['type']
        ]
        for ttype, tattrs in TOKEN_TYPES.items():
            if ttype != options['type']:
                for tattr in tattrs:
                    if tattr in entry_attrs:
                        del entry_attrs[tattr]

        # If owner was not specified, default to the person adding this token.
        # If managedby was not specified, attempt a sensible default.
        if 'ipatokenowner' not in entry_attrs or 'managedby' not in entry_attrs:
            cur_dn = DN(self.api.Backend.ldap2.conn.whoami_s()[4:])
            if cur_dn:
                cur_uid = cur_dn[0].value
                prev_uid = entry_attrs.setdefault('ipatokenowner', cur_uid)
                if cur_uid == prev_uid:
                    entry_attrs.setdefault('managedby', cur_dn.ldap_text())

        # Resolve the owner's dn
        _normalize_owner(self.api.Object.user, entry_attrs)

        # Get the issuer for the URI
        owner = entry_attrs.get('ipatokenowner', None)
        issuer = api.env.realm
        if owner is not None:
            try:
                issuer = ldap.get_entry(
                    owner, ['krbprincipalname'])['krbprincipalname'][0]
            except (NotFound, IndexError):
                pass

        # Check if key is not empty
        if entry_attrs['ipatokenotpkey'] is None:
            raise ValidationError(name='key', error=_(u'cannot be empty'))

        # Build the URI parameters
        args = {}
        args['issuer'] = issuer
        args['secret'] = base64.b32encode(entry_attrs['ipatokenotpkey'])
        args['digits'] = entry_attrs['ipatokenotpdigits']
        args['algorithm'] = entry_attrs['ipatokenotpalgorithm'].upper()
        if options['type'] == 'totp':
            args['period'] = entry_attrs['ipatokentotptimestep']
        elif options['type'] == 'hotp':
            args['counter'] = entry_attrs['ipatokenhotpcounter']

        # Build the URI
        label = urllib.parse.quote(entry_attrs['ipatokenuniqueid'])
        parameters = urllib.parse.urlencode(args)
        uri = u'otpauth://%s/%s:%s?%s' % (options['type'], issuer, label,
                                          parameters)
        setattr(context, 'uri', uri)

        attrs_list.append("objectclass")
        return dn
Exemplo n.º 44
0
 def totp_secret_b32(self):
     return b32encode(unhexlify(self.totp_secret))
Exemplo n.º 45
0
def unpackOnionAddress(packed):
    return base64.b32encode(packed[0:-2]).lower() + ".onion", struct.unpack(
        "H", packed[-2:])[0]
Exemplo n.º 46
0
     file = open(arg.file, 'r')
     text = file.read()
     file.close()
 elif arg.string != None:
     text = arg.string
 else:
     text = input()
 if arg.encode != None:
     if arg.encode == "64":
         if verbose:
             print("[+] Encoding to base64")
         print(base64.b64encode(text.encode('ascii')).decode('utf-8'))
     elif arg.encode == "32":
         if verbose:
             print("[+] Encoding to base32")
         print(base64.b32encode(text.encode('ascii')).decode('utf-8'))
     elif arg.encode == "85":
         if verbose:
             print("[+] Encoding to base85")
         print(base64.a85encode(text.encode('ascii')).decode('utf-8'))
     elif arg.encode == "58":
         if verbose:
             print("[+] Encoding to base58")
         print(base58.b58encode(text.encode('ascii')).decode('utf-8'))
     elif arg.encode == "62":
         if verbose:
             print("[+] Encoding to base62")
         print(base62.decode(text))
     elif arg.encode == "16":
         if verbose:
             print("[+] Encoding to base16")
Exemplo n.º 47
0
def api_vod_season(series, id):
    if not api_get_session():
        return None

    season = []
    episodes = []

    program_url = '{api_url}/CONTENT/DETAIL/BUNDLE/{id}'.format(
        api_url=CONST_DEFAULT_API, id=id)

    type = "vod_season_" + str(id)
    encodedBytes = base64.b32encode(type.encode("utf-8"))
    type = str(encodedBytes, "utf-8")

    file = "cache" + os.sep + type + ".json"

    if not is_file_older_than_x_days(file=ADDON_PROFILE + file, days=0.5):
        data = load_file(file=file, isJSON=True)
    else:
        download = api_download(url=program_url,
                                type='get',
                                headers=None,
                                data=None,
                                json_data=True,
                                return_json=True)
        data = download['data']
        code = download['code']

        if code and code == 200 and data and check_key(
                data,
                'resultCode') and data['resultCode'] == 'OK' and check_key(
                    data, 'resultObj') and check_key(data['resultObj'],
                                                     'containers'):
            write_file(file=file, data=data, isJSON=True)

    if not data or not check_key(data['resultObj'], 'containers'):
        return None

    for row in data['resultObj']['containers']:
        for currow in row['containers']:
            if check_key(currow, 'metadata') and check_key(
                    currow['metadata'], 'season') and str(
                        currow['metadata']
                        ['contentSubtype']) == 'EPISODE' and not str(
                            currow['metadata']['episodeNumber']) in episodes:
                asset_id = ''

                for asset in currow['assets']:
                    if check_key(
                            asset, 'videoType'
                    ) and asset['videoType'] == 'SD_DASH_PR' and check_key(
                            asset,
                            'assetType') and asset['assetType'] == 'MASTER':
                        asset_id = str(asset['assetId'])
                        break

                episodes.append(str(currow['metadata']['episodeNumber']))

                label = '{season}.{episode} - {title}'.format(
                    season=str(currow['metadata']['season']),
                    episode=str(currow['metadata']['episodeNumber']),
                    title=str(currow['metadata']['episodeTitle']))

                season.append({
                    'label':
                    label,
                    'id':
                    str(currow['metadata']['contentId']),
                    'assetid':
                    asset_id,
                    'duration':
                    currow['metadata']['duration'],
                    'title':
                    str(currow['metadata']['episodeTitle']),
                    'episodeNumber':
                    '{season}.{episode}'.format(
                        season=str(currow['metadata']['season']),
                        episode=str(currow['metadata']['episodeNumber'])),
                    'description':
                    str(currow['metadata']['shortDescription']),
                    'image':
                    "{image_url}/vod/{image}/1920x1080.jpg?blurred=false".
                    format(image_url=CONST_IMAGE_URL,
                           image=str(currow['metadata']['pictureUrl']))
                })

    return season
Exemplo n.º 48
0
def generate_id():
    t = time.time() * 1000000
    b = base64.b32encode(struct.pack(">Q", int(t)).lstrip(b'\x00')).strip(b'=').lower()
    return b.decode('utf8')
Exemplo n.º 49
0
def microhash(s, length=8):
    return base64.b32encode(hashlib.sha1(
        str(s).encode("utf-8")).digest()).decode()[:length]
Exemplo n.º 50
0
 def _get_random_queue_name():
     rb = os.urandom(16)
     return "amq_%s" % b32encode(rb).decode().replace("=", "").lower()
Exemplo n.º 51
0
 def __str__(self):
     return self.type_ + ':' + to_native_str(
         base64.b32encode(self.digester.digest()))
def decode_secret(secret: bytes) -> str:
    return str(b32encode(secret), 'utf-8').replace('=', '')
Exemplo n.º 53
0
def generate_mailbox_id():
    return base64.b32encode(os.urandom(8)).lower().strip(b"=").decode("ascii")
Exemplo n.º 54
0
    params = json.loads(i.text)
    params.update(i.attrib)

    # Convert the list of signed integers into a hex string.
    s_hex = b''
    for j in params['secret']:
        # Prepend a 0 if hex() would only return a single character.
        prefix = b''
        if j >= 0 and j < 16:
            prefix = b'0'
        b =  prefix + bytes(hex(j & 255)[2:], 'ascii')
        s_hex += b

    # Convert the hex string to base32 encoding.
    s_bytes = base64.b16decode(s_hex, True)
    s_b32 = base64.b32encode(s_bytes)
    str_s_b32 = s_b32.decode('ascii')

    if args.token is None:
        if args.url:
            print(f"otpauth://totp/{params['name']}?secret={str_s_b32}")
        else:
            print(params['name'])
    elif args.token == name:
        if args.url:
            print(f"otpauth://totp/{params['name']}?secret={str_s_b32}")
        else:
            totp = pyotp.TOTP(s=s_b32, interval=params['period'], digits=params['digits'], digest=digests[params['algo']])
            print(totp.now())
        break
Exemplo n.º 55
0
def convert_folder(base_source_dir,
                   base_target_dir,
                   tmp_dir,
                   tika=False,
                   ocr=False,
                   merge=False,
                   tsv_source_path=None,
                   tsv_target_path=None,
                   make_unique=True,
                   sample=False,
                   zip=False):
    # WAIT: Legg inn i gui at kan velge om skal ocr-behandles
    txt_target_path = base_target_dir + '_result.txt'
    json_tmp_dir = base_target_dir + '_tmp'
    converted_now = False
    errors = False
    originals = False

    if merge is False:  # TODO: Trengs begge argumentene?
        make_unique = False

    if tsv_source_path is None:
        tsv_source_path = base_target_dir + '.tsv'
    else:
        txt_target_path = os.path.splitext(
            tsv_source_path)[1][1:] + '_result.txt'

    if tsv_target_path is None:
        tsv_target_path = base_target_dir + '_processed.tsv'

    if os.path.exists(tsv_target_path):
        os.remove(tsv_target_path)

    Path(base_target_dir).mkdir(parents=True, exist_ok=True)

    # TODO: Viser mime direkte om er pdf/a eller må en sjekke mot ekstra felt i de to under? Forsjekk om Tika og siegfried?

    # TODO: Trengs denne sjekk om tsv her. Gjøres sjekk før kaller denne funskjonen og slik at unødvendig?
    if not os.path.isfile(tsv_source_path):
        if tika:
            run_tika(tsv_source_path, base_source_dir, json_tmp_dir, zip)
        else:
            run_siegfried(base_source_dir, tmp_dir, tsv_source_path, zip)

    # TODO: Legg inn test på at tsv-fil ikke er tom
    replace_text_in_file(tsv_source_path, '\0', '')

    table = etl.fromtsv(tsv_source_path)
    table = etl.rename(table, {
        'filename': 'source_file_path',
        'tika_batch_fs_relative_path': 'source_file_path',
        'filesize': 'file_size',
        'mime': 'mime_type',
        'Content_Type': 'mime_type',
        'Version': 'version'
    },
                       strict=False)

    thumbs_table = etl.select(
        table, lambda rec: Path(rec.source_file_path).name == 'Thumbs.db')
    if etl.nrows(thumbs_table) > 0:
        thumbs_paths = etl.values(thumbs_table, 'source_file_path')
        for path in thumbs_paths:
            if '/' not in path:
                path = os.path.join(base_source_dir, path)
            if os.path.isfile(path):
                os.remove(path)

        table = etl.select(
            table, lambda rec: Path(rec.source_file_path).name != 'Thumbs.db')

    table = etl.select(table, lambda rec: rec.source_file_path != '')
    table = etl.select(table, lambda rec: '#' not in rec.source_file_path)
    # WAIT: Ikke fullgod sjekk på embedded dokument i linje over da # faktisk kan forekomme i filnavn
    row_count = etl.nrows(table)

    file_count = sum([len(files) for r, d, files in os.walk(base_source_dir)])

    if row_count == 0:
        print('No files to convert. Exiting.')
        return 'Error', file_count
    elif file_count != row_count:
        print('Row count: ' + str(row_count))
        print('File count: ' + str(file_count))
        print("Files listed in '" + tsv_source_path +
              "' doesn't match files on disk. Exiting.")
        return 'Error', file_count
    elif not zip:
        print('Converting files..')

    # WAIT: Legg inn sjekk på filstørrelse før og etter konvertering

    append_fields = ('version', 'norm_file_path', 'result',
                     'original_file_copy', 'id')
    table = add_fields(append_fields, table)

    cut_fields = ('0', '1', 'X_TIKA_EXCEPTION_runtime',
                  'X_TIKA_EXCEPTION_warn')
    table = remove_fields(cut_fields, table)

    header = etl.header(table)
    append_tsv_row(tsv_target_path, header)

    # Treat csv (detected from extension only) as plain text:
    table = etl.convert(table,
                        'mime_type',
                        lambda v, row: 'text/plain'
                        if row.id == 'x-fmt/18' else v,
                        pass_row=True)

    # Update for missing mime types where id is known:
    table = etl.convert(table,
                        'mime_type',
                        lambda v, row: 'application/xml'
                        if row.id == 'fmt/979' else v,
                        pass_row=True)

    if os.path.isfile(txt_target_path):
        os.remove(txt_target_path)

    data = etl.dicts(table)
    count = 0
    for row in data:
        count += 1
        count_str = ('(' + str(count) + '/' + str(file_count) + '): ')
        source_file_path = row['source_file_path']
        if '/' not in source_file_path:
            source_file_path = os.path.join(base_source_dir, source_file_path)

        mime_type = row['mime_type']
        # TODO: Virker ikke når Tika brukt -> finn hvorfor
        if ';' in mime_type:
            mime_type = mime_type.split(';')[0]

        version = row['version']
        result = None
        old_result = row['result']

        if not mime_type:
            if os.path.islink(source_file_path):
                mime_type = 'n/a'

            # kind = filetype.guess(source_file_path)
            extension = os.path.splitext(source_file_path)[1][1:].lower()
            if extension == 'xml':
                mime_type = 'application/xml'

        if not zip:
            print_path = os.path.relpath(source_file_path,
                                         Path(base_source_dir).parents[1])
            print(count_str + '.../' + print_path + ' (' + mime_type + ')')

        if mime_type not in mime_to_norm.keys():
            # print("|" + mime_type + "|")

            errors = True
            converted_now = True
            result = 'Conversion not supported'
            append_txt_file(
                txt_target_path,
                result + ': ' + source_file_path + ' (' + mime_type + ')')
            row['norm_file_path'] = ''
            row['original_file_copy'] = ''
        else:
            keep_original = mime_to_norm[mime_type][0]

            if keep_original:
                originals = True

            if zip:
                keep_original = False

            function = mime_to_norm[mime_type][1]

            # Ensure unique file names in dir hierarchy:
            norm_ext = mime_to_norm[mime_type][2]
            if not norm_ext:
                norm_ext = 'none'

            if make_unique:
                norm_ext = (base64.b32encode(
                    bytes(
                        str(count), encoding='ascii'))).decode('utf8').replace(
                            '=', '').lower() + '.' + norm_ext
            target_dir = os.path.dirname(
                source_file_path.replace(base_source_dir, base_target_dir))
            normalized = file_convert(source_file_path,
                                      mime_type,
                                      function,
                                      target_dir,
                                      tmp_dir,
                                      None,
                                      norm_ext,
                                      version,
                                      ocr,
                                      keep_original,
                                      zip=zip)

            if normalized['result'] == 0:
                errors = True
                result = 'Conversion failed'
                append_txt_file(
                    txt_target_path,
                    result + ': ' + source_file_path + ' (' + mime_type + ')')
            elif normalized['result'] == 1:
                result = 'Converted successfully'
                converted_now = True
            elif normalized['result'] == 2:
                errors = True
                result = 'Conversion not supported'
                append_txt_file(
                    txt_target_path,
                    result + ': ' + source_file_path + ' (' + mime_type + ')')
            elif normalized['result'] == 3:
                if old_result not in ('Converted successfully',
                                      'Manually converted'):
                    result = 'Manually converted'
                    converted_now = True
                else:
                    result = old_result
            elif normalized['result'] == 4:
                converted_now = True
                errors = True
                result = normalized['error']
                append_txt_file(
                    txt_target_path,
                    result + ': ' + source_file_path + ' (' + mime_type + ')')
            elif normalized['result'] == 5:
                result = 'Not a document'

            if normalized['norm_file_path']:
                row['norm_file_path'] = relpath(normalized['norm_file_path'],
                                                base_target_dir)

            file_copy_path = normalized['original_file_copy']
            if file_copy_path:
                file_copy_path = relpath(file_copy_path, base_target_dir)
            row['original_file_copy'] = file_copy_path

        row['result'] = result
        row_values = list(row.values())

        # TODO: Fikset med å legge inn escapechar='\\' i append_tsv_row -> vil det skal problemer senere?
        # row_values = [r.replace('\n', ' ') for r in row_values if r is not None]
        append_tsv_row(tsv_target_path, row_values)

        if sample and count > 9:
            break

    if not sample:
        shutil.move(tsv_target_path, tsv_source_path)
    # TODO: Legg inn valg om at hvis merge = true kopieres alle filer til mappe på øverste nivå og så slettes tomme undermapper

    msg = None
    if sample:
        msg = 'Sample files converted.'
        if errors:
            msg = "Not all sample files were converted. See '" + txt_target_path + "' for details."
    else:
        if converted_now:
            msg = 'All files converted succcessfully.'
            if errors:
                msg = "Not all files were converted. See '" + txt_target_path + "' for details."
        else:
            msg = 'All files converted previously.'

    return msg, file_count, errors, originals  # TODO: Fiks så bruker denne heller for oppsummering til slutt når flere mapper konvertert
Exemplo n.º 56
0
def data_to_blob(data):
    as_json = json.dumps(attr.asdict(data)).encode('utf-8')
    compressed = zlib.compress(as_json)
    as_base64 = base64.b32encode(compressed)
    return as_base64.decode('ascii')
Exemplo n.º 57
0
def setup_totp(request):
    if twofa.models.TOTPDevice.objects.active_for_user(request.user).exists():
        messages.error(
            request,
            _('You may not have multiple Google Authenticators attached to your account.'
              ))
        return redirect('twofa:list')

    setup_signer = TimestampSigner('twofa.views.setup_totp:{}'.format(
        request.user.pk))

    if request.method == 'POST' and 'secret' in request.POST:
        try:
            b32_secret = setup_signer.unsign(request.POST['secret'],
                                             max_age=600)
        except SignatureExpired:
            messages.error(
                request,
                _('That took too long and your challenge expired. Here\'s a new one.'
                  ))
            return redirect('twofa:setup-totp')
        except BadSignature:
            messages.error(
                request, _('Whoops - something went wrong. Please try again.'))
            return redirect('twofa:setup-totp')
    else:
        b32_secret = base64.b32encode(secrets.token_bytes(10)).decode('utf8')
    signed_secret = setup_signer.sign(b32_secret)

    url = 'otpauth://totp/Sponge:{}?{}'.format(
        urlquote(request.user.username),
        urlencode({
            'secret': b32_secret,
            'issuer': 'Sponge'
        }))
    img = qrcode.make(url, image_factory=qrcode.image.svg.SvgPathFillImage)
    img_buf = io.BytesIO()
    img.save(img_buf)

    device = twofa.models.TOTPDevice(base32_secret=b32_secret,
                                     owner=request.user)
    device.activated_at = timezone.now(
    )  # this won't be saved unless the form is valid
    form = device.verify_form(secret=signed_secret)
    if request.method == 'POST':
        form = device.verify_form(request.POST, secret=signed_secret)

        if form.is_valid():
            # relying on verify_form to save the new device
            request.user.twofa_enabled = True
            request.user.save()

            messages.success(
                request,
                _('Your authenticator has been added to your account.'))
            return _generate_paper_codes_if_needed(request.user,
                                                   reverse('twofa:list'))

    return render(request, 'twofa/setup/totp.html', {
        'form': form,
        'qr_code_svg': img_buf.getvalue(),
        'b32_secret': b32_secret
    })
Exemplo n.º 58
0
def block_digest(self, content_buffer):
    hash = base64.b32encode(hashlib.sha1(content_buffer).digest())
    return "sha1:%s" % hash
Exemplo n.º 59
0
def test_long_random():
    v1 = base64.b32encode(os.urandom(1000))
    v2 = b'x' + v1 + b'x'
    delta = xdelta3.encode(v1, v2)
    v22 = xdelta3.decode(v1, delta)
    assert v2 == v22
Exemplo n.º 60
0
        def thg_encode(self, args):
            """modulo referente a encode de estrings"""
            arg_mensage = args.split(" ")
            if arg_mensage[0] == "":
                print("""suporte encode:

    Este módulo fornece funções para codificar dados binários em caracteres ASCII 
    imprimíveis e decodificar essas codificações de volta para dados binários.
    Ele fornece funções de codificação e decodificação para as codificações 
    especificadas em RFC 3548 ,que define os algoritmos Base16, Base32 e Base64,
    e para as codificações Ascii85 e Base85 padrão de fato.

    a2b_uu
    b2a_uu
    a2b_base64
    b2a_base64
    a2b_qp
    b2a_qp
    a2b_hqx
    rledecode_hqx
    rlecode_hqx
    b2a_hqx
    crc_hqx
    crc32
    b2a_hex
    a2b_hex
    hexlify
    unhexlify
    Charcode
    binary
    base62
    basen
    bcd
    ur
    unicode_normalize
    qp_encoding
            encode type[2,16,32,64]  str

            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))

            elif arg_mensage[0] == "64":
                arg_mensage[1] = arg_mensage[1].encode("ascii")
                base64_bytes = base64.b64encode(arg_mensage[1])
                by_to_st(base64_bytes)
            elif arg_mensage[0] == "32":
                arg_mensage[1] = arg_mensage[1].encode("ascii")
                b32encode_bytes = base64.b32encode(arg_mensage[1])
                by_to_st(b32encode_bytes)
            elif arg_mensage[0] == "16":
                arg_mensage[1] = arg_mensage[1].encode("ascii")
                b16encode_bytes = base64.b16encode(arg_mensage[1])
                by_to_st(b16encode_bytes)
            elif arg_mensage[0] == "a85encode":
                arg_mensage[1] = arg_mensage[1].encode("ascii")
                a85encode_bytes = base64.a85encode(arg_mensage[1])
                by_to_st(a85encode_bytes)
            elif arg_mensage[0] == "b85encode":
                arg_mensage[1] = arg_mensage[1].encode("ascii")
                b85encode_bytes = base64.b85encode(arg_mensage[1])
                by_to_st(b85encode_bytes)
            elif arg_mensage[0] == "a2b_uu":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Converta uma única linha de dados uuencodificados de volta em binários e retorne os dados binários. As linhas normalmente contêm 45 bytes (binários), exceto a última linha. Os dados da linha podem ser seguidos de espaços em branco."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st((binascii.a2b_uu(arg_mensage[1])))
            elif arg_mensage[0] == "a2b_base64":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED}Converta dados binários em uma linha de caracteres ASCII na codificação base64. O valor de retorno é a linha convertida, incluindo um caractere de nova linha. O comprimento dos dados deve ser de no máximo 57 para aderir ao padrão base64."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st(binascii.a2b_base64(arg_mensage[1]))
            elif arg_mensage[0] == "b2a_base64":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Converta dados binários em uma linha de caracteres ASCII na codificação base64. O valor de retorno é a linha convertida, incluindo um caractere de nova linha. O comprimento dos dados deve ser de no máximo 57 para aderir ao padrão base64."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st(binascii.b2a_base64(b"arg_mensage[1]"))
            elif arg_mensage[0] == "a2b_qp":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED}Converta um bloco de dados imprimíveis entre aspas de volta em binários e retorne os dados binários. Mais de uma linha pode ser passada por vez. Se o cabeçalho do argumento opcional estiver presente e verdadeiro, os sublinhados serão decodificados como espaços."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st(binascii.a2b_qp(arg_mensage[1]))
            elif arg_mensage[0] == "b2a_qp":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED}Converta dados binários em uma (s) linha (s) de caracteres ASCII em codificação imprimível entre aspas. O valor de retorno é a (s) linha (s) convertida (s). Se o argumento opcional quotetabs estiver presente e verdadeiro, todas as tabulações e espaços serão codificados. Se o argumento opcional istext estiver presente e verdadeiro, as novas linhas não serão codificadas, mas os espaços em branco finais serão codificados. Se o cabeçalho do argumento opcional estiver presente e verdadeiro, os espaços serão codificados como sublinhados de acordo com RFC1522. Se o cabeçalho do argumento opcional estiver presente e for falso, os caracteres de nova linha também serão codificados; caso contrário, a conversão de alimentação de linha pode corromper o fluxo de dados binários."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st(binascii.a2b_qp(arg_mensage[1].encode()))
            elif arg_mensage[0] == "a2b_hqx":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED}Converta dados ASCII formatados de binhex4 em binários, sem fazer a descompressão RLE. A string deve conter um número completo de bytes binários ou (no caso da última parte dos dados binhex4) ter os bits restantes zero.
    """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st(binascii.a2b_hqx(arg_mensage[1]))
            elif arg_mensage[0] == "rledecode_hqx":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Execute a descompressão RLE nos dados, de acordo com o padrão binhex4. O algoritmo usa 0x90 após um byte como um indicador de repetição, seguido por uma contagem. Uma contagem de 0 especifica um valor de byte de 0x90 . A rotina retorna os dados descompactados, a menos que os dados de entrada de dados terminem em um indicador de repetição órfão, caso em que a exceção Incompleta é levantada."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st((binascii.rledecode_hqx(arg_mensage[1].encode())))
            elif arg_mensage[0] == "rlecode_hqx":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Execute a compactação RLE no estilo binhex4 nos dados e retorne o resultado."""
                        .format(YELLOW=Fore.YELLOW,
                                BLUE=Fore.BLUE,
                                RED=Fore.RED))
                else:
                    by_to_st((binascii.rlecode_hqx(arg_mensage[1].encode())))
            elif arg_mensage[0] == "b2a_hqx":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Execute a conversão hexbin4 binário para ASCII e retorne a string resultante. O argumento já deve ser codificado por RLE e ter um comprimento divisível por 3 (exceto possivelmente o último fragmento).
    """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.b2a_hqx(arg_mensage[1].encode())))
            elif arg_mensage[0] == "crc_hqx":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Calcule o valor binhex4 crc dos dados , começando com um crc inicial e retornando o resultado.
    """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.crc_hqx(arg_mensage[1].encode(),
                                               int(arg_mensage[2]))))
            elif arg_mensage[0] == "crc32":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Calcule CRC-32, a soma de verificação de dados de 
                    32 bits, começando com um crc inicial. Isso é consistente com a soma de verificação do arquivo ZIP. 
                    Uma vez que o algoritmo é projetado para uso como um algoritmo de soma de verificação, não é adequado 
                    para uso como um algoritmo de hash geral. 

    {YELLOW}Nota{YELLOW}{RED} Para gerar o mesmo valor numérico em todas as versões e plataformas Python, {RED}{BLUE}use crc32 (dados) & 0xffffffff{BLUE}{RED}. Se você estiver usando apenas a soma de verificação no formato binário compactado, isso não é necessário, pois o valor de retorno é a representação binária correta de 32 bits, independentemente do sinal.
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.crc32(arg_mensage[1].encode())))
            elif arg_mensage[0] == "hexlify":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Retorna a representação hexadecimal dos dados 
                    binários . Cada byte de dados é convertido na representação hexadecimal de 2 dígitos correspondente. 
                    A string resultante é, portanto, o dobro do comprimento dos dados . 

            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.hexlify(arg_mensage[1].encode(),
                                               arg_mensage[2].encode())))
            elif arg_mensage[0] == "b2a_hex":
                if arg_mensage[1] == "help":
                    print("""{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} hex
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.b2a_hex(arg_mensage[1].encode(),
                                               int(arg_mensage[2]))))
            elif arg_mensage[0] == "unhexlify":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Retorna os dados binários representados pela string hexadecimal hexstr . Esta função é o inverso de b2a_hex () . hexstr deve conter um número par de dígitos hexadecimais (que podem ser maiúsculas ou minúsculas), caso contrário, um TypeError é gerado.

            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.unhexlify(arg_mensage[1].encode())))
            elif arg_mensage[0] == "b2a_uu":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}a2b_uu{YELLOW}{BLUE} =>{BLUE}{RED} Converta dados binários em uma linha de caracteres ASCII, o valor de retorno é a linha convertida, incluindo um caractere de nova linha. O comprimento dos dados deve ser de no máximo 45.

            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    by_to_st((binascii.b2a_uu(arg_mensage[1].encode(),
                                              int(arg_mensage[2]))))
            elif arg_mensage[0] == "charcode":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}charcode{YELLOW}{BLUE} =>{BLUE}{RED}converte string em charcode
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    print(ord(arg_mensage[1].encode()))
            elif arg_mensage[0] == "binary":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}binary{YELLOW}{BLUE} =>{BLUE}{RED}converte string em binary
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    print(" ".join(
                        format(ord(x), "b") for x in arg_mensage[1]))
            elif arg_mensage[0] == "base62":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}base62{YELLOW}{BLUE} =>{BLUE}{RED}converte string em base62
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    print(decode62(arg_mensage[1]))
            elif arg_mensage[0] == "basen":
                if arg_mensage[1] == "help":
                    print(
                        """{YELLOW}basen{YELLOW}{BLUE} =>{BLUE}{RED}converte decimal em basen
            """.format(YELLOW=Fore.YELLOW, BLUE=Fore.BLUE, RED=Fore.RED))
                else:
                    print(
                        numpy.base_repr(int(arg_mensage[1]),
                                        base=int(arg_mensage[2])))
            elif arg_mensage[0] == "url":
                try:
                    if arg_mensage[1] == "help":
                        print(
                            """{YELLOW}url_encode{YELLOW}{BLUE} =>{BLUE}{RED}encode personalidado para url\nencode url_encode safa[] encoding"""
                            .format(YELLOW=Fore.YELLOW,
                                    BLUE=Fore.BLUE,
                                    RED=Fore.RED))
                    else:
                        print(
                            quote(arg_mensage[1],
                                  safe=arg_mensage[2],
                                  encoding=arg_mensage[3]))
                except IndexError:
                    print(
                        "digite a sintaxe correta\nncode url_encode safa[] encoding\n ou use o comando help"
                    )
            elif arg_mensage[0] == "unicode_normalize":
                try:
                    if arg_mensage[1] == "help":
                        print(
                            """{YELLOW}unicode_normalize{YELLOW}{BLUE} =>{BLUE}{RED}Transforme caracteres Unicode em uma das formas de normalização['NFC', 'NFKC', 'NFD','NFKD']\n                   
    {YELLOW}NFD{YELLOW}{BLUE} =>{BLUE}{RED}Normalisation Form Canonical Decomposition
    {YELLOW}NFC{YELLOW}{BLUE} =>{BLUE}{RED}Normalisation Form Canonical Composition
    {YELLOW}NFKD{YELLOW}{BLUE} =>{BLUE}{RED}Normalisation Form Compatibility Decomposition
    {YELLOW}NFKC{YELLOW}{BLUE} =>{BLUE}{RED}Normalisation Form Compatibility Composition    
    encode unicode_normalize str encoding['NFC', 'NFKC', 'NFD','NFKD']\n""".
                            format(YELLOW=Fore.YELLOW,
                                   BLUE=Fore.BLUE,
                                   RED=Fore.RED))
                    else:
                        print(
                            unicodedata.normalize(arg_mensage[1],
                                                  arg_mensage[2]))
                except IndexError:
                    print(
                        "digite a sintaxe correta\nncode url_encode safa[] encoding\n ou use o comando help"
                    )
            elif arg_mensage[0] == "qp_encoding":
                try:
                    if arg_mensage[1] == "help":
                        print(
                            """{YELLOW}qp_encoding{YELLOW}{BLUE} =>{BLUE}{RED}
                        Quoted-Printable, ou QP encoding, 
                        é uma codificação que usa caracteres ASCII imprimíveis (alfanuméricos e o sinal de igual '=') 
                        para transmitir dados de 8 bits em um caminho de dados de 7 bits ou, geralmente, em um meio que não é 8- um pouco limpo. 
                        É definido como uma codificação de transferência de conteúdo MIME para uso em e-mail.
                        QP funciona usando o sinal de igual '=' como um caractere de escape. Ele também limita o comprimento da linha a 76, pois alguns softwares têm limites no comprimento da linha\nencode qp_encoding TXT encode"""
                            .format(YELLOW=Fore.YELLOW,
                                    BLUE=Fore.BLUE,
                                    RED=Fore.RED))
                    else:
                        encoded = quopri.encodestring(arg_mensage[1].encode(
                            arg_mensage[2]))
                        print(encoded.decode())
                except IndexError:
                    print(
                        "digite a sintaxe correta\nencode qp_encoding é utf-16\n ou use o comando help"
                    )
            elif arg_mensage[0] == "idna":
                try:
                    if arg_mensage[1] == "help":
                        print(
                            """{YELLOW}idna{YELLOW}{BLUE} =>{BLUE}{RED}encode personalidado para url\nencode url_encode safa[] encoding"""
                            .format(YELLOW=Fore.YELLOW,
                                    BLUE=Fore.BLUE,
                                    RED=Fore.RED))
                    else:
                        print(
                            idna.encode(arg_mensage[1]).decode(arg_mensage[2]))
                except IndexError:
                    print(
                        "digite a sintaxe correta\nncode idna string encoding\n ou use o comando help"
                    )

            else:
                pass
            try:
                pass

            except IndexError:
                print("verificar a saida")