Example #1
0
def decryptDict(dct, signingPrivateKey, realPrivateKey, realPublicKey):
    """decrypt all values in a dictionary

    Assumes all values in a dictionary are base64ed encrypted with realPublicKey,
    and signed with signingPrivateKey, and decrypt them with realPrivateKey.
    If realPrivateKey and realPublicKey do not correspond to each other,
    or any value in the dictionary is not signed and encrypted correctly,
    an exception is raised.

    :param dct: encrypted dictionary
    :type dct: dictionary with string values and keys
    :param signingPrivateKey: base64 encoded NaCl private key
    :type signingPrivateKey: string
    :param realPrivateKey: base64 encoded NaCl private key
    :type realPrivateKey: string
    :param realPublicKey: base64 encoded NaCl private key
    :type realPublicKey: string
    :rtype: dictionary with string values and keys
    """
    signingPrivateKey = npublic.PrivateKey(base64.decodestring(signingPrivateKey))
    signingPublicKey = signingPrivateKey.public_key
    realPrivateKey = npublic.PrivateKey(base64.decodestring(realPrivateKey))
    realPublicKey = npublic.PublicKey(base64.decodestring(realPublicKey))
    if realPrivateKey.public_key.encode() != realPublicKey.encode():
        raise ValueError("private key and public key do not match", realPrivateKey.public_key, realPublicKey)
    box = npublic.Box(realPrivateKey, signingPublicKey)
    ret = {}
    for key, value in six.iteritems(dct):
        decodedValue = base64.decodestring(value)
        decryptedValue = box.decrypt(decodedValue)
        ret[key] = decryptedValue
    return ret
Example #2
0
File: db.py Project: bala4901/RPI
def exp_restore(db_name, data):
    with _set_pg_password_in_environment():
        if exp_db_exist(db_name):
            _logger.warning('RESTORE DB: %s already exists', db_name)
            raise Exception, "Database already exists"

        _create_empty_database(db_name)

        cmd = ['pg_restore', '--no-owner']
        if openerp.tools.config['db_user']:
            cmd.append('--username='******'db_user'])
        if openerp.tools.config['db_host']:
            cmd.append('--host=' + openerp.tools.config['db_host'])
        if openerp.tools.config['db_port']:
            cmd.append('--port=' + str(openerp.tools.config['db_port']))
        cmd.append('--dbname=' + db_name)
        args2 = tuple(cmd)

        buf=base64.decodestring(data)
        if os.name == "nt":
            tmpfile = (os.environ['TMP'] or 'C:\\') + os.tmpnam()
            file(tmpfile, 'wb').write(buf)
            args2=list(args2)
            args2.append(tmpfile)
            args2=tuple(args2)
        stdin, stdout = openerp.tools.exec_pg_command_pipe(*args2)
        if not os.name == "nt":
            stdin.write(base64.decodestring(data))
        stdin.close()
        res = stdout.close()
        if res:
            raise Exception, "Couldn't restore database"
        _logger.info('RESTORE DB: %s', db_name)

        return True
    def create_aeroo_report(self, cr, uid, ids, data, report_xml, context=None, output='odt'):
        """ Returns an aeroo report generated with aeroolib
        """
        pool = pooler.get_pool(cr.dbname)
        if not context:
            context={}
        context = context.copy()
        if self.name=='report.printscreen.list':
            context['model'] = data['model']
            context['ids'] = ids
        
        print_id = context.get('print_id', False)
        aeroo_print = self.active_prints[print_id] # Aeroo print object
        aeroo_print.subreports = []
        #self.oo_subreports[print_id] = []
        objects = self.getObjects_mod(cr, uid, ids, report_xml.report_type, context) or []
        oo_parser = self.parser(cr, uid, self.name2, context=context)
        oo_parser.localcontext.update(context)
        oo_parser.set_context(objects, data, ids, report_xml.report_type)

        self.set_xml_data_fields(objects, oo_parser) # Get/Set XML

        oo_parser.localcontext['data'] = data
        oo_parser.localcontext['user_lang'] = context.get('lang', False)
        if len(objects)>0:
            oo_parser.localcontext['o'] = objects[0]
        xfunc = ExtraFunctions(cr, uid, report_xml.id, oo_parser.localcontext)
        oo_parser.localcontext.update(xfunc.functions)

        #company_id = objects and 'company_id' in objects[0]._table._columns.keys() and \
        #                        objects[0].company_id and objects[0].company_id.id or False # for object company usage
        company_id = False
        style_io=self.get_styles_file(cr, uid, report_xml, company=company_id, context=context)

        if report_xml.tml_source in ('file', 'database'):
            if not report_xml.report_sxw_content or report_xml.report_sxw_content=='False':
                raise osv.except_osv(_('Error!'), _('No template found!'))
            file_data = base64.decodestring(report_xml.report_sxw_content)
        else:
            file_data = self.get_other_template(cr, uid, data, oo_parser)
        if not file_data and not report_xml.report_sxw_content:
            self.logger("End process %s (%s), elapsed time: %s" % (self.name, self.table, time.time() - aeroo_print.start_time), logging.INFO) # debug mode
            return False, output
        #elif file_data:
        #    template_io = StringIO()
        #    template_io.write(file_data or report_xml.report_sxw_content)
        #    basic = Template(source=template_io, styles=style_io)
        else:
            if report_xml.preload_mode == 'preload' and hasattr(self, 'serializer'):
                serializer = copy.copy(self.serializer)
                serializer.apply_style(style_io)
                template_io = serializer.template
            else:
                template_io = StringIO()
                template_io.write(file_data or base64.decodestring(report_xml.report_sxw_content) )
                serializer = OOSerializer(template_io, oo_styles=style_io)
            try:
                basic = Template(source=template_io, serializer=serializer)
            except Exception, e:
                self._raise_exception(e, print_id)
 def get_credentials(self, acc):
     self.__check_valid(acc)
     try:
         return Credentials(base64.decodestring(acc["username"]),
             base64.decodestring(acc["password"]))
     except Exception, e:
         raise KeyringException("Cannot decode the base64 username or password for account %s" % (acc.get_name()), e)
Example #5
0
    def from_line(cls, line):
        """
        Parses the given line of text to find the names for the host,
        the type of key, and the key data. The line is expected to be in the
        format used by the openssh known_hosts file.

        Lines are expected to not have leading or trailing whitespace.
        We don't bother to check for comments or empty lines.  All of
        that should be taken care of before sending the line to us.

        @param line: a line from an OpenSSH known_hosts file
        @type line: str
        """
        fields = line.split(' ')
        if len(fields) < 3:
            # Bad number of fields
            return None
        fields = fields[:3]

        names, keytype, key = fields
        names = names.split(',')

        # Decide what kind of key we're looking at and create an object
        # to hold it accordingly.
        if keytype == 'ssh-rsa':
            key = RSAKey(data=base64.decodestring(key))
        elif keytype == 'ssh-dss':
            key = DSSKey(data=base64.decodestring(key))
        else:
            return None

        return cls(names, key)
    def _migrate_from_dir(self, cursor, day):
        entries_fn = self.cache_path.join(day).join(u'entries.log')
        origins_fn = self.cache_path.join(day).join(u'origins.log')
        if not fsutil.is_exists(self.fs, entries_fn):
            return
        with self.fs.open(entries_fn, 'r') as f:
            j = 0
            for line in f:
                try:
                    fn, server_path, mtime, size, blocklist = line.strip().split('|')
                    fn = base64.decodestring(fn).decode('utf8')
                    server_path = base64.decodestring(server_path).decode('utf8')
                    fn = unicode(self.cache_path.join(day, fn))
                    mtime = float(mtime)
                    size = int(size)
                    self._insert_entry(cursor, local_filename=fn, old_filename='*migrated*', date_added=day, ns=None, size=size, mtime=mtime, blocklist=blocklist)
                    j += 1
                except DuplicateEntryError as e:
                    TRACE('Duplicate Entry: %r', e.message)
                except Exception:
                    unhandled_exc_handler()

            TRACE('DELETED: Migrated %s entries from %r' % (j, day))
        fsutil.safe_remove(self.fs, entries_fn)
        fsutil.safe_remove(self.fs, origins_fn)
    def post(self):
        description = self.request.get("description", None)
        platform = int(self.request.get("platform", 0))
        platform_version = self.request.get("platform_version", None)
        timestamp = long(self.request.get("timestamp", 0))
        mobicage_version = self.request.get("mobicage_version", None)
        error_message = self.request.get("error_message", None)

        logging.debug("Error logged over HTTP:\n%s", error_message)

        ler = LogErrorRequestTO()
        ler.description = description
        ler.platform = platform
        ler.platformVersion = platform_version
        ler.timestamp = timestamp
        ler.mobicageVersion = mobicage_version
        ler.errorMessage = error_message

        from rogerthat.bizz.system import logErrorBizz

        user = self.request.headers.get("X-MCTracker-User", None)
        password = self.request.headers.get("X-MCTracker-Pass", None)
        if user and password:
            users.set_json_rpc_user(base64.decodestring(user), base64.decodestring(password))
            return logErrorBizz(ler, users.get_current_user())
        else:
#           language = self.request.get("language", None)  # Unused
#           deviceId = self.request.get("device_id", None)  # Unused
            install_id = self.request.get("install_id", None)
            return logErrorBizz(ler, user=None, install_id=install_id)
Example #8
0
def _validate_base64_format(instance):
    try:
        base64.decodestring(instance)
    except base64.binascii.Error:
        return False

    return True
Example #9
0
def test_encode_images():
    # invalid data, but the header and footer are from real files
    pngdata = b'\x89PNG\r\n\x1a\nblahblahnotactuallyvalidIEND\xaeB`\x82'
    jpegdata = b'\xff\xd8\xff\xe0\x00\x10JFIFblahblahjpeg(\xa0\x0f\xff\xd9'
    pdfdata = b'%PDF-1.\ntrailer<</Root<</Pages<</Kids[<</MediaBox[0 0 3 3]>>]>>>>>>'
    
    fmt = {
        'image/png'  : pngdata,
        'image/jpeg' : jpegdata,
        'application/pdf' : pdfdata
    }
    encoded = encode_images(fmt)
    for key, value in iteritems(fmt):
        # encoded has unicode, want bytes
        decoded = decodestring(encoded[key].encode('ascii'))
        nt.assert_equal(decoded, value)
    encoded2 = encode_images(encoded)
    nt.assert_equal(encoded, encoded2)
    
    b64_str = {}
    for key, encoded in iteritems(encoded):
        b64_str[key] = unicode_to_str(encoded)
    encoded3 = encode_images(b64_str)
    nt.assert_equal(encoded3, b64_str)
    for key, value in iteritems(fmt):
        # encoded3 has str, want bytes
        decoded = decodestring(str_to_bytes(encoded3[key]))
        nt.assert_equal(decoded, value)
Example #10
0
    def Conectar(self):
        import json, base64
        try:
            json_data = open(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "config/Variables.json")))
            data = json.load(json_data)
            ConsumerKey = str(data["Consumer_key"])
            ConsumerSecret = base64.decodestring(str(data["Consumer_secret"]))
            json_data.close()

            json_data = open(os.path.abspath(os.path.join(os.path.dirname(__file__), "..", "config/Credenciales.json")))
            data = json.load(json_data)
            AccessKey = str(data["ACCESS_KEY"])
            #AccessKey = "asd"
            AccessSecret = base64.decodestring(str(data["ACCESS_SECRET"]))
            json_data.close()

            Auth = tweepy.OAuthHandler(ConsumerKey, ConsumerSecret)
            Auth.set_access_token(AccessKey, AccessSecret)

            self.API = tweepy.API(Auth)

            if(self.API.verify_credentials()):
                return self.API
            else:
                return {"Tipo":0, "Mensaje":"Error al conectar", "Detalle":self.API.__dict__.__str__()}
        except tweepy.TweepError as e :
            print e.message
            print e.__str__()
            return {"Tipo":0, "Mensaje":"Error al leer Variables", "Detalle":e.__str__()}
 def _handle_display_data(self, msg):
     """ Overridden to handle rich data types, like SVG.
     """
     if not self._hidden and self._is_from_this_session(msg):
         source = msg["content"]["source"]
         data = msg["content"]["data"]
         metadata = msg["content"]["metadata"]
         # Try to use the svg or html representations.
         # FIXME: Is this the right ordering of things to try?
         if data.has_key("image/svg+xml"):
             self.log.debug("display: %s", msg.get("content", ""))
             svg = data["image/svg+xml"]
             self._append_svg(svg, True)
         elif data.has_key("image/png"):
             self.log.debug("display: %s", msg.get("content", ""))
             # PNG data is base64 encoded as it passes over the network
             # in a JSON structure so we decode it.
             png = decodestring(data["image/png"].encode("ascii"))
             self._append_png(png, True)
         elif data.has_key("image/jpeg") and self._jpg_supported:
             self.log.debug("display: %s", msg.get("content", ""))
             jpg = decodestring(data["image/jpeg"].encode("ascii"))
             self._append_jpg(jpg, True)
         else:
             # Default back to the plain text representation.
             return super(RichIPythonWidget, self)._handle_display_data(msg)
Example #12
0
 def get(self, article_id):
     post_path = site_config["post_dir"] + os.sep + article_id.replace(".", "") + ".md"
     article = SingleFileHandler(post_path)
     if article["e"]:
         auth_header = self.request.headers.get("Authorization")
         if auth_header is None or not auth_header.startswith("Basic "):
             self.auth()
         else:
             try:
                 auth_decoded = base64.decodestring(auth_header[6:])
                 username, password = auth_decoded.split(":", 2)
                 if username == "201314":
                     key = hashlib.sha256(password).digest()
                     cipher = AES.new(key, AES.MODE_ECB)
                     article["content"] = markdown.markdown(
                         unicode(cipher.decrypt(base64.decodestring(article["content"])), "utf8")
                     )
                     self.render(
                         "template/article.html", title=site_config["title"], url=site_config["url"], article=article
                     )
                 else:
                     self.auth()
             except:
                 self.auth()
     else:
         self.render("template/article.html", title=site_config["title"], url=site_config["url"], article=article)
    def post(self):
        params = dict(self.request.POST)
        logging.debug("PaymentLoginAppHandler with params %s", params)
        user = self.request.headers.get("X-MCTracker-User", None)
        password = self.request.headers.get("X-MCTracker-Pass", None)
        if not (user and password):
            logging.debug("user not provided")
            self.response.set_status(500)
            return

        if not users.set_json_rpc_user(base64.decodestring(user), base64.decodestring(password)):
            logging.debug("user not set")
            self.response.set_status(500)
            return
        app_user = users.get_current_user()

        state = params["state"]
        login_state = get_login_state(state)
        if app_user != login_state.app_user:
            self.response.set_status(500)
            logging.error("%s tried to finish anothers user login %s", app_user, state)
            return

        token = get_api_module(login_state.provider_id).handle_code(login_state)
        logging.debug('Received token: %s', token)
        if not finish_login_state(state, token):
            logging.debug("user already finished this login")
            self.response.set_status(500)
            return

        args = {"result": "success",
                "payment_provider": serialize_complex_value(
                    get_payment_provider_for_user(app_user, login_state.provider_id), AppPaymentProviderTO, False)}
        r = json.dumps(args)
        self.response.out.write(r)
Example #14
0
    def test_digest_generation(self):
        token = UsernameDigestToken()
        token.username = '******'

        # case 1
        token.password = '******'
        token.nonce = base64.decodestring("8kqcOS9SFYxSRslITbBmlw==")
        token.created = "2012-10-29T08:18:34.836Z"

        self.assertEquals(token.generate_digest(),
                          "LOzA3VPv+2hFGOHq8O6gcEXsc/k=")


        # case 2
        token.password = '******'
        token.nonce = base64.decodestring("m4feQj9DG96uNY1tCoFBnA==")
        token.created = "2012-10-29T08:49:58.645Z"

        self.assertEquals(token.generate_digest(),
                          "K80tK4TyuvjuXvMu++O8twrXuTY=")

        # case 3
        token.password = '******'
        token.nonce = base64.decodestring("MzI2NjYyNzYxMQ==")
        token.created = "2012-10-29T05:39:24Z"

        self.assertEquals(token.generate_digest(),
                          "88FDZSIoCwQT9zhMqpcekDvZwVo=")
Example #15
0
def authenticate(session, response):
    '''Process an authentication response.  session must be the
    established session (minimally including assoc_handle and
    mac_key), response the query string as given in the original URL
    (i.e. as the CGI variable QUERY_STRING).  If authentication
    succeeds, return the list of signed fields.  If the user was not
    authenticated, NotAuthenticated is raised.  If the HTTP request is
    invalid (missing parameters, failure to validate signature),
    different exceptions will be raised, typically ValueError.

    Callers must check openid.response_nonce for replay attacks.
    '''

    response = _prepare_response(response)

    # 1.1 compat: openid.ns may not be sent
    # if response['openid.ns'][0] != 'http://specs.openid.net/auth/2.0':
    #    raise ValueError('missing openid.ns')
    if session['assoc_handle'] != response['openid.assoc_handle'][0]:
        raise ValueError('incorrect session')
    if response['openid.mode'][0] == 'cancel':
        raise NotAuthenticated('provider did not authenticate user (cancelled)')
    if response['openid.mode'][0] != 'id_res':
        raise ValueError('invalid openid.mode')
    if  'openid.identity' not in response:
        raise ValueError('missing openid.identity')

    # Will not check nonce value - caller must verify this is not a replay

    signed = response['openid.signed'][0].split(',')
    query = []
    for name in signed:
        value = response['openid.'+name][0]
        value = '%s:%s\n' % (name, value)
        if sys.version_info >= (3,):
            value = value.encode('utf-8')
        query.append(value)
    query = b('').join(query)

    mac_key = base64.decodestring(b(session['mac_key']))
    transmitted_sig = base64.decodestring(b(response['openid.sig'][0]))
    computed_sig = hmac.new(mac_key, query, hashlib.sha1).digest()

    if transmitted_sig != computed_sig:
        raise ValueError('Invalid signature')

    # Check that all critical fields are signed. OpenID 2.0 says
    # that in a positive assertion, op_endpoint, return_to,
    # response_nonce and assoc_handle must be signed, and claimed_id
    # and identity if present in the response. 1.1 compatibility
    # says that response_nonce and op_endpoint may be missing.
    # In addition, OpenID 1.1 providers apparently fail to sign
    # assoc_handle often.
    if response['openid.mode'][0] == 'id_res':
        if 'return_to' not in signed or \
           ('openid.identity' in response and 'identity' not in signed) or \
           ('openid.claimed_id' in response and 'claimed_id' not in signed):
            raise ValueError, "Critical field missing in signature"

    return signed
Example #16
0
def is_ssh_pub_key(key):
    """Validates if a string is in valid ssh pub key format

    :param key: A string containing a ssh public key encoded in base64
    :return: Boolean
    """

    if not isinstance(key, six.string_types):
        raise ValueError(
            "Key should be a string type, received: %s" % type(key))

    # 1) a valid pub key has 3 parts separated by space
    try:
        key_type, key_string, comment = key.split()
    except ValueError:  # need more than one value to unpack
        return False

    # 2) The second part (key string) should be a valid base64
    try:
        base64.decodestring(key_string.encode('ascii'))
    except base64.binascii.Error:
        return False

    # 3) The first part, the type, should be one of below
    return key_type in (
        'ecdsa-sha2-nistp256', 'ssh-dss', 'ssh-rsa', 'ssh-ed25519'
    )
 def upload_report(self, cr, uid, ids, context=None):
     from base_report_designer import  openerp_sxw2rml
     import StringIO
     data=self.read(cr,uid,ids)[0]
     sxwval = StringIO.StringIO(base64.decodestring(data['file_sxw_upload']))
     fp = tools.file_open('normalized_oo2rml.xsl',subdir='addons/base_report_designer/openerp_sxw2rml')
     newrmlcontent = str(openerp_sxw2rml.sxw2rml(sxwval, xsl=fp.read()))
     report = self.pool.get('ir.actions.report.xml').write(cr, uid, [data['report_id']], {
         'report_sxw_content': base64.decodestring(data['file_sxw_upload']),
         'report_rml_content': newrmlcontent
     })
     cr.commit()
     data_obj = self.pool.get('ir.model.data')
     id2 = data_obj._get_id(cr, uid, 'base_report_designer', 'view_base_report_file_rml')
     report = self.pool.get('ir.actions.report.xml').browse(cr, uid, data['report_id'], context=context)
     if id2:
         id2 = data_obj.browse(cr, uid, id2, context=context).res_id
     return {
         'view_type': 'form',
         'view_mode': 'form',
         'res_model': 'base.report.rml.save',
         'views': [(id2, 'form')],
         'view_id': False,
         'type': 'ir.actions.act_window',
         'target': 'new',
     }
Example #18
0
	def get(self):
		response = urlfetch.fetch('http://www.bizizaragoza.com/localizaciones/station_map.php').content
		self.response.headers['Content-Type'] = 'text/plain'
		response = response.replace('\r', ' ')
		response = response.replace('\n', ' ')
		response = response.replace('\t', ' ')
		#regex = 'GLatLng\((-?\d+\.\d+),(-?\d+\.\d+).+?idStation="\+(\d+)\+\"&addressnew=([a-zA-Z0-9]+)'
		regex = 'GLatLng\((-?\d+\.\d+),(-?\d+\.\d+).+?idStation=(\d+)&addressnew=([a-zA-Z0-9]+)'
		matchobjects = re.finditer(regex, response)
		regex2 = 'idStation="\+(\d+)\+\"&addressnew=([a-zA-Z0-9]+)'
		matchobjects2 = re.finditer(regex2, response)
		print matchobjects2
		result = []
		import base64
		for match in matchobjects:
			s = match.group(4)
			id = match.group(3)
			title = "Parada %s" % (id, )
			
			try:
				subtitle = base64.decodestring(s + '=' * (4 - len(s) % 4)).decode('iso-8859-1')
			except:
				lendec = len(s) - (len(s) % 4 if len(s) % 4 else 0)
				subtitle = base64.decodestring(s[:lendec])
			
			result.append({"name": title,
				"title": title,
				"subtitle": subtitle,
				"lat": float(match.group(1)),
				"lon": float(match.group(2)),
				"id": id})
		
		
		#self.render_json(json.dumps(result))
		self.create_service("bizi", result)
Example #19
0
def create_key(keytype, key):
    if keytype == 'ssh-rsa':
        return paramiko.rsakey.RSAKey(data=base64.decodestring(key))
    elif keytype == 'ssh-dss':
        return paramiko.dsskey.DSSKey(data=base64.decodestring(key))
    else:
        raise ValueError('keytype must be ssh-rsa or ssh-dsa')
Example #20
0
def moo(moosic, arglist, opts):
    print base64.decodestring('''\
ICAgICAgICAoX19fKSAgIChfX18pICAoX19fKSAoX19fKSAgICAgIChfX18pICAgKF9fXykgICAo
X19fKSAoX19fKSAgICAgCiAgICAgICAgKG8gbyhfX18pbyBvKShfX18pbykgKG8gbykgKF9fXyko
byBvKF9fXylvIG8pKF9fXykgbykgKG8gbykoX19fKQogICAgICAgICBcIC8obyBvKVwgLyAobyBv
KS8gKF9fXykgIChvIG8pIFwgLyhvIG8pXCAvIChvIG8pIC8oX19fKS8gKG8gbykKICAgICAgICAg
IE8gIFwgLyAgTyAgIFwgL08gIChvIG8pICAgXCAvICAgTyAgXCAvICBPICAgXCAvIE8gKG8gbykg
ICBcIC8gCiAgICAgICAgKF9fKSAgTyAoPT0pICAgTyhfXykgXCAvKHx8KSBPICAoX18pICBPIChf
XykgICAgKCAgKSBcIC8oX18pIE8gIAogICAgICAgIChvbykoX18pKG9vKShfXykob28pKF9fKShv
bykoX18pKG9vKShfXykoIyMpKF9fKShvbykoX18pKG9vKShfXykKICAgICAgICAgXC8gKG9vKSBc
LyAob28pIFwvICgsLCkgXC8gKG9vKSBcLyAob28pIFwvIChvbykgXC8gKC0tKSBcLyAoT08pCiAg
ICAgICAgKF9fKSBcLyAoX18pIFwvIChfXykgXC8gKF9fKSBcLyAoX18pIFwvIChfXykgXC8gKCws
KSBcLyAoX18pIFwvIAogICAgICAgICgqKikoX18pKC0tKShfXykob28pKF9fKSgwMCkoX18pKG9v
KShfXykob28pKF9fKShvbykoX18pKG9vKShfXykKICAgICAgICAgXC8gKG9vKSBcLyAob28pIFwv
IChvbykgXC8gKG9vKSBcLyAoKiopIFwvIChPTykgXC8gKD8/KSBcLyAob28pCiAgICAgICAgKF9f
KSBcLyAoX18pIFwvIChfXykgXC8gKF9fKSBcLyAoX18pIFwvIChfXykgXC8gKF9fKSBcLyAoX18p
IFwvIAogICAgICAgIChvbykoX18pKG9vKShfXykoQEApKF9fKShvbykoX18pKG9vKShfXykob28p
KF9fKSgtMCkoLCwpKG9vKShfXykKICAgICAgICAgXC8gKG9fKSBcLyAob28pIFwvIChvbykgXC8g
KG8jKSBcLyAob28pIFwvIChvbykgXC8gKG9vKSBcLyAob28pCiAgICAgICAgICAgICBcLyAgICAg
IFwvICAgICAgXC8gICAgICBcLyAgICAgIFwvICAgICAgXC8gICAgICBcLyAgICAgIFwvIAogICAg
ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg
ICAgICAgICAgICAKICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAg
ICAgICAgICAgICAgICAgICAgICAgICAgICAgCiAgICAgICAgICAgICAoX18pICAgICAgICAgICAg
ICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICAgICAs
Jy0wMCAgICAgICAgICAgICAgICAgICAgICAgVGhlIExvY2FsIE1vb3NpY2FsIFNvY2lldHkgICAg
ICAKICAgICAgICAgICAvIC9cX3wgICAgICAvICAgICAgICAgICAgICAgICAgICAgICBpbiBDb25j
ZXJ0ICAgICAgICAgICAgICAgCiAgICAgICAgICAvICB8ICAgICAgIF8vX19fX19fX19fX19fX19f
X19fICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICB8ICAgXD09PV5fX3wg
ICAgICAgICAgICAgICAgIF9ffCAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAg
ICBffF9fXyAvXCAgfF9fX19fX19fX19fX19fX19fX198ICAgICAgICAgICAgICAgICAgICAgICAg
ICAgICAgICAgCiAgICAgICAgfD09PT09fCB8ICAgSSAgICAgICAgICAgICAgSSAgICAgICAgICAg
ICAgICAgICAgICAgICAgICAgICAgICAgIAogICAgICAgICpJICAgSXwgfCAgIEkgICAgICAgICAg
ICAgIEkgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAgICAKICAgICAgICAgSSAgIEle
IF4gICBJICAgICAgICAgICAgICBJICAgICAgICAgICAgICAgICAgICAgLWNmYmQt''')
Example #21
0
 def get_metadata(self, name=None):
     if not name:
         name = self.get_device_name()
     request_url = self.bigip.icr_url + '/cm/device/~Common~'
     request_url += name + '?$select=name,description'
     response = self.bigip.icr_session.get(request_url,
                              timeout=const.CONNECTION_TIMEOUT)
     str_comment = None
     if response.status_code < 400:
         response_obj = json.loads(response.text)
         if response_obj['name'] == name:
             if 'description' in response_obj:
                 str_comment = response_obj['description']
     elif response.status_code != 404:
         Log.error('device', response.text)
         raise exceptions.DeviceQueryException(response.text)
     if str_comment:
         try:
             return json.loads(base64.decodestring(str_comment))
         except:
             try:
                 return base64.decodestring(str_comment)
             except:
                 return str_comment
     return None
Example #22
0
 def _handle_display_data(self, msg):
     """ Overridden to handle rich data types, like SVG.
     """
     if not self._hidden and self._is_from_this_session(msg):
         self.flush_clearoutput()
         source = msg['content']['source']
         data = msg['content']['data']
         metadata = msg['content']['metadata']
         # Try to use the svg or html representations.
         # FIXME: Is this the right ordering of things to try?
         if 'image/svg+xml' in data:
             self.log.debug("display: %s", msg.get('content', ''))
             svg = data['image/svg+xml']
             self._append_svg(svg, True)
         elif 'image/png' in data:
             self.log.debug("display: %s", msg.get('content', ''))
             # PNG data is base64 encoded as it passes over the network
             # in a JSON structure so we decode it.
             png = decodestring(data['image/png'].encode('ascii'))
             self._append_png(png, True, metadata=metadata.get('image/png', None))
         elif 'image/jpeg' in data and self._jpg_supported:
             self.log.debug("display: %s", msg.get('content', ''))
             jpg = decodestring(data['image/jpeg'].encode('ascii'))
             self._append_jpg(jpg, True, metadata=metadata.get('image/jpeg', None))
         else:
             # Default back to the plain text representation.
             return super(RichIPythonWidget, self)._handle_display_data(msg)
Example #23
0
 def _handle_display_data(self, msg):
     """Overridden to handle rich data types, like SVG."""
     self.log.debug("display_data: %s", msg.get('content', ''))
     if self.include_output(msg):
         self.flush_clearoutput()
         data = msg['content']['data']
         metadata = msg['content']['metadata']
         # Try to use the svg or html representations.
         # FIXME: Is this the right ordering of things to try?
         self.log.debug("display: %s", msg.get('content', ''))
         if 'image/svg+xml' in data:
             svg = data['image/svg+xml']
             self._append_svg(svg, True)
         elif 'image/png' in data:
             # PNG data is base64 encoded as it passes over the network
             # in a JSON structure so we decode it.
             png = decodestring(data['image/png'].encode('ascii'))
             self._append_png(png, True, metadata=metadata.get('image/png', None))
         elif 'image/jpeg' in data and self._jpg_supported:
             jpg = decodestring(data['image/jpeg'].encode('ascii'))
             self._append_jpg(jpg, True, metadata=metadata.get('image/jpeg', None))
         elif 'text/latex' in data and latex_to_png:
             try:
                 self._append_latex(data['text/latex'], True)
             except Exception:
                 self.log.error("Failed to render latex: '%s'", data['text/latex'], exc_info=True)
                 return super(RichJupyterWidget, self)._handle_display_data(msg)
         else:
             # Default back to the plain text representation.
             return super(RichJupyterWidget, self)._handle_display_data(msg)
Example #24
0
def parse_eml_string(path):
    textflag = False
    base64flag = False
    linelst = []
#     with codecs.open(path, 'r', encoding='gbk') as file:
    with open(path, 'r') as file:
        
        lines = file.readlines()
        for line in lines:
            if textflag and base64flag:
                if 'NextPart' not in line:
                    linelst.append(line)
                else:
                    break
            if 'Content-Type' in line:
                if 'text/plain' in line:
                    textflag = True
                else:
                    textflag = False
                continue
            if 'Content-Transfer-Encoding' in line and 'base64' in line:
                if textflag:
                    base64flag = True
            
        pdb.set_trace()        
        strings = ''.join(map(lambda st:st.replace('\n', ''), linelst))
        print base64.decodestring(strings)
Example #25
0
 def _handle_execute_result(self, msg):
     """Overridden to handle rich data types, like SVG."""
     self.log.debug("execute_result: %s", msg.get('content', ''))
     if self.include_output(msg):
         self.flush_clearoutput()
         content = msg['content']
         prompt_number = content.get('execution_count', 0)
         data = content['data']
         metadata = msg['content']['metadata']
         if 'image/svg+xml' in data:
             self._pre_image_append(msg, prompt_number)
             self._append_svg(data['image/svg+xml'], True)
             self._append_html(self.output_sep2, True)
         elif 'image/png' in data:
             self._pre_image_append(msg, prompt_number)
             png = decodestring(data['image/png'].encode('ascii'))
             self._append_png(png, True, metadata=metadata.get('image/png', None))
             self._append_html(self.output_sep2, True)
         elif 'image/jpeg' in data and self._jpg_supported:
             self._pre_image_append(msg, prompt_number)
             jpg = decodestring(data['image/jpeg'].encode('ascii'))
             self._append_jpg(jpg, True, metadata=metadata.get('image/jpeg', None))
             self._append_html(self.output_sep2, True)
         elif 'text/latex' in data:
             self._pre_image_append(msg, prompt_number)
             try:
                 self._append_latex(data['text/latex'], True)
             except Exception:
                 self.log.error("Failed to render latex: '%s'", data['text/latex'], exc_info=True)
                 return super(RichJupyterWidget, self)._handle_execute_result(msg)
             self._append_html(self.output_sep2, True)
         else:
             # Default back to the plain text representation.
             return super(RichJupyterWidget, self)._handle_execute_result(msg)
Example #26
0
File: pki.py Project: wca/py9p.u
def strtoprivkey(data, passphrase):
    kind = data[0][11:14]
    if data[1].startswith("Proc-Type: 4,ENCRYPTED"):  # encrypted key
        ivdata = data[2].split(",")[1][:-1]
        iv = "".join([chr(int(ivdata[i : i + 2], 16)) for i in range(0, len(ivdata), 2)])
        if not passphrase:
            raise BadKeyError, "encrypted key with no passphrase"
        ba = md5(passphrase + iv).digest()
        bb = md5(ba + passphrase + iv).digest()
        decKey = (ba + bb)[:24]
        b64Data = base64.decodestring("".join(data[4:-1]))
        keyData = DES3.new(decKey, DES3.MODE_CBC, iv).decrypt(b64Data)
        removeLen = ord(keyData[-1])
        keyData = keyData[:-removeLen]
    else:
        keyData = base64.decodestring("".join(data[1:-1]))
    decodedKey = asn1parse(keyData)
    if type(decodedKey[0]) == type([]):
        decodedKey = decodedKey[0]  # this happens with encrypted keys
    if kind == "RSA":
        n, e, d, p, q = decodedKey[1:6]
        return RSA.construct((n, e, d, p, q))
    elif kind == "DSA":
        p, q, g, y, x = decodedKey[1:6]
        return DSA.construct((y, g, p, q, x))
Example #27
0
def strtoprivkey(data, password):
    kind = data[0][11: 14]
    if data[1].startswith('Proc-Type: 4,ENCRYPTED'):  # encrypted key
        if not password:
            raise BadKeyPassword("password required")
        enc_type, salt = data[2].split(": ")[1].split(",")
        salt = unhexlify(salt.strip())
        b64Data = base64.decodestring(''.join(data[4:-1]))
        if enc_type == "DES-EDE3-CBC":
            key = get_key_data(salt, password, 24)
            keyData = DES3.new(key, DES3.MODE_CBC, salt).decrypt(b64Data)
        elif enc_type == "AES-128-CBC":
            key = get_key_data(salt, password, 16)
            keyData = AES.new(key, AES.MODE_CBC, salt).decrypt(b64Data)
        else:
            raise BadKeyError("unknown encryption")
        removeLen = ord(keyData[-1])
        keyData = keyData[:-removeLen]
    else:
        keyData = base64.decodestring(''.join(data[1:-1]))
    decodedKey = asn1parse(keyData)
    if isinstance(decodedKey[0], list):
        decodedKey = decodedKey[0]  # this happens with encrypted keys
    if kind == 'RSA':
        n, e, d, p, q = decodedKey[1:6]
        return RSA.construct((n, e, d, p, q))
    elif kind == 'DSA':
        p, q, g, y, x = decodedKey[1: 6]
        return DSA.construct((y, g, p, q, x))
Example #28
0
 def _handle_pyout(self, msg):
     """ Overridden to handle rich data types, like SVG.
     """
     if not self._hidden and self._is_from_this_session(msg):
         self.flush_clearoutput()
         content = msg['content']
         prompt_number = content.get('execution_count', 0)
         data = content['data']
         metadata = msg['content']['metadata']
         if 'image/svg+xml' in data:
             self._pre_image_append(msg, prompt_number)
             self._append_svg(data['image/svg+xml'], True)
             self._append_html(self.output_sep2, True)
         elif 'image/png' in data:
             self._pre_image_append(msg, prompt_number)
             png = decodestring(data['image/png'].encode('ascii'))
             self._append_png(png, True, metadata=metadata.get('image/png', None))
             self._append_html(self.output_sep2, True)
         elif 'image/jpeg' in data and self._jpg_supported:
             self._pre_image_append(msg, prompt_number)
             jpg = decodestring(data['image/jpeg'].encode('ascii'))
             self._append_jpg(jpg, True, metadata=metadata.get('image/jpeg', None))
             self._append_html(self.output_sep2, True)
         else:
             # Default back to the plain text representation.
             return super(RichIPythonWidget, self)._handle_pyout(msg)
Example #29
0
    def get_old_torrents(self, personal_channel_only=False, batch_size=BATCH_SIZE, offset=0,
                         sign=False):
        connection = sqlite3.connect(self.tribler_db)
        cursor = connection.cursor()

        personal_channel_filter = ""
        if self.personal_channel_id:
            personal_channel_filter = " AND ct.channel_id " + \
                                      (" == " if personal_channel_only else " != ") + \
                                      (" %i " % self.personal_channel_id)

        torrents = []
        for tracker_url, channel_id, name, infohash, length, creation_date, torrent_id, category, num_seeders, \
            num_leechers, last_tracker_check in \
                cursor.execute(
                    self.select_full + personal_channel_filter + " group by infohash" +
                    (" LIMIT " + str(batch_size) + " OFFSET " + str(offset))):
            # check if name is valid unicode data
            try:
                name = text_type(name)
            except UnicodeDecodeError:
                continue

            try:
                if len(base64.decodestring(infohash)) != 20:
                    continue
                if not torrent_id or int(torrent_id) == 0:
                    continue

                infohash = base64.decodestring(infohash)

                torrent_dict = {
                    "status": NEW,
                    "infohash": infohash,
                    "size": int(length or 0),
                    "torrent_date": datetime.datetime.utcfromtimestamp(creation_date or 0),
                    "title": name or '',
                    "tags": category or '',
                    "origin_id": 0,
                    "tracker_info": tracker_url or '',
                    "xxx": int(category == u'xxx')}
                if not sign:
                    torrent_dict.update({
                        "id_": torrent_id,
                        "timestamp": int(torrent_id),
                        "status": LEGACY_ENTRY,
                        "public_key": dispesy_cid_to_pk(channel_id),
                        "signature": pseudo_signature(),
                        "skip_key_check": True})

                health_dict = {
                    "seeders": int(num_seeders or 0),
                    "leechers": int(num_leechers or 0),
                    "last_check": int(last_tracker_check or 0)}
                torrents.append((torrent_dict, health_dict))
            except:
                continue

        connection.close()
        return torrents
Example #30
0
def decode_flashget(link):
    try:
        l = base64.decodestring(link[11:len(link)-7].encode()).decode()
    except ValueError:
        logger.warn(traceback.format_exc())
        l = base64.decodestring(link[11:len(link)-7].encode()).decode('gbk')
    return l[10:len(l)-10]
Example #31
0
def action(gid=None):
    """
    Send tar of account data on  each enviroment
    """
    import CloudscalerLibcloud
    import capnp
    agentcontroller = j.clients.agentcontroller.get()
    cbcl = j.clients.osis.getNamespace("cloudbroker")
    jobs = list()

    capnp.remove_import_hook()
    schemapath = os.path.join(os.path.dirname(CloudscalerLibcloud.__file__),
                              'schemas')
    resources_capnp = capnp.load(
        os.path.join(schemapath, 'resourcemonitoring.capnp'))

    # schedule command
    for location in cbcl.location.search({})[1:]:
        jobs.append(
            agentcontroller.scheduleCmd(cmdcategory="greenitglobe",
                                        cmdname="collect_account_data",
                                        nid=None,
                                        timeout=60,
                                        roles=['controller'],
                                        gid=location["gid"],
                                        wait=True))

    # get return from each job.
    accounts = dict()
    for job in jobs:
        result = agentcontroller.waitJumpscript(job=job)

        # read the tar.
        c = io.BytesIO()
        if result['state'] != 'OK':
            raise RuntimeError("Failed to collect account data from grid %s" %
                               (job['gid']))
        result_decoded = base64.decodestring(result['result'])
        c.write(result_decoded)
        c.seek(0)
        tar = tarfile.open(mode="r", fileobj=c)
        members = tar.getmembers()
        for member in members:
            if member.name.endswith(".bin"):
                accountid, year, month, day, hour = re.findall(
                    "opt/jumpscale7/var/resourcetracking/active/([\d]+)/([\d]+)/([\d]+)/([\d]+)/([\d]+)/",
                    member.name)[0]

                datekey = (year, month, day, hour)
                accounts.setdefault(accountid, {
                    datekey: []
                }).setdefault(datekey, []).append(member)

    for account_id, dates in accounts.iteritems():
        for date, members in dates.iteritems():
            account = resources_capnp.Account.new_message()
            year, month, day, hour = date
            account.accountId = int(account_id)
            cloudspaces = account.init("cloudspaces", len(members))

            for i, member in enumerate(members):
                # read the capnp file obj.
                binary_content = tar.extractfile(member).read()
                cloudspace_obj = resources_capnp.CloudSpace.from_bytes(
                    binary_content)
                cloudspaces[i] = cloudspace_obj
                filepath = '/opt/jumpscale7/var/resourcetracking/%s/' % os.path.join(
                    account_id, year, month, day, hour)
                try:
                    os.makedirs(filepath)
                except OSError as err:
                    if err.errno != 17:
                        raise

            with open(os.path.join(filepath, "account_capnp.bin"), 'w+b') as f:
                account.write(f)
Example #32
0
# -*- coding:utf-8 -*-
"""  
 @desc:  
 @author: wuchongxiang 
 @site: 
 @software: PyCharm
 @file: 20181130.py
 @time: 2018/11/30 16:17
"""
import base64, os
code = '''UEsDBBQACAgIANZFfk0AAAAAAAAAAAAAAAARAAAAUGFja2FnZUxhYmVscy5wZGbtmwtYTOkbwFeqTZQKoTYOiZKac5+Zst2vKl2ljKGZacqkmcnMdHPZmFSE1iXWJSQVlmxFF5VbRVa5lKgk5FYtCUW7bM3/zISN6f/89//3fzz70DxPzznznu97v+/93vf7zXvezjHwsHMwgUxRVYN7969cU4UgAAT4zBDVmTNJtvxQvsA7jMFik+zYERwW21HAiCZ5hzNF0WFskjOXEcwmObE5wYtEAEQhOXBCRWwBydbW2cfHgRFlx2bxA9kkH2lTP3dmCJslIvXJPBgCrrBPfTiXJwQoOMmLHymU6nABTCALC9JcTqBokVTuyuYFE2cwFSHZcERCD7bAls8N4/PYPGJICwuhSMBmcFWnpm2q1tOsjtVPMzMNXJ60pvURjzjVr4h3dNDUI/VM2aLkDLpzAIexEokKaZr4O80JGvYUwHnSMInkVfK+Z+JsuyjuZHP9vRKJs5KW2fnKswpJKqOgGMnzQJHj6skJKkXWChoJzXUSiWHYFkjd3CqJAswyGDlR0jtZUw+wW3pWQVtvqcb6hZI/951VLNU4tPtigaZhmH3rQknPahXQLlJ0MSxDfUy8RCJxwg4aBqRdtRKTYiS9V6dZJ1PsIqBUa4XhEsnvaQnDPRdpzva4ewSTSM47+omVlB2dAPG4eLGiN9F10eQERY1Y18kJhmFxU4jmzppdGo2GYQdZpmIVj4WS3i3jJ9TEi1erlikTre/pKg4T12iItR3T7q5pT7b88xsrRVU2L/DtehFnUg9D8F+ufus8h1CGiP3Wc2/XHidT3i90VMqGOZdmN1pp7Dk2cd+Bk3lOl3KP5depk710fNLTatLPrHJRXHvfKmVq84Gwp3kj68a26fq0FyzW3VV+dHrlq6qnVeQ7l9dXuPxC7jqulcPkMKGSF7dHnhzFbzNi/DLn3h8zbPUnj6lfofcafqqq78vi33tynCJun3R7ZMahJZlC94p9I0q9Tj9XxvTLZk3nbqBstvb47UZruRdz7B6lhBxFxY0zik8UNAYPYSgcbL7o7aXLmpz6SGQ50bfkzpMRwVe1bt3QGj0+gkJXa+oWlnqq7S7TWV4/oqmjXWJp5uWb9jhlmZJI8rj3zY7XnT/bNYapXWQZj28z9d0ZuXfLC3HznnGujv6wXgbTclcT+x6gx3Zvexm1zcL06SlrZ23/3NlkFqLy2Gx5wglj/Sf7zHGV155vGlqH7q5ZeXtTqvpTUnJ12sYri1wXF8WJvmkzTh/2yld0ffO3TSadHvsQzvxJ4OvLBXZ/2ukvquta2c3SncC/nydIK/Azz9SfOeZcodpR9XuOozfe0VrV066CDzUKqzgz+kKVybh5XDqzJHQini/pbbCI3Wu5gJzn3jwvz92eOXFKkMjRf90Vof1ii6E7xSEOa5cdsNfcNRfTKjd1s3ZZ/fN2mFm1Nb9peq/mVUeneb+lmZxdUCi8pW3sHgDGT2n9zphVtDppVM2GY4EvzjqceHXshkIRuyTM0KrxwSFW1VadLZm9pAYeuWZJS1ek0HoFfVe6vbXbim0Mcfa5/LKpY323XQ/b8F3XoW/zxaahLUj75GrNPfh1hYVnJyzdRO+OSV+3O007pXNlzLX0eYmb/cY1R8Xbmq8itTC7l5feC5i0YMS9IrVC5PWtB0ZttYHDH/t3Jr28tolUPGR+sVeFVf6kvM3R554Hn7kwV/WEh2mImXt+me9UfFHzE7Fv7htWj5Kbms/YASId7Q81nojAB4Ebafh79QHKQ4oyL7aQHy5gsaVs8hDwWd5sEUAjEXgESD7sKBHQRzybt0fbt0dnOsmBUCndQxDQp9IBBiBQekag7C36iMt+QRAAya5zuMHEKdTXgmhDAFEGNER21Y0dyGHY8KNoUhUYFTMlwwAFhUxxCt3C4r1F2CCmvxZM438P09RBTA9i+nNgOu3cWZuSceXhumHBXSd+9a459Kv2jWOP9B4f0F8YMbQwIiNpVahtiEFn4v0rKzf9Mlqwb36taM/vNIbxKN3jYRHWxg09Ob0KszEfnQFCnTwQp/HPzWkYQN5xGgFkpP3fOU0Z5PTXwmnqIKcHOf0P4vS9dbFxUdCax/jdy63CLQfSvbeYHAm+ZH7D3fHUE+Ww9ll6Qxvia2c9GlOxd4j+QqMHNicOts0cVlD6QT7tTvfRkw91GByI09TPzWkEQN9xGgNkpP2fOQ0Plj2+Fk7Df7Ps8RGnBddAjdvHJqzawrs6Pm1Wrgn1JJYsah2hN8um0Za0A1+Sgqs7v7Zd9uhi2fPJJUOG7g0sjhi1izEzPzKjiW/ZfrTO5YYubBI9YysITVyQ020bUbrNVRCZpaVtqDmex259FT56TWGl2sINDeWNnB8UG21LJs7yvu/W4lqxSWW/12lG7M5Nnl5O7euTxzD8frvcWu7pZVFyb3qiErDyQf2lBt7CJSqjrNTmuqpy1+dYXWr7weJI1OmG8ZdTDQp3GxhOr28WYV3AhWGNOkWJIc88EdHpAomk1cl/7VHh0zFXe3550xH84sphLW4ZNuNHk7jr9CNoh8W0h0pl31tsT8oNNJv1xw94ceXv1mYX99Q8fj5jxfxbK2I3z8gN3M077wscefxML6OolDmyKccazG4P3j9SNGvoUscUs8LTHg7rbCz2Q1lt2jVD0n/ZOLWeVj3HZYKooMo27qjT/cSV1d2nDO5N07Euzvy2ZGLjs1p8gfG0pevyWGal3l7rG+/+BpzK2CjepjK9PukuvFEhcoRrUtUt8b5rNwzOV77yPP/yjmVj8LPOiseZr4zmH351yOlJbPn6p4aLn0cfXXQp2Mrb45Lh847jhszOnx3Eye7e9Vr7pmdmqwozuAWJMYpdmpu3vV7kkt3S+13BuYujxq2ftMSs4uJ20333V4vum4xRCtBf8Ya6WL0JWhA1OpX7ohZ6NGuDs7fkp9AC3bZC4eviE6mrjrrVpKVmWlSC1sllEVJOJ73jdIrp/Bbjm/ZpJntMr38n5bTnH08lTmPN7U18Lg/tddsS8qsLc9O55+sTbymeSuls6nS+MFL7z3PA0caVxTZ7d8002L3zScAOpINiyR1ardZ8c4yZcu7wQxkXjJMshzqcSr4Q1+V5LZop5bQpwelY31wJwWmPOB9ggFBHBuA0/NnrHiiAveM0GYA/qe4Bo4Oc/lo4jf1/8mmcbKg23cuT43X3jIG4nKJxwPl3o5Rl7WUv7V8rz3nTfvP81JzDF3O/P2beeakAuf+8tFF36by1fjlozrgFSzsSbucUPEvOroNfLt+2T3OT6pNT6ivGTytd87B+F8XcRJ2xbGW3LTskpa3q2hLD2Owafol12YXszbNo+kmBGj5FmFl+cn1c8FnGjCFDplTyHnG7/a22fxNS2JVce367ykGlpivfYh1kWpnR8dEFeYr28Xqz66y6e8RhtVYt2UMm5tt2d0dIJuoZHb4afvakcpEkXNLj0xM9Z1/WCeXrOVvXi8RbXzW1pJcEFOorz90/86QOm0nee/sh01L3oXvDy6id31/uIPLpluiiizek+fTU5T9SN2m+uqq3WLHn4JvQrlWzF1l1OHmM7B57kbPg4gE0c0yTzc2ErjE1istcip5ciJ3988zaVleduSrrT3VmaVhWq9zEvr/bUx9XsStqd0qryY2kVg13e4gXezjhd02lB5F2ATGI+JKV8i0IVIKFw+N9aXPGkRoTNrSfjsm/crflxzpuu3fRjXbvLfVKa42aPG83d86vjr+y8qgVdqD25LHcNe2HdYYxf8pM1behu44neFvH05NotOpPMRFWaweQblF5lVungeNstUTxWyG6VcWwtvP+8B8B++e/8V88pAneYz46NatTyulYZ5pkbihZt+1g5Ivik6mneK41uamZVILT9cxGhreKS2VIoY366+NEPn2Z24Jw9d/l0+p9+XRnVq72/GcrYxrT561JfJ9PBzHfhGol9pwg8uldsnzaOz29bqM0n14mzaddFQqnvi1P2/crT+MuWzZk561Vm2dWnJr3pNcvZohnlo/+AKGOD8Rp7PNymqAzFYD7Mmq/IAzAP4nT5EFOfy2cpgzWPQbrHv+gugez/L+vT4P8c1mhobWljfZZgd2Vr4enH/VxGyDUqQNxmvLZOQ0RLchvQY0D5E8BNQIOgvorATUCDYJ6ENT/IFBrrf3vC9SMGGMem7Vf+7ZpiEHxwhsLM+p8PAYIdXgAUCPQ5wc1AsgAKwU1GfikCjWCDIL6awE1+rdATUE/FdTmZ/8tqJNVjps+6LbtKKcxf11wuLzTP97Bbnr2G7Mu9q0xDkfO3y5v4Cx5V6Z2bdmTZDt+dcYP99VCHOK3+tzUWTIlNrU2q3ptRoXpzOHqrGEqgH+EkF9wRaFU0aNs64F0/Lzdh2XqLAND+ElYkU5RR8vq/TpmCdjv2eOL7jRKYkTpR+zqqN2qrb11Pd3Iy5Y5a/Mf6iRX+MI1fWVqdux/hHXBB7DO/0RYny5f/NCOnxPh6hg1PXGJ7ny98h8NgdMGCm9I+vUj0s/oDfUGkIKARXnha3MaLEJDJT3HVihbxpTwQvaW5oXsTSy3cKpqTcrV3tuSeGO5Mqp0fb1W9+x1k/FfQqasoe8Ub1OZA7HLtxtzi+f8qZ/648bjtfY08cmwlkIT3z1nAkY4X6X6nhcN05uWrhv864PYDYL24CwlUWVUs8fK/EveFduNsaluPQtzVzQ4Vey+dueS0stw7mZtJeylf/ywUu1G9V3z/fx9yzDB1QMT6lWLsndfOb7OzjHqps/w5UrLWu0jmiSbTCMS54X1VamNZjEZq++PV29XPJWyrqnTeYa6NpOlUcc9s7u6Zaq784GpNL8rOyOP7E1a3qB29+ZWM+VctUNbq/qq1F4X4gSe16LFDcV6Bbk5nsP5Y/M2N5Eek77J8T0xQLxjA9Ea/fy0xgAEeUtrCkD9pPrHXxa9w7LUEqjPHumMSDYMIVt24sQOjWCLOCwGyZ5HbHAOL5jgLM+aJ+S8+97vV6BfpciOLWRJ52UCwSDJlhH2lvgUCkjyFrG5vgAB5/fDSRsLOGEivowlwUIAJxpFh7Kli8ng8YVswyFKCgqK3/R9jAirpb1sCDsBmgmMAcQfSiwbCEr1963pbAaXTfL28ebzgk1cpUOTnEWMUA7LmhccygZAknXf9Ij2/ebfry5kN1em8P0C2TrbSdVKpwxKv3hHCwkznHlBfNkqhoWFsrlShSgRCsEcIoSiDa0JpWwjkrsgkC0g1snQ0QaSTn2ARf5gnh8uCID0PWE0F6BBNBgk06nS+EDIhL0IDUYoxClGh3AagqB0iCyty6E4TNyASa/REZRGgBjAcBDAqRhARqh0hErDIIiOwjQEIjpDMB3FaRQqcaOGUegocRFF6RhMQ3GMjqE0MoYAZOKGjkqEEY6S6ThOQyHiQKGhxD0eRtxyokRcwjgKoCgxDqERRolIJtIKlBgPhil0MqESJtMphGsgogMKUGAaBqJELBOjE7bBZEIPoZJCoeEYTKeCNBSDAZQwtP+DSuDAEQv+p8U0mcPjONqYzLH1hk2c/orgD6R9kRrI4Imk3YU0WRR49R9dfnAHvoA70K/fB3mLG0Mk4BARKu0Pyp6ulcWmgCttBUD/N2bIMGDzbtNDCGKKE3veFMfp7zMhFO73a7x1c87sclDDbllMnMtRaE9eJPcnu8m1Tr1KR63SmiXDFeaneJ+/FF4dGPwiiFxdEPBIs2rsyzsTZ9v0/sQ8SDbiu087lKtIdTy7KFOxZopg24hEc5AFTwq2AZLSiyPIZKcfLMMfr6Wbl+ygvLhLSdHZ4Ra3zOghbeaV+tWlhk2qkdNHJW6Oum8f7ZixZIFSx8Hp164v9b1ZWNocuqNtp9Kr2U4acT8lHaxKqC8EwT+MVw5Jzi8o9tuhJXbOTHfOHG2ahJ4YrrXr2pLDCemHaUXLFjQf7pib4PbjBAv/q6ejOnsqv8+qHC5aQQvoaK+xbFocadmiVbVGnunooEe/MI9SBz36hXkUGfToF+ZRbNCjX5hHyYMe/cI8Shn06BfmUXzQo1+YR6F+qZELJ1BIk71B6gXIXlDyAmTPvxMHpO+A9x1kBSNA9q8H4iB7eof+V/1KSLLlh0vLISRnHz8fQ9gUMiUb9SuM9MOCrI8tQ8QI5Qf39X1be+rXvF827sYPtCMizNDODAYhCgQhIEhBcRSdZ0SyJYwScfi8f3ediKrAcBZbYMiRBZRsUgAzGoB8ED+ffrOLErCDVEEABVXB9x8AxzDCyiCgTwYRMUSEsOwK750MwkH8YxlCkb4p8KEMplDIH8tQmIJ9LMNwCiLXjozJjYvBkJw+BMHk+sIw9eMxZN3lZJi8HTiEfmwHoRCD5WTSatHHMqrcXECYTHz7WEaF5GQICn5sL4hCKEVOhmDyMor8nDGM+rFtIEaRsxfEYerHawqSQRSVk0krhR/LyKjcmhJTkVsXChmUWxcqRJXTR6Wicj6Xel1OhqNyPocQOZ8TcSo3BgRRKPI+h3E5fTjS30ciAYMTyhZI96W0cgkgfW/FONsBtJmBDDwIRkEGCAaCCE6lBlGoKIuMgcwgJpnMxCCLmUwKm8lAEAoYxKIwiAMCUckIHsRgsBEGSJxZ0ElefL4IQPqeOfHmLGUTm5LYp0IRQyCS7VIIJ/ahqoGBvbuD6r8AUEsHCAWq5v1sEwAA3EMAAFBLAQIUABQACAgIANZFfk0Fqub9bBMAANxDAAARAAAAAAAAAAAAAAAAAAAAAABQYWNrYWdlTGFiZWxzLnBkZlBLBQYAAAAAAQABAD8AAACrEwAAAAA='''
# code = '''UEsDBBQACAgIAEE/fk0AAAAAAAAAAAAAAAARAAAAUGFja2FnZUxhYmVscy5wZGatV2tUE9cWVl7mUouIggoiB0ELUphnZhJEJISHEVQw+KgxtiEZQhASSAYK4qMIllQkYn3gW1C07bpUvT7Qi7UoYBWxVqyKFatYquCrpdZqrcW5ZwIWLN51/9yslTVn9tnn2/vs8+3vJOPiIqICsSDScdwPbd9860gCFBgSUxxDQhCpIdVglKer1AwSwWTp1Ey0UZWDyDMT2Zx0BpGlqbQMMoXRaZNZgONIlC6VZYyIVCpLSIhSZUcwaoOGQRJ417kzElMYNYt02+JUxjRTN3xmmt4ERBQy0/C+iceIAYFYaCgyR6dhk3l7LKPXwhEhpJBwHWuKY4xSQ1q6Qc/oWQAdTayRUaU5jt+5pslzaJPZYu6oT1vT5OWzKRsOV48Z5Tkygq355Zark8AltboozNxxiuNO2shAhpNgtWDs1raC4YkjOM5tZbq5XRBgF13Yeqbd6lBkX1y63D65vJXj/nzHc+gtQXltUElYgfchgaXObxn3Z3lt+FBPy8QwiwhMBeZOjvslXfJepbO36pLUrnwox70QuBwtjP1p1r+lBZWTuRe1diPH/ixRuW6rgtHuSqQC7/1huwTDBhM1HHdNEGz3j7HiVWDncp8dHCfDt7pWrPTMTYIgbzuPd15zaZ5ddMQi77I81yEc91RQftI+MW/4bSfngrxlXNc68enR4+Na86LQSKfhCAxr2SXaHxvg5BzgEM1xv+cHmR9dd44/+uFJm+GCNo67ZZM9CeRFupp9V77P71OS/2mRz3eeEhuJOcwymeP2r3xQ3tr83krj1jkc13khPmz5BdbFcfXNzoEDnB0Zvaan3HDEE0TYS5Seo49KVbFMz7n3nBxNE38dU/bWJbO+Nn6LOt846PWB7Vvflw3O8G2senBj3tRg9kMn7z0Py0tjt1RM9yj07dI6BM3PGan/YsCQE0Xpt+vXGbqGHfvo6+TUfRWXny45Ni+GWDjxwGj/Ve1FHZc+3rl7ScOV+2fuzSNI/3XJ/t4r2txbXIMH1zfY7bp+xydGXHX/jdvVIZ+sD5TeKnp2wj1RbCtKHHL+WHGY5WgDNrph5LCYTeAIUrmAqhm9ae61y1Xte4MrBuiTPbIyVi3MHf/zgZg9Fdu9bv2TdFsB8FEhHtMqswZJt5s/LDPf2XBoEspk1NwMHNb5xcHjh4cWY2htncU8+6TrqMrlgyR7JkaXVmVUDHZPavF8qz0oJ5T7/XFX49obXi4VYyyF34/y8H3ybqfdA7cxxUlrvTOeH7yQILiwd1FE08ztbqdm72MkK6oTXELcjF3P/7hfvji7+mzJvsFfjK2NG/SbfOs11zhNhvzx+T8aOiq9ij7XLF5v3nZ0x/XT7fkZi+nj1ZFaclpUUXZx85BUpaT9zAO0KC1yC1Yc3+QRXDbm+FniWZ7L5jtPvN9MlpTuz3pvSYGw/vrW6R54a2GOw9T0SKfNg8zTqw8NWBy95PH5uw43j40XHXZo/tS8IDF1x5PODcNbf2maV0b/+LEedD52H1joZZs1YEreofnhXk/d7OtTMhu5DbE/Nx/RHh5Xd3+DvfyiIW9p7Jx1l4L8sv51oCxaG7fxgl/XV8++urw6eErR1LVtV2qRI984ebpembH+XO6Jhz4xGWeJ/OQ1Z2vORf90ctIH4zoEPrbBEq+8psh7n3XWTSWDqCd33t14eLp8eNnnXV9tXpjrX1F1Rb57YeTBm8G/t+zPrQ6p+7Xr14D2BYtqD3jsyN10clCN2U1QI21xntCxceH8eI/vhNmmpfb3lgxjPrkYvCzmtMczh09jHa40e24m4jsezW/RGEZEH072enP91U0r9bNyL7/1Q2Pd1aUbLLNPJV19ZHPh2pFJQ+wbRwzoGP3RiVFO+Rc3zHS/GjtyRtT9VTFD9jSW7mF3b1uw/bLdFvDru2MWR6YsuUn/1jz1220XD78YKJMW5PTvI7qv4OpZKG0mwPfWzG7tjONVdiZjMmQa1Qwvm3FGg1rOsECBQOEGSAKTzYJuMQ7veUp7njIlEgUR+QbFAG6FjMIBwQ+gyPaIMpydm4QBzDqtS9NigOx2gC5QqXmlpaxz0xiNThVuyFag8JXCcECLcWVo6Mtt4L3beHlD8Plj3bvg80DCVSbGOpjCpGYxrE6tQiL1UC10ei2UfL1Eb9K9fO/FFfXiRjAmNZ9PIIajiFSV3nP3iEQoImeZtNlATPRG452NunTWYNUlrQlQ0CknleErqNIbTIzfQHsbG7sB3R9/uFt+VTjcIFAE4kIAvyTAUBTl8bsLOV2VxiDyBLlBrw2M5UMjMlaVqlNL9NpUBqCIpDs96N+bvrhP+nOseH+VRyqL4FH5jFH+RZ5jgruQ6ZMM1hqmp6cyaTweCY9fq4OkyfGTQEzGH5lh1DBGWCW/6HCMz/w1JX4lzVfrAUTW85wDFJgCR2mlGKYFCBrullDghAgOhcD6JEglRgNYC5LCAU7zk0qCVFAiEggpyAGxENCEWEmIFUIMU5K4gsDgKgxXkpRCJKYALRQpSThJkkohriApoVJIKmghAWgMA2IaBxRJKylKQWLwIVKQOA2EuBiQGIxPkYAkYRyIiJOQvQQKSBgPx0VKGkLitFIETwaDC0ggwhVClIT58jlTMFOIAyFFIgUlxJViVEEKcUDCnfahK/F6uqL/q5aBs/S66PDAWVI5Hjill76vWLt5qlHpWX65ScFzYGaf2Fj/2FEGY9rrrtFXfj5NU7FGHaQnvx61dqyVmMY03gtg/zeVsPZ++MtWx2g6iAIiKLyU8uW1jouFfa719euOT69HnQtyJ6+IST5nWXYlS+ZrcfcPsZkDmjpf2I9QBlXhLSUT3B9aapSr3/5y0KzVZV+6cc9vJ555Xxbesnvx84my4nH1B13XpaSAQMfcU/mJNvLxRZc2OZt91hdSlz19W+Kfb81O3kUPiw4VRWmHJgWsaZ/w4z7j3vj88AZlwDcJb67cnN12uNOSoq1xuBEfENt87Z27pwNXnd++ae/C1gX5di45d0tqF5ect7WNmvDBOW9bp50NhGNMQ9mq71Ju7P5cVepS5/tGUgHrcrDzfMlarHXKvbez6kyaTLfijWVk29jKM+HFWlufKjkjvLg8pd5+/erP2mTPStnMxnsLgjq/3jencEvQmNDTJReT9i3typhRetv19qMvwyb/8Umq1/Zmw9Oq6skPW9542F/4qV4axOg0JgVtpUmv6pvgfZDJ/9BFZAlzE/zwICyI9u/DIrR3vXWNVAXVyKDtXtst2n28+5BumkETASnmFxGMo5gIwwgUha2K4vP8ESlMkNUZ9P9tHtJKk6lmjH46K6OsOYHEHIAlEHMT+iSXbWSSHCF3cEf0rw+ghEJCCJJAjw32NVQi64z+pQ2jUPrvNhwXk3+3QU9hP5uQ6oeHi4l+eBiJviauuB8eJhJRf7cR/Fs/G9nHjzWqdKmMka8zL+UA675ZZRFAEcIQKI5pVHQiRVJqQq3CSZVYw9BJajJRpUZxVWgIzuAorkY1jJokKDJJzYjFYhLFRUKaQRkNg4Uq4X8lAwushz8TkesWwcbHYd1NrMrIWqtOYELacdy4yBlRjv8BUEsHCNKHT35yCgAA2w0AAFBLAQIUABQACAgIAEE/fk3Sh09+cgoAANsNAAARAAAAAAAAAAAAAAAAAAAAAABQYWNrYWdlTGFiZWxzLnBkZlBLBQYAAAAAAQABAD8AAACxCgAAAAA='''
# code = '''UEsDBBQACAgIAEE/fk0AAAAAAAAAAAAAAAARAAAAUGFja2FnZUxhYmVscy5wZGatV2tUE9cWVl7mUouIggoiB0ELUphnZhJEJISHEVQw+KgxtiEZQhASSAYK4qMIllQkYn3gW1C07bpUvT7Qi7UoYBWxVqyKFatYquCrpdZqrcW5ZwIWLN51/9yslTVn9tnn2/vs8+3vJOPiIqICsSDScdwPbd9860gCFBgSUxxDQhCpIdVglKer1AwSwWTp1Ey0UZWDyDMT2Zx0BpGlqbQMMoXRaZNZgONIlC6VZYyIVCpLSIhSZUcwaoOGQRJ417kzElMYNYt02+JUxjRTN3xmmt4ERBQy0/C+iceIAYFYaCgyR6dhk3l7LKPXwhEhpJBwHWuKY4xSQ1q6Qc/oWQAdTayRUaU5jt+5pslzaJPZYu6oT1vT5OWzKRsOV48Z5Tkygq355Zark8AltboozNxxiuNO2shAhpNgtWDs1raC4YkjOM5tZbq5XRBgF13Yeqbd6lBkX1y63D65vJXj/nzHc+gtQXltUElYgfchgaXObxn3Z3lt+FBPy8QwiwhMBeZOjvslXfJepbO36pLUrnwox70QuBwtjP1p1r+lBZWTuRe1diPH/ixRuW6rgtHuSqQC7/1huwTDBhM1HHdNEGz3j7HiVWDncp8dHCfDt7pWrPTMTYIgbzuPd15zaZ5ddMQi77I81yEc91RQftI+MW/4bSfngrxlXNc68enR4+Na86LQSKfhCAxr2SXaHxvg5BzgEM1xv+cHmR9dd44/+uFJm+GCNo67ZZM9CeRFupp9V77P71OS/2mRz3eeEhuJOcwymeP2r3xQ3tr83krj1jkc13khPmz5BdbFcfXNzoEDnB0Zvaan3HDEE0TYS5Seo49KVbFMz7n3nBxNE38dU/bWJbO+Nn6LOt846PWB7Vvflw3O8G2senBj3tRg9kMn7z0Py0tjt1RM9yj07dI6BM3PGan/YsCQE0Xpt+vXGbqGHfvo6+TUfRWXny45Ni+GWDjxwGj/Ve1FHZc+3rl7ScOV+2fuzSNI/3XJ/t4r2txbXIMH1zfY7bp+xydGXHX/jdvVIZ+sD5TeKnp2wj1RbCtKHHL+WHGY5WgDNrph5LCYTeAIUrmAqhm9ae61y1Xte4MrBuiTPbIyVi3MHf/zgZg9Fdu9bv2TdFsB8FEhHtMqswZJt5s/LDPf2XBoEspk1NwMHNb5xcHjh4cWY2htncU8+6TrqMrlgyR7JkaXVmVUDHZPavF8qz0oJ5T7/XFX49obXi4VYyyF34/y8H3ybqfdA7cxxUlrvTOeH7yQILiwd1FE08ztbqdm72MkK6oTXELcjF3P/7hfvji7+mzJvsFfjK2NG/SbfOs11zhNhvzx+T8aOiq9ij7XLF5v3nZ0x/XT7fkZi+nj1ZFaclpUUXZx85BUpaT9zAO0KC1yC1Yc3+QRXDbm+FniWZ7L5jtPvN9MlpTuz3pvSYGw/vrW6R54a2GOw9T0SKfNg8zTqw8NWBy95PH5uw43j40XHXZo/tS8IDF1x5PODcNbf2maV0b/+LEedD52H1joZZs1YEreofnhXk/d7OtTMhu5DbE/Nx/RHh5Xd3+DvfyiIW9p7Jx1l4L8sv51oCxaG7fxgl/XV8++urw6eErR1LVtV2qRI984ebpembH+XO6Jhz4xGWeJ/OQ1Z2vORf90ctIH4zoEPrbBEq+8psh7n3XWTSWDqCd33t14eLp8eNnnXV9tXpjrX1F1Rb57YeTBm8G/t+zPrQ6p+7Xr14D2BYtqD3jsyN10clCN2U1QI21xntCxceH8eI/vhNmmpfb3lgxjPrkYvCzmtMczh09jHa40e24m4jsezW/RGEZEH072enP91U0r9bNyL7/1Q2Pd1aUbLLNPJV19ZHPh2pFJQ+wbRwzoGP3RiVFO+Rc3zHS/GjtyRtT9VTFD9jSW7mF3b1uw/bLdFvDru2MWR6YsuUn/1jz1220XD78YKJMW5PTvI7qv4OpZKG0mwPfWzG7tjONVdiZjMmQa1Qwvm3FGg1rOsECBQOEGSAKTzYJuMQ7veUp7njIlEgUR+QbFAG6FjMIBwQ+gyPaIMpydm4QBzDqtS9NigOx2gC5QqXmlpaxz0xiNThVuyFag8JXCcECLcWVo6Mtt4L3beHlD8Plj3bvg80DCVSbGOpjCpGYxrE6tQiL1UC10ei2UfL1Eb9K9fO/FFfXiRjAmNZ9PIIajiFSV3nP3iEQoImeZtNlATPRG452NunTWYNUlrQlQ0CknleErqNIbTIzfQHsbG7sB3R9/uFt+VTjcIFAE4kIAvyTAUBTl8bsLOV2VxiDyBLlBrw2M5UMjMlaVqlNL9NpUBqCIpDs96N+bvrhP+nOseH+VRyqL4FH5jFH+RZ5jgruQ6ZMM1hqmp6cyaTweCY9fq4OkyfGTQEzGH5lh1DBGWCW/6HCMz/w1JX4lzVfrAUTW85wDFJgCR2mlGKYFCBrullDghAgOhcD6JEglRgNYC5LCAU7zk0qCVFAiEggpyAGxENCEWEmIFUIMU5K4gsDgKgxXkpRCJKYALRQpSThJkkohriApoVJIKmghAWgMA2IaBxRJKylKQWLwIVKQOA2EuBiQGIxPkYAkYRyIiJOQvQQKSBgPx0VKGkLitFIETwaDC0ggwhVClIT58jlTMFOIAyFFIgUlxJViVEEKcUDCnfahK/F6uqL/q5aBs/S66PDAWVI5Hjill76vWLt5qlHpWX65ScFzYGaf2Fj/2FEGY9rrrtFXfj5NU7FGHaQnvx61dqyVmMY03gtg/zeVsPZ++MtWx2g6iAIiKLyU8uW1jouFfa719euOT69HnQtyJ6+IST5nWXYlS+ZrcfcPsZkDmjpf2I9QBlXhLSUT3B9aapSr3/5y0KzVZV+6cc9vJ555Xxbesnvx84my4nH1B13XpaSAQMfcU/mJNvLxRZc2OZt91hdSlz19W+Kfb81O3kUPiw4VRWmHJgWsaZ/w4z7j3vj88AZlwDcJb67cnN12uNOSoq1xuBEfENt87Z27pwNXnd++ae/C1gX5di45d0tqF5ect7WNmvDBOW9bp50NhGNMQ9mq71Ju7P5cVepS5/tGUgHrcrDzfMlarHXKvbez6kyaTLfijWVk29jKM+HFWlufKjkjvLg8pd5+/erP2mTPStnMxnsLgjq/3jencEvQmNDTJReT9i3typhRetv19qMvwyb/8Umq1/Zmw9Oq6skPW9542F/4qV4axOg0JgVtpUmv6pvgfZDJ/9BFZAlzE/zwICyI9u/DIrR3vXWNVAXVyKDtXtst2n28+5BumkETASnmFxGMo5gIwwgUha2K4vP8ESlMkNUZ9P9tHtJKk6lmjH46K6OsOYHEHIAlEHMT+iSXbWSSHCF3cEf0rw+ghEJCCJJAjw32NVQi64z+pQ2jUPrvNhwXk3+3QU9hP5uQ6oeHi4l+eBiJviauuB8eJhJRf7cR/Fs/G9nHjzWqdKmMka8zL+UA675ZZRFAEcIQKI5pVHQiRVJqQq3CSZVYw9BJajJRpUZxVWgIzuAorkY1jJokKDJJzYjFYhLFRUKaQRkNg4Uq4X8lAwushz8TkesWwcbHYd1NrMrIWqtOYELacdy4yBlRjv8BUEsHCNKHT35yCgAA2w0AAFBLAQIUABQACAgIAEE/fk3Sh09+cgoAANsNAAARAAAAAAAAAAAAAAAAAAAAAABQYWNrYWdlTGFiZWxzLnBkZlBLBQYAAAAAAQABAD8AAACxCgAAAAA='''
code = '''UEsDBBQACAgIAEIbhU0AAAAAAAAAAAAAAAARAAAAUGFja2FnZUxhYmVscy5wZGatV3lUE9caRwoC0VJAWtDWdhDRgMLsMwkiAmGtsiYIdkztJBkgShKaREDUIhQEF0BPKaJSF1yLWhH3BRW0Vvu0VWjxoaWotUdUbMEuUqXOuxNQUPrO++flnDn3zne/7X7f7/7uxCsuNNwX9SNEXrd++rZZREAIZFDNFQUEwDJDusEoz2DVHBzKZWrVXISRXQDL56vMCzI4OErHpnJwJKdNTTNDGAqHa9PNnBGWyaIUinA2O5RTGzQcrBBUk2NVczm1Ge6TxbFGnanP/Xyd3gRJKDjBkGUSfEyHfNHAQDhJqzGnCfIZnD4VzHBMAodozaY4zigz6DIMek5vhoCiyWzkWJ1oQvXqK2Odr6wsLe44l+NZHRiRcsvfszogW2NmnZv4rwrsI+85xRePuxfoXMbzBW6fNDjal9mPq9rcKPLN5bs7ncc2ihqcbx4o7izN5Xuh2obuuq6gkx+P4PkVwIuHqyRo+Vte7jedLwLrrqDyENuTl+OCyt99exrfK/PctHysPK5R5OdiEzGH793cYNPotGdpRLBjo0OdP8//vSzmwR5vWSF7in/a4NrV1WQ/qTZsSi7PX7O5kTd+1a3RY8Vhr/F8bdDSuJ6/ZEdl+bVXeV5jXpUNhdxYGc9aj9w5h+cPOM2cnW/jEGwt28Tzjxveym728TyntHHadIp/bO9RHJoT3dhgg+Qlq5xlPH/rTRuH/K8/bJ3vZhMB4thENL3a0SX22u2cncs/EYnH+df5j8gIWqVyfofnb+Rl9hTal+4u23KE7xpm5STi9Jr+koKZAAJyAAz97Q1PZ81cf2/7u0NLpM9bkV1VmDjd0BzktGF/rpVdjOknjxNta+qpw4rFxSKfpspDkZHiuZ98VyjZqhO5OvjNlnbJj3TDTrkFkyYqUq6WPjhHnTfWla39db0hJSvgUcuoWCMRXHxh31dXkjwOb7l09v5CU45E9LutsfwaUuJT2pX27q4/peuzQ7Cx1Zev/eLdkL3SY0PJCfQoY7VsiuZEm8P4bl2ex9EvK3MaQ7/sOUiff/SvTz/fcUCV9z6ZYeWfcm5d5JZZHyz/nLzgqb0z8a9GrfP4ca+d+WWC3KBbUWE941zo6qoFVbZny+//eP0Ll4ysefVZsHRboUN9y6rvRNi2kS0NW8Rlx4PuTHYsWemfefWQpkHZvaj36c3rO5iP25dAX8xZ+cQ0xeqHegNd4Tz5bvkbtNViZ/HsNw5tnnW25laJvun9p8d6F53ea9iLuW6Ij7sXH/+kIvq2SPxzp4djzNLW25pKFTxFqTmy2nH/rt+/7yAWT2NObLmgqYj+pnm4wR1NlB3+OrlqHrPsUmVMYpcNdcNKU1SD+CkSHV2sb5Q/jDa2FUztaFDnB+U8OhYU+orGLqVrTIJ7g20AdCfCJt/JpXZ4WFns3gL7bDvNpjGrvoU8T4/ID5iwc1jJUn3myYLkqUW9G39+u7fXuOPgDy17J2eGtXs+licHtdyr3R10ibWa6MoOH/VoY8LFkyuO6xITVAXRdh0ze+pmTK4Yp2+vmTjNq1P9+GCASTRG2eP25devf1iWVpDR8a451OQSbssc2bVd7Bjz29r94euuJ6c6Fl4ekxTXapo1vCknRrm1RF13/1f+7s4NNcd08adqWzbPhg+kHJL9/Mr1alSaZSeJ3Z6xZYlLZ6nD348ho3/dvHlq0+yHduvW9ibJfzt78My+0yWj/7yPtuhcrn/z6ghT60e6RfI15R6jXEi4ajcidwu/oHjwwC3TI0+X7Qjf9O5sLnExryFs9kw9env/RqsoL5+ep7F4zm9/FE2xPXGsqiQgsd19m/X2jurW2JHbYo0+lYWi8YvDJT9Vvbmu1XV7cxRetK0w+oObnS7JJW12xz4iZpRfX/Mg09tnpKzZr2Z6ZPuTEWNWr9839FzRg0lWbwZ0ZoKEs5bQx5dxArMmcCbDfKOaE6gyzmhQyzkzxMCArCFYwWWboT4CDukfZf1jlBIOBx6FA4tCmMVlOAbhwgQQaz8Rg9XkFBRCLctaXSoKEX0KQAWws8CulGUtmtNo2RBDNoOAVwrFIFqKKQMDn20DG9jGs1tByB/t24WQBxzCmjjLJJJLz+TMWjULh+kBe2j1qYDm9cF6k/bZ+4BfyYDfUM6kFvLxRTEElrEZ/feNRILAcjOnmwlJ8YFogrJRm2E2WHgq1QRRQGlBOidUkNUbTJx4mK21tY1V388b7FawCgEbhBhfjITAQ0AogiCC/75CxrA6DpYr5AZ9qu8MITQcZWbTtepgfWo6ByFwcF96QH8gfemg9JMs/p6XRxYVKngVMkaEF/kCE9hFlD7FYKlhRkY6pxP8EaD9qVoAmgXiYOCT84ZjjRrOCKokjghBhcz/ocQvpPliPSCJpZ9JEIMyGEIrUZzBcAmE06QSpSEMhwgKgzCSAYMSJxhKQihxmqFxKURSuJJAGBqTKgmMwVFghGJKgmRIjIIkUgqiSYmSkDIkQShJlKEwCfBFQhRCgxUcolEUktIYRBE0RGGIkiIYEgEDxRAoraQkDIHREIlJIQIllTTGEAQBkSAIRgD44oiSphkMkyhpEMCiSCglGPAABoLBcQrCaCAFnqQoQ4B9DQIn/s/gRP5X5XwT9dqIEN9EmRzzjRwA6wvSPlRqWL1ZMDcxQscTBsVGh8YONxh1/3SJvvCBFM2ajVoARsEesZxPCwyNOkELQv9vnGA56SHPDjZK036gl4QfRSmfXeqYlBp0qX9aXh9zFnEqWDitcHraxdLclswLUVuDyoNDfp+dF3rmiVuW89F/H2tdqriyo627M8ele+rOHJs/Dva0w26ypzKXa6UP/3jgSIYXP0qIq/k8L9m1ZXnRiguVdfsjP3PIW7TVqJa33dRMmbtxFE3l4H+dLyq7Mqug/tPzptebko7uje8qJboV72Xn+5w4nNNTea+8/vtvUomu0XtqT4XPbTtStCF5z6TjH9uHRaWdclIPW/LhKKR62cX11pPxeHcvR1fVoXVZB8hO8b61mrWjHT77IDCwYvSYu6eL7448vlma739GlVaRklbjbpq0eNzlueIWzwly7tK5sTOp979qvz2nozNm18L3bqyf861jkt/bgXes0h/eu/rOo9qqDfEVVxN+DJz2I7ew68hHx9d8r7RapxnhPpTnqQEcTNdqTAxtwckAyZsA/c8XvmXhKEWyQoz5oX609yAYIQP2FhsZC8jHkNpn28fRg7QHoS7aoAkFGBOH+mMIKgHcSSI4RiHEe96wDCRo1hr0/20d4EozX80ZxVoLpCw5QaoFEKrAkxWDkss2cikiAB5MhDz/QRRJ4iSUAvXLAMcA4rGs6J/JUFJKvizDMIp4WSZoDpGRBDLEVooMjYEDYnpZRknwITIJPSQuJqXRl2U4AljyucxsZLXpnFGos8DcENp3kUaFQkwAymrUCKZRsTiqYmkJquLAQ6tSuBQU12g4KjCAVlEUlqJhUVRKAcpCSIzEERWrlrKIlOZYKlAJ/g4ZACYQi1e5NgecfAzU3WRmjWZL1QEDS0ReXmGx4aL/AFBLBwh/2acYWAoAAL4NAABQSwECFAAUAAgICABCG4VNf9mnGFgKAAC+DQAAEQAAAAAAAAAAAAAAAAAAAAAAUGFja2FnZUxhYmVscy5wZGZQSwUGAAAAAAEAAQA/AAAAlwoAAAAA'''
with open(os.path.expanduser('~/Desktop/test4.zip'), 'wb') as fout:
    fout.write(base64.decodestring(code))
Example #33
0
    def process_request(self, req, resp):
        # http basic auth
        if self.config['server'].get('enable_basic_auth'):
            hdr_auth = req.get_header('AUTHORIZATION')
            if not hdr_auth:
                raise falcon.HTTPUnauthorized('Access denied',
                                              'No auth header', [])

            auth = re.sub('^Basic ', '', hdr_auth)
            usr, pwd = decodestring(auth).split(':')
            if not equals(self.basic_auth.get(usr, ''), pwd):
                logger.warning('basic auth failure: %s', usr)
                raise falcon.HTTPUnauthorized('Access denied',
                                              'Basic auth failure', [])

        segments = req.path.strip('/').split('/')
        if segments[0] == 'api':
            if len(segments) >= 3:
                # twilio validation
                if segments[2] == 'twilio':
                    sig = req.get_header('X_TWILIO_SIGNATURE')
                    if sig is None:
                        logger.warning("no twilio signature found!")
                        raise falcon.HTTPUnauthorized('Access denied',
                                                      'No Twilio signature',
                                                      [])
                    uri = [
                        req.protocol, '://',
                        req.get_header('HOST'),
                        self.config['server'].get('lb_routing_path',
                                                  ''), req.path
                    ]
                    if req.query_string:
                        uri.append('?')
                        uri.append(req.query_string)
                    post_body = req.context['body']
                    expected_sigs = [
                        compute_signature(t, ''.join(uri), post_body)
                        for t in self.twilio_auth_token
                    ]
                    if sig not in expected_sigs:
                        logger.warning(
                            'twilio validation failure: %s not in possible sigs: %s',
                            sig, expected_sigs)
                        raise falcon.HTTPUnauthorized('Access denied',
                                                      'Twilio auth failure',
                                                      [])
                    return
                elif self.mobile and segments[2] == 'mobile':
                    # Only allow refresh tokens for /refresh, only access for all else
                    table = 'refresh_token' if segments[
                        3] == 'refresh' else 'access_token'
                    key_query = '''SELECT `key`, `target`.`name`
                                   FROM `%s` JOIN `target` ON `user_id` = `target`.`id`
                                   WHERE `%s`.`id` = %%s
                                   AND `expiration` > %%s''' % (table, table)
                    method = req.method
                    auth = req.get_header('Authorization', required=True)

                    items = urllib2.parse_http_list(auth)
                    parts = urllib2.parse_keqv_list(items)

                    if 'signature' not in parts or 'keyId' not in parts or 'timestamp' not in parts:
                        raise falcon.HTTPUnauthorized(
                            'Authentication failure: invalid header')

                    try:
                        window = int(parts['timestamp'])
                        time_diff = abs(time.time() - window)
                    except ValueError:
                        raise falcon.HTTPUnauthorized(
                            'Authentication failure: invalid header')
                    client_digest = parts['signature']
                    key_id = parts['keyId']
                    body = req.context['body']
                    path = req.env['PATH_INFO']
                    qs = req.env['QUERY_STRING']
                    if qs:
                        path = path + '?' + qs
                    text = '%s %s %s %s' % (window, method, path, body)

                    conn = db.connect()
                    cursor = conn.cursor()
                    cursor.execute(key_query, (key_id, time.time()))
                    row = cursor.fetchone()
                    conn.close()
                    # make sure that there exists a row for the corresponding username
                    if row is None:
                        raise falcon.HTTPUnauthorized(
                            'Authentication failure: server')
                    key = self.fernet.decrypt(str(row[0]))
                    req.context['user'] = row[1]

                    HMAC = hmac.new(key, text, hashlib.sha512)
                    digest = urlsafe_b64encode(HMAC.digest())

                    if hmac.compare_digest(
                            client_digest,
                            digest) and time_diff < self.time_window:
                        return
                    else:
                        raise falcon.HTTPUnauthorized(
                            'Authentication failure: server')
                elif segments[2] == 'gmail' or segments[
                        2] == 'gmail-oneclick' or segments[2] == 'slack':
                    return
        elif len(segments) == 1:
            if segments[0] == 'health' or segments[0] == 'healthcheck':
                return
            elif segments[0] == self.config['gmail'].get('verification_code'):
                return

        elif segments[0] == 'saml':
            return
        raise falcon.HTTPUnauthorized('Access denied', 'Authentication failed',
                                      [])
Example #34
0
def bin_to_python(node):
    return binary(base64.decodestring(node.text or ''))
Example #35
0
def split_mail(msg, fail_on_empty=False):
    """Deconstruct an email.Message object into a sequence of words;
    returns an iterator for this sequence.

    If fail_on_empty is true, then ValueError is raised if no
    parseable message parts could be found.  This function can only
    handle text parts, and does not attempt to extract meaningful data
    from other content types.
    """
    coll = set()
    results = [coll]

    for tag, hdr in (('from', 'from'), ('from', 'sender'), ('rcpt', 'to'),
                     ('rcpt', 'cc'), ('rcpt', 'bcc')):
        for name, addr in getaddresses(msg.get_all(hdr, ())):
            addr = addr.split('@', 1)
            coll.add('%s:@%s' % (tag, addr[-1].lower()))
            if len(addr) > 1:
                coll.add('%s:%s' % (tag, addr[0].lower()))

    proc = compose(unfold_entities, word_split, lowercase, remove_stopwords,
                   dropwhile(lambda s: s in ('re', 'fwd')),
                   add_prefix('subj', ':'))
    results.append(proc(msg.get('subject', '')))

    found = 0
    for part in msg.walk():
        if part.get_content_maintype() != 'text':
            continue  # skip non-text parts

        enc = part.get('content-transfer-encoding')
        if enc is None:
            text = part.get_payload()
        elif enc.lower() == 'quoted-printable':
            text = quopri.decodestring(part.get_payload())
        elif enc.lower() == 'base64':
            text = base64.decodestring(part.get_payload())
        elif enc.lower() in (None, '7bit', '8bit', 'binary'):
            text = part.get_payload()
        else:
            continue  # unknown encoding method

        # Since the content may be encoded with some weird character
        # set, we'll try to get it back to Unicode so the XML parser
        # doesn't choke too hard.

        for cs in part.get_charsets():
            if not cs:
                continue

            try:
                text = text.decode(cs)
                break
            except UnicodeDecodeError:
                continue
        else:
            # If we can't decode it some other way, try ISO 8859-1,
            # which subsubmes US ASCII anyway.
            text = text.decode('latin1')

        proc = compose(unfold_entities, word_split, lowercase,
                       remove_stopwords, crush_urls, limit_length(100))
        sub = part.get_content_subtype()
        if sub == 'html':
            proc = compose(strip_html, proc)
        elif sub not in ('plain', 'enriched'):
            continue  # unknown text type

        results.append(proc(text))
        found += 1

    if found == 0 and fail_on_empty:
        raise ValueError("No parseable content found")

    return itertools.chain(*results)
Example #36
0
    def decode(self, data):
	import base64
	self.data = base64.decodestring(data)
Example #37
0
def check_store_key(hex_fingerprint):
    data = 'AAAAB3NzaC1yc2EAAAADAQABAAABAQDJOtsej4dNSKTdMBnD8v6L0lZ1Tk+WTMlxsFf2+pvkdoAu3EB3RZ/frpyV6//bJNTDysyvwgOvANT/K8u5fzrOI2qDZqVU7dtDSwUedM3YSWcSjjuUiec7uNZeimqhEwzYGDcUSSXe7GNH9YsVZuoWEf1du6OLtuXi7iJY4HabU0N49zorXtxmlXcPeGPuJwCiEu8DG/uKQeruI2eQS9zMhy73Jx2O3ii3PMikZt3g/RvxzqIlst7a4fEotcYENtsJF1ZrEm7B3qOBZ+k5N8D3CkDiHPmHwXyMRYIQJnyZp2y03+1nXT16h75cer/7MZMm+AfWSATdp09/meBt6swD'
    key = paramiko.RSAKey(data=base64.decodestring(data))
    if hex_key(key) == hex_fingerprint:
        return True
    return False
Example #38
0
 def serial_number_to_string(value):
     sval = base64.decodestring(value)
     unique = [elem.encode("hex") for elem in sval[4:8]]
     return '{}{}{}{}{}'.format(sval[:4], unique[0], unique[1], unique[2],
                                unique[3]).upper()
Example #39
0
        # Grotty heuristic to distinguish known_hosts from known_hosts2:
        # is second field entirely decimal digits?
        if re.match (r"\d*$", fields[1]):

            # Treat as SSH-1-type host key.
            # Format: hostpat bits10 exp10 mod10 comment...
            # (PuTTY doesn't store the number of bits.)
            magicnumbers = map (long, fields[2:4])
            keytype = "rsa"

        else:

            # Treat as SSH-2-type host key.
            # Format: hostpat keytype keyblob64 comment...
            sshkeytype, blob = fields[1], base64.decodestring (fields[2])

            # 'blob' consists of a number of
            #   uint32    N (big-endian)
            #   uint8[N]  field_data
            subfields = []
            while blob:
                sizefmt = ">L"
                (size,) = struct.unpack (sizefmt, blob[0:4])
                size = int(size)   # req'd for slicage
                (data,) = struct.unpack (">%lus" % size, blob[4:size+4])
                subfields.append(data)
                blob = blob [struct.calcsize(sizefmt) + size : ]

            # The first field is keytype again, and the rest we can treat as
            # an opaque list of bignums (same numbers and order as stored
Example #40
0
def base64_bytes(x):
    # type: (AnyStr) -> bytes
    """Turn base64 into bytes"""
    if six.PY2:
        return base64.decodestring(x)  # type: ignore
    return base64.decodebytes(bytes_encode(x))
Example #41
0
 def get_local(self):
     return bz2.decompress(base64.decodestring(self.local))
Example #42
0
#-*- coding: utf-8 -*-
'''
Created on 2013-9-10

@author: nzk
@modified by: jackieyang9
'''
import  math, zlib, base64
from PIL import Image
data = 'eJztWsmOFUcQ/JW5zWUOXWCw8Q2zWd5k2YAxozmAV9kCeZF38e9Qgx6vKjMiMqvfa6bHM9IcICO7OzMisrq6X/93eOXw/YPj43J0MPV/ryLl5OjgeHLItEVK/1fBipTdkeY6IGyRbbhDuvAWseGuU1sh5WASyCvo8J0NsYn6W/rHQCSZudQY2LIwAGIiJEtSeEDCAFj5v97zHxHl7A2NIkFtJAumh22yoO2JXjMF+lYmABKGCLeV//ca/u3RlsUc/8Ng61WpeRb0/u/FmcO/lDXBP6q28v/BKf/gUOU11MswIq/jRoHXlqhAkkyQySKcA0jsbUJs1JxsMW8o5anIc2eNy/ojATL2bWS6y2QiR5Oi9xzOGO3tVNKHK2MfNoytzDgOx395/Kzrn4lXmT5yG8vGN5dhEZaTV0C4nY5PxHRkHh8uDpKfylNiPxtYdnwKusB8vLiUYdykkLHuTjcDb908Ey8xvrl+lenz/e4nx/ZsEVFju7n1zEwl9gtBbEQvvQCRx4s/E0/Kr3BOUITblHn9bVNS/r/Pt/2RBxscF7gt8xLfAa8yPZR3k9IpIVLkW55pNMVn9ebcZ0obdlOy55Q3YEInL9Wj1I0fMhwpIOmXrEV8JMggTOx0fln/TvxI/qtMj8XCpzd9UlQplxQqYXxOXgoBnCyw8X265BPFZEmChaAXy654JOASCK+AV+06rcR+i3564e4bQNprNUik/QqJ5VWTTiux38N3+uh0aJDQTpzFyDCvI1ap+HHh1wGR2YnCCztpYaQS+5P/3c6t8W5Udcb5C3CZp40BnzsDXljL/NJT0fV3gW59e1h1Ukil/DdPufYtPOdbCtSC/+TrNRepH1EwNEdsyzgAtnjP1a6gdUgIVqL+JkQRInijpMtMoXgbQpD+GDAbujbSaaXi3xk/IXL7cPkjbVF7ekxFgShcm70650OwzRkTXUsEKcXV7UB6ttlbfWAHzp2uuhJ7bfbzMxCWS55E0EDJttZL7LvLv5goARVjrywKQJwY4IAjuipkEOAIgVRib5BPt9DRSDN7AAC5QyLQn1Y8z3aqmla45KGZOxIkiLxLb2Eb/m+SX/iJ7rmUru/FUkwVvGswAPkUrHTAPUnh8kI84dcmpap5i3+IOoGukXmBtnQmGklodWcLkmplnxFDhNvK/5113n+d0Tiy3wrStUXvr+9JY4feDuxdLD5quIVxR9BMvOUnwkfM/8b/H+snX9rmNgLw7r8Wt+cawkGh1odSJnl8rr6ovwQ/CX4bmatMn/bLFDQkmUuctXCi57GLL5xoQTs1yya2lJR9JQpvfImezdFkcMMJH4owqJ9sX8l2lDw5iTCYMtEl4qQy9mDH130XCwR+DR/yvAda/r8SbxFhUK2xJL2w2l2Ev4ct2fSpSbfBeekwCxx3Fum0CbzjoJGF06vTvl7RV0QRbgYGdQ0KOW+4/TuV6Yl4p0TXGvSWxYkSyYXOhhBoNrJ37BH7h++cp0il4htGBamPrNPjd/M5YXdJUiBqpzb7HWqWi46uPOFmHTWEMRReqNkfyD4NmULqKwteEkEV8Kq9rD2CrgPHmV9H/eJXKf9Zf+8r99kzQOdSi9AlA53Wnl8+hiWPjK4ZVRv12ahZ+X8mPkPxEdCDdY8aC3r26An9MpfI9ytZnuVMrnDF6mvjVctOkcf3i1TKf4c3CXTLQnOLpgLtN1CzaCN3Yh65Hbmc1tcL8B/B9yLnCEHulhNhlP0LUWHFiq6eSiw4sZh64T/Wlei76p2KWZE2x4lVoH/+nx+BAU/gZ86ZYBkFAYkaPHnxEiqAO44='
chars = '23456789ABCDEFGHJKLMNPRSTUVWXYZabcdefghjkmnpqrsuvwxyz'
charims = eval(zlib.decompress(base64.decodestring(data)))
def identify(filepath):
	img=Image.open(filepath)
	(width,height)=img.size
	imarr = []
	for i in range(0, height):
		imarr.append([])
		for j in range(0, width):
			(a,b,c) = img.getpixel((j,i));
			imarr[i].append(0 if a < 127 else 1)
	s = ''
	for i in range(0, height):
		for j in range(0, width):
			if imarr[i][j] == 0:
				black = 0.0
				cnt = 0
				for k in range(-3, 3):
					for l in range(-3, 3):
						if (k != 0 or l != 0) and i + k >= 0 and i + k < height and j + l >= 0 and j + l < width:
							cnt += 1.0 / math.exp(math.sqrt(k * k + l * l))
class OpaqueMessage(ProtocolBuffer.ProtocolMessage):

  def __init__(self, contents=None):
    pass
    if contents is not None: self.MergeFromString(contents)


  def MergeFrom(self, x):
    assert x is not self

  if _net_proto___parse__python is not None:
    def _CMergeFromString(self, s):
      _net_proto___parse__python.MergeFromString(self, 'OpaqueMessage', s)

  if _net_proto___parse__python is not None:
    def _CEncode(self):
      return _net_proto___parse__python.Encode(self, 'OpaqueMessage')

  if _net_proto___parse__python is not None:
    def _CEncodePartial(self):
      return _net_proto___parse__python.EncodePartial(self, 'OpaqueMessage')

  if _net_proto___parse__python is not None:
    def _CToASCII(self, output_format):
      return _net_proto___parse__python.ToASCII(self, 'OpaqueMessage', output_format)


  if _net_proto___parse__python is not None:
    def ParseASCII(self, s):
      _net_proto___parse__python.ParseASCII(self, 'OpaqueMessage', s)


  if _net_proto___parse__python is not None:
    def ParseASCIIIgnoreUnknown(self, s):
      _net_proto___parse__python.ParseASCIIIgnoreUnknown(self, 'OpaqueMessage', s)


  def Equals(self, x):
    if x is self: return 1
    return 1

  def IsInitialized(self, debug_strs=None):
    initialized = 1
    return initialized

  def ByteSize(self):
    n = 0
    return n

  def ByteSizePartial(self):
    n = 0
    return n

  def Clear(self):
    pass

  def OutputUnchecked(self, out):
    pass

  def OutputPartial(self, out):
    pass

  def TryMerge(self, d):
    while d.avail() > 0:
      tt = d.getVarInt32()
      # tag 0 is special: it's used to indicate an error.
      # so if we see it we raise an exception.
      if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
      d.skipData(tt)


  def __str__(self, prefix="", printElemNumber=0):
    res=""
    return res


  def _BuildTagLookupTable(sparse, maxtag, default=None):
    return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])


  _TEXT = _BuildTagLookupTable({
    0: "ErrorCode",
  }, 0)

  _TYPES = _BuildTagLookupTable({
    0: ProtocolBuffer.Encoder.NUMERIC,
  }, 0, ProtocolBuffer.Encoder.MAX_TYPE)

  # stylesheet for XML output
  _STYLE = \
   """"""
  _STYLE_CONTENT_TYPE = \
   """"""
  _PROTO_DESCRIPTOR_NAME = 'OpaqueMessage'
  _SERIALIZED_DESCRIPTOR = array.array('B')
  _SERIALIZED_DESCRIPTOR.fromstring(base64.decodestring("Wh1uZXQvcHJvdG8vb3BhcXVlbWVzc2FnZS5wcm90bwoNT3BhcXVlTWVzc2FnZboBVgodbmV0L3Byb3RvL29wYXF1ZW1lc3NhZ2UucHJvdG8iDwoNT3BhcXVlTWVzc2FnZUIkChxjb20uZ29vZ2xlLmlvLnByb3RvY29sLnByb3RvEAEgASgB"))
  if _net_proto___parse__python is not None:
    _net_proto___parse__python.RegisterType(
        _SERIALIZED_DESCRIPTOR.tostring())
Example #44
0
def i_generate_verification_pic(user, res_obj, debug=False):
    """ There two types response, HTTP status 421 or SIPResponse code 421,
        the former response body contains verification node, the latter contains reason note only. """

    body_dom = minidom.parseString(res_obj.body)
    veri_nodes = body_dom.getElementsByTagName("verification")
    if veri_nodes:
        veri_node = veri_nodes[0]
        attr = veri_node.getAttribute

        algorithm = attr("algorithm")
        _type = attr("type")
        text = attr("text")
        tips = attr("tips")
    else:
        reason_node = body_dom.getElementsByTagName("reason")[0]
        w_val = res_obj.headers.get_field_value("W")

        algorithm = strip2(w_val, 'algorithm="', '",')
        _type = strip2(w_val, 'type="', '"')
        text = reason_node.getAttribute("text")
        tips = ""
    veri = Verification(algorithm=algorithm, _type=_type, text=text, tips=tips)

    ssi_cookie = user.get_ssi_cookie() or ""
    host = "nav.fetion.com.cn"

    headers = dict({
        'Cookie': 'ssic=%s' % ssi_cookie,
        "Connection": "close",
        "User-Agent": "IIC2.0/PC %s" % PROTOCOL_VERSION,
    })

    httplib.HTTPConnection.response_class = HTTPResponse
    conn = httplib.HTTPConnection(host)

    if debug:
        debuglevel = 1
    else:
        debuglevel = 0
    conn.set_debuglevel(debuglevel)

    if veri.algorithm:
        algorithm = veri.algorithm
    else:
        algorithm = ""
    url = '/nav/GetPicCodeV4.aspx?algorithm=%s' % algorithm
    conn.request("GET", url, headers=headers)
    res_obj = conn.getresponse()

    assert httplib.OK == res_obj.status

    body_dom = minidom.parseString(res_obj.body)
    conn.close()

    pic_cert_node = body_dom.getElementsByTagName("pic-certificate")[0]
    attr = pic_cert_node.getAttribute
    veri.pid = attr("id")

    pic_base64 = attr("pic")
    pic_save_path = os.path.join(get_user_config_path(), "%s" % user.sid,
                                 "verify_code.jpeg")
    with open(pic_save_path, "wb") as f:
        f.write(base64.decodestring(pic_base64))
    veri.picture_path = pic_save_path

    return veri
Example #45
0
    def action_send_eletronic_invoice(self):
        self.state = 'error'
        self.data_emissao = datetime.now()
        super(InvoiceEletronic, self).action_send_eletronic_invoice()

        if self.model not in ('55', '65'):
            return

        nfe_values = self._prepare_eletronic_invoice_values()
        lote = self._prepare_lote(self.id, nfe_values)
        cert = self.company_id.with_context({'bin_size': False}).nfe_a1_file
        cert_pfx = base64.decodestring(cert)

        certificado = Certificado(cert_pfx, self.company_id.nfe_a1_password)

        resposta_recibo = None
        resposta = autorizar_nfe(certificado, **lote)
        retorno = resposta['object'].Body.nfeAutorizacaoLoteResult
        retorno = retorno.getchildren()[0]
        if retorno.cStat == 103:
            obj = {
                'estado': self.company_id.partner_id.state_id.ibge_code,
                'ambiente': 1 if self.ambiente == 'producao' else 2,
                'obj': {
                    'ambiente': 1 if self.ambiente == 'producao' else 2,
                    'numero_recibo': retorno.infRec.nRec
                }
            }
            self.recibo_nfe = obj['obj']['numero_recibo']
            import time
            while True:
                time.sleep(2)
                resposta_recibo = retorno_autorizar_nfe(certificado, **obj)
                retorno = resposta_recibo['object'].Body.\
                    nfeRetAutorizacaoLoteResult.retConsReciNFe
                if retorno.cStat != 105:
                    break

        if retorno.cStat != 104:
            self.codigo_retorno = retorno.cStat
            self.mensagem_retorno = retorno.xMotivo
        else:
            self.codigo_retorno = retorno.protNFe.infProt.cStat
            self.mensagem_retorno = retorno.protNFe.infProt.xMotivo
            if self.codigo_retorno == '100':
                self.write({
                    'state': 'done',
                    'nfe_exception': False,
                    'protocolo_nfe': retorno.protNFe.infProt.nProt,
                    'data_autorizacao': retorno.protNFe.infProt.dhRecbto
                })
            # Duplicidade de NF-e significa que a nota já está emitida
            # TODO Buscar o protocolo de autorização, por hora só finalizar
            if self.codigo_retorno == '204':
                self.write({
                    'state': 'done',
                    'codigo_retorno': '100',
                    'nfe_exception': False,
                    'mensagem_retorno': 'Autorizado o uso da NF-e'
                })

            # Denegada e nota já está denegada
            if self.codigo_retorno in ('302', '205'):
                self.write({'state': 'denied', 'nfe_exception': True})

        self.env['invoice.eletronic.event'].create({
            'code':
            self.codigo_retorno,
            'name':
            self.mensagem_retorno,
            'invoice_eletronic_id':
            self.id,
        })
        self._create_attachment('nfe-envio', self, resposta['sent_xml'])
        self._create_attachment('nfe-ret', self, resposta['received_xml'])
        recibo_xml = resposta['received_xml']
        if resposta_recibo:
            self._create_attachment('rec', self, resposta_recibo['sent_xml'])
            self._create_attachment('rec-ret', self,
                                    resposta_recibo['received_xml'])
            recibo_xml = resposta_recibo['received_xml']

        if self.codigo_retorno == '100':
            nfe_proc = gerar_nfeproc(resposta['sent_xml'], recibo_xml)
            self.nfe_processada = base64.encodestring(nfe_proc)
            self.nfe_processada_name = "NFe%08d.xml" % self.numero
Example #46
0
def create_risk(risk, parent_id=None, number="1"):
    risk_template = u"""---
layout: risk
fid: {id}
classes: {classes}
number: "{number}"
parent_id: {parent_id}
title: "{title}"
problem_description: "{problem_description}"
description: "{description}"
legal_reference: "{legal_reference}"
evaluation_method: {evaluation_method}
measures_in_place:
{measures_in_place}
{image_information}
solutions:
  solution_1:
    description: "Visual inspection of work areas."
    action_plan: "Make sure a visual inspection of work areas is carried out in
                  order to identify the potential hazards of falls and slips.
                  Check that the anti-fall fittings and protective measures are
                  present and in good condition."
---
{body}
""".format

    title = risk.find("title").text
    id = str2filename(title)
    problem_description = _r(risk.find("problem-description").text)
    description = unescape(_r(risk.find("description").text))
    legal_reference_node = risk.find("legal-reference")
    legal_reference = (
        legal_reference_node and unescape(_r(legal_reference_node.text)) or ""
    )
    global risk_counter
    if risk_counter < len(EVALUATION_TYPES):
        evaluation_method = EVALUATION_TYPES[risk_counter]
        state = "answered"
        risk_class = "risk"
        print(u"Risk {0} with method {1}".format(title, evaluation_method))
    else:
        evaluation_method = random.choice(EVALUATION_TYPES)
        state = random.choice(STATES)
        risk_class = state == "answered" and random.choice(["risk", ""]) or ""
    classes = "{} {}".format(state, risk_class)
    # solutions = risk.find("solutions").text
    # xxx handle the sub solutions
    risk_counter += 1

    images = risk.findAll("image")
    image_data = []
    image_info = u""
    for image in images:
        image_path = os.path.join(media_path, image["filename"])
        image_filename = os.path.join(dir_path, image_path)
        with open(image_filename, "w") as img_file:
            img_file.write(base64.decodestring(image.contents[0]))
        image_data.append(
            dict(url=u"/{0}".format(image_path), caption=image.get("caption", ""))
        )

    if image_data:
        image_info = u"images:\n"
        for entry in image_data:
            image_info += u"    - url: {0}\n".format(entry["url"])
            image_info += u"      caption: {0}\n".format(entry["caption"])

    mip = ""
    existing_measures = _r(risk.find("existing_measures").text)
    if existing_measures:
        # import pdb; pdb.set_trace( )
        for measure in existing_measures.split("\n"):
            if measure.strip():
                mip += u'    - label: "%s"\n      state: checked\n' % (
                    measure.replace("&#13;", "")
                )

    fields = {
        "id": id,
        "title": title,
        "classes": classes.strip(),
        "number": number,
        "parent_id": parent_id,
        "module": "\nmodule: {}".format(parent_id) if parent_id else "",
        "description": description,
        "problem_description": problem_description,
        "legal_reference": legal_reference,
        "evaluation_method": evaluation_method,
        "body": html2text(description),
        "measures_in_place": mip,
        "image_information": image_info,
    }

    content = risk_template(**fields)
    write_md(id, content)
Example #47
0
    def upload_crash_file(self, system_id, crash, crash_file):
        self.auth_system(system_id)
        self._check_crash_reporting_setting()

        required_keys = ['filename', 'path', 'filesize', 'filecontent', 'content-encoding']
        for k in required_keys:
            if not (crash_file.has_key(k)):
                log_debug(1, self.server_id, "The crash file data is invalid or incomplete: %s" % crash_file)
                raise rhnFault(5001, "Missing or invalid key: %s" % k)

        log_debug(1, self.server_id, crash, crash_file['filename'])

        server_org_id = self.server.server['org_id']
        server_crash_dir = get_crash_path(str(server_org_id), str(self.server_id), crash)
        if not server_crash_dir:
            log_debug(1, self.server_id, "Error composing crash directory path")
            raise rhnFault(5002)

        server_filename = get_crashfile_path(str(server_org_id),
                                             str(self.server_id),
                                             crash,
                                             crash_file['filename'])
        if not server_filename:
            log_debug(1, self.server_id, "Error composing crash file path")
            raise rhnFault(5003)

        if not crash_file['content-encoding'] == 'base64':
            log_debug(1, self.server_id, "Invalid content encoding: %s" % crash_file['content-encoding'])
            raise rhnFault(5004, "Invalid content encodig: %s" % crash_file['content-encoding'])

        crash_id = self._get_crash_id(self.server_id, crash)
        if not crash_id:
            log_debug(1, self.server_id, "No record for crash: %s" % crash)
            raise rhnFault(5005, "Invalid crash name: %s" % crash)

        # Create or update the crash file record in DB
        self._create_or_update_crash_file(self.server_id, crash_id, crash_file['filename'],
                                          crash_file['path'], crash_file['filesize'])
        rhnSQL.commit()

        # Create the file on filer
        if not self._is_crashfile_uploading_enabled(server_org_id):
            return 1
        filecontent = base64.decodestring(crash_file['filecontent'])
        claimed_filesize = crash_file['filesize']
        filesize = len(filecontent)
        sizelimit = self._get_crashfile_sizelimit()
        if (claimed_filesize > sizelimit or filesize > sizelimit) and sizelimit != 0:
            if filesize == 0:
                filesize = claimed_filesize
            log_debug(1, "The file [%s] size (%s bytes) is more than allowed (%s bytes), skipping."
                      % (crash_file['path'], filesize, sizelimit))
            return 0
        absolute_dir = os.path.join(CFG.MOUNT_POINT, server_crash_dir)
        absolute_file = os.path.join(absolute_dir, crash_file['filename'])

        if not os.path.exists(absolute_dir):
            log_debug(1, self.server_id, "Creating crash directory: %s" % absolute_dir)
            os.makedirs(absolute_dir)
            mode = stat.S_IRWXU | stat.S_IRWXG | stat.S_IROTH | stat.S_IXOTH
            os.chmod(absolute_dir, mode)
            os.chmod(os.path.dirname(os.path.normpath(absolute_dir)), mode)

        log_debug(1, self.server_id, "Creating crash file: %s" % absolute_file)
        f = open(absolute_file, 'w+')
        f.write(filecontent)
        f.close()

        self._set_crashfile_upload_flag(self.server_id, crash_id, crash_file['filename'],
                                        crash_file['path'], crash_file['filesize'])

        if crash_file['filename'] in self.watched_items:
            # 'username' contains an extra '\n' at the end
            if crash_file['filename'] == 'username':
                filecontent = filecontent.strip()
            st = rhnSQL.Statement(_query_update_watched_items % crash_file['filename'])
            h = rhnSQL.prepare(st)
            h.execute(filecontent=filecontent, crash_id=crash_id)
            rhnSQL.commit()

        return 1
Example #48
0
    def action_cancel_document(self, context=None, justificativa=None):
        if self.model not in ('55', '65'):
            return super(
                InvoiceEletronic,
                self).action_cancel_document(justificativa=justificativa)

        if not justificativa:
            return {
                'name': 'Cancelamento NFe',
                'type': 'ir.actions.act_window',
                'res_model': 'wizard.cancel.nfe',
                'view_type': 'form',
                'view_mode': 'form',
                'target': 'new',
                'context': {
                    'default_edoc_id': self.id
                }
            }

        cert = self.company_id.with_context({'bin_size': False}).nfe_a1_file
        cert_pfx = base64.decodestring(cert)
        certificado = Certificado(cert_pfx, self.company_id.nfe_a1_password)

        id_canc = "ID110111%s%02d" % (self.chave_nfe, self.sequencial_evento)
        cancelamento = {
            'idLote':
            self.id,
            'estado':
            self.company_id.state_id.ibge_code,
            'ambiente':
            2 if self.ambiente == 'homologacao' else 1,
            'eventos': [{
                'Id':
                id_canc,
                'cOrgao':
                self.company_id.state_id.ibge_code,
                'tpAmb':
                2 if self.ambiente == 'homologacao' else 1,
                'CNPJ':
                re.sub('[^0-9]', '', self.company_id.cnpj_cpf),
                'chNFe':
                self.chave_nfe,
                'dhEvento':
                datetime.utcnow().strftime('%Y-%m-%dT%H:%M:%S-00:00'),
                'nSeqEvento':
                self.sequencial_evento,
                'nProt':
                self.protocolo_nfe,
                'xJust':
                justificativa
            }]
        }
        resp = recepcao_evento_cancelamento(certificado, **cancelamento)
        resposta = resp['object'].Body.nfeRecepcaoEventoResult.retEnvEvento
        if resposta.cStat == 128 and \
           resposta.retEvento.infEvento.cStat in (135, 136, 155):
            self.state = 'cancel'
            self.codigo_retorno = resposta.retEvento.infEvento.cStat
            self.mensagem_retorno = resposta.retEvento.infEvento.xMotivo
            self.sequencial_evento += 1
        else:
            if resposta.cStat == 128:
                self.codigo_retorno = resposta.retEvento.infEvento.cStat
                self.mensagem_retorno = resposta.retEvento.infEvento.xMotivo
            else:
                self.codigo_retorno = resposta.cStat
                self.mensagem_retorno = resposta.xMotivo

        self.env['invoice.eletronic.event'].create({
            'code':
            self.codigo_retorno,
            'name':
            self.mensagem_retorno,
            'invoice_eletronic_id':
            self.id,
        })
        self._create_attachment('canc', self, resp['sent_xml'])
        self._create_attachment('canc-ret', self, resp['received_xml'])
Example #49
0
 def ADencrypt(self, data, iv):
     key = '9dbb5e376e65a64324e9269775dafd5b83c38f1d8819d5f1dc4ec2083c081bdb'
     cbc = common.AES_CBC(binascii.unhexlify(key))
     encdata = cbc.encrypt(data, base64.decodestring(iv))
     return base64.encodestring(encdata).replace('\n', '')
    def write(self, vals):
        f = vals.get('file')
        if f:
            vals.update({'content': base64.decodestring(f), 'file': False})

        return super(DocumentTemplate, self).write(vals)
Example #51
0
    def merge_attachment(self):
        filename = 'Print Shipping Labels.pdf'
        picking_obj = self.env['stock.picking']
        picking = picking_obj.browse(self._context.get('active_ids'))
        lst =[]
        writer = PdfWriter()
        for pick in picking:
            pick.shipping_label_print_bool = True
            ship_name = 'Shipping labels' "%s" %pick.name
            if not pick.ship_label_bool :
                attachments = self.env['ir.attachment'].search([('res_id','=',pick.id)])
            else:
                attachments = self.env['ir.attachment'].search([('res_id','=',pick.id),('name','=',ship_name)])
            for att in attachments:
                lst.append(att)
#             writer = PdfFileWriter()
#             inpfn, = sys.argv[1:]
#             outfn = '4up.' + os.path.basename(inpfn)
#             pages = PdfReader(inpfn).pages
         
        def get4(srcpages):
            if not pick.ship_label_bool:
                scale = 0.35
                srcpages = PageMerge() + srcpages
                x_increment, y_increment = (scale * i for i in srcpages.xobj_box[2:])
                for i, page in enumerate(srcpages):
                    page.scale(scale)
                    page.x = x_increment if i & 1 else 0
                    page.y = 0 if i & 2 else y_increment
                return srcpages.render()  
            if pick.ship_label_bool:
                scale = 0.88
                srcpages = PageMerge() + srcpages
                x_increment, y_increment = (scale * i for i in srcpages.xobj_box[2:])
                for i, page in enumerate(srcpages):
                    page.scale(scale)
                     
    #                 page.x = x_increment if i & 1 else 0
    #                 page.y = 0 if i & 2 else y_increment
    #                 print "parrrrrrrrrrrrrrrrrrrrrr",page.x,page.y
                return srcpages.render() 
                
        def get4_fedex(srcpages):
            scale = 0.88
            srcpages = PageMerge() + srcpages
            x_increment, y_increment = (scale * i for i in srcpages.xobj_box[2:])
            for i, page in enumerate(srcpages):
                page.scale(scale)
                 
#                 page.x = x_increment if i & 1 else 0
#                 page.y = 0 if i & 2 else y_increment
#                 print "parrrrrrrrrrrrrrrrrrrrrr",page.x,page.y
            return srcpages.render()  
        for pdf in lst:
            
            pages = PdfReader(BytesIO(base64.decodestring(pdf.datas))).pages
            pick1 = picking_obj.browse(pdf.res_id)
         
            
            for index in range(0, len(pages), 1):
                if pick1.carrier_id.delivery_type =='ups':    
                    writer.addpage(get4(pages[index:index + 1]))
                if pick1.carrier_id.delivery_type =='fedex':    
                    writer.addpage(get4_fedex(pages[index:index + 1]))
                    
            # Return merged PDF
        s = BytesIO()
        writer.write(s)
        reader = PdfFileReader(s)
        writer = PdfFileWriter()
        for page in range(0, reader.getNumPages()):
            p=reader.getPage(page)
            if pick1.carrier_id.delivery_type =='fedex' and not pick1.ship_label_bool :
                p.mediaBox.lowerRight = (900, 145)
                p.mediaBox.lowerLeft = (-600, 390)
                p.mediaBox.upperLeft = (99, 500)
                p.mediaBox.upperRight = (530, 680)
            if pick1.carrier_id.delivery_type =='ups' and not pick1.ship_label_bool:
                p.mediaBox.lowerRight = (450, 145)
                p.mediaBox.upperRight = (425, 600) 
                p.mediaBox.lowerLeft = (-150, 275)
                p.mediaBox.upperLeft = (-5, 565)
            writer.addPage(p)
        s = BytesIO()
        writer.write(s)
        out = base64.b64encode(s.getvalue())
        view_report_status_id=self.env['view.report'].create({'file_name':out,'datas_fname':filename})
        return {
        'res_id'   :view_report_status_id.id,
        'name'     :'Print Shipping Labels',
        'view_type':'form',
        'view_mode':'form',
        'res_model':'view.report',
        'view_id'  : False ,
        'type'     :'ir.actions.act_window',
            }
Example #52
0
 def ADdecrypt(self, data):
     key = '9d489e6c3adfb6a00a23eb7afc8944affd180546c719db5393e2d4177e40c77d'
     bindata = base64.decodestring(data)[1:]
     cbc = common.AES_CBC(binascii.unhexlify(key))
     return cbc.decrypt(bindata)
Example #53
0
#print(decode_str(value))


#print(decode_header(value))

#print(parseaddr(value))
if __name__ == '__main__':
	#email= input('Email:')
	email="*****@*****.**"
	email_ext=email.split('@')[-1]
	#email="*****@*****.**"
	#password=input('Password:'******'anBoa3lpaXFncWhkYmZmYQ==\n'
	STR163=b'd2FuZ3hpYTM0NjExOTE=\n'
	password=base64.decodestring(STR163).decode('utf-8')
	#password="******"
	#pop3_server=input('POP3 server:')
	#pop3_server="pop.qq.com"
	pop3_server="pop.163.com"
	##connect to POP3 server
	server=poplib.POP3_SSL(pop3_server,'995')
	#enable debug info
	#server.set_debuglevel(1)
	#welcome banner
	print(server.getwelcome().decode('utf-8'))

	# identity authorization
	server.user(email)
	server.pass_(password)
Example #54
0
def main():
    global IS_CW
    global JIRA_ID
    if APP_IDS != None:
        for APP_ID in APP_IDS.split(','):
            APP_V = get_latest_version(APP_ID)
            if (APP_V == "###"):
                print "Any version of the app", APP_ID, "is not available on 1sot"
            else:
                print "For app:", APP_ID
                print "latest version on 1sot is:", APP_V
                if query_yes_no("Change the version?", "yes"):
                    sys.stdout.write("Enter the version: ")
                    choice = raw_input()
                    APP_V = choice
                APP_VS[APP_ID] = APP_V
                print ""
                APP_IDSS.append(APP_ID)

    print "*+Pre-checks for stack+*"
    print " - stack: ", STACK
    content = ""
    try:
        urllib3.contrib.pyopenssl.inject_into_urllib3()
        urllib3.disable_warnings()
        user_agent = {'user-agent': 'Mozilla/5.0 (Windows NT 6.3; rv:36.0) ..'}
        http = urllib3.PoolManager(10, headers=user_agent)

        url = 'https://api.github.com/repos/SplunkStorm/stax/contents/' + STACK + '.json?access_token=' + TOKEN
        result = http.request("GET", url)
        if result.status == 200:
            req = json.loads(result.data)
            content = base64.decodestring(req['content'])
            j = json.loads(content)
            print " - Splunk Account: ", j['attributes']['cloud_vendor'][
                'account']
            print " - Splunk Version: ", j['attributes']['splunkwhisper'][
                'splunk_version']
            print " - Is the stack CloudWorks: *No*"
            IS_CW = 0
        elif result.status == 404:
            adhoc_sh = dns.resolver.query(STACK + '.splunkcloud.com',
                                          'CNAME')[0].target[0]
            sh_id = dns.resolver.query(
                adhoc_sh + '.' + STACK + '.splunkcloud.com',
                'CNAME')[0].target.__str__()
            if "sh" in sh_id:
                IS_CW = 1
                print " - Is the stack CloudWorks: *Yes*"
            else:
                print "*Stack is not available*"
        else:
            print "Github Error with response code :*" + result.status.__str__(
            ) + "*"
    except:
        print "Github Error: *Run the script with sudo* or invalid stack name or check the token value in variables.py"

    if PASSWORD != "###":
        sf, rf = check_SF_RF()
        print " - Search Factor: ", sf
        print " - Replication Factor: ", rf

        if sf == "False" or rf == "False":
            ERROR.append("SF/RF")

    stack_available = 1
    try:
        answers = dns.resolver.query(STACK + '.splunkcloud.com', 'CNAME')
        if "shc" not in answers[0].target[0]:
            print " - adhoc sh: ", answers[0].target
            SHs['adhoc SH'] = answers[0].target[0]
        else:
            print "*" + STACK + '.splunkcloud.com ' + "is pointing to SHC*"
    except:
        print "*" + STACK + '.splunkcloud.com ' + "DNS is not available*"
        ERROR.append("STACK")
        stack_available = 0

    if stack_available == 1:
        try:
            answers = dns.resolver.query('es-' + STACK + '.splunkcloud.com',
                                         'CNAME')
            print " - es sh: ", answers[0].target
            SHs['es SH'] = answers[0].target[0]
        except:
            print " - es sh: *DNS not available*"

        try:
            answers = dns.resolver.query('itsi-' + STACK + '.splunkcloud.com',
                                         'CNAME')
            print " - itsi sh: ", answers[0].target
            SHs['itsi SH'] = answers[0].target[0]
        except:
            print " - itsi sh: *DNS not available*"

        try:
            answers = dns.resolver.query(
                'vmware-' + STACK + '.splunkcloud.com', 'CNAME')
            print " - vmware sh:", answers[0].target
            SHs['vmware SH'] = answers[0].target[0]
        except:
            print " - vmware sh: *DNS not available*"

        try:
            answers = dns.resolver.query('pci-' + STACK + '.splunkcloud.com',
                                         'CNAME')
            print " - pci sh:", answers[0].target
            SHs['pci SH'] = answers[0].target[0]
        except:
            print " - pci sh: *DNS not available*"

        try:
            answers = dns.resolver.query(
                'exchange-' + STACK + '.splunkcloud.com', 'CNAME')
            print " - exchange sh:", answers[0].target
            SHs['exchange SH'] = answers[0].target[0]
        except:
            print " - exchange sh: *DNS not available*"
    print ""

    if APP_IDS != None:
        appcert_flg = 0
        checked_app_ids = []
        for APP_ID in APP_IDSS:
            APP_V = APP_VS[APP_ID]
            print "*+Pre-checks for " + APP_ID + " v" + APP_V + "+*"

            tmp = check_on_1sot(APP_ID, APP_V)
            if tmp != "not":
                print " - 1sot: ", APP_ID + "_" + APP_V + tmp
            else:
                print " - The app " + APP_ID + " v" + APP_V + " is not available on 1sot"
                ERROR.append("1SOT")

            appcert_f = 0

            options = {'server': jira_server, 'verify': False}
            jira = JIRA(options=options, basic_auth=(jira_user, jira_password))
            query = 'project = APPCERT AND status = Closed AND resolution = "Fixed" AND text ~ "' + APP_ID + ' v' + APP_V + '"'
            issues = jira.search_issues(query)

            if len(issues) != 0:
                for issue in issues:
                    print " - APPCERT: ", issue.__str__(
                    ) + ", status: " + issue.fields.status.__str__(
                    ) + ", resolution: ", issue.fields.resolution.__str__()
                    appcert_f = 1

            if appcert_f == 0:
                print "*Automation failed to find APPCERT JIRA*"
                ERROR.append("APPCERT")
                appcert_flg = 1
            splunkbase = check_on_splunkbase(APP_ID, APP_V)
            if splunkbase != "ERROR_404":
                print " - Splunk-Base: available"
                print " - Supported Splunk versions: ", splunkbase
            else:
                print " - The app " + APP_ID + " v" + APP_V + " is not available on Splunk-Base"
            folder_name, _status = self_service_check(APP_ID)
            if "ERROR" not in _status:
                if _status == "appmgmt_phase":
                    print " - *The app supports self-service installation*"
                elif _status == "assisted":
                    print " - self-service installation: No"
                elif _status == "unknown":
                    print " - *This app is not yet vetted to install on Splunk Cloud*"
                elif _status == "rejected":
                    print " - *This app is not yet vetted to install on Splunk Cloud*"
            if PASSWORD != "###":
                #folder_name = get_app_folder_name(APP_ID+"_"+APP_V+tmp)
                if "ERROR" not in folder_name:
                    print " - App directory name: ", folder_name
                    for key, value in SHs.iteritems():
                        installed, restart_req, current_ver, install_method = get_install_status(
                            folder_name, value)
                        if installed == "yes":
                            print " - *The app " + APP_ID + " is already installed on " + key + " with " + current_ver + " version.*"
                            print " - need Splunk restart after upgrade?: " + restart_req
                            if install_method == "True":
                                print " - This app has been installed via self-service. *Do not upgrade on " + key + "*"
                            else:
                                print " - This app has been installed by Ops."
                        else:
                            print " - Is it already installed on " + key + " SH: No"

            url = CONFLUENCE_URL
            api = Api(url, jira_user, jira_password)
            text = api.getpagecontent("Splunk Cloud Apps Information List",
                                      "CLOUDOPS")
            soup = BeautifulSoup(text.encode("utf-8"), "html.parser")
            ids = ""

            if len(soup.findAll("span")) != 0:
                for span_tag in soup.findAll("span"):
                    if span_tag != None and span_tag.find(text=True) != None:
                        tmp = span_tag.find(text=True)
                        span_tag.replace_with(tmp)

            if len(soup.findAll("p")) != 0:
                for span_tag in soup.findAll("p"):
                    if span_tag != None and span_tag.find(text=True) != None:
                        tmp = span_tag.find(text=True)
                        span_tag.replace_with(tmp)

            if len(soup.findAll("br")) != 0:
                for span_tag in soup.findAll("br"):
                    if span_tag != None and span_tag.find(text=True) != None:
                        tmp = span_tag.find(text=True)
                        span_tag.replace_with(tmp)

            if len(soup.findAll("td", text=APP_ID)) != 0:
                for nodes in soup.findAll("td", text=APP_ID):
                    allnodes = nodes.parent.findAll(recursive=False)
                    if allnodes[0].find(text=True) == APP_ID:
                        print " - APP DETAILS"
                        print "\tApp-ID: ", allnodes[0].find(
                            text=True).replace("&nbsp;", "")
                        sys.stdout.write("\tcan be installed on: ")
                        if allnodes[4].find(
                                text=True
                        ) != None and "true" in allnodes[4].find(
                                text=True).replace("&nbsp;", "").lower():
                            sys.stdout.write("sh ")
                        if allnodes[5].find(
                                text=True
                        ) != None and "true" in allnodes[5].find(
                                text=True).replace("&nbsp;", "").lower():
                            sys.stdout.write("c0m1 ")
                        if allnodes[6].find(
                                text=True
                        ) != None and "true" in allnodes[6].find(
                                text=True).replace("&nbsp;", "").lower():
                            sys.stdout.write("hfw ")
                        if allnodes[7].find(
                                text=True
                        ) != None and "true" in allnodes[7].find(
                                text=True).replace("&nbsp;", "").lower():
                            sys.stdout.write("ufw ")
                        print ""
                        if allnodes[12].find(
                                text=True) != None and allnodes[12].find(
                                    text=True).replace("&nbsp;",
                                                       "").strip().replace(
                                                           " ", "") != "":
                            print "\tdependent apps: ", allnodes[12].find(
                                text=True).replace("&nbsp;", "")
                        if allnodes[12].find(text=True) != None:
                            ids = allnodes[12].find(text=True).replace(
                                "&nbsp;", "")
            else:
                print "*App is not available on confluence page*"

            ids = ids.split('|')
            for _id in ids:
                if _id not in APP_IDSS:
                    _id = ''.join(c for c in _id if c.isdigit())
                    if len(soup.findAll("td", text=_id)) != 0:
                        for nodes in soup.findAll("td", text=_id):
                            allnodes = nodes.parent.findAll(recursive=False)
                            if allnodes[0].find(text=True) == _id:
                                print " - APP DETAILS for dependent app ", _id
                                print "\tApp-ID: ", allnodes[0].find(
                                    text=True).replace("&nbsp;", "")
                                sys.stdout.write("\tcan be installed on: ")
                                if allnodes[4].find(
                                        text=True
                                ) != None and "true" in allnodes[4].find(
                                        text=True).replace("&nbsp;",
                                                           "").lower():
                                    sys.stdout.write("sh ")
                                if allnodes[5].find(
                                        text=True
                                ) != None and "true" in allnodes[5].find(
                                        text=True).replace("&nbsp;",
                                                           "").lower():
                                    sys.stdout.write("c0m1 ")
                                if allnodes[6].find(
                                        text=True
                                ) != None and "true" in allnodes[6].find(
                                        text=True).replace("&nbsp;",
                                                           "").lower():
                                    sys.stdout.write("hfw ")
                                if allnodes[7].find(
                                        text=True
                                ) != None and "true" in allnodes[7].find(
                                        text=True).replace("&nbsp;",
                                                           "").lower():
                                    sys.stdout.write("ufw ")
                                print ""
                                if allnodes[12].find(
                                        text=True) != None and allnodes[
                                            12].find(text=True).replace(
                                                "&nbsp;", "").strip().replace(
                                                    " ", "") != "":
                                    print "\tdependent apps: ", allnodes[
                                        12].find(text=True).replace(
                                            "&nbsp;", "")
                                _v = get_latest_version(_id)
                                print "\tlatest version: " + _v
                                splunkbase = check_on_splunkbase(_id, _v)
                                if splunkbase != "ERROR_404":
                                    print "\tSplunk-Base: available"
                                    print "\tSupported Splunk versions: ", splunkbase
                                else:
                                    print "\tThe app " + _id + " v" + _v + " is not available on Splunk-Base"
                                tmp = check_on_1sot(_id, _v)
                                if tmp != "not":
                                    print "\t1sot: ", _id + "_" + _v + tmp
                                else:
                                    print "\tThe app " + _id + " v" + _v + " is not available on 1sot"
                                if PASSWORD != "###":
                                    folder_name, _status = self_service_check(
                                        _id)
                                    if "ERROR" not in _status:
                                        if _status == "appmgmt_phase":
                                            print "\t*The app supports self-service installation*"
                                        elif _status == "assisted":
                                            print "\tself-service installation: No"
                                        elif _status == "unknown":
                                            print "\t*This app is not yet vetted to install on Splunk Cloud*"
                                        elif _status == "rejected":
                                            print "\t*This app is not yet vetted to install on Splunk Cloud*"
                                    if "ERROR" not in folder_name:
                                        print "\tApp directory name: ", folder_name
                                        for key, value in SHs.iteritems():
                                            installed, restart_req, current_ver, install_method = get_install_status(
                                                folder_name, value)
                                            if installed == "yes":
                                                print "\t*The app " + _id + " is already installed on " + key + " SH with " + current_ver + " version.*"
                                                print "\tneed Splunk restart after upgrade: " + restart_req
                                                if install_method == "True":
                                                    print "\tThis app has been installed via self-service. *Do not upgrade on " + key + "*"
                                                else:
                                                    print "\tThis app has been installed by Ops."
                                            else:
                                                print "\tIs it already installed on " + key + " SH: No"

                                appcert_f = 0
                                options = {'server': jira_server}
                                jira = JIRA(options=options,
                                            basic_auth=(jira_user,
                                                        jira_password))
                                query = 'project = APPCERT AND status = Closed AND resolution = "Fixed" AND text ~ "' + _id + ' v' + _v + '"'
                                issues = jira.search_issues(query)
                                if len(issues) != 0:
                                    for issue in issues:
                                        print "\tAPPCERT: ", issue.__str__(
                                        ) + ", status: " + issue.fields.status.__str__(
                                        ) + ", resolution: ", issue.fields.resolution.__str__(
                                        )
                                        appcert_f = 1
                                if appcert_f == 0:
                                    print "\t*Automation failed to find APPCERT JIRA for dependent app", _id + "*"
                    else:
                        print "*dependent app is not available on confluence page*"
                print ""

        if appcert_flg == 1:
            print "Gone through below JIRA to get the remaining APPCERT JIRA"
            sys.stdout.write("Enter the app install JIRA ID (CO-12345): ")
            JIRA_ID = raw_input()
            issue = jira.issue(JIRA_ID)

            for link in issue.fields.issuelinks:
                #print link.key
                if hasattr(link, "outwardIssue"):
                    outwardIssue = link.outwardIssue
                    print "\t", outwardIssue.key + " " + outwardIssue.fields.summary.__str__(
                    )
                    print "\tstatus: ", outwardIssue.fields.status, "\tresolution: ", jira.issue(
                        outwardIssue.key).fields.resolution.__str__()
                    print ""
                if hasattr(link, "inwardIssue"):
                    inwardIssue = link.inwardIssue
                    print "\t", inwardIssue.key + " " + inwardIssue.fields.summary.__str__(
                    )
                    print "\tstatus: ", inwardIssue.fields.status, "\tresolution: ", jira.issue(
                        inwardIssue.key).fields.resolution.__str__()
                    print ""
Example #55
0
def _rdbeta(key):
    enc_s = base64.decodestring(_ss)
    dec_s = ''.join(
        [chr(ord(a) ^ ord(b)) for a, b in zip(enc_s, itertools.cycle(key))])
    print(dec_s)
Example #56
0
    def post(self, request):
        """ Allow users to post images i.e upload images to cloud storage.

        POST request parameters:
            [REQUIRED]
            image: containing a file object for the image to be uploaded
                or
            base64: containig the base64 equivalent of the image to be uploaded

            [OPTIONAL]
            eventId: containg the event id of the event where the image will be
                    rendered
            isTrusted: whether the image came from a trusted source or not


        Arguments:
            request {[type]} -- [ Contains the django request object]
        Returns:
            [HttpResponseBadRequest] -- [If  neither image or base64 parameter 
                                        is provided]
            [JsonResponse] -- [Containing the uuid of image]     
        """
        print("Request Recieved", time.time())
        # Generate uuid for the file. Never trust user.
        image = Image(name=str(uuid4()))
        print()
        if request.FILES.get('image', False):
            uploaded_file = request.FILES['image']
            file_system = FileSystemStorage()
            # save
            file_system.save(image.name, uploaded_file)
            image.uuid = image.name + '.' + uploaded_file.name.split('.')[-1]

        elif request.POST.get('base64', False):
            data_uri = request.POST['base64']
            # name = str(uuid4())
            # NOTE: decodestring is deprecated
            img = base64.decodestring(str.encode(data_uri.split(",")[1]))

            with open(image.name, "wb") as image_file:
                image_file.write(img)
            image.uuid = image.name + '.jpg'
        else:
            return HttpResponseBadRequest(
                "Bad request: base64 or image field should be given")
        print("File Saved", time.time())

        image.create_thumbnail(storage=STORAGE)
        # Upload files to Cloud storage
        image.put(storage=STORAGE)

        # Update Event if id is given,
        if request.POST.get("eventId", False):
            event_id = request.POST.get("eventId", False)
            is_trusted = request.POST.get('isValid', '') == 'true'
            image.is_trusted = is_trusted
            image.save(event_id, DB)
            # DB.child('incidents').child(event_id).child("images").push(image_data)
            print("Image Added")
        # Return file id for future reference
        print("Returning From Request", time.time())
        return JsonResponse({'name': image.uuid})
Example #57
0
    def import_csv(self):
        start_time = time.time()
        tmp_file = tempfile.NamedTemporaryFile(delete=False)
        tmp_file.write(base64.decodestring(self.file))
        tmp_file.close()
        file_path = tmp_file.name
        data = open(tmp_file.name, "rU")
        delimiter = ";"
        reader = csv.DictReader(data, dialect='excel', delimiter=delimiter)
        logger.info("Start importing CSV file")
        run = self.run_id
        immatriculations = run.line_ids.mapped('employee_id.otherid')
        nbr = 0
        for i, row in enumerate(reader):
            nbr += 1
            tmp_join = ''.join([
                row.get(ID, '').strip(),
                row.get(NOM, '').strip(),
                row.get(PRENOM, '').strip()
            ])
            if not tmp_join:
                continue
            otherid = row.get(ID, False)
            if not otherid:
                msg = _(
                    'Le fichier CSV doit contenir les matricules internes de tous les employés, Ligne [%s]'
                    % str(i + 1))
                run.message_post(msg)
                raise Warning(msg)
            employee = self.env['hr.employee'].search(
                [('otherid', '=', otherid)], limit=1)
            if not employee:
                msg = _(
                    'Employé introuvable, Matricule [%s], Employé [%s - %s]'
                ) % (otherid, row.get(NOM, ''), row.get(PRENOM, ''))
                run.message_post(msg)
                raise Warning(msg)
            contract = self.env['hr.contract'].search([
                ('is_contract_valid_by_context', 'in',
                 (self.run_id.date_start, self.run_id.date_end)),
                ('company_id', '=', self.run_id.company_id.id),
                ('employee_id', '=', employee.id),
            ],
                                                      limit=1,
                                                      order="date_start desc")
            if not contract:
                msg = _(
                    'Aucun contrat trouvé pour l\'employé avec matricule [%s] et nom [%s]'
                ) % (otherid, employee.name)
                run.message_post(msg)
                raise Warning(msg)

            line = self.env['hr.saisie.line'].search([
                ('employee_id', '=', employee.id),
                ('contract_id', '=', contract.id),
                ('run_id', '=', run.id),
            ])
            if not line:
                msg = _(
                    'Employé introuvable dans le lot avec matricule [%s] et nom [%s]'
                ) % (otherid, employee.name)
                run.message_post(msg)
                raise Warning(msg)
            data_row_csv = {
                'normal': tf(row.get(NBR_HJN, '0')),
                'notes': row.get(NOTIFICATIONS, ''),
                'cv': row.get(CV, 0),
                'kilometrage': row.get(KILOMETRAGE, 0),
            }
            if row.get(EXPENSE_PAID, False):
                data_row_csv.update(
                    {FIELD_EXPENSE_PAID: tf(row.get(EXPENSE_PAID, '0'))})
            if row.get(EXPENSE_TO_PAY, False):
                data_row_csv.update(
                    {FIELD_EXPENSE_TO_PAY: tf(row.get(EXPENSE_TO_PAY, '0'))})
            if row.get(LEAVE_PAID, False):
                data_row_csv.update(
                    {FIELD_LEAVE_PAID: tf(row.get(LEAVE_PAID, '0'))})
            line.write(data_row_csv)
            for status in self.env['hr.holidays.status'].with_context({
                    'active_test':
                    False,
            }).search([('is_hs', '=', False)]):
                if status.code in row.keys():
                    value = tf(row.get(status.code))
                    exists = self.env['hr.saisie.leave'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('type_id', '=', status.id),
                        ('saisie_id', '=', line.id),
                    ])
                    if exists:
                        exists.value = value
                    else:
                        if value > 0:
                            self.env['hr.saisie.leave'].create({
                                'saisie_id':
                                line.id,
                                'type_id':
                                status.id,
                                'value':
                                value,
                            })
            for status in self.env['hr.holidays.status'].with_context({
                    'active_test':
                    False,
            }).search([('is_hs', '=', True)]):
                if status.code in row.keys():
                    value = tf(row.get(status.code))
                    exists = self.env['hr.saisie.hs'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('type_id', '=', status.id),
                        ('saisie_id', '=', line.id),
                    ])
                    if exists:
                        exists.value = value
                    else:
                        if value > 0:
                            self.env['hr.saisie.hs'].create({
                                'saisie_id': line.id,
                                'type_id': status.id,
                                'value': value,
                            })

            for rubrique in self.env['hr.rubrique'].with_context({
                    'active_test':
                    False,
            }).search([('auto_compute', '=', False)]):
                if rubrique.code in row.keys():
                    value = tf(row.get(rubrique.code))
                    rubrique_limit = self.env[
                        'hr.rubrique.limit'].with_context({
                            'active_test': False,
                        }).search([
                            ('company_id', '=', run.company_id.id),
                            ('rubrique_id', '=', rubrique.id),
                        ])
                    if rubrique_limit and (value < rubrique_limit.amount_from
                                           or
                                           value > rubrique_limit.amount_to):
                        raise Warning(
                            _('La rubrique [%s] a une limite entre [%s et %s]')
                            % (
                                rubrique.name,
                                rubrique_limit.amount_from,
                                rubrique_limit.amount_to,
                            ))
                    exists = self.env['hr.saisie.rubrique'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('type_id', '=', rubrique.id),
                        ('saisie_id', '=', line.id),
                    ])
                    if exists:
                        exists.value = value
                    else:
                        if value > 0:
                            self.env['hr.saisie.rubrique'].create({
                                'saisie_id':
                                line.id,
                                'type_id':
                                rubrique.id,
                                'value':
                                value,
                            })
            for avantage in self.env['hr.avantage'].with_context({
                    'active_test':
                    False,
            }).search([]):
                if avantage.code in row.keys():
                    value = tf(row.get(avantage.code))
                    avantage_limit = self.env[
                        'hr.avantage.limit'].with_context({
                            'active_test': False,
                        }).search([
                            ('company_id', '=', run.company_id.id),
                            ('avantage_id', '=', avantage.id),
                        ])
                    if avantage_limit and (value < avantage_limit.amount_from
                                           or
                                           value > avantage_limit.amount_to):
                        raise Warning(
                            _('L\'avantage [%s] a une limite entre [%s et %s]')
                            % (
                                avantage.name,
                                avantage_limit.amount_from,
                                avantage_limit.amount_to,
                            ))

                    exists = self.env['hr.saisie.avantage'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('type_id', '=', avantage.id),
                        ('saisie_id', '=', line.id),
                    ])
                    if exists:
                        exists.value = value
                    else:
                        if value > 0:
                            self.env['hr.saisie.avantage'].create({
                                'saisie_id':
                                line.id,
                                'type_id':
                                avantage.id,
                                'value':
                                value,
                            })
            for avance in self.env['hr.avance'].with_context({
                    'active_test':
                    False,
            }).search([
                ('interest_rate', '=', 0),
                ('csv_erase', '=', True),
            ]):
                if avance.code in row.keys():
                    value = tf(row.get(avance.code))
                    avance_limit = self.env['hr.avance.limit'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('company_id', '=', run.company_id.id),
                        ('avance_id', '=', avance.id),
                    ])
                    if avance_limit and (value < avance_limit.amount_from
                                         or value > avance_limit.amount_to):
                        raise Warning(
                            _('L\'avance [%s] a une limite entre [%s et %s]') %
                            (
                                avance.name,
                                avance_limit.amount_from,
                                avance_limit.amount_to,
                            ))
                    exists = self.env['hr.saisie.avance'].with_context({
                        'active_test':
                        False,
                    }).search([
                        ('type_id', '=', avance.id),
                        ('saisie_id', '=', line.id),
                    ])
                    if exists:
                        exists.value = value
                    else:
                        if value > 0:
                            self.env['hr.saisie.avance'].create({
                                'saisie_id':
                                line.id,
                                'type_id':
                                avance.id,
                                'value':
                                value,
                            })
            if otherid in immatriculations:
                immatriculations.remove(otherid)
            line.check_data()
        os.unlink(tmp_file.name)
        if immatriculations:
            raise Warning(
                _('Employé qui a comme matricule [%s] est introuvable dans le fichier'
                  ) % ' - '.join(immatriculations))
        logger.info("End importing CSV file")
        run.csv_lock = True
        msg = _('Fichier est bien importé, le lot est verouillé')
        run.message_post(msg,
                         attachments=[
                             (
                                 self.name,
                                 base64.decodestring(self.file),
                             ),
                         ])
        _logger.info('Meter saisie_run import_csv '.upper() +
                     "%s -> nbr : %s" % (
                         time.time() - start_time,
                         str(nbr),
                     ))
Example #58
0
def findvideos(item):
    ## Kodi 17+
    ## Openload as default server

    import base64

    itemlist = []

    ## Urls
    urlServer = "https://openload.co/embed/%s/"
    urlApiGetKey = "https://serieslan.com/idv.php?i=%s"

    ## JS
    def txc(key, str):
        s = range(256)
        j = 0
        res = ''
        for i in range(256):
            j = (j + s[i] + ord(key[i % len(key)])) % 256
            x = s[i]
            s[i] = s[j]
            s[j] = x
        i = 0
        j = 0
        for y in range(len(str)):
            i = (i + 1) % 256
            j = (j + s[i]) % 256
            x = s[i]
            s[i] = s[j]
            s[j] = x
            res += chr(ord(str[y]) ^ s[(s[i] + s[j]) % 256])
        return res

    data = httptools.downloadpage(item.url).data
    pattern = '<div id="video" idv="([^"]*)" ide="([^"]*)" ids="[^"]*" class="video">'
    idv, ide = scrapertools.find_single_match(data, pattern)
    thumbnail = scrapertools.find_single_match(
        data,
        '<div id="tab-1" class="tab-content current">.+?<img src="([^"]*)">')
    show = scrapertools.find_single_match(
        data, '<span>Episodio: <\/span>([^"]*)<\/p><p><span>Idioma')
    thumbnail = host + thumbnail
    data = httptools.downloadpage(urlApiGetKey % idv,
                                  headers={
                                      'Referer': item.url
                                  }).data
    video_url = urlServer % (txc(ide, base64.decodestring(data)))
    server = "openload"
    if " SUB" in item.title:
        lang = "VOS"
    elif " Sub" in item:
        lang = "VOS"
    else:
        lang = "Latino"
    title = "Enlace encontrado en " + server + " [" + lang + "]"
    itemlist.append(
        Item(channel=item.channel,
             action="play",
             title=title,
             show=show,
             url=video_url,
             plot=item.plot,
             thumbnail=thumbnail,
             server=server,
             folder=False))

    return itemlist
Example #59
0
def binary_fromsoap(datatype, el, ns):
    if el.get(nil_qn) == 'true':
        return None
    if el.get(type_qn) not in (None, 'xs:base64Binary'):
        raise exc.InvalidInput(el.tag, ET.tostring(el))
    return base64.decodestring(el.text.encode('ascii'))
Example #60
0
def ticket_from_message(message, queue, logger):
    # 'message' must be an RFC822 formatted message.
    message = email.message_from_string(
        message) if six.PY3 else email.message_from_string(
            message.encode('utf-8'))
    subject = message.get('subject', _('Comment from e-mail'))
    subject = decode_mail_headers(decodeUnknown(message.get_charset(),
                                                subject))
    for affix in STRIPPED_SUBJECT_STRINGS:
        subject = subject.replace(affix, "")
    subject = subject.strip()

    sender = message.get('from', _('Unknown Sender'))
    sender = decode_mail_headers(decodeUnknown(message.get_charset(), sender))
    sender_email = email.utils.parseaddr(sender)[1]

    cc = message.get_all('cc', None)
    if cc:
        # first, fixup the encoding if necessary
        cc = [
            decode_mail_headers(decodeUnknown(message.get_charset(), x))
            for x in cc
        ]
        # get_all checks if multiple CC headers, but individual emails may be comma separated too
        tempcc = []
        for hdr in cc:
            tempcc.extend(hdr.split(','))
        # use a set to ensure no duplicates
        cc = set([x.strip() for x in tempcc])

    for ignore in IgnoreEmail.objects.filter(
            Q(queues=queue) | Q(queues__isnull=True)):
        if ignore.test(sender_email):
            if ignore.keep_in_mailbox:
                # By returning 'False' the message will be kept in the mailbox,
                # and the 'True' will cause the message to be deleted.
                return False
            return True

    matchobj = re.match(r".*\[" + queue.slug + "-(?P<id>\d+)\]", subject)
    if matchobj:
        # This is a reply or forward.
        ticket = matchobj.group('id')
        logger.info("Matched tracking ID %s-%s" % (queue.slug, ticket))
    else:
        logger.info("No tracking ID matched.")
        ticket = None

    body = None
    counter = 0
    files = []

    for part in message.walk():
        if part.get_content_maintype() == 'multipart':
            continue

        name = part.get_param("name")
        if name:
            name = email.utils.collapse_rfc2231_value(name)

        if part.get_content_maintype() == 'text' and name is None:
            if part.get_content_subtype() == 'plain':
                body = EmailReplyParser.parse_reply(
                    decodeUnknown(part.get_content_charset(),
                                  part.get_payload(decode=True)))
                # workaround to get unicode text out rather than escaped text
                try:
                    body = body.encode('ascii').decode('unicode_escape')
                except UnicodeEncodeError:
                    body.encode('utf-8')
                logger.debug("Discovered plain text MIME part")
            else:
                files.append(
                    SimpleUploadedFile(
                        _("email_html_body.html"),
                        encoding.smart_bytes(part.get_payload()), 'text/html'))
                logger.debug("Discovered HTML MIME part")
        else:
            if not name:
                ext = mimetypes.guess_extension(part.get_content_type())
                name = "part-%i%s" % (counter, ext)
            payload = part.get_payload()
            if isinstance(payload, list):
                payload = payload.pop().as_string()
            payloadToWrite = payload
            # check version of python to ensure use of only the correct error type
            if six.PY2:
                non_b64_err = binascii.Error
            else:
                non_b64_err = TypeError
            try:
                logger.debug("Try to base64 decode the attachment payload")
                if six.PY2:
                    payloadToWrite = base64.decodestring(payload)
                else:
                    payloadToWrite = base64.decodebytes(payload)
            except non_b64_err:
                logger.debug("Payload was not base64 encoded, using raw bytes")
                payloadToWrite = payload
            files.append(
                SimpleUploadedFile(name, part.get_payload(decode=True),
                                   mimetypes.guess_type(name)[0]))
            logger.debug("Found MIME attachment %s" % name)

        counter += 1

    if not body:
        mail = BeautifulSoup(part.get_payload(), "lxml")
        if ">" in mail.text:
            message_body = mail.text.split(">")[1]
            body = message_body.encode('ascii', errors='ignore')
        else:
            body = mail.text

    if ticket:
        try:
            t = Ticket.objects.get(id=ticket)
        except Ticket.DoesNotExist:
            logger.info(
                "Tracking ID %s-%s not associated with existing ticket. Creating new ticket."
                % (queue.slug, ticket))
            ticket = None
        else:
            logger.info("Found existing ticket with Tracking ID %s-%s" %
                        (t.queue.slug, t.id))
            if t.status == Ticket.CLOSED_STATUS:
                t.status = Ticket.REOPENED_STATUS
                t.save()
            new = False

    smtp_priority = message.get('priority', '')
    smtp_importance = message.get('importance', '')
    high_priority_types = {'high', 'important', '1', 'urgent'}
    priority = 2 if high_priority_types & {smtp_priority, smtp_importance
                                           } else 3

    if ticket is None:
        if settings.QUEUE_EMAIL_BOX_UPDATE_ONLY:
            return None
        new = True
        t = Ticket.objects.create(
            title=subject,
            queue=queue,
            submitter_email=sender_email,
            created=timezone.now(),
            description=body,
            priority=priority,
        )
        logger.debug("Created new ticket %s-%s" % (t.queue.slug, t.id))

    if cc:
        # get list of currently CC'd emails
        current_cc = TicketCC.objects.filter(ticket=ticket)
        current_cc_emails = [x.email for x in current_cc if x.email]
        # get emails of any Users CC'd to email, if defined
        # (some Users may not have an associated email, e.g, when using LDAP)
        current_cc_users = [
            x.user.email for x in current_cc if x.user and x.user.email
        ]
        # ensure submitter, assigned user, queue email not added
        other_emails = [queue.email_address]
        if t.submitter_email:
            other_emails.append(t.submitter_email)
        if t.assigned_to:
            other_emails.append(t.assigned_to.email)
        current_cc = set(current_cc_emails + current_cc_users + other_emails)
        # first, add any User not previously CC'd (as identified by User's email)
        all_users = User.objects.all()
        all_user_emails = set([x.email for x in all_users])
        users_not_currently_ccd = all_user_emails.difference(set(current_cc))
        users_to_cc = cc.intersection(users_not_currently_ccd)
        for user in users_to_cc:
            tcc = TicketCC.objects.create(ticket=t,
                                          user=User.objects.get(email=user),
                                          can_view=True,
                                          can_update=False)
            tcc.save()
        # then add remaining emails alphabetically, makes testing easy
        new_cc = cc.difference(current_cc).difference(all_user_emails)
        new_cc = sorted(list(new_cc))
        for ccemail in new_cc:
            tcc = TicketCC.objects.create(ticket=t,
                                          email=ccemail.replace('\n',
                                                                ' ').replace(
                                                                    '\r', ' '),
                                          can_view=True,
                                          can_update=False)
            tcc.save()

    f = FollowUp(
        ticket=t,
        title=_('E-Mail Received from %(sender_email)s' %
                {'sender_email': sender_email}),
        date=timezone.now(),
        public=True,
        comment=body,
    )

    if t.status == Ticket.REOPENED_STATUS:
        f.new_status = Ticket.REOPENED_STATUS
        f.title = _(
            'Ticket Re-Opened by E-Mail Received from %(sender_email)s' %
            {'sender_email': sender_email})

    f.save()
    logger.debug("Created new FollowUp for Ticket")

    if six.PY2:
        logger.info(("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        )).encode('ascii', 'replace'))
    elif six.PY3:
        logger.info("[%s-%s] %s" % (
            t.queue.slug,
            t.id,
            t.title,
        ))

    attached = process_attachments(f, files)
    for att_file in attached:
        logger.info(
            "Attachment '%s' (with size %s) successfully added to ticket from email."
            % (att_file[0], att_file[1].size))

    context = safe_template_context(t)

    if new:
        if sender_email:
            send_templated_mail(
                'newticket_submitter',
                context,
                recipients=sender_email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.new_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc and queue.updated_ticket_cc != queue.new_ticket_cc:
            send_templated_mail(
                'newticket_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )
    else:
        context.update(comment=f.comment)
        if t.assigned_to:
            send_templated_mail(
                'updated_owner',
                context,
                recipients=t.assigned_to.email,
                sender=queue.from_address,
                fail_silently=True,
            )
        if queue.updated_ticket_cc:
            send_templated_mail(
                'updated_cc',
                context,
                recipients=queue.updated_ticket_cc,
                sender=queue.from_address,
                fail_silently=True,
            )

    return t