def test_error_make_ref_failure(self):
     """ Test that github api make ref fails does not break API
     """
     self.github_api.sha_origin = "789456"
     self.github_api.exist_file["path/to/some/file.xml"] = False
     # We need to make checking the ref fail first (because the branch should not exist to trigger creation)
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/git/refs/heads/uuid-1234"
     ] = True
     # And then make creating fail
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/git/refs"
     ] = True
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "auithor_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'Not Found', 'status': 'error', 'step': 'make_ref'},
         "Error message should be carried by ProxyError in Ref Creation Failure"
     )
     self.assertEqual(http, 404, "Status code should be carried by ProxyError")
Esempio n. 2
0
	def load(self, f, pretty = False, no_payload = False):
		entries = []

		gf = pb.ghost_file_entry()
		buf = f.read(4)
		size, = struct.unpack('i', buf)
		gf.ParseFromString(f.read(size))
		g_entry = pb2dict.pb2dict(gf, pretty)

		if gf.chunks:
			entries.append(g_entry)
			while True:
				gc = pb.ghost_chunk_entry()
				buf = f.read(4)
				if buf == '':
					break
				size, = struct.unpack('i', buf)
				gc.ParseFromString(f.read(size))
				entry = pb2dict.pb2dict(gc, pretty)
				if no_payload:
					f.seek(gc.len, os.SEEK_CUR)
				else:
					entry['extra'] = base64.encodebytes(f.read(gc.len))
				entries.append(entry)
		else:
			if no_payload:
				f.seek(0, os.SEEK_END)
			else:
				g_entry['extra'] = base64.encodebytes(f.read())
			entries.append(g_entry)

		return entries
Esempio n. 3
0
 def saveWindowState(self):
     """ Save:
         * which tools are loaded 
         * geometry of the top level windows
         * layout of dockwidgets and toolbars
     """
     
     # Save tool list
     tools = pyzo.toolManager.getLoadedTools()
     pyzo.config.state.loadedTools = tools
     
     # Store window geometry
     geometry = self.saveGeometry()
     try:
         geometry = bytes(geometry) # PyQt4
     except:
         geometry = bytes().join(geometry) # PySide
     geometry = base64.encodebytes(geometry).decode('ascii')
     pyzo.config.state.windowGeometry = geometry
     
     # Store window state
     state = self.saveState()
     try:
         state = bytes(state) # PyQt4
     except:
         state = bytes().join(state) # PySide
     state = base64.encodebytes(state).decode('ascii')
     pyzo.config.state.windowState = state
Esempio n. 4
0
    def home(self, request):
        app = self.app.materialize()
        if app.main_window.widget_id == self.widget_id:
            window = app.main_window
        else:
            try:
                window = app.windows[self.widget_id]
            except KeyError:
                raise Exception("Unknown window")

        sourcefile = os.path.join(os.path.dirname(__file__), 'libs.py')
        py_compile.compile(sourcefile)
        with open(os.path.join(
                    os.path.dirname(sourcefile),
                    '__pycache__/%s.cpython-34.pyc' % os.path.splitext(os.path.basename(sourcefile))[0]
                ), 'rb') as compiled:
            toga = base64.encodebytes(compiled.read())

        return render(request, 'toga/window.html', {
            'toga': toga,
            'bootstrap': base64.encodebytes(b'\xee\x0c\r\n00000000' + marshal.dumps(bootstrap.__code__)).strip(),
            'app': app,
            'window': window,
            'callbacks': {
                # 'sample': base64.encodebytes(b'\x08\x1c\xe8VU\x00\x00\x00' + marshal.dumps(sample.__code__)).strip()
                '%s-%s' % (widget, message): base64.encodebytes(b'\xee\x0c\r\n00000000' + marshal.dumps(callback.__code__)).strip()
                for (widget, message), callback in self.callbacks.items()
            }
        })
Esempio n. 5
0
    def hide_email(self):
        """ base64 encodes all email addresses for use with wptool.js
            reveal functions.
            for spam protection.
            (as long as the email-harvest-bot doesn't decode Base64)
        """

        if not self.lines:
            return False

        final_output = []
        for sline in self.lines:
            mailtos = self.find_mailtos()
            for mailto in mailtos:
                b64_mailto = base64.encodebytes(mailto.encode('utf-8'))
                sline = sline.replace(
                    mailto,
                    b64_mailto.decode('utf-8').replace('\n', ''))

            emails = self.find_email_addresses()
            for email in emails:
                b64_addr = base64.encodebytes(email.encode('utf-8'))
                sline = sline.replace(
                    email,
                    b64_addr.decode('utf-8').replace('\n', ''))

            # add line (encoded or not)
            final_output.append(sline)
        self.lines = [l for l in final_output]
        return True
 def test_fail_get_upstream_ref(self):
     """ Test when getting the file fails
     """
     self.proxy.__default_branch__ = GithubProxy.DEFAULT_BRANCH.AUTO_SHA
     self.github_api.sha_origin = "789456"
     self.github_api.exist_file["path/to/some/file.xml"] = False
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/git/refs/heads/uuid-1234"
     ] = "master_ref"
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/git/refs/heads/master"
     ] = "master_ref"
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "author_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'The default branch from which to checkout is either not available or does not exist',
                'status': 'error', "step": "make_ref"},
         "Error message should be carried by ProxyError in Check Reference Failure"
     )
     self.assertEqual(http, 404, "Status code should be produced by ProxyError")
Esempio n. 7
0
    def _write_filedata(self, pathfrom, datatype='text', enc64=True):
        '''Take the given file and create a series of SQL statements that create a table
        and write the file data to it. If enc64 is True, use base64 encoding, otherwise
        use hex encoding. datatype is the column datatype used for the table.
        Returns teh (random) table name and series of SQL statements.'''
        table = ''.join(random.choice(string.ascii_uppercase) for _ in range(16))
        statements = []
        statements.append('CREATE TABLE {} (data {})'.format(table, datatype))
        default_chunk_len = 1024

        with open(pathfrom, 'rb') as fd:
            chunk_len = default_chunk_len if not self._se.has_message_size() else self._se.message_size()
            log.debug('Reading {} bytes from {}.'.format(chunk_len, pathfrom))
            chunk = fd.read(chunk_len)
            if enc64:
                data = base64.encodebytes(chunk).decode()     # the decode() decodes the python byte string.
            else:
                data = binascii.b2a_hex(chunk).decode()

            statements.append('INSERT INTO {} (data) VALUES(\'{}\')'.format(table, data))
            while chunk:
                chunk_len = default_chunk_len if not self._se.has_message_size() else self._se.message_size()
                log.debug('Reading {} bytes from {}.'.format(chunk_len, pathfrom))
                chunk = fd.read(chunk_len)
                if chunk:
                    if enc64:
                        data = base64.encodebytes(chunk).decode()
                    else:
                        data = binascii.b2a_hex(chunk).decode()

                    statements.append('UPDATE {} SET data = concat(data,\'{}\')'.format(table, data))

        return table, statements
 def test_fail_update_file(self, logger):
     """ Test when update the file fails
     """
     self.github_api.sha_origin = "789456"
     self.github_api.exist_file["path/to/some/file.xml"] = True
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/contents/path/to/some/file.xml"
     ] = 500
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "author_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'Not Found', 'status': 'error', 'step': 'update'},
         "Error message should be carried by ProxyError in Update File Failure"
     )
     self.assertEqual(http, 404, "Status code should be carried by ProxyError")
     logger.assert_called_with("Request::PUT::/repos/ponteineptique/dummy/contents/path/to/some/file.xml", extra={
         'response': mock.ANY,
         'request': mock.ANY
     })
 def test_fail_get_file(self):
     """ Test when getting the file fails
     """
     self.github_api.sha_origin = "789456"
     self.github_api.exist_file["path/to/some/file.xml"] = False
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/contents/path/to/some/file.xml"
     ] = 501
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "author_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'Error checking a file', 'status': 'error', "step": "get"},
         "Error message should be carried by ProxyError in Check Reference Failure"
     )
     self.assertEqual(http, 501, "Status code should be carried by ProxyError")
 def test_fail_pull_request(self, logger):
     """ Test when pull request the file fails
     """
     self.github_api.sha_origin = "789456"
     self.github_api.route_fail[
         "http://localhost/repos/perseusDL/dummy/pulls"
     ] = 500
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "author_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'Not Found', 'status': 'error', 'step': 'pull_request'},
         "Error message should be carried by ProxyError in Pull Request Failure"
     )
     self.assertEqual(http, 404, "Status code should be carried by ProxyError")
     logger.assert_called_with("Not Found", extra={
         'step': 'pull_request',
         'context': {
             'params': {
                 'base': 'master', 'body': '',
                 'head': 'ponteineptique:uuid-1234', 'title': '[Proxy] Hard work of transcribing file'
             },
             'uri': '/repos/perseusDL/dummy/pulls'
         }
     })
Esempio n. 11
0
    def home(self, request):
        # app = self.app.materialize()
        # if app.main_window.id == self.id:
        #     window = app.main_window
        # else:
        #     try:
        #         window = app.windows[self.id]
        #     except KeyError:
        #         raise Exception("Unknown window")

        sourcefile = os.path.join(os.path.dirname(__file__), 'render', '__init__.py')

        fd, tempname = tempfile.mkstemp()
        py_compile.compile(sourcefile, cfile=tempname, doraise=True)
        with open(os.path.join(os.path.dirname(sourcefile), tempname), 'rb') as compiled:
            toga = base64.encodebytes(compiled.read())

        return render(request, 'toga/app.html', {
            'toga': toga,
            'bootstrap': base64.encodebytes(b'\xee\x0c\r\n00000000' + marshal.dumps(bootstrap.__code__)).strip(),
            'window': self._impl,
            'callbacks': {
                # 'sample': base64.encodebytes(b'\x08\x1c\xe8VU\x00\x00\x00' + marshal.dumps(sample.__code__)).strip()
                '%s-%s' % (widget, message): base64.encodebytes(b'\xee\x0c\r\n00000000' + marshal.dumps(callback.__code__)).strip()
                for (widget, message), callback in self.callbacks.items()
            }
        })
Esempio n. 12
0
def access_keys(url, identity, key_file, csr_file, crt_file, ca_crt_file):
    csr = CryptoFunctions.load_csr(csr_file)

    # Note the signature is only based on the CSR data - not the identity...
    # We send the identity only to enable the server to identify who we are.
    signature = CryptoFunctions.sign(CryptoFunctions.get_csr_bytes(csr), key_file)

    # We need to also send a suitable header to indicate that we're sending JSON...
    headers = {'Content-Type': 'application/json'}

    # And note that we must use base64 encoding for the signature – since it is composed from an arbitrary bytestream
    # when we receive the signature from the server we must reverse this process...
    data = {'csr': base64.encodebytes(CryptoFunctions.get_csr_bytes(csr)).decode(), 'identity': identity,
            'signature': base64.encodebytes(signature).decode()}

    r = requests.post(url, data=json.dumps(data), headers=headers)
    if r.status_code == 200:
        # If we do have HTTP 200 – then we need to write the data we have to files...
        with open(crt_file, 'wb') as f:
            f.write(base64.decodebytes(r.json()['certificate'].encode()))

        with open(ca_crt_file, 'wb') as f:
            f.write(base64.decodebytes(r.json()['CA_certificate'].encode()))
        return True
    else:
        return False
Esempio n. 13
0
def encode_images(format_dict):
    """b64-encodes images in a displaypub format dict

    Perhaps this should be handled in json_clean itself?

    Parameters
    ----------

    format_dict : dict
        A dictionary of display data keyed by mime-type

    Returns
    -------

    format_dict : dict
        A copy of the same dictionary,
        but binary image data ('image/png' or 'image/jpeg')
        is base64-encoded.

    """
    encoded = format_dict.copy()
    pngdata = format_dict.get('image/png')
    if isinstance(pngdata, bytes) and pngdata[:8] == PNG:
        encoded['image/png'] = encodebytes(pngdata).decode('ascii')
    jpegdata = format_dict.get('image/jpeg')
    if isinstance(jpegdata, bytes) and jpegdata[:2] == JPEG:
        encoded['image/jpeg'] = encodebytes(jpegdata).decode('ascii')
    return encoded
 def test_error_get_ref_failure(self):
     """ Test that github api check ref fails does not break API
     """
     self.github_api.sha_origin = "789456"
     self.github_api.exist_file["path/to/some/file.xml"] = False
     self.github_api.route_fail[
         "http://localhost/repos/ponteineptique/dummy/git/refs/heads/uuid-1234"
     ] = 500
     result = self.makeRequest(
         base64.encodebytes(b'Some content'),
         make_secret(base64.encodebytes(b'Some content').decode("utf-8"), self.secret),
         {
             "author_name": "ponteineptique",
             "date": "19/06/2016",
             "logs": "Hard work of transcribing file",
             "auithor_email": "*****@*****.**",
             "branch": "uuid-1234"
         }
     )
     data, http = response_read(result)
     self.assertEqual(
         data, {'message': 'Bad credentials', 'status': 'error', 'step': 'get_ref'},
         "Error message should be carried by ProxyError in Ref Failure"
     )
     self.assertEqual(http, 401, "Status code should be carried by ProxyError")
 def __init__(self, **kwargs):  # Basic Config Items
     __app = kwargs.get('appli')
     __id = datetime.datetime.now().strftime('%Y%m%d%H%M%S')
     __output = kwargs.get ('output', '/tmp/otecconfgen/' + __app + __id + '.conf')
     __comments = kwargs.get ('comments', '1')
     __virt = kwargs.get ('virt', 'vmware')
     __format = kwargs.get ('outputformat', 'txt')
     __web = kwargs.get('web', False)
     # Build Common Objects
     self.netConf = NETCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__net_fileopt), **kwargs)))
     self.sshConf = SSHCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__ssh_fileopt), **kwargs)))
     self.localeConf = LOCALECONFIG(**(self.__filterdict__(self.__buildfilter__(self.__locale_fileopt), **kwargs)))
     # Add common objets to string used to fill config file
     s = self.__headerwrite()
     s += self.__sshconfwrite(__comments)
     s += self.__netconfwrite(__virt, __comments)
     s += self.__localeconfwrite(__comments)
     if __app == 'oxe':  # OXE
         # Build OXE specific Objects
         self.oxeConf = OXECONFIG(**(self.__filterdict__(self.__buildfilter__(self.__oxe_fileopt), **kwargs)))
         # Add OXE specific objets to string used to fill config file
         s += self.__oxeconfwrite(__comments)
     elif (__app == 'oxems') or (__app == 'otes') or (__app == 'licsrv'):  # OMS/OTES/LICSRV
         # Build OXEMS/OTES/LICSRV specific Objects
         self.adminConf = ADMINCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__admin_fileopt), **kwargs)))
         self.sysAdmConf = SYSADMCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__sysadm_fileopt), **kwargs)))
         # Add OXEMS/OTES/LICSRV specific objets to string used to fill config file
         s += self.__adminconfwrite(__comments)
         s += self.__sysadmconfwrite(__comments)
     elif __app == 'ot':  # OTMS
         # Build OT specific Objects
         self.adminConf = ADMINCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__admin_fileopt), **kwargs)))
         self.otmsConf = OTMSCONFIG(**(self.__filterdict__(self.__buildfilter__(self.__otms_fileopt), **kwargs)))
         # Add OT specific objets to string used to fill config file
         s += self.__adminconfwrite(__comments)
         s += self.__otmsconfwrite(__comments)
     s += self.__footerwrite()
     # Writing Config File
     pprint.pprint(s)
     try:
         # os.makedirs(os.path.split(__output)[0]) # To modify, problems if directory already exists
         if __format == 'txt' or __virt == 'vmware':
             if __output != 'stdout':
                 with open(__output, 'w') as fh:  # Unix/Linux path //test default path as /tmp/otecconfgen/
                     fh.write(s)
                 print('Config File generated: {}'.format(__output))
             else:
                 print(s)
         elif __virt == 'amazon' and __format == 'b64':
             if __output != 'stdout':
                 with open(__output, 'wb') as fh:  # Unix/Linux path
                     fh.write(base64.encodebytes(bytes(s, encoding="UTF-8")))
                 print('Config File generated: {}'.format(__output))
             else:
                 sys.stdout.buffer.write(base64.encodebytes(bytes(s, encoding="UTF-8")))
     except:
         print('failed to write config file '+ __output)
     if __web ==  True:
         webserver(os.path.split(__output)[0])
Esempio n. 16
0
	def load(self, f, pbuff):
		d = {}

		inq	= f.read(pbuff.inq_len)
		outq	= f.read(pbuff.outq_len)

		d['inq']	= base64.encodebytes(inq)
		d['outq']	= base64.encodebytes(outq)

		return d
Esempio n. 17
0
    def on_selection_modified(self,view):
        self.output("on_selection_modified")

        global last_select_time
        if view.name() == "Lua Context":
            if not protocol or not protocol.server:
                print("not server")
                return

            if time.time()-(last_select_time or 0) <= 0.1:
                print("time too close")
                return

            last_select_time = time.time()

            sel = view.sel()[0]
            line_no = view.rowcol(sel.a)[0]
            if line_no < 0:
                return
            fullname = mng_context.getfullnamebyline(line_no)
            print(fullname)
            if not fullname or re.match(r"\[+",fullname):
                return
            
            mdict = mng_context.fullnamedict[fullname]

            if mdict["value"].find("table") != -1 or mdict["value"].find("userdata") != -1:
                protocol.property_get_context(base64.encodebytes(fullname.encode()).decode())
            
        elif view.name() == "Lua expression":
            if not protocol or not protocol.server:
                return

            if time.time()-(last_select_time or 0) <= 0.1:
                return

            last_select_time = time.time()

            sel = view.sel()[0]
            line_no = view.rowcol(sel.a)[0]
            if line_no < 0:
                return
            fullname = mng_exp.getfullnamebyline(line_no)
            print(fullname)
            if not fullname:
                return
            
            mdict = mng_exp.fullnamedict[fullname]
            if mdict["lv"] > 0:
                if mdict["value"].find("table") != -1 or mdict["value"].find("userdata") != -1:
                    protocol.property_get_eval(base64.encodebytes(fullname.encode()).decode())
Esempio n. 18
0
def encode_images(format_dict):
    """b64-encodes images in a displaypub format dict

    Perhaps this should be handled in json_clean itself?

    Parameters
    ----------

    format_dict : dict
        A dictionary of display data keyed by mime-type

    Returns
    -------

    format_dict : dict
        A copy of the same dictionary,
        but binary image data ('image/png', 'image/jpeg' or 'application/pdf')
        is base64-encoded.

    """
    encoded = format_dict.copy()

    pngdata = format_dict.get('image/png')
    if isinstance(pngdata, bytes):
        # make sure we don't double-encode
        if not pngdata.startswith(PNG64):
            pngdata = encodebytes(pngdata)
        encoded['image/png'] = pngdata.decode('ascii')

    jpegdata = format_dict.get('image/jpeg')
    if isinstance(jpegdata, bytes):
        # make sure we don't double-encode
        if not jpegdata.startswith(JPEG64):
            jpegdata = encodebytes(jpegdata)
        encoded['image/jpeg'] = jpegdata.decode('ascii')
        
    gifdata = format_dict.get('image/gif')
    if isinstance(gifdata, bytes):
        # make sure we don't double-encode
        if not gifdata.startswith((GIF_64, GIF89_64)):
            gifdata = encodebytes(gifdata)
        encoded['image/gif'] = gifdata.decode('ascii')

    pdfdata = format_dict.get('application/pdf')
    if isinstance(pdfdata, bytes):
        # make sure we don't double-encode
        if not pdfdata.startswith(PDF64):
            pdfdata = encodebytes(pdfdata)
        encoded['application/pdf'] = pdfdata.decode('ascii')

    return encoded
Esempio n. 19
0
def pem_objects(draw):
    """
    Strategy for generating ``pem`` objects.
    """
    key = RSAPrivateKey((
        b'-----BEGIN RSA PRIVATE KEY-----\n' +
        encodebytes(draw(s.binary(min_size=1))) +
        b'-----END RSA PRIVATE KEY-----\n'))
    return [key] + [
        Certificate((
            b'-----BEGIN CERTIFICATE-----\n' +
            encodebytes(cert) +
            b'-----END CERTIFICATE-----\n'))
        for cert in draw(s.lists(s.binary(min_size=1), min_size=1))]
Esempio n. 20
0
 def __authorization_header(self):
     if self.config.has_client_credentials():
         return b"Basic " + encodebytes(
                     self.config.client_id.encode('ascii') +
                     b":" +
                     self.config.client_secret.encode('ascii')
                 ).replace(b"\n", b"").strip()
     elif self.config.has_access_token():
         return b"Bearer " + self.config.access_token.encode('ascii')
     else:
         return b"Basic " + encodebytes(
                     self.config.public_key.encode('ascii') +
                     b":" +
                     self.config.private_key.encode('ascii')
                 ).replace(b"\n", b"").strip()
Esempio n. 21
0
 def _do_numeric(self, value):
     idx = self._register(value)
     if PY_VER > 2:
         data = base64.encodebytes(gzip_string(numpy.ndarray.dumps(value)))
     else:
         data = base64.encodestring(gzip_string(numpy.ndarray.dumps(value)))
     return dict(type='numeric', id=idx, data=data)
Esempio n. 22
0
def send_mail(to, subject, text, files=[]):
    assert type(files)==list

    msg = MIMEMultipart()
    msg['From'] = gmail_user
    msg['To'] = to
    msg['Subject'] = subject

    msg.attach(MIMEText(text.encode('utf-8'), 'plain', 'UTF-8'))

    for f in files:
        fp = open(os.path.join(config.application_path,f),"rb")
        part = MIMEBase('application', "octet-stream")
        part.set_payload(encodebytes(fp.read()).decode())
        fp.close()
        part.add_header('Content-Transfer-Encoding', 'base64')
        part.add_header('Content-Disposition', 'attachment; filename="%s"' % f)
        msg.attach(part)   # msg is an instance of MIMEMultipart()

    mailServer = smtplib.SMTP("smtp.gmail.com", 587)
    mailServer.ehlo()
    mailServer.starttls()
    mailServer.ehlo()
    mailServer.login(gmail_user, gmail_pwd)
    mailServer.sendmail(gmail_user, to, msg.as_string())
    # Should be mailServer.quit(), but that crashes...
    mailServer.close()
Esempio n. 23
0
 def generate_authorization(self, username, password):
     password_manager = urllib.request.HTTPPasswordMgrWithDefaultRealm()
     password_manager.add_password(None, self.url, username, password)
     urllib.request.install_opener(
         urllib.request.build_opener(urllib.request.HTTPBasicAuthHandler(password_manager)))
     base64string = base64.encodebytes(('%s:%s' % (username, password)).encode()).decode()[:-1]
     return "Basic %s" % base64string
Esempio n. 24
0
def seguid(seq):
    """Returns the SEGUID (string) for a sequence (string or Seq object).

    Given a nucleotide or amino-acid secuence (or any string),
    returns the SEGUID string (A SEquence Globally Unique IDentifier).
    seq type = str.

    For more information about SEGUID, see:
    http://bioinformatics.anl.gov/seguid/
    DOI: 10.1002/pmic.200600032
    """
    import hashlib
    import base64
    m = hashlib.sha1()
    try:
        # Assume it's a Seq object
        seq = str(seq)
    except AttributeError:
        # Assume it's a string
        pass
    m.update(_as_bytes(seq.upper()))
    try:
        # For Python 3+
        return base64.encodebytes(m.digest()).decode().replace("\n", "").rstrip("=")
    except AttributeError:
        pass
    # For all other Pythons
    return base64.b64encode(m.digest()).rstrip("=")
Esempio n. 25
0
    def make_cache(self):
        from gramps.webapp.libdjango import DjangoInterface
        if self.dji is None:
            self.dji = DjangoInterface()

        if isinstance(self, Person):
            raw = self.dji.get_person(self)
        elif isinstance(self, Family):
            raw = self.dji.get_family(self)
        elif isinstance(self, Place):
            raw = self.dji.get_place(self)
        elif isinstance(self, Media):
            raw = self.dji.get_media(self)
        elif isinstance(self, Source):
            raw = self.dji.get_source(self)
        elif isinstance(self, Citation):
            raw = self.dji.get_citation(self)
        elif isinstance(self, Repository):
            raw = self.dji.get_repository(self)
        elif isinstance(self, Note):
            raw = self.dji.get_note(self)
        elif isinstance(self, Event):
            raw = self.dji.get_event(self)
        elif isinstance(self, Tag):
            raw = self.dji.get_tag(self)
        else:
            raise Exception("Don't know how to get raw '%s'" % type(item))
        return str(base64.encodebytes(pickle.dumps(raw)), "utf-8")
Esempio n. 26
0
def latex_to_png(s, encode=False, backend=None, wrap=False):
    """Render a LaTeX string to PNG.

    Parameters
    ----------
    s : text
        The raw string containing valid inline LaTeX.
    encode : bool, optional
        Should the PNG data base64 encoded to make it JSON'able.
    backend : {matplotlib, dvipng}
        Backend for producing PNG data.
    wrap : bool
        If true, Automatically wrap `s` as a LaTeX equation.

    None is returned when the backend cannot be used.

    """
    s = cast_unicode(s)
    allowed_backends = LaTeXTool.instance().backends
    if backend is None:
        backend = allowed_backends[0]
    if backend not in allowed_backends:
        return None
    if backend == 'matplotlib':
        f = latex_to_png_mpl
    elif backend == 'dvipng':
        f = latex_to_png_dvipng
    else:
        raise ValueError('No such backend {0}'.format(backend))
    bin_data = f(s, wrap)
    if encode and bin_data:
        bin_data = encodebytes(bin_data)
    return bin_data
Esempio n. 27
0
    def export_config(self, request):
        """
        Exports the script file from a VPCS instance.

        Mandatory request parameters:
        - id (vm identifier)

        Response parameters:
        - script_file_base64 (script file base64 encoded)
        - False if no configuration can be exported
        """

        # validate the request
        if not self.validate_request(request, VPCS_EXPORT_CONFIG_SCHEMA):
            return

        # get the instance
        vpcs_instance = self.get_vpcs_instance(request["id"])
        if not vpcs_instance:
            return

        response = {}
        script_file_path = os.path.join(vpcs_instance.working_dir, vpcs_instance.script_file)
        try:
            with open(script_file_path, "rb") as f:
                config = f.read()
                response["script_file_base64"] = base64.encodebytes(config).decode("utf-8")
        except OSError as e:
            self.send_custom_error("unable to export the script file: {}".format(e))
            return

        if not response:
            self.send_response(False)
        else:
            self.send_response(response)
Esempio n. 28
0
    def execute(self, method, params=None, progress=True):
        """
        :param str method: remote method name
        :param params: method kwargs
        :param bool progress: use native progress bar support
        :rtype str: return task id
        """

        assert method and isinstance(method, str)
        task_id = self._get_task_id()
        data = {
            'id': task_id,
            'params': None,
            'progress': progress,
        }
        if params:
            data.update(
                params=base64.encodebytes(pickle.dumps(params)))

        cmd = self._new_cmd(
            target=CommandTargetType.call_method, data=data, method=method)
        self._send(cmd)

        for cmd in self._read():
            if cmd.target == CommandTargetType.problem:
                raise ExecuteError(cmd.data)
            elif cmd.target == CommandTargetType.ok:
                return task_id
Esempio n. 29
0
 def represent_binary(self, data):
     if hasattr(base64, 'encodebytes'):
         data = base64.encodebytes(data).decode('ascii')
     else:
         data = base64.encodestring(data).decode('ascii')
     return self.represent_scalar(u'tag:yaml.org,2002:binary', data,
                                  style='|')
Esempio n. 30
0
    def __init__(self, loop):
        self._loop = loop

        handlers = [
            # Static file handlers
            (r'/(favicon.ico)', StaticFileHandler, {"path": ""}),

            # File upload handler
            (r'/upload', upload.StreamHandler, {"loop": self._loop}),

            (r'/quote', QuoteHandler, {"loop": self._loop}),
            (r'/provider_upload', UploadToProvider, {"loop": self._loop}),

            # Web sockets
            (r'/ws/upload_progress', UploadProgressWS),
            (r'/ws/provider_upload_progress', ProviderUploadProgressWS),
            (r'/ws/provider_request', ProvidersService),

            # Page handlers
            (r"/", MainHandler),
            (r"/auth/create", AuthCreateHandler, {"loop": self._loop}),
            (r"/auth/login", AuthLoginHandler, {"loop": self._loop}),
            (r"/auth/logout", AuthLogoutHandler, {"loop": self._loop}),
        ]
        settings = dict(
            template_path=os.path.join(os.path.dirname(__file__), "templates"),
            static_path=os.path.join(os.path.dirname(__file__), "static"),
            cookie_secret=base64.encodebytes(uuid.uuid4().bytes + uuid.uuid4().bytes),
            login_url="/auth/login",
            debug=True,
        )
        self._sessions = {}
        self._web_socks = {}
        super().__init__(handlers, **settings)
Esempio n. 31
0
 def get_url_file(url):
     return 'tmp/' + base64.encodebytes(str.encode(url)).decode('utf-8').strip()
Esempio n. 32
0
from base64 import encodebytes

c = int.from_bytes(b"Hello! Can you give me the flag, please? I would really appreciate it!", "big")
m = int.from_bytes(b"Quack! Quack!", "big")

p = 11 # just pick some arbitrary prime here and change it until something works

for i in range(1, 10000):
    q = i * 2**559 + 1
    if not isprime(q):
        continue
    try:
        n = p*q
        d = discrete_log(n, c, m)
        e = mod_inverse(d,(p-1)*(q-1))
        if pow(m,d,n)!=c: raise
        if pow(c,e,n)!=m: raise
    except:
        print(f'i={i} failed')
        continue

    # Success! Let's construct the PEM file
    print(f'i={i} succeeded')
    print(f'd={d}')
    seq = Sequence()
    for i,x in enumerate([0, n, e, d, p, q, d%(p-1), d%(q-1), mod_inverse(q,p)]):
        seq.setComponentByPosition(i, Integer(x))
    b64 = encodebytes(encode(seq)).decode('ascii')
    print(f'-----BEGIN RSA PRIVATE KEY-----\n{b64}-----END RSA PRIVATE KEY-----')
    break
Esempio n. 33
0
    def mimecast_post(  # noqa: MC0001
        self,
        url: str,
        uri: str,
        access_key: str,
        secret_key: str,
        app_id: str,
        app_key: str,
        data: dict,
        meta: dict = None,
    ) -> dict:
        """
        This method will send a properly formatted post request to the Mimecast server
        :param url: The server URL
        :param uri: The URI for the api call
        :param access_key: The access key for the session
        :param secret_key: The secret key for the session
        :param app_id: The application ID for the app that will be logging in
        :param app_key: The key associated with the app_id
        :param data: The payload for the api call
        :param meta: The meta information for request
        :return:
        """
        # Set full URL
        url = url + uri

        # Generate request header values
        request_id = str(uuid.uuid4())
        hdr_date = datetime.datetime.utcnow().strftime(
            "%a, %d %b %Y %H:%M:%S") + " UTC"

        # Decode secret key
        encoded_secret_key = secret_key.encode()
        bytes_secret_key = base64.b64decode(encoded_secret_key)

        # Create hmac message
        msg = ":".join([hdr_date, request_id, uri, app_key])

        # Create the HMAC SHA1 of the Base64 decoded secret key for the Authorization header
        hmac_sha1 = hmac.new(bytes_secret_key,
                             msg.encode(),
                             digestmod=hashlib.sha1).digest()

        # Use the HMAC SHA1 value to sign the hdrDate + ":" requestId + ":" + URI + ":" + appkey
        sig = base64.encodebytes(hmac_sha1).rstrip()
        sig = sig.decode("UTF-8")

        # Create request headers
        headers = {
            "Authorization": "MC " + access_key + ":" + sig,
            "x-mc-app-id": app_id,
            "x-mc-date": hdr_date,
            "x-mc-req-id": request_id,
            "Content-Type": "application/json",
        }

        # build payload data
        if data is not None:
            payload = {"data": [data]}
        else:
            payload = {"data": []}

        if meta is not None:
            payload["meta"] = meta

        try:
            request = requests.post(url=url,
                                    headers=headers,
                                    data=str(payload))
        except requests.exceptions.RequestException as e:
            raise PluginException(data=e)

        try:
            response = request.json()
        except json.decoder.JSONDecodeError:
            self.logger.error(request.text)
            raise PluginException(
                cause="Unknown error.",
                assistance=
                "The Mimecast server did not respond correctly. Response not in JSON format. Response in logs.",
            )

        try:
            # Check for expired key
            if response["fail"]:
                for errors in response["fail"]:
                    for codes in errors["errors"]:
                        if codes["code"] == "err_xdk_binding_expired":
                            raise PluginException(
                                cause="AccessKey has expired.",
                                assistance="Please provide a valid AccessKey.",
                            )
        except KeyError:
            self.logger.error(response)
            raise PluginException(
                cause="Unknown error.",
                assistance=
                "The Mimecast server did not respond correctly. Response in logs.",
            )

        try:
            if response["meta"]["status"] != 200:
                self.logger.error(response)
                raise PluginException(
                    cause="Server request failed.",
                    assistance="Status code is {}, see log for details.".
                    format(response["meta"]["status"]),
                    data=response["fail"],
                )
        except KeyError:
            self.logger.error(response)
            raise PluginException(
                cause="Unknown error.",
                assistance=
                "The Mimecast server did not respond correctly. Response in logs.",
            )

        return response
Esempio n. 34
0
import pymongo
import json
import base64

#connect to mongodb
myclient = pymongo.MongoClient("mongodb://*****:*****@140.131.149.39:465")
mydb = myclient["menu"]
mycol = mydb["menu"]

#data
name = "炒面"
price = 70
date = 20200614
with open("upload.jpg", "rb") as image:
    img_data = image.read()
    img_string = base64.encodebytes(img_data)

#dict
mydict = {"name": name, "price": price, "date": date, "image": img_string}

#update if name = "炒面" & date = 20200614
x = mycol.update_one({
    "name": name,
    "date": date
}, {'$set': mydict},
                     upsert=True)
#x = mycol.insert_one(mydict)
Esempio n. 35
0
def collect_events(helper, ew):

    # Get variables

    apikey = helper.get_arg('apikey') + ":"
    interval = helper.get_arg('interval')
    helper.get_input_type()
    loglevel = helper.get_log_level()
    proxy_settings = helper.get_proxy()
    account = helper.get_arg('global_account')
    username = account['username']
    password = account['password']

    helper.log_debug("Parameters correctly obtained")

    # for each entry in KVstore not searched
    service = client.connect(username=username, password=password)
    helper.log_debug("Connection to splunk API OK")
    kwargs_oneshot = {
        "earliest_time": "2000-01-01T00:00:00.000-00:00",
        "latest_time": "now",
        "count": 40
    }

    searchquery_oneshot = "|inputlookup botdc_distinct_threat_indicators | stats values(enriched) as enriched, values(feed_type) as feed_type, min(event_time) as first_time_seen, sum(count) as count by threat_indicator | where NOT (isnull(enriched) OR match(enriched,\"1\"))| sort - count"

    oneshotsearch_results = service.jobs.oneshot(searchquery_oneshot,
                                                 **kwargs_oneshot)

    helper.log_debug("Splunk search executed properly")

    # Get the results and display them using the ResultsReader
    reader = splunk_results.ResultsReader(oneshotsearch_results)
    for item in reader:
        threat_indicator = item.get('threat_indicator')
        if item.get('feed_type') == "IP Address":
            threat_indicator_type = "ip"
        elif item.get('feed_type') == "FQDN":
            threat_indicator_type = "host"

        # Do a Dossier search
        helper.log_info("Start perform an Infoblox Dossier Search for " +
                        threat_indicator)

        url = "https://platform.activetrust.net:8000/api/services/intel/lookup/indicator/"
        url = url + threat_indicator_type
        url = url + "?value=" + threat_indicator + "&wait=true"
        method = "GET"
        auth = base64.encodebytes(
            ('%s:' % (apikey)).encode('utf8')).decode('utf8').replace(
                '\n', '')

        headers = {
            'Authorization': 'Basic %s' % auth,
            'Content-Type': 'application/x-www-form-urlencoded',
            'User-Agent':
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2490.80 Safari/537.36',
            'Cache-Control': 'no-cache'
        }

        if not proxy_settings:
            response = requests.get(url,
                                    headers=headers,
                                    cookies=None,
                                    verify=True,
                                    timeout=(600, 600),
                                    stream=True)
        else:
            response = requests.get(url,
                                    headers=headers,
                                    cookies=None,
                                    verify=True,
                                    timeout=(600, 600),
                                    proxies=proxy_settings,
                                    stream=True)

        if response.encoding is None:
            response.encoding = 'utf-8'
        if response.text:
            try:
                r_json = json.loads(response.text)
            except:
                raise Exception("Unable to load into a json format")

            data = json.dumps(r_json)
            data = data.replace("\"host\":", "\"hostname\":")
            data = data.replace("\"source\":", "\"src\":")

            helper.log_debug("data: " + data)

            data = json.loads(data)

            if "results" in data.keys():
                for result in data["results"]:
                    if "params" in result.keys():
                        if "src" in result["params"].keys():
                            if "data" in result.keys():

                                if result["params"]["src"] == "atp":
                                    for threat in result["data"]["threat"]:
                                        threat[
                                            "threat_indicator"] = threat_indicator
                                        threat[
                                            "threat_indicator_type"] = threat_indicator_type
                                        result_data = json.dumps(threat)
                                        event = helper.new_event(
                                            source=result["params"]["src"],
                                            index=helper.get_output_index(),
                                            sourcetype=helper.get_sourcetype(),
                                            data=result_data)
                                        ew.write_event(event)

                                elif result["params"][
                                        "src"] == "malware_analysis":
                                    if "details" in result["data"].keys():
                                        if "detected_communicating_samples" in result[
                                                "data"]["details"].keys():
                                            for malware_analysis in result[
                                                    "data"]["details"][
                                                        "detected_communicating_samples"]:
                                                malware_analysis[
                                                    "threat_indicator"] = threat_indicator
                                                malware_analysis[
                                                    "threat_indicator_type"] = threat_indicator_type
                                                malware_analysis[
                                                    "threat_indicator_type"] = "detected_communicating_samples"
                                                result_data = json.dumps(
                                                    malware_analysis)
                                                event = helper.new_event(
                                                    source=result["params"]
                                                    ["src"],
                                                    index=helper.
                                                    get_output_index(),
                                                    sourcetype=helper.
                                                    get_sourcetype(),
                                                    data=result_data)
                                                ew.write_event(event)

                                        if "detected_downloaded_samples" in result[
                                                "data"]["details"].keys():
                                            for malware_analysis in result[
                                                    "data"]["details"][
                                                        "detected_downloaded_samples"]:
                                                malware_analysis[
                                                    "threat_indicator"] = threat_indicator
                                                malware_analysis[
                                                    "threat_indicator_type"] = threat_indicator_type
                                                malware_analysis[
                                                    "threat_indicator_type"] = "detected_downloaded_samples"
                                                result_data = json.dumps(
                                                    malware_analysis)
                                                event = helper.new_event(
                                                    source=result["params"]
                                                    ["src"],
                                                    index=helper.
                                                    get_output_index(),
                                                    sourcetype=helper.
                                                    get_sourcetype(),
                                                    data=result_data)
                                                ew.write_event(event)

                                        if "detected_urls" in result["data"][
                                                "details"].keys():
                                            for malware_analysis in result[
                                                    "data"]["details"][
                                                        "detected_urls"]:
                                                malware_analysis[
                                                    "threat_indicator"] = threat_indicator
                                                malware_analysis[
                                                    "threat_indicator_type"] = threat_indicator_type
                                                malware_analysis[
                                                    "threat_indicator_type"] = "detected_urls"
                                                result_data = json.dumps(
                                                    malware_analysis)
                                                event = helper.new_event(
                                                    source=result["params"]
                                                    ["src"],
                                                    index=helper.
                                                    get_output_index(),
                                                    sourcetype=helper.
                                                    get_sourcetype(),
                                                    data=result_data)
                                                ew.write_event(event)

                                elif result["params"]["src"] == "pdns":
                                    for pdns in result["data"]["items"]:
                                        pdns[
                                            "threat_indicator"] = threat_indicator
                                        pdns[
                                            "threat_indicator_type"] = threat_indicator_type
                                        result_data = json.dumps(pdns)
                                        event = helper.new_event(
                                            source=result["params"]["src"],
                                            index=helper.get_output_index(),
                                            sourcetype=helper.get_sourcetype(),
                                            data=result_data)
                                        ew.write_event(event)

                                else:
                                    result["data"][
                                        "threat_indicator"] = threat_indicator
                                    result["data"][
                                        "threat_indicator_type"] = threat_indicator_type
                                    result_data = json.dumps(result["data"])
                                    event = helper.new_event(
                                        source=result["params"]["src"],
                                        index=helper.get_output_index(),
                                        sourcetype=helper.get_sourcetype(),
                                        data=result_data)
                                    ew.write_event(event)

                                searchquery_oneshot_update = "|makeresults | eval threat_indicator=\"" + threat_indicator + "\" | eval enriched=1 | outputlookup botdc_distinct_threat_indicators append=true createinapp=true"
                                oneshotsearch_update_results = service.jobs.oneshot(
                                    searchquery_oneshot_update,
                                    **kwargs_oneshot)

        helper.log_info("Completed an Infoblox Dossier Search for " +
                        threat_indicator)
Esempio n. 36
0
 def get_kube_config_base64(self):
     file_name = self.fetch_config()
     with open(file_name) as f:
         text = f.read()
         return base64.encodebytes(bytes(text, 'utf-8')).decode().replace(
             '\n', '')
Esempio n. 37
0
def _create_sec_websocket_key():
    uid = uuid.uuid4()
    return base64.encodebytes(uid.bytes).strip().decode("utf-8")
Esempio n. 38
0
def do_check(request):

    # Check if defined any Host HTTP header.
    if menu.options.host and settings.HOST_INJECTION == None:
        request.add_header(settings.HOST, menu.options.host)

    # Check if defined any User-Agent HTTP header.
    if menu.options.agent:
        request.add_header(settings.USER_AGENT, menu.options.agent)

    # Check if defined any Referer HTTP header.
    if menu.options.referer and settings.REFERER_INJECTION == None:
        request.add_header(settings.REFERER, menu.options.referer)

    # Check if defined any Cookie HTTP header.
    if menu.options.cookie and settings.COOKIE_INJECTION == False:
        request.add_header(settings.COOKIE, menu.options.cookie)

    if not checks.get_header(request.headers, settings.HTTP_ACCEPT_HEADER):
        request.add_header(settings.HTTP_ACCEPT_HEADER,
                           settings.HTTP_ACCEPT_HEADER_VALUE)

    # The MIME media type for JSON.
    if menu.options.data:
        if re.search(settings.JSON_RECOGNITION_REGEX, menu.options.data) or \
           re.search(settings.JSON_LIKE_RECOGNITION_REGEX, menu.options.data):
            request.add_header("Content-Type", "application/json")

    # Appends a fake HTTP header 'X-Forwarded-For'
    if settings.TAMPER_SCRIPTS["xforwardedfor"]:
        from src.core.tamper import xforwardedfor
        xforwardedfor.tamper(request)

    # Default value for "Accept-Encoding" HTTP header
    request.add_header('Accept-Encoding',
                       settings.HTTP_ACCEPT_ENCODING_HEADER_VALUE)

    # Check if defined any HTTP Authentication credentials.
    # HTTP Authentication: Basic / Digest Access Authentication.
    if menu.options.auth_cred and menu.options.auth_type:
        try:
            settings.SUPPORTED_HTTP_AUTH_TYPES.index(menu.options.auth_type)
            if menu.options.auth_type == "basic":
                b64_string = encodebytes(
                    menu.options.auth_cred.encode(
                        settings.UNICODE_ENCODING)).decode().replace('\n', '')
                request.add_header("Authorization", "Basic " + b64_string + "")
            elif menu.options.auth_type == "digest":
                try:
                    url = menu.options.url
                    try:
                        response = _urllib.request.urlopen(
                            url, timeout=settings.TIMEOUT)
                    except _urllib.error.HTTPError as e:
                        try:
                            authline = e.headers.get('www-authenticate', '')
                            authobj = re.match('''(\w*)\s+realm=(.*),''',
                                               authline).groups()
                            realm = authobj[1].split(',')[0].replace("\"", "")
                            user_pass_pair = menu.options.auth_cred.split(":")
                            username = user_pass_pair[0]
                            password = user_pass_pair[1]
                            authhandler = _urllib.request.HTTPDigestAuthHandler(
                            )
                            authhandler.add_password(realm, url, username,
                                                     password)
                            opener = _urllib.request.build_opener(authhandler)
                            _urllib.request.install_opener(opener)
                            result = _urllib.request.urlopen(
                                url, timeout=settings.TIMEOUT)
                        except AttributeError:
                            pass
                except _urllib.error.HTTPError as e:
                    pass
        except ValueError:
            err_msg = "Unsupported / Invalid HTTP authentication type '" + menu.options.auth_type + "'."
            err_msg += " Try basic or digest HTTP authentication type."
            print(settings.print_critical_msg(err_msg))
            raise SystemExit()
    else:
        pass

    # Check if defined any extra HTTP headers.
    if menu.options.headers or menu.options.header or len(
            settings.RAW_HTTP_HEADERS) >= 1:
        if len(settings.RAW_HTTP_HEADERS) >= 1:
            menu.options.headers = settings.RAW_HTTP_HEADERS
        # Do replacement with the 'INJECT_HERE' tag, if the wildcard char is provided.
        if menu.options.headers:
            menu.options.headers = checks.wildcard_character(
                menu.options.headers)
            extra_headers = menu.options.headers
        else:
            menu.options.header = checks.wildcard_character(
                menu.options.header)
            extra_headers = menu.options.header

        extra_headers = extra_headers.replace(":", ": ")
        if ": //" in extra_headers:
            extra_headers = extra_headers.replace(": //", "://")

        if "\\n" in extra_headers:
            extra_headers = extra_headers.split("\\n")
            # Remove empty strings and "Content-Length"
            extra_headers = [
                x for x in extra_headers if "Content-Length" not in x
            ]
        else:
            tmp_extra_header = []
            tmp_extra_header.append(extra_headers)
            extra_headers = tmp_extra_header

        # Remove empty strings
        extra_headers = [x for x in extra_headers if x]

        for extra_header in extra_headers:
            try:
                # Extra HTTP Header name
                http_header_name = extra_header.split(':', 1)[0]
                http_header_name = ''.join(http_header_name).strip()
                # Extra HTTP Header value
                http_header_value = extra_header.split(':', 1)[1]
                http_header_value = ''.join(http_header_value).strip().replace(
                    ": ", ":")
                # Check if it is a custom header injection.
                if settings.CUSTOM_HEADER_INJECTION == False and \
                   settings.INJECT_TAG in http_header_value:
                    settings.CUSTOM_HEADER_INJECTION = True
                    settings.CUSTOM_HEADER_NAME = http_header_name
                # Add HTTP Header name / value to the HTTP request
                if http_header_name not in [
                        settings.HOST, settings.USER_AGENT, settings.REFERER,
                        settings.COOKIE
                ]:
                    request.add_header(
                        http_header_name.encode(settings.UNICODE_ENCODING),
                        http_header_value.encode(settings.UNICODE_ENCODING))
            except:
                pass


# eof
Esempio n. 39
0
    def on_task_output(self, task, config):
        """Add torrents to deluge at exit."""
        config = self.prepare_config(config)
        client = self.setup_client(config)
        # don't add when learning
        if task.options.learn:
            return
        if not config['enabled'] or not (task.accepted or task.options.test):
            return

        try:
            client.connect()
        except ConnectionError as exc:
            raise plugin.PluginError(
                f'Error connecting to deluge daemon: {exc}', logger=logger
            ) from exc

        if task.options.test:
            logger.debug('Test connection to deluge daemon successful.')
            client.disconnect()
            return

        # loop through entries to get a list of labels to add
        labels = set()
        for entry in task.accepted:
            label = entry.get('label') or config.get('label')
            if label and label.lower() != 'no label':
                try:
                    label = self._format_label(entry.render(label))
                    logger.debug('Rendered label: {}', label)
                except RenderError as e:
                    logger.error('Error rendering label `{}`: {}', label, e)
                    continue
                labels.add(label)
        if labels:
            # Make sure the label plugin is available and enabled, then add appropriate labels

            enabled_plugins = client.call('core.get_enabled_plugins')
            label_enabled = 'Label' in enabled_plugins
            if not label_enabled:
                available_plugins = client.call('core.get_available_plugins')
                if 'Label' in available_plugins:
                    logger.debug('Enabling label plugin in deluge')
                    label_enabled = client.call('core.enable_plugin', 'Label')
                else:
                    logger.error('Label plugin is not installed in deluge')

            if label_enabled:
                d_labels = client.call('label.get_labels')
                for label in labels:
                    if label not in d_labels:
                        logger.debug('Adding the label `{}` to deluge', label)
                        client.call('label.add', label)

        # add the torrents
        torrent_ids = client.call('core.get_session_state')
        for entry in task.accepted:
            # Generate deluge options dict for torrent add
            add_opts = {}
            try:
                path = entry.render(entry.get('path') or config['path'])
                if path:
                    add_opts['download_location'] = pathscrub(os.path.expanduser(path))
            except RenderError as e:
                logger.error('Could not set path for {}: {}', entry['title'], e)
            for fopt, dopt in self.options.items():
                value = entry.get(fopt, config.get(fopt))
                if value is not None:
                    add_opts[dopt] = value
                    if fopt == 'ratio':
                        add_opts['stop_at_ratio'] = True
            # Make another set of options, that get set after the torrent has been added
            modify_opts = {
                'queue_to_top': entry.get('queue_to_top', config.get('queue_to_top')),
                'main_file_only': entry.get('main_file_only', config.get('main_file_only', False)),
                'main_file_ratio': entry.get('main_file_ratio', config.get('main_file_ratio')),
                'hide_sparse_files': entry.get(
                    'hide_sparse_files', config.get('hide_sparse_files', True)
                ),
                'keep_subs': entry.get('keep_subs', config.get('keep_subs', True)),
                'container_directory': config.get('container_directory', ''),
                'force_recheck': entry.get('force_recheck', config.get('force_recheck')),
            }
            try:
                label = entry.render(entry.get('label') or config['label'])
                modify_opts['label'] = self._format_label(label)
            except RenderError as e:
                logger.error('Error setting label for `{}`: {}', entry['title'], e)
            try:
                move_completed_path = entry.render(
                    entry.get('move_completed_path') or config['move_completed_path']
                )
                modify_opts['move_completed_path'] = pathscrub(
                    os.path.expanduser(move_completed_path)
                )
            except RenderError as e:
                logger.error('Error setting move_completed_path for {}: {}', entry['title'], e)
            try:
                content_filename = entry.get('content_filename') or config.get(
                    'content_filename', ''
                )
                modify_opts['content_filename'] = pathscrub(entry.render(content_filename))
            except RenderError as e:
                logger.error('Error setting content_filename for {}: {}', entry['title'], e)

            torrent_id = entry.get('deluge_id') or entry.get('torrent_info_hash')
            torrent_id = torrent_id and torrent_id.lower()
            if torrent_id in torrent_ids:
                logger.info('{} is already loaded in deluge, setting options', entry['title'])
                # Entry has a deluge id, verify the torrent is still in the deluge session and apply options
                # Since this is already loaded in deluge, we may also need to change the path
                modify_opts['path'] = add_opts.pop('download_location', None)
                client.call('core.set_torrent_options', [torrent_id], add_opts)
                self._set_torrent_options(client, torrent_id, entry, modify_opts)
            elif config['action'] != 'add':
                logger.warning(
                    'Cannot {} {}, because it is not loaded in deluge.',
                    config['action'],
                    entry['title'],
                )
                continue
            else:
                magnet, filedump = None, None
                if entry.get('url', '').startswith('magnet:'):
                    magnet = entry['url']
                else:
                    if not os.path.exists(entry['file']):
                        entry.fail('Downloaded temp file \'%s\' doesn\'t exist!' % entry['file'])
                        del entry['file']
                        return
                    with open(entry['file'], 'rb') as f:
                        filedump = base64.encodebytes(f.read())

                logger.verbose('Adding {} to deluge.', entry['title'])
                added_torrent = None
                if magnet:
                    try:
                        added_torrent = client.call('core.add_torrent_magnet', magnet, add_opts)
                    except Exception as exc:
                        logger.error('{} was not added to deluge! {}', entry['title'], exc)
                        logger.opt(exception=True).debug('Error adding magnet:')
                        entry.fail('Could not be added to deluge')
                    else:
                        if config.get('magnetization_timeout'):
                            timeout = config['magnetization_timeout']
                            logger.verbose(
                                'Waiting {} seconds for "{}" to magnetize', timeout, entry['title']
                            )
                            for _ in range(timeout):
                                time.sleep(1)
                                try:
                                    status = client.call(
                                        'core.get_torrent_status', added_torrent, ['files']
                                    )
                                except Exception as err:
                                    logger.error('wait_for_metadata Error: {}', err)
                                    break
                                if status.get('files'):
                                    logger.info('"{}" magnetization successful', entry['title'])
                                    break
                            else:
                                logger.warning(
                                    '"{}" did not magnetize before the timeout elapsed, '
                                    'file list unavailable for processing.',
                                    entry['title'],
                                )
                else:
                    try:
                        added_torrent = client.call(
                            'core.add_torrent_file', entry['title'], filedump, add_opts
                        )
                    except Exception as e:
                        logger.error('{} was not added to deluge! {}', entry['title'], e)
                        entry.fail('Could not be added to deluge')
                if not added_torrent:
                    logger.error('There was an error adding {} to deluge.', entry['title'])
                else:
                    logger.info('{} successfully added to deluge.', entry['title'])
                    self._set_torrent_options(client, added_torrent, entry, modify_opts)
            if config['action'] in ('remove', 'purge'):
                client.call('core.remove_torrent', torrent_id, config['action'] == 'purge')
                logger.info('{} removed from deluge.', entry['title'])
            elif config['action'] == 'pause':
                client.call('core.pause_torrent', [torrent_id])
                logger.info('{} has been paused in deluge.', entry['title'])
            elif config['action'] == 'resume':
                client.call('core.resume_torrent', [torrent_id])
                logger.info('{} has been resumed in deluge.', entry['title'])

        client.disconnect()
Esempio n. 40
0
sorted_contests = []

for c in contests:
    sorted_contests.append(c[0])

contest_id = max(sorted_contests)

decfilename = "/var/rc24/File-Maker/Channels/Check_Mii_Out_Channel/decfiles/contests/{}/con_detail1.dec".format(
    str(contest_id))

if path.exists(decfilename):
    boundary2 = "----=_CMOC_Contest_Icon"

    with open(decfilename, "rb") as f:
        contest_icon = str(
            encodebytes(f.read()).replace(b"\n", b"\r\n").decode("utf-8"))

    message += (
        boundary + "\r\n" + "Content-Type: text/plain\r\n\r\n" +
        "Date: {}\r\n".format(date) + "From: [email protected]\r\n" +
        "To: [email protected]\r\n" +
        "Message-ID: <[email protected]@rc24.xyz>\r\n"
        + "Subject: \r\n" + "MIME-Version: 1.0\r\n" +
        'Content-Type: multipart/mixed; boundary="{}"\r\n'.format(boundary2) +
        "Content-Transfer-Encoding: base64\r\n" +
        "X-Wii-AppID: 3-48415041-3031\r\n" + "X-Wii-Tag: 00000001\r\n" +
        "X-Wii-Cmd: 00080001\r\n\r\n" + "--" + boundary2 + "\r\n" +
        "Content-Type: application/octet-stream;\r\n" +
        " name=storage.bin\r\n" + "Content-Transfer-Encoding: base64\r\n" +
        "Content-Disposition: attachment;\r\n" +
        " filename=storage.bin\r\n\r\n" + contest_icon + "\r\n\r\n" + "--" +
Esempio n. 41
0
def enc(text, description):
    return str(
        encodebytes(text.format(description).encode("utf-16be")).replace(
            b"\n", b"\r\n").decode("utf-8"))
Esempio n. 42
0
def get_image(image_path):
    img = Image.open(image_path, mode='r')
    img_byte_arr = BytesIO()
    img.save(img_byte_arr, format='PNG')
    encoded_img = base64.encodebytes(img_byte_arr.getvalue()).decode('ascii')
    return encoded_img
Esempio n. 43
0
if (os.path.isfile(pass_file)):
    wordlist = open(pass_file, encoding='latin-1')
else:
    print("Error:\n\t Wordlist not found!\n")
    exit(1)

# Login Page URL
ip_address = "http://172.26.0.2:80"
port = 80
words = wordlist.readlines()

for user in ["admin"]:
    for passwd in words:
        user_pass = "******" % (user, passwd.strip())

        base64_value = base64.encodebytes(user_pass.encode()).split()[0]
        base64_value = base64_value.decode()
        hdr = {'Authorization': "Basic %s" % base64_value}
        print(hdr)
        try:
            res = requests.get(ip_address, headers=hdr)

        except:
            print("No such URL")
            exit(1)
        if res.status_code == 200:
            print("%s CRACKED: " % res.status_code + user + ":" + passwd)
            exit(0)
        elif res.status_code == 401:
            print("FAILED %s: %s:%s" % (res.status_code, user, passwd))
        else:
Esempio n. 44
0
 def encode_audio(self, file_path):
     f = open(file_path, 'rb')
     contents = f.read()
     return base64.encodebytes(contents)
Esempio n. 45
0
def write_secret(filename, secret):
    encoded_secret = base64.encodebytes(bytes(secret, encoding='utf-8'))
    with open(filename, 'wb') as new_secret:
        new_secret.write(encoded_secret)
Esempio n. 46
0
 def dump_bytes(self, value, write):
     write("<value><base64>\n")
     encoded = base64.encodebytes(value)
     write(encoded.decode('ascii'))
     write("</base64></value>\n")
Esempio n. 47
0
def csp(request):
    return {'csp_nonce': base64.encodebytes(os.urandom(32)).decode().rstrip()}
Esempio n. 48
0
 def genePwd(pwd, salt):
     m = hashlib.md5()
     str = "%s-%s" % (base64.encodebytes(pwd.encode("utf-8")), salt)
     m.update(str.encode("utf-8"))
     return m.hexdigest()
Esempio n. 49
0
 def encode(self, input, final=False):
     assert self.errors == 'strict'
     return base64.encodebytes(input)
Esempio n. 50
0
def btoa(str):
    # timestamp = str(int(time.time()))
    # a = '8808'
    b = base64.encodebytes(str.encode("utf8"))
    tokens = b.decode("utf8").strip()
    return tokens
Esempio n. 51
0
 def encode(self, out):
     out.write("<value><base64>\n")
     encoded = base64.encodebytes(self.data)
     out.write(encoded.decode('ascii'))
     out.write("</base64></value>\n")
print('NEW_keydatabytes: ' + privatekeypaket)

newCheckSum = printDSAKeyPaket(keydatabytes)

keydatabytes[len(keydatabytes) - 1] = newCheckSum.to_bytes(2,
                                                           byteorder='big')[1]
keydatabytes[len(keydatabytes) - 2] = newCheckSum.to_bytes(2,
                                                           byteorder='big')[0]

printDSAKeyPaket(keydatabytes)

print('---')
print(binascii.hexlify(keydatabytes))
print('---')
print(binascii.hexlify(keypackets[0].data))
print('---')
print(binascii.hexlify(keydata.data))
keydata.data = keydata.data.replace(keypackets[0].data, keydatabytes)
print('---')
print(binascii.hexlify(keydata.data))

print(b64encode(keydata.data))
print(standard_b64encode(keydata.data))
print(encodebytes(keydata.data))

# writes manipulated pakets to file
writePrivateKeyASC("bob-dsa-private-keyasc-AUTOgenerated.sec", keydata.data)

print("done")
Esempio n. 53
0
    def __init__(self,w):

#Create the frames to pack individual frames inside the GUI for better organization
        self.myTOPFrame = Frame(w)
        self.myTOPFrame.pack(side=TOP)
        self.myLeftFrame = Frame(w)
        self.myLeftFrame.pack(side=LEFT)

#Create frames within the main frames above for organization
        self.left0 = Frame(self.myLeftFrame)
        self.left0.pack()
        self.left1 = Frame(self.myLeftFrame)
        self.left1.pack()
        self.left2 = Frame(self.myLeftFrame)
        self.left2.pack()
        self.left3 = Frame(self.myLeftFrame)
        self.left3.pack()

#Get GT logo, no real value, just thought it looked nice
        url = "http://w4aql.gtorg.gatech.edu/images/buzzzap.gif"
        response = urllib.request.urlopen(url)
        myPicture = response.read()
        import base64
        b64_data = base64.encodebytes(myPicture)
        self.mainImage = PhotoImage(data=b64_data)
#These values can be used throughout the code because they are global
        self.totalOrders = 0
        self.kValue = .999
        self.switch = True
        self.iteration = 0
#Image
        self.photoLabel = Label(self.myTOPFrame, image=self.mainImage)
        self.photoLabel.grid(row=0, column=0)
#Load Buttons
        self.loadFirstInputCSVFile = Button(self.left1, width=78, text="Load Weekly Demand", command=self.loadFirstCSVclicked).grid(row=0, column=0, sticky=E+W)
        self.loadSecondInputCSVFile = Button(self.left1, width=78, text="Load Master Data", command=self.loadSecondCSVclicked).grid(row=1, column=0, sticky=E+W)

#Week Number Variable and Week Number Entry
        self.entryWeekNumber = StringVar()
        self.weekNumber = Label(self.left2, text="Week Number:").grid(row=0, column=0, sticky=E)
        self.weekNumber = Entry(self.left2, width=60, state=NORMAL, text=self.entryWeekNumber).grid(row=0, column=1, sticky=W)

#Allocation Variable and Allocation Entry
        self.entryAllocationAmount = StringVar()
        self.allocationAmount = Label(self.left2, text="Allocation Amount:").grid(row=1, column=0, sticky=E)
        self.allocationAmount = Entry(self.left2, width=60, state=NORMAL, text=self.entryAllocationAmount).grid(row=1, column=1, sticky=W)

#I thought it would be beneficial to show the file location
        self.inputFirstCSVFile = Label(self.left2, text="Weekly Demand File Path:").grid(row=2, column=0, sticky=E)
        self.inputFirstCSVFileEntry = Entry(self.left2, width=60, state="readonly")
        self.inputFirstCSVFileEntry.grid(row=2, column=1)

#File location
        self.inputSecondCSVFile= Label(self.left2, text ="Master Data File Path:").grid(row=3, column=0, sticky=E)
        self.inputSecondCSVFileEntry=Entry(self.left2, width =60, state="readonly")
        self.inputSecondCSVFileEntry.grid(row=3, column=1)

#Output file location, needs to be saved as file.csv
        self.outputCSVFile = Label(self.left2, text="DV Model File Path:").grid(row=5, column=0, sticky=E)
        self.outputCSVFileEntry = Entry(self.left2, width=60, state="readonly")
        self.outputCSVFileEntry.grid(row=5, column=1)


        self.outputCSVFile1 = Label(self.left2, text="DOS Model File Path:").grid(row=6, column=0, sticky=E)
        self.outputCSVFileEntry1 = Entry(self.left2, width=60, state="readonly")
        self.outputCSVFileEntry1.grid(row=6, column=1)
#After all the data has been entered, click process button
        self.process_Data = Button(self.left3, text="Process Data", width=78, state="disabled", command=self.processDataButtonClicked)
        self.process_Data.grid(row=0, column=0, sticky=E+W)


        self.convertData = 0
Esempio n. 54
0
def base64_encode(input, errors='strict'):
    assert errors == 'strict'
    return (base64.encodebytes(input), len(input))
Esempio n. 55
0
def get_url(address, user, password):
    request = Request(address)
    base64string = base64.encodebytes(ensure_binary('%s:%s' % (user, password))).replace(b'\n', b'')
    request.add_header("Authorization", "Basic %s" % ensure_str(base64string))
    return urlopen(request)
def format_basicAuth(client_id, client_secret):
    return (base64.encodebytes(('%s:%s' % (client_id,client_secret)).encode('utf-8')).decode().strip()).replace('\n', '')
Esempio n. 57
0
 def _binary_value_prep(value: bytes) -> str:
     return base64.encodebytes(value).decode()
Esempio n. 58
0
 def get_cmd(self):
     cmd = LineBreakpoint.get_cmd(self)
     cmd += " -- " + base64.encodebytes(
         self.condition.encode("UTF-8")).decode("UTF-8")
     return cmd
Esempio n. 59
0
def predict_card():
    print("predict_card")
    if request.method == 'POST':
        print("predict_card")
        try:
            print("predict_card")
            try:
                file = request.files['file']

                image_file = file.read()
                image = cv2.imdecode(np.frombuffer(image_file, dtype=np.uint8),
                                     -1)
            except Exception as e:
                print(e)
                logger.error(str(e))
                logger.error(str(traceback.print_exc()))
                return_result = {'code': '1001', 'status': rcode.code_1001}

            now = datetime.now()
            date_time = now.strftime("%m_%d_%Y_%H_%M_%S")
            image_path = os.path.join(UPLOAD_FOLDER, date_time + '.jpg')
            print("image_path: ", image_path)
            cv2.imwrite(image_path, image)

            url = 'http://service.aiclub.cs.uit.edu.vn/gpu150/paddle_ocr/predict'

            is_success, buffer = cv2.imencode('.png', image)
            f = io.BytesIO(buffer)
            image_encoded = base64.encodebytes(f.getvalue()).decode('utf-8')
            ####################################
            start_time = time.time()
            data = {"images": [image_encoded]}
            headers = {'Content-type': 'application/json'}
            data_json = json.dumps(data)
            response = requests.post(url, data=data_json, headers=headers)
            response = response.json()

            data = response['data']
            predict = data['predict'][0]

            # image = cv2.imread(image_path)

            # image = cv2.resize(image, (500, 500))
            image, list_info_bankcard, list_flag_bbox = draw_bankcard(
                image, predict)
            bank, name, type_card, valid_from, good_thru, number = get_info_card(
                image, list_info_bankcard, list_flag_bbox)

            image_path_recog = os.path.join(RECOG_FOLDER, date_time + '.jpg')
            cv2.imwrite(image_path_recog, image)

            print("bank: ", bank)
            print("name: ", name)
            print("number: ", number)
            print("type_card: ", type_card)
            print("valid_from: ", valid_from)
            print("good_thru: ", good_thru)

            return_result = {
                'recog_path': image_path_recog,
                'path': image_path,
                'bank': bank,
                'name': name,
                'type_card': type_card,
                'valid_from': valid_from,
                'good_thru': good_thru,
                'number': number
            }

            with open(os.path.join(RESULT_FOLDER, date_time + '.json'),
                      'w') as f:
                json.dump(return_result, f)

        except Exception as e:
            print("ERROR: ", e)
            logger.error(str(e))
            logger.error(str(traceback.print_exc()))
            return_result = {'code': '1009', 'status': rcode.code_1001}

        finally:
            return jsonify(return_result)
Esempio n. 60
0
    def _l10n_it_post_invoices_step_1(self, invoices):
        ''' Send the invoices to the proxy.
        '''
        to_return = {}

        to_send = {}
        for invoice in invoices:
            xml = b"<?xml version='1.0' encoding='UTF-8'?>" + invoice._export_as_xml(
            )
            filename = self._l10n_it_edi_generate_electronic_invoice_filename(
                invoice)
            attachment = self.env['ir.attachment'].create({
                'name':
                filename,
                'res_id':
                invoice.id,
                'res_model':
                invoice._name,
                'datas':
                base64.encodebytes(xml),
                'description':
                _('Italian invoice: %s', invoice.move_type),
                'type':
                'binary',
            })
            invoice.l10n_it_edi_attachment_id = attachment

            if invoice._is_commercial_partner_pa():
                invoice.message_post(body=(_(
                    "Invoices for PA are not managed by Odoo, you can download the document and send it on your own."
                )))
                to_return[invoice] = {'attachment': attachment}
            else:
                to_send[filename] = {
                    'invoice': invoice,
                    'data': {
                        'filename': filename,
                        'xml': base64.b64encode(xml).decode()
                    }
                }

        company = invoices.company_id
        proxy_user = self._get_proxy_user(company)
        if not proxy_user:  # proxy user should exist, because there is a check in _check_move_configuration
            return {
                invoice: {
                    'error':
                    _("You must accept the terms and conditions in the settings to use FatturaPA."
                      ),
                    'blocking_level':
                    'error'
                }
                for invoice in invoices
            }

        if proxy_user._get_demo_state() == 'demo':
            responses = {
                filename: {
                    'id_transaction': 'demo'
                }
                for invoice in invoices
            }
        else:
            try:
                responses = self._l10n_it_edi_upload(
                    [i['data'] for i in to_send.values()], proxy_user)
            except AccountEdiProxyError as e:
                return {
                    invoice: {
                        'error': e.message,
                        'blocking_level': 'error'
                    }
                    for invoice in invoices
                }

        for filename, response in responses.items():
            invoice = to_send[filename]['invoice']
            to_return[invoice] = response
            if 'id_transaction' in response:
                invoice.l10n_it_edi_transaction = response['id_transaction']
                to_return[invoice].update({
                    'error':
                    _('The invoice was successfully transmitted to the Public Administration and we are waiting for confirmation.'
                      ),
                    'blocking_level':
                    'info',
                })
        return to_return