Example #1
0
 def on_Btn_cups_clicked(self, widget, ip, usuario, password, puerto, notebook, spinner, estado):
     ssh_path = os.environ['HOME'] + '/.ssh/id_rsa'
     spinner.start()
     estado.set_text("Creando tunel...")
     puerto = int(puerto)
     try:
         #Borra en el archivo cupsd.conf la autentificacion
         with settings(host_string=ip, port=puerto, password=password, user=usuario):
             sudo('sed -i."old" "/Require user @SYSTEM/d" /etc/cups/cupsd.conf;sed -i."old2" "/AuthType Default/d" /etc/cups/cupsd.conf;/etc/init.d/cups reload')
         server = SSHTunnelForwarder((ip, puerto), ssh_username=usuario, ssh_private_key=ssh_path, remote_bind_address=('127.0.0.1', 631))
         server.start()
         puerto_local = str(server.local_bind_port)
         scrolledwindow = Gtk.ScrolledWindow()
         scrolledwindow.set_hexpand(True)
         scrolledwindow.set_vexpand(True)
         page = WebKit.WebView()
         page.set_border_width(10)
         page.open("http://127.0.0.1:" + puerto_local)
         scrolledwindow.add(page)
         tab_label = tablabel.TabLabel("CUPS " + ip, Gtk.Image.new_from_file("/usr/share/grx/icons/cups32.png"))
         tab_label.connect("close-clicked", tablabel.on_close_clicked, notebook, page)
         notebook.append_page(scrolledwindow, tab_label)
         self.show_all()
     except:
         self.mensaje("No se ha podido ejecutar cups", "Atencion", atencion)
     spinner.stop()
     estado.set_text("")
Example #2
0
class SSHTunnel(object):
    class TunnelException(Exception):
        pass

    def __init__(self, host, username, key_file, remote_port, host_port=nat_ssh_port_forwarding):
        """
        Returns tuple consisting of local port and sshtunnel SSHTunnelForwarder object.
        Caller must call stop() on object when finished
        """
        logger = logging.getLogger('sshtunnel')
        logger.setLevel(logging.ERROR)

        try:
            self._server = SSHTunnelForwarder((host, host_port),
                    ssh_username=username, ssh_private_key=key_file,
                    remote_bind_address=('127.0.0.1', remote_port), logger=logger)
        except sshtunnel.BaseSSHTunnelForwarderError as e:
            raise self.TunnelException(e)


    def connect(self):
        self._server.start()
        self.local_port = self._server.local_bind_port

    def close(self):
        self._server.stop()

    def __enter__(self):
        self.connect()
        return self

    def __exit__(self, type, value, traceback):
        self.close()
Example #3
0
    def create_ssh_tunnel(self, tunnel_password):
        """
        This method is used to create ssh tunnel and update the IP Address and
        IP Address and port to localhost and the local bind port return by the
        SSHTunnelForwarder class.
        :return: True if tunnel is successfully created else error message.
        """
        # Fetch Logged in User Details.
        user = User.query.filter_by(id=current_user.id).first()
        if user is None:
            return False, gettext("Unauthorized request.")

        if tunnel_password is not None and tunnel_password != '':
            try:
                tunnel_password = decrypt(tunnel_password, user.password)
                # Handling of non ascii password (Python2)
                if hasattr(str, 'decode'):
                    tunnel_password = \
                        tunnel_password.decode('utf-8').encode('utf-8')
                # password is in bytes, for python3 we need it in string
                elif isinstance(tunnel_password, bytes):
                    tunnel_password = tunnel_password.decode()

            except Exception as e:
                current_app.logger.exception(e)
                return False, "Failed to decrypt the SSH tunnel " \
                              "password.\nError: {0}".format(str(e))

        try:
            # If authentication method is 1 then it uses identity file
            # and password
            if self.tunnel_authentication == 1:
                self.tunnel_object = SSHTunnelForwarder(
                    (self.tunnel_host, int(self.tunnel_port)),
                    ssh_username=self.tunnel_username,
                    ssh_pkey=get_complete_file_path(self.tunnel_identity_file),
                    ssh_private_key_password=tunnel_password,
                    remote_bind_address=(self.host, self.port)
                )
            else:
                self.tunnel_object = SSHTunnelForwarder(
                    (self.tunnel_host, int(self.tunnel_port)),
                    ssh_username=self.tunnel_username,
                    ssh_password=tunnel_password,
                    remote_bind_address=(self.host, self.port)
                )

            self.tunnel_object.start()
            self.tunnel_created = True
        except BaseSSHTunnelForwarderError as e:
            current_app.logger.exception(e)
            return False, "Failed to create the SSH tunnel." \
                          "\nError: {0}".format(str(e))

        # Update the port to communicate locally
        self.local_bind_port = self.tunnel_object.local_bind_port

        return True, None
Example #4
0
def sshtunnel():
    server = SSHTunnelForwarder(
            (current_app.config['SSH_HOST'],current_app.config['SSH_PORT']),
            ssh_username=current_app.config['SSH_USER'],
            ssh_password=current_app.config['SSH_PASSWORD'],
            remote_bind_address=(current_app.config['SSH_REMOTE_HOST'], current_app.config['SSH_REMOTE_PORT'])
            )
    server.start()

    return server.local_bind_port
class TunelSSH():
    def __init__(self, ssh_address, ssh_port, ssh_username, ssh_password, remote_bind_address, remote_bind_port):
        self.server = SSHTunnelForwarder(ssh_address=(ssh_address, ssh_port), ssh_username=ssh_username, 
            ssh_password=ssh_password, remote_bind_address=(remote_bind_address, remote_bind_port))

    def Iniciar(self):
        self.server.start()
        return self.server.local_bind_port

    def Cerrar(self):
        self.server.stop()
Example #6
0
    def on_Btn_vncviewer_clicked(self, widget, ip, usuario, puerto, password, clave_remoto, spinner, estado):
        spinner.start()
        estado.set_text("Conectando con el equipo")
        ssh_path=os.environ['HOME'] + '/.ssh/id_rsa'

        try:
            puerto = int(puerto)
            server = SSHTunnelForwarder((ip, puerto), ssh_username=usuario,ssh_private_key=ssh_path,remote_bind_address=(ip, 5900))
            server.start()
            puerto_local = str(server.local_bind_port)
            msg = local('echo "' + clave_remoto + '" | vncviewer -autopass -compresslevel 9 -bgr233 127.0.0.1:' + puerto_local + ' &')
            #msg=local('echo "'+clave_remoto+'" | vinagre -autopass -compresslevel 9 -bgr233 127.0.0.1:'+puerto_local+' &')
        except:
            self.mensaje("No se ha podido ejecutar 'vncviewer' en el equipo remoto", "Atencion", atencion)
        spinner.stop()
        estado.set_text("")
Example #7
0
def dal_connect():
    server = SSHTunnelForwarder(
             (settings.ssh_host, settings.ssh_port),
             ssh_password=settings.ssh_password,
             ssh_username=settings.ssh_username,
             remote_bind_address=('127.0.0.1', 3306))

    server.start()

    uri = 'mysql://{username}:{password}@127.0.0.1:{port}/{db}'.format(
           username = settings.mysql_username,
           password = settings.mysql_password,
           db = settings.mysql_dbname,
           port = server.local_bind_port
        )

    db = DAL(uri, migrate = False)
    return db
Example #8
0
 def create_tunnel_to_cdh_manager(self, local_bind_address='localhost', local_bind_port=7180, remote_bind_port=7180):
     self._local_bind_address = local_bind_address
     self._local_bind_port = local_bind_port
     self.cdh_manager_tunnel = SSHTunnelForwarder(
         (self._hostname, self._hostport),
         ssh_username=self._username,
         local_bind_address=(local_bind_address, local_bind_port),
         remote_bind_address=(self.extract_cdh_manager_host(), remote_bind_port),
         ssh_private_key_password=self._key_password,
         ssh_private_key=self._key
     )
Example #9
0
 def port_forward(self, port=PORT):
     key = paramiko.RSAKey.from_private_key_file(
         config.get('EC2', 'PrivateKeyFile')
     )
     self._forward = SSHTunnelForwarder(
         ssh_address=(self.ip, 22),
         ssh_username=config.get('EC2', 'User'),
         ssh_private_key=key,
         remote_bind_address=('127.0.0.1', PORT),
         local_bind_address=('127.0.0.1', PORT)
     )
     self._forward.start()
Example #10
0
 def _portforward_frida_start(self):
     """Setup local port forward to enable communication with the Frida server running on the device."""
     self.printer.debug('{} Setting up port forwarding on port {}'.format("[FRIDA]", Constants.FRIDA_PORT))
     localhost = '127.0.0.1'
     self._frida_server = SSHTunnelForwarder(
         (self._ip, int(self._port)),
         ssh_username=self._username,
         ssh_password=self._password,
         local_bind_address=(localhost, Constants.FRIDA_PORT),
         remote_bind_address=(localhost, Constants.FRIDA_PORT),
     )
     self._frida_server.start()
Example #11
0
 def _portforward_agent_start(self):
     """Setup local port forward to enable communication with the Needle server running on the device."""
     self.printer.debug('{} Setting up port forwarding on port {}'.format(Constants.AGENT_TAG, self._agent_port))
     localhost = '127.0.0.1'
     self._port_forward_agent = SSHTunnelForwarder(
         (self._ip, int(self._port)),
         ssh_username=self._username,
         ssh_password=self._password,
         local_bind_address=(localhost, self._agent_port),
         remote_bind_address=(localhost, self._agent_port),
     )
     self._port_forward_agent.start()
Example #12
0
 def get_connection_params(self):
     kwargs = super(DatabaseWrapper, self).get_connection_params()
     host = kwargs['host']
     port = kwargs['port']
     config = self.settings_dict["TUNNEL_CONFIG"]
     config['remote_bind_address'] = (host, port)
     self.tunnel = SSHTunnelForwarder(**config)
     self.tunnel.daemon_forward_servers = True
     self.tunnel.daemon_transport = True
     self.tunnel.start()
     kwargs["host"] = '127.0.0.1'
     kwargs['port'] = self.tunnel.local_bind_port
     return kwargs
Example #13
0
    def __init__(self, host, username, key_file, remote_port, host_port=nat_ssh_port_forwarding):
        """
        Returns tuple consisting of local port and sshtunnel SSHTunnelForwarder object.
        Caller must call stop() on object when finished
        """
        logger = logging.getLogger('sshtunnel')
        logger.setLevel(logging.ERROR)

        try:
            self._server = SSHTunnelForwarder((host, host_port),
                    ssh_username=username, ssh_private_key=key_file,
                    remote_bind_address=('127.0.0.1', remote_port), logger=logger)
        except sshtunnel.BaseSSHTunnelForwarderError as e:
            raise self.TunnelException(e)
Example #14
0
class DatabaseWrapper(MysqlDatabaseWrapper):

    def __init__(self, *args, **kwargs):
        super(DatabaseWrapper, self).__init__(*args, **kwargs)
        self.tunnel = None

    def get_connection_params(self):
        kwargs = super(DatabaseWrapper, self).get_connection_params()
        host = kwargs['host']
        port = kwargs['port']
        config = self.settings_dict["TUNNEL_CONFIG"]
        config['remote_bind_address'] = (host, port)
        self.tunnel = SSHTunnelForwarder(**config)
        self.tunnel.daemon_forward_servers = True
        self.tunnel.daemon_transport = True
        self.tunnel.start()
        kwargs["host"] = '127.0.0.1'
        kwargs['port'] = self.tunnel.local_bind_port
        return kwargs

    def _close(self):
        super(DatabaseWrapper, self)._close()
        if self.tunnel is not None:
            self.tunnel.stop()
Example #15
0
 def __init__(self):
     ssh_tunnel = Credentials.ssh_tunnel
     db_config = Credentials.vicnode_db
     self._server = SSHTunnelForwarder(
         ((ssh_tunnel['host']), (int(ssh_tunnel['port']))),
         ssh_password=ssh_tunnel['ssh_password'],
         ssh_username=(ssh_tunnel['username']),
         ssh_pkey=(ssh_tunnel['private_key_file']),
         remote_bind_address=(db_config['host'], 5432),
         allow_agent=False
     )
     self._server.start()
     # we are about to bind to a 'local' server by means of an ssh tunnel
     # ssh tunnel: which will be seen as a local server...
     # so replace the loaded config host
     db_config['host'] = 'localhost'
     db_config['port'] = self._server.local_bind_port
     self._db_connection = psycopg2.connect(**db_config)
     self._db_cur = self._db_connection.cursor(
         cursor_factory=psycopg2.extras.RealDictCursor)
     self.test_connection()
Example #16
0
def db_show():
    with SSHTunnelForwarder(
        (dbConfigs["HOST_IP"], dbConfigs["HOST_PORT"]),
            ssh_host_key=None,
            ssh_pkey=None,
            ssh_username=dbConfigs["HOST_USER"],
            ssh_password=dbConfigs["HOST_PASS"],
            remote_bind_address=("localhost", dbConfigs["DB_PORT"]),
            allow_agent=False) as ssh:
        ssh.start()
        conn = pymysql.connect(host='localhost',
                               port=ssh.local_bind_port,
                               user=dbConfigs["DB_USER"],
                               password=dbConfigs["DB_PASS"],
                               db=dbConfigs["DB_NAME"],
                               charset='utf8',
                               cursorclass=pymysql.cursors.DictCursor)
        cursor = conn.cursor()
        sql = ""
        cursor.execute(sql)
        results = cursor.fetchall()
        conn.close()
    return "param1:{}".format(results)
Example #17
0
def flushTables():
    with SSHTunnelForwarder(
        (hostname, 22),
            ssh_username=username,
            ssh_password=password,
            remote_bind_address=(sql_hostname, sql_port)) as tunnel:
        conn = pymysql.connect(host='127.0.0.1',
                               user=sql_username,
                               passwd=sql_password,
                               db=sql_main_database,
                               port=tunnel.local_bind_port)
        query = [
            '''SHOW TABLES;''', '''select * from customer;''',
            '''select * from payment;'''
        ]

        for command in query:
            print("=" * 50, command, "=" * 50)
            data = pd.read_sql_query(command, conn)
            print(data)
            input("Press Enter to continue...")

        conn.close()
def loadDocData(authorList, doc_id, chunk_size = 1000):
    texts = []  # list of text samples
    labels = []  # list of label ids
    import DatabaseQuery
    from sshtunnel import SSHTunnelForwarder
    PORT=5432
    with SSHTunnelForwarder((databaseConnectionServer, 22),
                            ssh_username='******',
                            ssh_password='******',
                            remote_bind_address=('localhost', 5432),
                            local_bind_address=('localhost', 5400)):
        textToUse = DatabaseQuery.getWordDocData(5400, doc_id, documentTable = documentTable,
                                                 chunk_size = chunk_size)
    labels = []
    texts = []
    for index, row in textToUse.iterrows():
        labels.append(authorList.index(row.author_id))
        texts.append(row.doc_content)

    del textToUse

    print('Found %s texts.' % len(texts))
    return (texts, labels)
Example #19
0
    def wrapper(conn_info, *args):
        if conn_info.use_ssh is True:
            if conn_info.ssh_local_bind_port > 0:
                conn_info.db_host = conn_info.original_db_host
                conn_info.db_port = conn_info.original_db_port
            else:
                conn_info.original_db_host = conn_info.db_host
                conn_info.original_db_port = conn_info.db_port

            with SSHTunnelForwarder(
                    ssh_address_or_host=(conn_info.ssh_host,
                                         conn_info.ssh_port),
                    ssh_username=conn_info.ssh_user,
                    ssh_pkey=conn_info.ssh_key,
                    remote_bind_address=(conn_info.db_host,
                                         int(conn_info.db_port))) as tunnel:
                conn_info.db_host = '127.0.0.1'
                conn_info.db_port = tunnel.local_bind_port
                conn_info.ssh_local_bind_port = tunnel.local_bind_port
                return_obj = func(conn_info, *args)
                return return_obj
        else:
            return func(conn_info, *args)
Example #20
0
    def create_tunnel(self, remote_host, remote_port, local_port=0):
        """
        Creates a tunnel to the remote host

        :param remote_host: Remote host to tunnel to
        :type remote_host: String
        :param remote_port: Remote port to tunnel to
        :type remote_port: Number
        :param local_port: Local port. If set to 0, random local port is selected
        :type local_port: Number
        """
        if local_port is 0:
            local_port = self.get_available_local_port()

        with SSHTunnelForwarder((self.host, self.port),
                                ssh_username=self.username,
                                remote_bind_address=(remote_host, remote_port),
                                local_bind_address=('0.0.0.0', local_port)):
            try:
                while True:
                    sleep(1)
            except KeyboardInterrupt:
                pass
    def query_db(self, query, params=None):
        """
            query the database

            :param dict params: the params to be add to the query in the form of {param_name: value}
            :param str query: The sql query in a string format
            :return: Data-frame with the result of the query
            """
        SSHTunnelForwarder.daemon_forward_servers = True  # fix problems with python >= 3.7
        with SSHTunnelForwarder(
                (self._server_host, self._ssh_port),
                ssh_username=self._ssh_username,
                ssh_password=self._ssh_password,
                remote_bind_address=(self._sqlhost, self._sqlport)
        ) as server:
            server.start()
            _con = psycopg2.connect(dbname=self._db_name,
                                    user=self._db_username,
                                    password=self._db_password,
                                    host=self._sqlhost,
                                    port=server.local_bind_port)
            tr = pd.read_sql(query, _con, params=params)
        return tr
Example #22
0
def dbSetting(hostAddr, sqlStr):
    #print('server adder=%s'%hostAddr)
    ssh_host = hostAddr #'35.201.246.119'            #SSH服务器地址
    ssh_port = 22                  #SSH端口
    keyfile = '../assistence/lisakey'            #SSH密钥
    ssh_user = '******'              #SSH用户名
    db_host = '127.0.0.1'          #数据库地址
    db_name = 'live_casting'       #数据库名
    db_port = 3306                 #数据库端口
    db_user = '******'               #数据库用户名
    db_passwd = 'mysql'            #数据库密码
    with SSHTunnelForwarder(
            (ssh_host, ssh_port),            
            ssh_pkey=keyfile,
            ssh_username=ssh_user,
            remote_bind_address=(db_host, db_port)
    ) as server:
        db = pymysql.connect(
            host=db_host,
            port=server.local_bind_port,
            user=db_user,
            passwd=db_passwd,
            db=db_name,
            charset="utf8")
        cursor = db.cursor()
        try:    
            for i in sqlStr:       
                #print(i)          
                cursor.execute(i)
            db.commit()            
        except Exception as err:
            print("Error %s from exceute sql: %s" % (err, i))
            db.rollback()       
        finally:                 
            cursor.close()
            db.close() 
        return 
Example #23
0
def query(q, fl=1):
    with SSHTunnelForwarder(
        (ssh_host, ssh_port),
            ssh_username=ssh_user,
            ssh_pkey=mypkey,
            remote_bind_address=(sql_hostname, sql_port)) as tunnel:
        conn = pymysql.connect(host='127.0.0.1',
                               user=sql_username,
                               passwd=sql_password,
                               db=sql_main_database,
                               port=tunnel.local_bind_port)
        cur = conn.cursor()
        cur.execute(q)
        if (fl == 1):
            arr = list(cur.fetchall())
            l = []
            for i in arr:
                l.append(list(i))
            conn.close()
            return l
        elif (fl == 2):
            arr = list(cur.fetchall())
            dic = {}
            val = 1
            for i in arr:
                li = list(i[1:7])
                a = str(val) + ".jpeg"
                li.insert(0, a)
                dic[i[0]] = li
                val += 1
            return dic
        else:
            arr = list(cur.fetchall())
            l = ""
            for i in arr:
                l = i[0]
            return l
Example #24
0
    def test_database(self):
        """Test data parse from remoted database."""
        try:
            # create ssh-tunnel for connectd
            with SSHTunnelForwarder(
                (SSH['hostname'], SSH['port']),
                    ssh_password=SSH['password'],
                    ssh_username=SSH['username'],
                    remote_bind_address=(DATABASE['host'], DATABASE['port']),
                    local_bind_address=('localhost', 6543)) as tunnel:

                self.assertEqual(tunnel.is_active, True)

                # create database connection and fetch data
                with psycopg2.connect(database=DATABASE['db'],
                                      user=DATABASE['user'],
                                      password=DATABASE['password'],
                                      host=tunnel.local_bind_host,
                                      port=tunnel.local_bind_port) as conn:

                    curs = conn.cursor()
                    curs.execute('''SELECT username, email
                                    FROM mantis_user_table
                                    WHERE enabled IS TRUE;''')
                    rows = curs.fetchall()

                    self.assertIsNotNone(rows)

                    # store answer for separated test
                    self.__class__.data_database = sorted(rows,
                                                          key=lambda x: x[0])

        except (BaseSSHTunnelForwarderError, ):
            assert False, 'Invalid ssh credentials'

        except (psycopg2.OperationalError):
            assert False, 'Invalid database credentials'
Example #25
0
def dbQuery(hostAddr, sqlStr):
    sshtunnel.SSH_TIMEOUT = 15
    ssh_host = hostAddr  #'35.201.246.119'            #SSH服务器地址
    ssh_port = 22  #SSH端口
    keyfile = './lisakey'  #SSH密钥
    ssh_user = '******'  #SSH用户名
    db_host = '127.0.0.1'  #数据库地址
    db_name = 'live_casting'  #数据库名
    db_port = 3306  #数据库端口
    db_user = '******'  #数据库用户名
    db_passwd = 'mysql'  #数据库密码

    with SSHTunnelForwarder((ssh_host, ssh_port),
                            ssh_pkey=keyfile,
                            ssh_username=ssh_user,
                            remote_bind_address=(db_host, db_port)) as server:
        db = pymysql.connect(host=db_host,
                             port=server.local_bind_port,
                             user=db_user,
                             passwd=db_passwd,
                             db=db_name,
                             charset="utf8")
        cursor = db.cursor()
        collect = []
        try:
            #print(sqlStr)
            cursor.execute(sqlStr)
            data = cursor.fetchall()
            for result in data:
                collect.append(result)
            #print(collect)
        except Exception as err:
            print("Error %s from exceute sql: %s" % (err, sqlStr))
        finally:
            cursor.close()
            db.close()
        return collect
Example #26
0
    def start_tunnels(self, ssh_port: int = 22):
        """Starts all tunnels
        Arguments:
            ssh_port {int} -- SSH Port (default: 22)
        Returns:
            success {bool} -- Returns true if all tunnels started successfully
        """
        self.tunnels = []
        for idx, tunnel_info in enumerate(self.tunnel_info):

            # If we're not the first element, set the bastion to the local port of the previous tunnel
            if idx > 0:
                tunnel_info['ssh_address_or_host'] = (
                    'localhost', self.tunnels[-1].local_bind_port)

            # If we are the last element, the target is the real target
            if idx == len(self.tunnel_info) - 1:
                target = (self.target_ip, self.target_port)
            # Otherwise the target is the next bastion
            else:
                target = (self.tunnel_info[idx + 1]['ssh_address_or_host'],
                          ssh_port)

            logger.debug(
                "Attempting to start tunnel to target '%s' with info '%s'",
                target, tunnel_info)
            try:
                self.tunnels.append(
                    SSHTunnelForwarder(remote_bind_address=target,
                                       **tunnel_info))
                self.tunnels[idx].start()
            except Exception as ex:
                logger.error("Unable to start tunnel to '%s', exception: %s",
                             target, ex)
                return False

        return True
def remote_query(sql_query, typeOp="select"):

    rows = ""
    config_dict = _read_config()

    if typeOp == "select":
        with SSHTunnelForwarder(
            (config_dict['ssh_host'], int(config_dict['ssh_port'])),
                ssh_username=config_dict['ssh_user'],
                ssh_password=config_dict['mypkey'],
                remote_bind_address=(config_dict['sql_hostname'],
                                     int(config_dict['sql_port']))) as tunnel:

            connection_object = pymysql.connect(
                host='127.0.0.1',
                user=config_dict['sql_username'],
                passwd=config_dict['sql_password'],
                db=config_dict['sql_main_database'],
                port=tunnel.local_bind_port,
                cursorclass=pymysql.cursors.DictCursor)

            try:
                cursor_object = connection_object.cursor()
                cursor_object.execute(sql_query)

                # connection_object.commit()
                rows = cursor_object.fetchall()
            except Exception as e:
                print(f"Exception occured: {e}")
            finally:
                cursor_object.close()
                connection_object.close()

        return rows

    else:
        return rows
Example #28
0
def return_tweets(tar=False, low=False):
    tweetvec = []
    tweets = []
    targets = []
    rec = []
    nonrec = []
    with SSHTunnelForwarder('ucla.seanbeaton.com',
                            ssh_port=7822,
                            ssh_username="******",
                            local_bind_address=settings.local_address,
                            remote_bind_address=('127.0.0.1', 3306)) as server:

        cnx = pymysql.connect(cursorclass=pymysql.cursors.DictCursor,
                              **settings.db_config)
        with cnx.cursor() as cur:

            sql = "SELECT * FROM tweets ORDER BY RAND()"
            cur.execute(sql)
            tweets = cur.fetchall()

            tweetids = []

            for tweet in tweets:
                tweetids.append(int(tweet['tweetid']))
                tweetvec.append(clean(tweet['tweettext'], low))
                if tweet['category'] == 'r' or tweet['category'] == 's':
                    targets.append(1)
                    rec.append(clean(tweet['tweettext'], low))
                else:
                    targets.append(0)
                    nonrec.append(clean(tweet['tweettext'], low))

        cnx.close()
    if tar:
        return (tweetvec, targets)
    else:
        return (rec, nonrec)
Example #29
0
 def login_server(self, address, address_port, username, remote_address, remote_address_port, **kw):
     """
     :param address:       跳扳机的IP地址
     :param address_port:  跳扳机的端口号
     :param username:      跳板机的SSH登录账号
     :param remote_address:      远程服务器的IP地址
     :param remote_address_port: 远程服务器的端口号
     :param kw: 参数ssh_pkey(客户端私钥路径)参数key_password(客户端开机密码) 参数ssh_password(客户端密码)
     :return:  返回服务器操作指针
     """
     try:
         # 通过密钥调用connect函数建立Linux连接
         if "ssh_pkey" in kw.keys():
             server = SSHTunnelForwarder(
                 # 跳板机ip与ssh登录端口号
                 ssh_address_or_host=(address, address_port),
                 # 跳板机登录账号
                 ssh_username=username,
                 # PC(客户端)的私钥路径
                 ssh_pkey=paramiko.RSAKey.from_private_key_file(kw["ssh_pkey"]),
                 # PC(客户端)的密码
                 ssh_private_key_password=kw["key_password"],
                 # 远程MYSQL服务器的绑定的IP和端口号
                 remote_bind_address=(remote_address, remote_address_port)
             )
         # 通过密码调用connect函数建立Linux连接
         elif "password" in kw.keys():
             server = SSHTunnelForwarder(
                 ssh_address_or_host=(address, address_port),
                 ssh_username=username,
                 ssh_password=kw["password"],
                 remote_bind_address=(remote_address, remote_address_port),
                 # 绑定本地地址(默认127.0.0.1和端口号)及与跳板机相通的端口
                 local_bind_address=('127.0.0.1', 22),
             )
         else:
             print("登录信息与方法错误,抛出异常 - ValueError")
             raise ValueError
         server.start()
         print("账号【%s】登录【%s】服务器启动服务器【%s】的MySQL服务" % (username, address, remote_address))
         return server
     except Exception as e:
         print("发生未知错误: %s" % e)
         raise
Example #30
0
def connection_from_settings(
        ssh_username,
        ssh_pkey,
        local_port,
        remote_host,
        private_dburl,
        private_port=5432,
        remote_port=22):

    pk = paramiko.RSAKey.from_private_key(
        file_obj=io.StringIO(six.text_type(ssh_pkey))
    )

    private_host = host_from_dburl(private_dburl)
    connection_url = local_dburl(private_dburl, local_port)

    with SSHTunnelForwarder(
        (remote_host, remote_port),
        ssh_username=ssh_username,
        ssh_pkey=pk,
        remote_bind_address=(private_host, private_port),
        local_bind_address=('0.0.0.0', local_port)
    ):
        yield connection_url
Example #31
0
def ssh_Mysql(FSRAR_ID):

    sql_hostname = 'cashsrv'
    sql_username = '******'
    sql_password = '******'
    sql_main_database = 'documentsAll'
    sql_port = 3306
    sql_ip = 'cashsrv'
    sql_host='127.0.0.1'

    ssh_host = 'cashsrv'
    ssh_user = '******'
    ssh_port = 22
    ssh_password1='Ma4u-Pik4u'



    with SSHTunnelForwarder(
            (ssh_host, ssh_port),
            ssh_username = sql_username,
            ssh_password = ssh_password1,
        
            remote_bind_address = (sql_host, sql_port)) as tunnel:
        conn = pymysql.connect(host=sql_host, user=sql_username,
                passwd=sql_password, db=sql_main_database,
                port=tunnel.local_bind_port)
    
##        query1 = '''SET @row_number = 0;SELECT (@row_number:=@row_number + 1) AS num, cashcode, excisemark, alcocode from goodsitem WHERE cashcode LIKE "''' + FSRAR_ID + '''1''' + '''" INTO OUTFILE "/var/lib/mysql-files/''' + FSRAR_ID + '''.csv" FIELDS ENCLOSED BY '"' TERMINATED BY ';' ESCAPED BY '"' LINES TERMINATED BY "\\r\\n";'''
        try:
            query1 = '''SELECT @i:=@i+1 num, cashcode, excisemark, alcocode from goodsitem, (SELECT @i:=0) X WHERE cashcode LIKE "''' + FSRAR_ID + '''1''' + '''" INTO OUTFILE "/var/lib/mysql-files/''' + FSRAR_ID + '''.csv" FIELDS ENCLOSED BY '"' TERMINATED BY ';' ESCAPED BY '"' LINES TERMINATED BY "\\r\\n";'''
            print ("Формирую список марок")
            data1 = pd.read_sql_query(query1, conn)
            
        except:
            print ("Список сформирован!")
            conn.close()
Example #32
0
def connect_and_excute(host, port, ssh_user, ssh_password, db_user,
                       db_password, query):
    data = []
    with SSHTunnelForwarder(ssh_address_or_host=(host, 22),
                            ssh_username=ssh_user,
                            ssh_password=ssh_password,
                            remote_bind_address=('127.0.0.1',
                                                 int(port))) as server:
        db = 'bookroll'
        myConfig = pymysql.connect(user=db_user,
                                   passwd=db_password,
                                   host="127.0.0.1",
                                   db=db,
                                   port=server.local_bind_port)
        cursor = myConfig.cursor()
        # for q in query:
        #     cursor.execute(q)
        cursor.execute(query)
        data = list(cursor.fetchall())

        cursor.close()

        # return all fetched data
        return data
Example #33
0
    def syncProduct(self, mage2Setting, attributeSet, products):
        with SSHTunnelForwarder(
            (mage2Setting['SSHSERVER'], mage2Setting['SSHSERVERPORT']),
                ssh_username=mage2Setting['SSHUSERNAME'],
                ssh_pkey=mage2Setting.get('SSHPKEY'),
                ssh_password=mage2Setting.get('SSHPASSWORD'),
                remote_bind_address=(mage2Setting['REMOTEBINDSERVER'],
                                     mage2Setting['REMOTEBINDSERVERPORT']),
                local_bind_address=(
                    mage2Setting['LOCALBINDSERVER'],
                    mage2Setting['LOCALBINDSERVERPORT'])) as server:
            sleep(2)
            for product in products:
                sku = product["sku"]
                typeId = product["data"].pop("type_id", "simple")
                storeId = product["data"].pop("store_id", "0")
                product["data"] = dict((k, v) for k, v in dict(
                    (k, product["data"].get(v.get("key")) if product["data"].
                     get(v.get("key")) is not None else v.get("default"))
                    for k, v in txmap.items()).items() if v is not None)
                try:
                    product['product_id'] = self.mage2Connector.syncProduct(
                        sku, attributeSet, product["data"], typeId, storeId)
                    product['sync_status'] = 'S'
                except Exception:
                    product['sync_status'] = 'F'
                    product['log'] = traceback.format_exc()

                logger.info(
                    json.dumps(product,
                               indent=4,
                               cls=JSONEncoder,
                               ensure_ascii=False))
            del self.mage2Connector
            server.stop()
            server.close()
Example #34
0
def connect_db():
    with SSHTunnelForwarder(
        ('mm-ts-next.chinaeast.cloudapp.chinacloudapi.cn',
         1021),  # 远端ssh server ip 和 ssh端口
            logger=create_logger(loglevel=1),  # sshtunnel debug log 打印输出
            ssh_username="******",
            ssh_pkey="loadtesters_rsa-2.pem",
            remote_bind_address=('127.0.0.1', 3306),  # 远端db server ip 和 db 端口
            # local_bind_address=('127.0.0.1', 3306)  # 本地连接db的端口
    ) as server:

        print(server.local_bind_address)
        print(server.local_bind_port)
        db = pymysql.connect(
            host='127.0.0.1',  # 此处必须是是127.0.0.1
            port=server.local_bind_port,
            user='******',
            password='******',
            database='mm')
        cursor = db.cursor()
        cursor.execute("show tables")
        sql = 'select * from `User` limit 101'
        cursor.execute(sql)
        print(cursor.rowcount)
def download_table_as_excel(table: str, filter_time: str = None):
    """
    Downloads the entire data from the database as an excel file
    [Note]: If timestamp is not included it fetches all the records from the table
    """
    if filter_time == None:
        q = "select * from " + table + " "
    else:
        q = "select * from " + table + " where Timestamp = '" + filter_time + "'"

    with SSHTunnelForwarder((host, 24226),
                            ssh_username=ssh_username,
                            ssh_pkey=ssh_private_key,
                            remote_bind_address=(localhost, 1521),
                            local_bind_address=(localhost, 1563)) as server:
        dsn_tns = cx_Oracle.makedsn(localhost, 1563, service_name=database)
        conn = cx_Oracle.connect(user=user, password=password, dsn=dsn_tns)

        c = conn.cursor()
        print(q)
        try:
            # c.execute(q)
            # res = c.fetchall()
            # c.close()
            data = pd.read_sql(q, conn)
            print(data.head())
            data.to_excel(table + '.xlsx')
            conn.close()
        except:
            data = pd.read_sql(q, conn)
            print(data.head())
            data.to_excel(table + '.xlsx')
            conn.close()
        #return data.to_excel(table + '.xlsx')
        return FileResponse(EXCEL_PATH + table + '.xlsx',
                            media_type='application/octet-stream')
Example #36
0
def get_data_db():
    """
    连接数据库,获取初始数据集
    :return: data_list[]
    """
    global results
    with SSHTunnelForwarder(
            ('54.223.128.196', 11690),  # 指定ssh登录的跳转机的address
            ssh_username="******",  # 跳转机的用户
            ssh_pkey="/Users/letote/.ssh/id_rsa",
            ssh_private_key_password="******",
            remote_bind_address=(
                    'staging-mysql.czqjl6kiyvxa.rds.cn-north-1.amazonaws.com.cn', 3306)) as server:  # A机器的配置

        db = pymysql.connect(host='127.0.0.1',  # 此处必须是是127.0.0.1
                             port=server.local_bind_port,
                             user='******',
                             passwd='Lt123456',
                             db='letote_staging')
        data_list = []

        with db:
            cursor = db.cursor()
            for sql_name, sql in config.sql_dict.items():
                # print(sql)
                try:
                    # 执行 SQL 查询
                    cursor.execute(sql)
                    # 获取所有记录列
                    results = cursor.fetchall()
                    data_df = pd.DataFrame(list(results),
                                           columns=['id', 'iid', 'image', 'description', 'title', 'FFCGO', 'name',
                                                    'url'])
                except Exception as e:
                    print(e)
        return data_df
Example #37
0
def create_aurora_db_connection(dsn):

    #pkeyfilepath = '/tmp/glue_key.pem'
    #mypkey = paramiko.RSAKey.from_private_key_file(pkeyfilepath)

    #Private key pulled from odin
    material = ""
    pkey = helpers.get_odin_material(material, 'PrivateKey')
    pkey = base64.encodestring(pkey).strip()
    pkey_tmpl = '-----BEGIN PRIVATE KEY-----\n%s\n-----END PRIVATE KEY-----'
    pkey = pkey_tmpl % (pkey, )
    mypkey = paramiko.RSAKey.from_private_key(mypkey)

    # db properties
    dbname = dsn['url']['name']
    db_host = dsn['url']['host']
    db_port = dsn['url']['port']

    # ssh properties
    ssh_user = '******'
    ssh_port = 22
    ssh_host = ''

    #db user name and password
    db_user, db_password = get_dsn_credential(dsn)

    with SSHTunnelForwarder((ssh_host, ssh_port),
                            ssh_username=ssh_user,
                            ssh_pkey=mypkey,
                            remote_bind_address=(db_host, db_port)) as tunnel:
        conn = pymysql.connect(host='127.0.0.1',
                               user=db_user,
                               passwd=db_password,
                               db=dbname,
                               port=tunnel.local_bind_port)
    return conn
Example #38
0
    def insert_to_tables(self, table_name: str, data: dict):
        # TODO:亮点是对需要单个写入的数据,进行批量写入
        u'''插入数据,在对拿到数据做处理后
		:param table_name: 数据库中表名
		:param data: 字典型数据
		:return: 无返回,需要try/except/finally关闭数据库否则会一直运行
		'''
        with SSHTunnelForwarder(
            ("192.168.29.37", 22),
                ssh_username='******',
                ssh_password='******',
                remote_bind_address=('0.0.0.0', 3306)) as server:
            try:
                connect = mc.connect(host="127.0.0.1",
                                     port=server.local_bind_port,
                                     user=self.user,
                                     passwd=self.passwd,
                                     database=self.database)
                cursor = connect.cursor()
                table_header = ",".join(data['header'])
                for contents in data['data']:
                    for content in contents:
                        sql = "INSERT INTO " + table_name + "(" + table_header + ") VALUES (" + str(
                            "%s," * len(data['header'])).strip(",") + ")"
                        cursor.execute(
                            sql,
                            tuple([
                                content[i]
                                for i in range(0, len(data['header']))
                            ]))
                        connect.commit()
                print("The database was commited.")
            except:
                connect.rollback()
            finally:
                connect.close()
Example #39
0
def mysql(direction):
    import pymysql
    import pandas as pd
    from sshtunnel import SSHTunnelForwarder
    from os.path import expanduser

    sql_hostname = 'localhost'
    sql_username = '******'
    sql_password = ''
    sql_main_database = 'perl'
    sql_port = 3306
    ssh_host = '10.36.65.1'
    ssh_user = '******'
    ssh_password = '******'
    ssh_port = 22
    sql_ip = '127.0.0.1'

    with SSHTunnelForwarder(
        (ssh_host, ssh_port),
            ssh_username=ssh_user,
            ssh_password=ssh_password,
            remote_bind_address=(sql_hostname, sql_port)) as tunnel:
        conn = pymysql.connect(host='127.0.0.1',
                               user=sql_username,
                               passwd=sql_password,
                               db=sql_main_database,
                               port=tunnel.local_bind_port)
        query = '''SELECT VERSION();'''
        cursor = conn.cursor()
        cursor.execute("SELECT * FROM traffic WHERE " + direction +
                       "_port = '443';")
        arr = np.array(cursor.fetchall())
        cursor.close()
        data = pd.read_sql_query(query, conn)
        conn.close()
    return arr
Example #40
0
def dbconn_database(sql_code, **kw):
    '''
	连接MySQL数据库,并且带ssh的,另外可以传入相应的参数
	'''
    try:
        with SSHTunnelForwarder(
            ('ssh_host', 22312),  # B机器的配置
                ssh_password='******',
                ssh_username='******',
                remote_bind_address=('remote_bind_address',
                                     3306)) as server:  # A机器的配置
            database = pymysql.connect(
                host='127.0.0.1',  # 此处必须是是127.0.0.1
                port=server.local_bind_port,
                user='******',
                passwd='passwd',
                db='db',
                charset='utf8')
            cursor = database.cursor()
            cursor.execute(sql_code.format(**kw))
            data = cursor.fetchall()
        return data
    except Exception as e:
        print(e)
Example #41
0
def query(use_ssh,
          q,
          db_host,
          db_user,
          db_password,
          db_port,
          db,
          ssh_username,
          ssh_password,
          charset='utf8mb4'):

    if use_ssh:
        with SSHTunnelForwarder(ssh_address_or_host=(db_host, 22),
                                ssh_password=ssh_password,
                                ssh_username=ssh_username,
                                remote_bind_address=('127.0.0.1',
                                                     db_port)) as server:
            conn = sql.connect(host='127.0.0.1',
                               port=server.local_bind_port,
                               user=db_user,
                               passwd=db_password,
                               db=db,
                               charset=charset)
            response = pd.read_sql_query(q, conn)
            conn.close()
            return response
    else:
        conn = sql.connect(host=db_host,
                           port=db_port,
                           user=db_user,
                           passwd=db_password,
                           db=db,
                           charset=charset)
        response = pd.read_sql_query(q, conn)
        conn.close()
        return response
def mongoConnection():
	MONGO_HOST = "13.58.236.43"
	MONGO_DB = "Quant_database"
	MONGO_USER = "******"
	MONGO_PASS = "******"
	server = SSHTunnelForwarder(
	    MONGO_HOST,
	    ssh_username='******',
	    ssh_password='******',
	    remote_bind_address=('127.0.0.1', 27017)
	)

	server.start()

	client = pymongo.MongoClient('127.0.0.1', server.local_bind_port) # server.local_bind_port is assigned local port
	db = client[MONGO_DB]
	pprint.pprint(db.collection_names())

	server.stop()
Example #43
0
def get_unidade(pk):
    server = None
    if dev:
        server = SSHTunnelForwarder('18.229.203.58',
                                    ssh_username='******',
                                    ssh_pkey='./cnjinova.pem',
                                    remote_bind_address=('10.0.22.39', 27017),
                                    local_bind_address=('0.0.0.0', 27018))
        server.start()

    uri = "mongodb://%s:%s@%s" % (quote_plus(user), quote_plus(password), host)
    client = MongoClient(uri)
    db = client.cnjinova
    unidade = db.unidades.find_one({'id': str(pk)})
    client.close()

    if dev:
        server.stop()
    return unidade
Example #44
0
def remote_kernel():
    # push code to colab and run the colab start and stop
    try:
        url = input("Enter the url generated in colab: ")
        hostname, port = url.split(":")
        port = int(port)
        key = input("Enter secret key: ")
        passwd = hashlib.sha1(key.encode("utf-8")).hexdigest()[:10]
    except Exception as e:
        print("Error: " + str(e))
        exit(1)
    try:
        spinner = Halo(text="Connecting", spinner="dots")
        spinner.start()
        with Connection(host=hostname,
                        port=port,
                        user="******",
                        connect_kwargs={"password": passwd}) as c:
            server = SSHTunnelForwarder((hostname, port),
                                        ssh_username="******",
                                        ssh_password=passwd,
                                        remote_bind_address=('0.0.0.0', 9000),
                                        local_bind_address=('0.0.0.0', 8888))
            server.start()
            spinner.succeed("Connected")
            sudopass = Responder(
                pattern=r"\[sudo\] password for root:",
                response=f"{passwd}\n",
            )
            print(
                f"Open {bcolors.WARNING}http://localhost:8888/?token={passwd}{bcolors.ENDC} to connect to Jupyter. Press CTRL+C twice to kill."
            )
            c.run(
                f"LC_ALL=en_US.utf8 python3 -m jupyter notebook --NotebookApp.token='{passwd}' --ip=0.0.0.0 --port=8888 --no-browser",
                pty=True,
                watchers=[sudopass],
            )
    except Exception as e:
        spinner.fail("Something went wrong when running.")
        print(str(e))
        exit(1)
    server.stop()
 def __enter__(self):
     try:
         sqlserver = SSHTunnelForwarder(ssh_address_or_host=(self.ip,
                                                             self.port),
                                        ssh_username="******",
                                        ssh_pkey=mypkey,
                                        remote_bind_address=('127.0.0.1',
                                                             3306))
         sqlserver.start()
         self.con = pymysql.connect(host="127.0.0.1",
                                    user=self.user,
                                    passwd=self.password,
                                    db=self.db,
                                    charset='utf8mb4',
                                    port=sqlserver.local_bind_port)
         '''
         把通过ssh连接数据库的端口给inception,连接本地数据库密码写死的
         ALTER TABLE `core_databaselist` ADD COLUMN `sshport` int(11) NULL AFTER `after`;
         '''
         insert_sshport = pymysql.connect(host='127.0.0.1',
                                          user='******',
                                          passwd='bbotte',
                                          db='yearning',
                                          charset='utf8mb4',
                                          port=3306,
                                          connect_timeout=1)
         sshportcursor = insert_sshport.cursor()
         sshportsql = "update yearning.core_databaselist set sshport={} WHERE ip='{}'".format(
             sqlserver.local_bind_port, self.ip)
         sshportcursor.execute(sshportsql)
         insert_sshport.commit()
         insert_sshport.close()
     except:
         sqlserver.stop()
         self.con = pymysql.connect(host=self.ip,
                                    user=self.user,
                                    passwd=self.password,
                                    db=self.db,
                                    charset='utf8mb4',
                                    port=self.port)
     finally:
         return self
Example #46
0
GPIO.setup(S1_ABE,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_ABERTO
GPIO.setup(S1_INF,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_INFRA
GPIO.setup(S1_LAC1,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_LACO1
GPIO.setup(S1_LAC2,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_LACO2
GPIO.setup(S2_FEC,GPIO.IN, pull_up_down=GPIO.PUD_DOWN)   #S_FECHADO
GPIO.setup(S2_ABE,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_ABERTO
GPIO.setup(S2_INF,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_INFRA
GPIO.setup(S2_LAC1,GPIO.IN,pull_up_down=GPIO.PUD_DOWN)  #S_LACO1
GPIO.setup(2,GPIO.IN)   #RESERVA
#GPIO.setup(11,GPIO.IN)  #RESERVA
#GPIO.setup(7,GPIO.IN)   #RESERVA
#**********************************

server =  SSHTunnelForwarder(
    ("192.168.15.6",22),
    ssh_username="******",
    ssh_password="******",
    remote_bind_address=("localhost",3306))
server.start()
print server.local_bind_port
#server.stop()
db=MySQLdb.connect(host="192.168.15.7",
                       port=server.local_bind_port,
                       user="******",
                       passwd="toor",
                       db="wordpress")
cur=db.cursor()
#testar conexao com banco de dados
def dba():
    try:
        cur.execute("SELECT VERSION()")
Example #47
0
	def getRemoteDB(self):
		# TODO: This needs to add new rows since the last update, rather than replace everything.
		'''
		# Get the local database last record timestamp
		conn = pymysql.connect(host='127.0.0.1', port=3306, user=self.config['localusername'], passwd=self.config['localpassword'], db=self.config['localdbname'])
		cur = conn.cursor()
		cur.execute('SELECT Time_uploaded_to_server FROM remotedata ORDER BY Time_uploaded_to_server ASC;')
		lts = cur.fetchone()
		lastTimestamp = lts[0]

		cur.close()
		conn.close()
		'''

		# The database query
		sql = """select enc.dateTime, enc.pushedToServerDateTime, enc.howFeeling, enc.takenMedsToday+0,
				 MAX(IF(Observation.question_id = "20140544-1bee-4d02-b764-d80102437adc", Observation.valueNumeric, NULL)) AS Nose,
				 MAX(IF(Observation.question_id = "22d7940f-eb30-42cd-b511-d0d65b89eec6", Observation.valueNumeric, NULL)) AS Eyes,
				 MAX(IF(Observation.question_id = "2cd9490f-d8ca-4f14-948a-1adcdf105fd0", Observation.valueNumeric, NULL)) AS Breathing,
				 demo.birthYear, demo.gender, demo.allergiesDivulged+0, demo.hayFever+0, demo.asthma+0, demo.otherAllergy+0,
				 demo.unknownAllergy+0, loc.latitude, loc.longitude, loc.accuracy, loc.whenObtained
				 from Encounter as enc inner join Observation on Observation.encounter_id = enc.id
				 inner join Question on Observation.question_id = Question.id join Demographics as demo on demo.id = enc.demographics_id
				 join LocationInfo as loc on loc.id = enc.locationInfo_id
				 group by enc.id;"""

		# Open SSH tunnel
		server = SSHTunnelForwarder(
			(self.config['remotehostname'], 1522),
			ssh_username=self.config['remoteusername'],
			ssh_password=self.config['remotepassword'],
			remote_bind_address=('127.0.0.1', 3306)
		)

		# Select data from remote database
		server.start()
		#print(server.local_bind_port)
		#print(server.tunnel_is_up)
		#print(server.local_is_up(('127.0.0.1', 3306)))

		if server.is_alive:
			print('Connection up...executing sql...')
			#print(os.system('mysql -h 127.0.0.1 --port='+str(server.local_bind_port)+' -u'+self.config['remoteusername']+' -p'))

			try:
				conn = pymysql.connect(host='127.0.0.1', port=server.local_bind_port, user=self.config['remoteusername'], passwd=self.config['remotedbpassword'], db=self.config['remotedbname'])
				cur = conn.cursor()
				cur.execute(sql)
				cur.close()
				conn.close()

			except pymysql.err.OperationalError:
				print('MySQL failed...exiting.')
				server.stop()
				sys.exit(0)

		# Close the ssh tunnel
		server.stop()

		# Update local database
		lconn = pymysql.connect(host='127.0.0.1', port=3306, user=self.config['localusername'], passwd=self.config['localpassword'], db=self.config['localdbname'], autocommit=True)
		lcur = lconn.cursor()
		lcur.execute('TRUNCATE TABLE remotedata;')

		rowcount = 0
		for row in cur:
			#print(row[0])
			sql = """INSERT INTO remotedata (id, Time_answered_on_phone, Time_uploaded_to_server, How_feeling, Taken_meds_today, Nose, Eyes, Breathing, Year_of_Birth, Gender, Optional_data_shared, hay_fever, asthma, other_allergy, unknown, latitude, longitude, accuracy, time_of_location_fix) VALUES ('', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}');
			""".format(row[0], row[1], row[2], row[3], row[4], row[5], row[6], row[7], row[8], row[9], row[10], row[11], row[12], row[13], row[14], row[15], row[16], row[17])

			# Update the remotedata table
			lcur.execute(sql)

			# Update the row count
			rowcount = rowcount+1

		print('Rows processed: ', rowcount)

		# Done
		lcur.close()
		lconn.close()

		# DB sync complete, update the postcodes
		self.addPostcodesToDB()
Example #48
0
    def DatabaseReachPasswordTimeout(self,p_database_index):
        if not self.v_databases[p_database_index]['prompt_password']:
            return { 'timeout': False, 'message': ''}
        else:
            #Create tunnel if enabled
            if self.v_databases[p_database_index]['tunnel']['enabled']:
                v_create_tunnel = False
                if self.v_databases[p_database_index]['tunnel_object'] != None:
                    try:
                        result = 0
                        v_tunnel_object = tunnels[self.v_databases[p_database_index]['database'].v_conn_id]
                        if not v_tunnel_object.is_active:
                            v_tunnel_object.stop()
                            v_create_tunnel = True
                    except Exception as exc:
                        v_create_tunnel = True
                        None

                if self.v_databases[p_database_index]['tunnel_object'] == None or v_create_tunnel:
                    try:
                        if self.v_databases[p_database_index]['tunnel']['key'].strip() != '':
                            v_file_name = '{0}'.format(str(time.time())).replace('.','_')
                            v_full_file_name = os.path.join(settings.TEMP_DIR, v_file_name)
                            with open(v_full_file_name,'w') as f:
                                f.write(self.v_databases[p_database_index]['tunnel']['key'])
                            server = SSHTunnelForwarder(
                                (self.v_databases[p_database_index]['tunnel']['server'], int(self.v_databases[p_database_index]['tunnel']['port'])),
                                ssh_username=self.v_databases[p_database_index]['tunnel']['user'],
                                ssh_private_key_password=self.v_databases[p_database_index]['tunnel']['password'],
                                ssh_pkey = v_full_file_name,
                                remote_bind_address=(self.v_databases[p_database_index]['database'].v_active_server, int(self.v_databases[p_database_index]['database'].v_active_port)),
                                logger=logger
                            )
                        else:
                            server = SSHTunnelForwarder(
                                (self.v_databases[p_database_index]['tunnel']['server'], int(self.v_databases[p_database_index]['tunnel']['port'])),
                                ssh_username=self.v_databases[p_database_index]['tunnel']['user'],
                                ssh_password=self.v_databases[p_database_index]['tunnel']['password'],
                                remote_bind_address=(self.v_databases[p_database_index]['database'].v_active_server, int(self.v_databases[p_database_index]['database'].v_active_port)),
                                logger=logger
                            )
                        server.set_keepalive = 120
                        server.start()

                        s = SessionStore(session_key=self.v_user_key)
                        tunnels[self.v_databases[p_database_index]['database'].v_conn_id] = server

                        self.v_databases[p_database_index]['tunnel_object'] = str(server.local_bind_port)
                        self.v_databases[p_database_index]['database'].v_connection.v_host = '127.0.0.1'
                        self.v_databases[p_database_index]['database'].v_connection.v_port = server.local_bind_port

                        #GO OVER ALL TABS CONNECTION OBJECTS AND UPDATE HOST AND PORT FOR THIS CONN_ID
                        try:
                            for k in list(self.v_tab_connections.keys()):
                                if self.v_tab_connections[k].v_conn_id == p_database_index:
                                    self.v_tab_connections[k].v_connection.v_host = '127.0.0.1'
                                    self.v_tab_connections[k].v_connection.v_port = server.local_bind_port
                        except Exception:
                            None
                        s['omnidb_session'] = self
                        s.save()

                    except Exception as exc:
                        return { 'timeout': True, 'message': str(exc)}
            #Reached timeout, must request password
            if not self.v_databases[p_database_index]['prompt_timeout'] or datetime.now() > self.v_databases[p_database_index]['prompt_timeout'] + timedelta(0,custom_settings.PWD_TIMEOUT_TOTAL):
                #Try passwordless connection
                self.v_databases[p_database_index]['database'].v_connection.v_password = ''
                v_test = self.v_databases[p_database_index]['database'].TestConnection()

                if v_test=='Connection successful.':
                    s = SessionStore(session_key=self.v_user_key)
                    s['omnidb_session'].v_databases[p_database_index]['prompt_timeout'] = datetime.now()
                    s['omnidb_session'].v_databases[p_database_index]['database'].v_connection.v_password = ''
                    s.save()
                    return { 'timeout': False, 'message': ''}
                else:
                    return { 'timeout': True, 'message': v_test}
            #Reached half way to timeout, update prompt_timeout
            if datetime.now() > self.v_databases[p_database_index]['prompt_timeout'] + timedelta(0,settings.PWD_TIMEOUT_REFRESH):
                s = SessionStore(session_key=self.v_user_key)
                s['omnidb_session'].v_databases[p_database_index]['prompt_timeout'] = datetime.now()
                s.save()
            return { 'timeout': False, 'message': ''}
Example #49
0
class Instance(object):
    """ Encapsulates the AWS EC2 instance to add additional functionality
    for running the MuMax3 simulations.
    """

    def __init__(self, aws_instance):
        self._instance = aws_instance
        self._forward = None

    def start(self):
        aws.start_instances(instance_ids=self.id)
        self.add_ready_tags()

    def add_ready_tags(self):
        self._instance.add_tag('mucloud', __version__)

    def stop(self):
        aws.stop_instances(instance_ids=[self.id])

    def terminate(self):
        # Toggle on delete on termination
        devices = ["%s=1" % dev for dev, bd in
                   self._instance.block_device_mapping.items()]
        self._instance.modify_attribute('BlockDeviceMapping', devices)
        aws.terminate_instances(instance_ids=[self.id])

    def is_up(self):
        return self._instance.state == u'running'

    def is_ready(self):
        return self.state == u'ready'

    def is_simulating(self):
        return self.state == u'simulating'

    def wait_for_boot(self, delay=10):
        """ Waits for an instance to boot up """
        log.info("Waiting for instance to boot...")
        while not self.is_up():
            sleep(delay)
            self._instance.update()
        sleep(delay)

    @property
    def directory(self):
        return "/home/%s" % config.get('EC2', 'User')

    def paths(self, local_input_file):
        basename = os.path.basename(local_input_file)
        directory = "/home/%s" % config.get('EC2', 'User')

        return {
            'local_input_file': local_input_file,
            'local_output_dir': local_input_file.replace(".mx3", ".out"),
            'input_file': "%s/simulations/%s" % (directory, basename),
            'output_dir': "%s/simulations/%s" % (
                directory,
                basename.replace(".mx3", ".out")
            ),
            'basename': basename,
            'log': "%s/log.txt" % directory,
            'finished': "%s/finished" % directory,
        }

    def connect(self):
        """ Connects to the instance through SSH and SFTP
        """
        log.info("Making secure connection to instance %s..." % self.id)
        ssh = paramiko.SSHClient()
        ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
        ssh.connect(
            self.ip,
            username=config.get('EC2', 'User'),
            key_filename=config.get('EC2', 'PrivateKeyFile')
        )
        sftp = ssh.open_sftp()
        return ssh, sftp

    def run(self, local_input_file, port=PORT, detach=False):
        """ Run the mumax input file on a ready instance """

        if not self.is_ready():
            raise Exception("The instance %s is not ready to be run" % repr(
                            self))

        try:
            ssh, sftp = self.connect()
        except:
            log.error("Could not connect to remote server")
            return

        try:

            # Determine file paths
            paths = self.paths(local_input_file)

            self._instance.add_tags({
                'local_input_file': local_input_file,
                'port': port,
            })

            log.info("Transferring input file to instance:"
                     " %s" % paths['basename'])
            sftp.put(local_input_file, paths['input_file'])

            log.info("Starting port forwarding: http://127.0.0.1:%d" % port)
            self.port_forward(port)

            # Starting screen
            ssh.exec_command("screen -dmS %s" % SCREEN)
            sleep(0.5)

            cmd = "source ./run_mumax3 %s %s" % (port, paths['input_file'])
            log.info("Running %s on MuMax3" % paths['basename'])
            ssh.exec_command("screen -S %s -X stuff $'%s'\r" % (SCREEN, cmd))

        except KeyboardInterrupt:
            log.info("\n\nCanceling simulation on keyboard interrupt")
            self.clean(ssh, sftp)
            return

        if detach:
            log.info("Stopping port forwarding")
            self.stop_port_forward()
            log.info("Detaching from instance with simulation running")
            log.info("Reattach with: python mucloud.py "
                     "reattach %s" % self.id)
            return

        detach = self.wait_for_simulation(ssh, sftp)

        log.info(MUMAX_OUTPUT)
        log.info("Stopping port forwarding")
        self.stop_port_forward()

        if detach:
            return

        # Exit screen
        ssh.exec_command("screen -S %s -X stuff $'exit\r'" % SCREEN)

        self.clean(ssh, sftp)

        self.stop_or_terminate()

    def wait_for_simulation(self, ssh, sftp):
        local_input_file = self.tags['local_input_file']
        paths = self.paths(local_input_file)

        try:
            log.info(MUMAX_OUTPUT)

            while not rexists(sftp, paths['log']):
                sleep(0.1)  # Wait for log

            f = sftp.open(paths['log'], 'r')
            while not rexists(sftp, paths['finished']):
                data = f.read()
                if data != "":
                    # TODO: Incorporate with logging module
                    print(data.decode('utf8'), end='')  # end argument to prevent newline
            print(f.read().decode('utf8'), end='')

        except KeyboardInterrupt:
            log.info("\n\nCaught keyboard interrupt during simulation")
            answer = input("Detach, abort, or continue the "
                               "simulation? [Dac]: ")
            if len(answer) == 0 or answer.startswith(("D", "d")):
                log.info("Detaching from instance with simulation running")
                log.info("Reattach with: python mucloud.py"
                         " reattach %s" % self.id)
                return True
            elif answer.startswith(("A", "a")):
                self.halt(ssh, sftp)
                return False
            else:
                log.info("Continuing the simulation")
                return self.wait_for_simulation(ssh, sftp)

    def halt(self, ssh, sftp):
        log.info("Aborting the simulation")
        # Keyboard interrupt the screen
        ssh.exec_command("screen -S %s -X stuff $'\\003\r'" % SCREEN)

    def clean(self, ssh, sftp):
        """ Clean the instance when the simulation has been stopped
        """
        local_input_file = self.tags['local_input_file']
        paths = self.paths(local_input_file)

        if rexists(sftp, paths['output_dir']):
            log.info("Receiving output files from instance")
            if not os.path.isdir(paths['local_output_dir']):
                os.mkdir(paths['local_output_dir'])
            os.chdir(paths['local_output_dir'])
            sftp.chdir(paths['output_dir'])
            files = sftp.listdir()
            for f in tqdm(files):
                sftp.get(f, f)

            log.info("Removing simulation output from instance")
            ssh.exec_command("rm -r %s" % paths['output_dir'])

        if rexists(sftp, paths['input_file']):
            log.info("Removing input file from instance")
            sftp.remove(paths['input_file'])

        if rexists(sftp, paths['log']):
            log.info("Removing logs from instance")
            sftp.remove(paths['log'])

        if rexists(sftp, paths['finished']):
            sftp.remove(paths['finished'])

        ssh.close()

        # Remove tags
        self._instance.remove_tags({
            'local_input_file': None,
            'port': None,
        })

    def stop_or_terminate(self):
        answer = input("Terminate the instance? [Yn]: ")
        if len(answer) == 0 or answer.startswith(("Y", "y")):
            log.info("Terminating instance")
            self.terminate()
        else:
            answer = input("Stop the instance? [Yn]: ")
            if len(answer) == 0 or answer.startswith(("Y", "y")):
                log.info("Stopping instance")
                self.stop()
            else:
                log.info("The instance has been left running")

    def reattach(self):
        if 'local_input_file' in self.tags:
            local_input_file = self.tags['local_input_file']
            port = int(self.tags['port'])
            paths = self.paths(local_input_file)

            log.info("Reconnecting to running instance")

            try:
                ssh, sftp = self.connect()
            except:
                log.error("Could not connect to remote server")
                return

            if not rexists(sftp, paths['input_file']):
                log.info("The input file has not been uploaded correctly")
                return

            log.info("Starting port forwarding: http://127.0.0.1:%d" % port)
            self.port_forward(port)

            disconnect = self.wait_for_simulation(ssh, sftp)

            log.info(MUMAX_OUTPUT)
            log.info("Stopping port forwarding")
            self.stop_port_forward()

            if disconnect:
                return

            # Exit screen
            ssh.exec_command("screen -S %s -X stuff $'exit\r'" % SCREEN)

            self.clean(ssh, sftp)

            self.stop_or_terminate()
        else:
            log.info("Instance %s is not running a simulation" % self.id)

    def port_forward(self, port=PORT):
        key = paramiko.RSAKey.from_private_key_file(
            config.get('EC2', 'PrivateKeyFile')
        )
        self._forward = SSHTunnelForwarder(
            ssh_address=(self.ip, 22),
            ssh_username=config.get('EC2', 'User'),
            ssh_private_key=key,
            remote_bind_address=('127.0.0.1', PORT),
            local_bind_address=('127.0.0.1', PORT)
        )
        self._forward.start()

    def stop_port_forward(self):
        if self._forward is not None:
            self._forward.stop()
            self._forward = None

    @property
    def ip(self):
        return self._instance.ip_address

    @property
    def id(self):
        return self._instance.id

    @property
    def tags(self):
        return self._instance.tags

    @property
    def state(self):
        if self._instance.state == u'running':
            # Determine if its ready or simulating
            if 'local_input_file' in self.tags:
                return u'simulating'
            else:
                return u'ready'
        else:
            return self._instance.state

    @staticmethod
    def has_mumax(aws_instance):
        return (
            'mucloud' in aws_instance.tags and
            aws_instance.tags['mucloud'] == str(__version__) and
            aws_instance.state != u'terminated'
        )

    @staticmethod
    def launch():
        """ Launch a new AWS instance """
        reservation = aws.run_instances(
            config.get('EC2', 'Image'),
            key_name=config.get('EC2', 'PrivateKeyName'),
            instance_type=config.get('EC2', 'InstanceType'),
            security_groups=config.get('EC2', 'SecurityGroups').split(',')
        )
        instance = Instance(reservation.instances[0])
        log.info("Creating a new instance %s from image %s" % (
            instance.id, config.get('EC2', 'Image')))
        sleep(1)
        instance.add_ready_tags()
        return instance

    def __repr__(self):
        return "<MuCloud Instance(id='%s')>" % self.id
Example #50
0
class DB(object):
    """
    Read: https://colinnewell.wordpress.com/2016/01/21/hand-coding-sql-with-psycopg2-for-odoo/
    """
    _db_connection = None
    _db_cur = None
    _server = None

    def __init__(self):
        db_config = Configuration.get_vicnode_db()
        ssh_intermediate = Configuration.get_ssh_tunnel_info()

        self._server = SSHTunnelForwarder(
            ((ssh_intermediate['host']), (int(ssh_intermediate['port']))),
            ssh_password=ssh_intermediate['ssh_password'],
            ssh_username=(ssh_intermediate['username']),
            ssh_pkey=(ssh_intermediate['private_key_file']),
            remote_bind_address=(db_config['host'], 5432),
            allow_agent=False
        )
        self._server.start()
        # we are about to bind to a 'local' server by means of an ssh tunnel
        # ssh tunnel: which will be seen as a local server...
        # so replace the loaded config host
        db_config['host'] = 'localhost'
        db_config['port'] = self._server.local_bind_port
        self._db_connection = psycopg2.connect(**db_config)
        self._db_cur = self._db_connection.cursor(
            cursor_factory=psycopg2.extras.RealDictCursor)
        self.test_connection()

    def __del__(self):
        self.close_connection()

    def close_connection(self):
        if self._server:
            logging.info("Closing the VicNode DB connection")
            # if the connection was not established, there will be no close()
            # attribute...
            self._db_connection.close()
            logging.info("Stopping the ssh tunnel")
            self._server.stop()
            logging.info("The VicNode DB connection is closed")
            self._server = None

    @staticmethod
    def get_product_code(product):
        products = settings.STORAGE_PRODUCT_CODES
        if product == COMPUTATIONAL:
            products = settings.COMPUTE_PRODUCT_CODES
        elif product == MARKET:
            products = settings.MARKET_PRODUCT_CODES
        elif product == VAULT:
            products = settings.VAULT_MARKET_CODES
        return products

    @connection_required
    def test_connection(self):
        self._db_cur.execute("SELECT * FROM applications_suborganization;")
        rows = self._db_cur.fetchall()
        # print(rows)

    @connection_required
    def get_allocated(self, day_date):
        """
        :param self:
        :param day_date:
        :return:
        """
        q_allocated = """
            SELECT
              sum(size),
              CASE
              WHEN storage_product_id IN %(compute)s
                THEN 'computational'
              WHEN storage_product_id IN %(market)s
                THEN 'market'
              ELSE 'vault' END AS product
            FROM applications_allocation
            WHERE storage_product_id IN %(all_types)s
                AND COALESCE(applications_allocation.creation_date, '2014-11-14' :: DATE) <
                      (%(day_date)s :: DATE + '1 day' :: INTERVAL)
            GROUP BY storage_product_id;
        """
        self._db_cur.execute(q_allocated, {
            'compute': settings.COMPUTE_PRODUCT_CODES,
            'market': settings.MARKET_PRODUCT_CODES,
            'all_types': settings.STORAGE_PRODUCT_CODES,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_allocated_by_faculty(self, day_date, product='all'):
        """
        :param product:
        :param self:
        :param day_date:
        :return:
        """
        products = self.get_product_code(product)
        q_allocated = """
            SELECT
              sum(size)          AS used,
              CASE
              WHEN institution_id != 2
                THEN 'external'
              WHEN applications_suborganization.id = 1
                THEN 'ABP'
              WHEN applications_suborganization.id = 2
                THEN 'FBE'
              WHEN applications_suborganization.id = 3
                THEN 'FoA'
              WHEN applications_suborganization.id = 4
                THEN 'MGSE'
              WHEN applications_suborganization.id = 5
                THEN 'MSE'
              WHEN applications_suborganization.id = 6
                THEN 'MLS'
              WHEN applications_suborganization.id = 7
                THEN 'MDHS'
              WHEN applications_suborganization.id = 8
                THEN 'FoS'
              WHEN applications_suborganization.id = 9
                THEN 'VAS'
              WHEN applications_suborganization.id = 10
                THEN 'VCAMCM'
              WHEN applications_suborganization.id = 11
                THEN 'services'
              ELSE 'unknown' END AS faculty
            FROM applications_allocation
              LEFT JOIN applications_request
                ON applications_allocation.application_id =
                   applications_request.id
              LEFT JOIN applications_suborganization
                ON applications_request.institution_faculty_id =
                   applications_suborganization.id
            WHERE storage_product_id IN %(products)s
                  AND COALESCE(applications_allocation.creation_date, '2014-11-14' :: DATE) <
                      (%(day_date)s :: DATE + '1 day' :: INTERVAL)
            GROUP BY faculty;
        """
        # print(self._db_cur.mogrify(q_allocated,
        #                      {'products': products, 'day_date': day_date}))
        self._db_cur.execute(q_allocated, {
            'products': products,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_storage_used(self, day_date):
        q_used = """
            SELECT
              sum(used_capacity),
              CASE
              WHEN storage_product_id IN %(compute)s
                THEN 'computational'
              WHEN storage_product_id IN %(market)s
                THEN 'market'
              ELSE 'vault' END AS product
            FROM applications_ingest t1
            WHERE storage_product_id IN %(all_types)s
                  -- and this is the last record
                  AND extraction_date =
                      (SELECT MAX(extraction_date)
                       FROM applications_ingest t2
                       WHERE t2.collection_id = t1.collection_id
                             AND t2.storage_product_id = t1.storage_product_id
                             AND t2.extraction_date <
                                (%(day_date)s :: DATE + '1 day' :: INTERVAL)
                      )
            GROUP BY product;
        """
        self._db_cur.execute(q_used, {
            'compute': settings.COMPUTE_PRODUCT_CODES,
            'market': settings.MARKET_PRODUCT_CODES,
            'all_types': settings.STORAGE_PRODUCT_CODES,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_used_by_faculty(self, day_date, product='all'):
        products = self.get_product_code(product)
        q_used = """
            SELECT
              sum(used_capacity),
              CASE
                WHEN suborg_id IS NULL
                  THEN 'external'
                WHEN suborg_id = 1
                  THEN 'ABP'
                WHEN suborg_id = 2
                  THEN 'FBE'
                WHEN suborg_id = 3
                  THEN 'FoA'
                WHEN suborg_id = 4
                  THEN 'MGSE'
                WHEN suborg_id = 5
                  THEN 'MSE'
                WHEN suborg_id = 6
                  THEN 'MLS'
                WHEN suborg_id = 7
                  THEN 'MDHS'
                WHEN suborg_id = 8
                  THEN 'FoS'
                WHEN suborg_id = 9
                  THEN 'VAS'
                WHEN suborg_id = 10
                  THEN 'VCAMCM'
                WHEN suborg_id = 11
                  THEN 'services'
                ELSE 'unknown' END AS faculty
            FROM applications_ingest ingest
              LEFT JOIN (
                      SELECT
                        request.id,
                        coalesce(suborganization.id, -1) AS suborg_id
                      FROM applications_request request
                        LEFT JOIN applications_suborganization suborganization
                          ON institution_faculty_id = suborganization.id
                      WHERE
                        request.institution_id = '2'
                      ORDER BY id
                    ) AS names ON names.id = ingest.collection_id
            WHERE storage_product_id IN %(products)s
              -- and this is the last record
              AND extraction_date =
                  (SELECT MAX(extraction_date)
                   FROM applications_ingest t2
                   WHERE t2.collection_id = ingest.collection_id
                         AND t2.storage_product_id = ingest.storage_product_id
                         AND
                         t2.extraction_date <
                            (%(day_date)s :: DATE + '1 day' :: INTERVAL)
                  )
            GROUP BY faculty;
        """
        self._db_cur.execute(q_used, {
            'products': products,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_headroom_unused(self, day_date):
        q_used = """
            SELECT
              sum(allocated_capacity - used_capacity) AS headroom,
              CASE
              WHEN storage_product_id IN %(compute)s
                THEN 'computational'
              WHEN storage_product_id IN %(market)s
                THEN 'market'
              ELSE 'vault' END AS product
            FROM applications_ingest AS t1
            WHERE storage_product_id IN %(all_types)s
                  -- and this is the last record
                  AND extraction_date =
                      (SELECT MAX(extraction_date)
                       FROM applications_ingest t2
                       WHERE t2.collection_id = t1.collection_id
                             AND t2.storage_product_id = t1.storage_product_id
                             AND t2.extraction_date <
                                (%(day_date)s :: DATE + '1 day' :: INTERVAL)
                      )
            GROUP BY storage_product_id;
        """
        self._db_cur.execute(q_used, {
            'compute': settings.COMPUTE_PRODUCT_CODES,
            'market': settings.MARKET_PRODUCT_CODES,
            'all_types': settings.STORAGE_PRODUCT_CODES,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_headroom_unused_by_faculty(self, day_date, product='all'):
        products = self.get_product_code(product)
        q_used = """
            SELECT  sum(allocated_capacity - used_capacity) AS headroom,
                  CASE
                    WHEN suborg_id IS NULL
                      THEN 'external'
                    WHEN suborg_id = 1
                      THEN 'ABP'
                    WHEN suborg_id = 2
                      THEN 'FBE'
                    WHEN suborg_id = 3
                      THEN 'FoA'
                    WHEN suborg_id = 4
                      THEN 'MGSE'
                    WHEN suborg_id = 5
                      THEN 'MSE'
                    WHEN suborg_id = 6
                      THEN 'MLS'
                    WHEN suborg_id = 7
                      THEN 'MDHS'
                    WHEN suborg_id = 8
                      THEN 'FoS'
                    WHEN suborg_id = 9
                      THEN 'VAS'
                    WHEN suborg_id = 10
                      THEN 'VCAMCM'
                    WHEN suborg_id = 11
                      THEN 'services'
                    ELSE 'unknown' END AS faculty
            FROM applications_ingest ingest
            LEFT JOIN (
                      SELECT
                        request.id,
                        coalesce(suborganization.id, -1) AS suborg_id
                      FROM applications_request request
                        LEFT JOIN applications_suborganization suborganization
                          ON institution_faculty_id = suborganization.id
                      WHERE
                        request.institution_id = '2'
                      ORDER BY id
                    ) AS names ON names.id = ingest.collection_id
            WHERE storage_product_id IN %(products)s
                  -- and this is the last record
                  AND extraction_date =
                      (SELECT MAX(extraction_date)
                       FROM applications_ingest t2
                       WHERE t2.collection_id = ingest.collection_id
                             AND t2.storage_product_id = ingest.storage_product_id
                             AND t2.extraction_date <
                                        (%(day_date)s :: DATE + '1 day' :: INTERVAL)
                      )
            GROUP BY faculty;
            """
        self._db_cur.execute(q_used, {
            'products': products,
            'day_date': day_date
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_storage_capacity(self):
        query = """
            SELECT
              capacity * 1000   AS capacity,
              CASE
              WHEN id IN %(compute)s
                THEN 'computational'
              WHEN id IN %(market)s
                THEN 'market'
              ELSE 'vault' END AS product
            FROM applications_storageproduct
            WHERE id IN %(all_types)s;
        """
        self._db_cur.execute(query, {
            'compute': settings.COMPUTE_PRODUCT_CODES,
            'market': settings.MARKET_PRODUCT_CODES,
            'all_types': settings.STORAGE_PRODUCT_CODES
        })
        return self._db_cur.fetchall()

    @connection_required
    def get_storage_types(self):
        """
        :return:
            The set of storage types in the vicnode database
        """
        query = """
            SELECT
              value
            FROM applications_storageproduct
              LEFT JOIN labels_label
                ON labels_label.id =
                  applications_storageproduct.product_name_id;
        """
        result = set()
        self._db_cur.execute(query)
        result_set = self._db_cur.fetchall()
        for row in result_set:
            result.add(row['value'])
        return result
Example #51
0
class CdhConfExtractor(object):
    def __init__(self, config):
        self._logger = logging.getLogger(__name__)
        self._hostname = config['machines']['cdh-launcher']['hostname']
        self._hostport = config['machines']['cdh-launcher']['hostport']
        self._username = config['machines']['cdh-launcher']['username']
        key_path = config['machines']['cdh-launcher']['key_filename']
        self._key = os.path.expanduser(key_path)
        self._key_password = config['machines']['cdh-launcher']['key_password']
        self._is_openstack = config['openstack_env']
        self._is_kerberos = config['kerberos_used']
        self._cdh_manager_ip = config['machines']['cdh-manager']['ip']
        self._cdh_manager_user = config['machines']['cdh-manager']['user']
        self._cdh_manager_sshtunnel_required = config['machines']['cdh-manager']['sshtunnel_required']
        self._cdh_manager_password = config['machines']['cdh-manager']['password']

    def __enter__(self):
        extractor = self
        try:
            if self._cdh_manager_sshtunnel_required:
                self._logger.info('Creating tunnel to CDH-Manager.')
                extractor.create_tunnel_to_cdh_manager()
                extractor.start_cdh_manager_tunneling()
                self._logger.info('Tunnel to CDH-Manager has been created.')
            else:
                self._logger.info('Connection to CDH-Manager host without ssh tunnel.')
                self._local_bind_address = self.extract_cdh_manager_host()
                self._local_bind_port = 7180
            return extractor
        except Exception as exc:
            self._logger.error('Cannot creating tunnel to CDH-Manager machine.')
            raise exc

    def __exit__(self, exc_type, exc_val, exc_tb):
        try:
            if self._cdh_manager_sshtunnel_required:
                self.stop_cdh_manager_tunneling()
                self._logger.info('Tunelling to CDH-Manager stopped.')
        except Exception as exc:
            self._logger.error('Cannot close tunnel to CDH-Manager machine.')
            raise exc

    # Cdh launcher methods
    def create_ssh_connection(self, hostname, username, key_filename, key_password):
        try:
            self._logger.info('Creating connection to remote host {0}.'.format(hostname))
            self.ssh_connection = paramiko.SSHClient()
            self.ssh_connection.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            self.ssh_connection.connect(hostname, username=username, key_filename=key_filename, password=key_password)
            self._logger.info('Connection to host {0} established.'.format(hostname))
        except Exception as exc:
            self._logger.error('Cannot creating connection to host {0} machine. Check your settings '
                               'in fetcher_config.yml file.'.format(hostname))
            raise exc

    def close_ssh_connection(self):
        try:
            self.ssh_connection.close()
            self._logger.info('Connection to remote host closed.')
        except Exception as exc:
            self._logger.error('Cannot close connection to the remote host.')
            raise exc

    def ssh_call_command(self, command, subcommands=None):
        self._logger.info('Calling remote command: "{0}" with subcommands "{1}"'.format(command, subcommands))
        ssh_in, ssh_out, ssh_err = self.ssh_connection.exec_command(command, get_pty=True)
        if subcommands != None:
            for subcommand in subcommands:
                ssh_in.write(subcommand + '\n')
                ssh_in.flush()
        return ssh_out.read() if ssh_out is not None else ssh_err.read()

    def extract_cdh_manager_host(self):
        self._logger.info('Extracting CDH-Manager address.')
        if self._cdh_manager_ip is None:
            self.create_ssh_connection(self._hostname, self._username, self._key, self._key_password)
            if self._is_openstack:
                ansible_ini = self.ssh_call_command('cat ansible-cdh/platform-ansible/inventory/cdh')
            else:
                ansible_ini = self.ssh_call_command('cat ansible-cdh/inventory/cdh')
            self._cdh_manager_ip = self._get_host_ip('cdh-manager', ansible_ini)
            self.close_ssh_connection()
        self._logger.info('CDH-Manager adress extracted: {}'.format(self._cdh_manager_ip))
        return self._cdh_manager_ip

    # Cdh manager methods
    def create_tunnel_to_cdh_manager(self, local_bind_address='localhost', local_bind_port=7180, remote_bind_port=7180):
        self._local_bind_address = local_bind_address
        self._local_bind_port = local_bind_port
        self.cdh_manager_tunnel = SSHTunnelForwarder(
            (self._hostname, self._hostport),
            ssh_username=self._username,
            local_bind_address=(local_bind_address, local_bind_port),
            remote_bind_address=(self.extract_cdh_manager_host(), remote_bind_port),
            ssh_private_key_password=self._key_password,
            ssh_private_key=self._key
        )

    def start_cdh_manager_tunneling(self):
        try:
            self.cdh_manager_tunnel.start()
        except Exception as e:
            self._logger.error('Cannot start tunnel: ' + e.message)

    def stop_cdh_manager_tunneling(self):
        try:
            self.cdh_manager_tunnel.stop()
        except Exception as e:
            self._logger.error('Cannot stop tunnel: ' + e.message)

    def extract_cdh_manager_details(self, settings):
        for host in settings['hosts']:
            if 'cdh-manager' in host['hostname']:
                return host

    def extract_nodes_info(self, name, settings):
        nodes = []
        for host in settings['hosts']:
            if name in host['hostname']:
                nodes.append(host)
        return nodes

    def extract_service_namenode(self, service_name, role_name, settings):
        hdfs_service = self._find_item_by_attr_value(service_name, 'name', settings['clusters'][0]['services'])
        hdfs_namenode = self._find_item_by_attr_value(role_name, 'name', hdfs_service['roles'])
        host_id = hdfs_namenode['hostRef']['hostId']
        return self._find_item_by_attr_value(host_id, 'hostId', settings['hosts'])['hostname']

    def get_client_config_for_service(self, service_name):
        result = requests.get('http://{0}:{1}/api/v10/clusters/CDH-cluster/services/{2}/clientConfig'.format(self._local_bind_address, self._local_bind_port, service_name))
        return base64.standard_b64encode(result.content)

    def generate_keytab(self, principal_name):
        def transfer_keytab_script(target):
            sftp = self.ssh_connection.open_sftp()
            with tempfile.NamedTemporaryFile('w') as f:
                f.file.write(GENERATE_KEYTAB_SCRIPT)
                f.file.close()
                sftp.put(f.name, target)
        self._logger.info('Generating keytab for {} principal.'.format(principal_name))
        self.create_ssh_connection(self._hostname, self._username, self._key, self._key_password)

        transfer_keytab_script('/tmp/generate_keytab_script.sh')

        self.ssh_call_command('scp -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no /tmp/generate_keytab_script.sh {0}:/tmp/'.format(self._cdh_manager_ip))
        self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "chmod 700 /tmp/generate_keytab_script.sh"'.format(self._cdh_manager_ip))
        keytab_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "/tmp/generate_keytab_script.sh {1}"'
                                            .format(self._cdh_manager_ip, principal_name))
        self.close_ssh_connection()
        lines = keytab_hash.splitlines()
        self._logger.info('Keytab for {} principal has been generated.'.format(principal_name))
        return '{}'.format(''.join(lines[2:-2]))

    def generate_base64_for_file(self, file_path, hostname):
        self._logger.info('Generating base64 for {} file.'.format(file_path))
        self.create_ssh_connection(self._hostname, self._username, self._key, self._key_password)
        base64_file_hash = self.ssh_call_command('ssh -t {0} -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no "base64 {1}"'.format(hostname, file_path))
        self.close_ssh_connection()
        lines = base64_file_hash.splitlines()
        self._logger.info('Base64 hash for {0} file on {1} machine has been generated.'.format(file_path, hostname))
        return '{}'.format(''.join(lines[2:-2]))

    def get_all_deployments_conf(self):
        result = {}
        deployments_settings = json.loads(requests.get('http://' + self._local_bind_address + ':'
                                                       + str(self._local_bind_port) + '/api/v10/cm/deployment',
                                                    auth=(self._cdh_manager_user, self._cdh_manager_password)).content)
        result['cloudera_manager_internal_host'] = self.extract_cdh_manager_details(deployments_settings)['hostname']

        helper = CdhApiHelper(ApiResource(self._local_bind_address, username=self._cdh_manager_user, password=self._cdh_manager_password, version=9))
        if self._is_kerberos:
            result['kerberos_host'] = result['cloudera_manager_internal_host']
            result['hdfs_keytab_value'] = self.generate_keytab('hdfs')
            result['auth_gateway_keytab_value'] = self.generate_keytab('authgateway/sys')
            result['hgm_keytab_value'] = self.generate_keytab('hgm/sys')
            result['vcap_keytab_value'] = self.generate_keytab('vcap')
            result['krb5_base64'] = self.generate_base64_for_file('/etc/krb5.conf', self._cdh_manager_ip)
            result['kerberos_cacert'] = self.generate_base64_for_file('/var/krb5kdc/cacert.pem', self._cdh_manager_ip)

            sentry_service = helper.get_service_from_cdh('SENTRY')
            result['sentry_port'] = helper.get_entry(sentry_service, 'sentry_service_server_rpc_port')
            result['sentry_address'] = helper.get_host(sentry_service)
            result['sentry_keytab_value'] = self.generate_keytab('hive/sys')
            result['auth_gateway_profile'] = 'cloud,sentry-auth-gateway,zookeeper-auth-gateway,hdfs-auth-gateway,kerberos-hgm-auth-gateway,yarn-auth-gateway,hbase-auth-gateway'
            hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
            result['hgm_adress'] = 'http://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':' \
                                   + helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
            result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
            result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
        else:
            result['sentry_port'] = ''
            result['sentry_address'] = ''
            result['sentry_keytab_value'] = ''
            result['hdfs_keytab_value'] = ''
            result['auth_gateway_keytab_value'] = ''
            result['vcap_keytab_value'] = ''
            result['hgm_keytab_value'] = ''
            result['krb5_base64'] = ''
            result['kerberos_cacert'] = ''
            result['auth_gateway_profile'] = 'cloud,zookeeper-auth-gateway,hdfs-auth-gateway,https-hgm-auth-gateway,yarn-auth-gateway,hbase-auth-gateway'
            hgm_service = helper.get_service_from_cdh('HADOOPGROUPSMAPPING')
            result['hgm_adress'] = 'https://' + helper.get_host(hgm_service, 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER') + ':'\
                                   + helper.get_entry_from_group(hgm_service, 'rest_port', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
            result['hgm_password'] = helper.get_entry_from_group(hgm_service, 'basic_auth_pass', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')
            result['hgm_username'] = helper.get_entry_from_group(hgm_service, 'basic_auth_user', 'HADOOPGROUPSMAPPING-HADOOPGROUPSMAPPING_RESTSERVER-BASE')

        result['cloudera_address'] = result['cloudera_manager_internal_host']
        result['cloudera_port'] = 7180
        result['cloudera_user'] = self._cdh_manager_user
        result['cloudera_password'] = self._cdh_manager_password

        oozie_server = helper.get_service_from_cdh('OOZIE')
        result['oozie_server'] = 'http://' + helper.get_host(oozie_server) + ':' + helper.get_entry(oozie_server, 'oozie_http_port')

        yarn = helper.get_service_from_cdh('YARN')
        result['job_tracker'] = helper.get_host(yarn) + ':' + helper.get_entry(yarn, 'yarn_resourcemanager_address')

        sqoop_client = helper.get_service_from_cdh('SQOOP_CLIENT')
        result['metastore'] = self._get_property_value(helper.get_entry(sqoop_client, 'sqoop-conf/sqoop-site.xml_client_config_safety_valve'), 'sqoop.metastore.client.autoconnect.url')

        master_nodes = self.extract_nodes_info('cdh-master', deployments_settings)
        for i, node in enumerate(master_nodes):
            result['master_node_host_' + str(i+1)] = node['hostname']
        result['namenode_internal_host'] = self.extract_service_namenode('HDFS', 'HDFS-NAMENODE', deployments_settings)
        result['hue_node'] = self.extract_service_namenode('HUE', 'HUE-HUE_SERVER', deployments_settings)
        result['h2o_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
        result['arcadia_node'] = self.extract_nodes_info('cdh-worker-0', deployments_settings)[0]['hostname']
        result['import_hadoop_conf_hdfs'] = self.get_client_config_for_service('HDFS')
        result['import_hadoop_conf_hbase'] = self.get_client_config_for_service('HBASE')
        result['import_hadoop_conf_yarn'] = self.get_client_config_for_service('YARN')
        result['import_hadoop_conf_hive'] = self.get_client_config_for_service('HIVE')

        return result

    # helpful methods

    def _get_property_value(self, config, key):
        properties = ET.fromstring('<properties>' + config + '</properties>')
        for property in properties:
            if property.find('name').text == key:
                return property.find('value').text

    def _find_item_by_attr_value(self, attr_value, attr_name, array_with_dicts):
        return next(item for item in array_with_dicts if item[attr_name] == attr_value)

    def _get_host_ip(self, host, ansible_ini):
        host_info = []
        for line in ansible_ini.split('\n'):
            if host in line:
                host_info.append(line.strip())
        return host_info[host_info.index('[' + host + ']') + 1].split(' ')[1].split('=')[1]

    def _load_config_yaml(self, filename):
        with open(filename, 'r') as stream:
            return yaml.load(stream)
Example #52
0
class Device(object):
    # ==================================================================================================================
    # FRAMEWORK ATTRIBUTES
    # ==================================================================================================================
    # Connection Parameters
    _ip, _port, _agent_port, _username, _password = '', '', '', '', ''
    _tools_local = None
    # Port Forwarding
    _frida_server = None
    _port_forward_ssh, _port_forward_agent = None, None
    # App specific
    _applist, _ios_version = None, None
    # Reference to External Objects
    ssh, agent = None, None
    app, installer = None, None
    local_op, remote_op = None, None
    printer = None
    # On-Device Paths
    TEMP_FOLDER = Constants.DEVICE_PATH_TEMP_FOLDER
    DEVICE_TOOLS = Constants.DEVICE_TOOLS

    # ==================================================================================================================
    # INIT
    # ==================================================================================================================
    def __init__(self, ip, port, agent_port, username, password, pub_key_auth, tools):
        # Setup params
        self._ip = ip
        self._port = port
        self._agent_port = agent_port
        self._username = username
        self._password = password
        self._pub_key_auth = bool(pub_key_auth)
        self._tools_local = tools
        # Init related objects
        self.app = App(self)
        self.local_op = LocalOperations()
        self.remote_op = RemoteOperations(self)
        self.printer = Printer()
        self.agent = NeedleAgent(self)

    # ==================================================================================================================
    # UTILS - USB
    # ==================================================================================================================
    def _portforward_usb_start(self):
        """Setup USB port forwarding with TCPRelay."""
        # Check if the user chose a valid port
        if str(self._port) == '22':
            raise Exception('Chosen port must be different from 22 in order to use USB over SSH')
        # Setup the forwarding
        self.printer.debug('Setting up USB port forwarding on port %s' % self._port)
        cmd = '{app} -t 22:{port}'.format(app=self._tools_local['TCPRELAY'], port=self._port)
        self._port_forward_ssh = self.local_op.command_subproc_start(cmd)

    def _portforward_usb_stop(self):
        """Stop USB port forwarding."""
        self.printer.debug('Stopping USB port forwarding')
        self.local_op.command_subproc_stop(self._port_forward_ssh)

    # ==================================================================================================================
    # UTILS - SSH
    # ==================================================================================================================
    def _connect_ssh(self):
        """Open a new SSH connection using Paramiko."""
        try:
            self.printer.verbose("[SSH] Connecting ({}:{})...".format(self._ip, self._port))
            ssh = paramiko.SSHClient()
            ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
            ssh.connect(self._ip, port=self._port, username=self._username, password=self._password,
                        allow_agent=self._pub_key_auth, look_for_keys=self._pub_key_auth)
            self.printer.notify("[SSH] Connected ({}:{})".format(self._ip, self._port))
            return ssh
        except paramiko.AuthenticationException as e:
            raise Exception('Authentication failed when connecting to %s. %s: %s' % (self._ip, type(e).__name__, e.message))
        except paramiko.SSHException as e:
            raise Exception('Connection dropped. Please check your connection with the device, '
                            'and reload the module. %s: %s' % (type(e).__name__, e.message))
        except Exception as e:
            raise Exception('Could not open a connection to %s. %s - %s' % (self._ip, type(e).__name__, e.message))

    def _disconnect_ssh(self):
        """Close the SSH connection, if available."""
        self.printer.verbose("[SSH] Disconnecting...")
        if self.ssh:
            self.ssh.close()

    @Retry()
    def _exec_command_ssh(self, cmd, internal):
        """Execute a shell command on the device, then parse/print output."""
        def hotfix_67():
            # TODO: replace with a more long-term fix
            import time
            timeout = 30
            endtime = time.time() + timeout
            while not stdout.channel.eof_received:
                time.sleep(1)
                if time.time() > endtime:
                    stdout.channel.close()
                    break

        # Paramiko Exec Command
        stdin, stdout, stderr = self.ssh.exec_command(cmd)
        hotfix_67()
        # Parse STDOUT/ERR
        out = stdout.readlines()
        err = stderr.readlines()
        if internal:
            # For processing, don't display output
            if err:
                # Show error and abort run
                err_str = ''.join(err)
                raise Exception(err_str)
        else:
            # Display output
            if out: map(lambda x: print('\t%s' % x, end=''), out)
            if err: map(lambda x: print('\t%s%s%s' % (Colors.R, x, Colors.N), end=''), err)
        return out, err

    # ==================================================================================================================
    # UTILS - AGENT
    # ==================================================================================================================
    def _portforward_agent_start(self):
        """Setup local port forward to enable communication with the Needle server running on the device."""
        self.printer.debug('{} Setting up port forwarding on port {}'.format(Constants.AGENT_TAG, self._agent_port))
        localhost = '127.0.0.1'
        self._port_forward_agent = SSHTunnelForwarder(
            (self._ip, int(self._port)),
            ssh_username=self._username,
            ssh_password=self._password,
            local_bind_address=(localhost, self._agent_port),
            remote_bind_address=(localhost, self._agent_port),
        )
        self._port_forward_agent.start()

    def _portforward_agent_stop(self):
        """Stop local port forwarding for Needle server."""
        self.printer.debug('{} Stopping port forwarding'.format(Constants.AGENT_TAG))
        if self._port_forward_agent:
            self._port_forward_agent.stop()

    def _connect_agent(self):
        self.agent.connect()
        # Ensure the tunnel has been established (especially after auto-reconnecting)
        self.agent.exec_command_agent(Constants.AGENT_CMD_OS_VERSION)

    def _disconnect_agent(self):
        self.agent.disconnect()

    # ==================================================================================================================
    # FRIDA PORT FORWARDING
    # ==================================================================================================================
    def _portforward_frida_start(self):
        """Setup local port forward to enable communication with the Frida server running on the device."""
        self.printer.debug('{} Setting up port forwarding on port {}'.format("[FRIDA]", Constants.FRIDA_PORT))
        localhost = '127.0.0.1'
        self._frida_server = SSHTunnelForwarder(
            (self._ip, int(self._port)),
            ssh_username=self._username,
            ssh_password=self._password,
            local_bind_address=(localhost, Constants.FRIDA_PORT),
            remote_bind_address=(localhost, Constants.FRIDA_PORT),
        )
        self._frida_server.start()

    def _portforward_frida_stop(self):
        """Stop local port forwarding for Frida server."""
        self.printer.debug('{} Stopping port forwarding'.format("FRIDA"))
        if self._frida_server:
            self._frida_server.stop()

    # ==================================================================================================================
    # UTILS - OS
    # ==================================================================================================================
    def _list_apps(self, hide_system_apps=False):
        """Retrieve all the 3rd party apps installed on the device."""
        agent_list = self.agent.exec_command_agent(Constants.AGENT_CMD_LIST_APPS)
        self._applist = Utils.string_to_json(agent_list)
        if hide_system_apps:
            self._applist = {k: v for k, v in self._applist.iteritems() if v["BundleType"] == "User"}

    def select_target_app(self):
        """List all apps installed and let the user choose which one to target."""
        # Show menu to user
        self.printer.notify('Apps found:')
        app_name = choose_from_list(self._applist.keys())
        return app_name

    # ==================================================================================================================
    # EXPOSED COMMANDS
    # ==================================================================================================================
    def is_usb(self):
        """Returns true if using SSH over USB."""
        return self._ip == '127.0.0.1' or self._ip == 'localhost'

    def connect(self):
        """Connect to the device (both SSH and AGENT)."""
        # Using USB, setup port forwarding first
        if self.is_usb():
            self._portforward_usb_start()
            self._portforward_agent_start()
        # Setup channels
        self._connect_agent()
        self.ssh = self._connect_ssh()

    def disconnect(self):
        """Disconnect from the device (both SSH and AGENT)."""
        # Close channels
        self._disconnect_ssh()
        self._disconnect_agent()
        # Using USB, stop port forwarding first
        if self._port_forward_ssh:
            self._portforward_usb_stop()
            self._portforward_agent_stop()

    def setup(self):
        """Create temp folder, and check if all tools are available"""
        # Setup temp folder
        self.printer.debug("Creating temp folder: %s" % self.TEMP_FOLDER)
        self.remote_op.dir_create(self.TEMP_FOLDER)
        # Detect OS version
        if not self._ios_version:
            self._ios_version = self.agent.exec_command_agent(Constants.AGENT_CMD_OS_VERSION).strip()

    def cleanup(self):
        """Remove temp folder from device."""
        self.printer.debug("Cleaning up remote temp folder: %s" % self.TEMP_FOLDER)
        self.remote_op.dir_delete(self.TEMP_FOLDER)

    def shell(self):
        """Spawn a system shell on the device."""
        cmd = 'sshpass -p "{password}" ssh {hostverification} -p {port} {username}@{ip}'.format(password=self._password,
                                                                                                hostverification=Constants.DISABLE_HOST_VERIFICATION,
                                                                                                port=self._port,
                                                                                                username=self._username,
                                                                                                ip=self._ip)
        self.local_op.command_interactive(cmd)

    def pull(self, src, dst):
        """Pull a file from the device."""
        self.printer.info("Pulling: %s -> %s" % (src, dst))
        self.remote_op.download(src, dst)

    def push(self, src, dst):
        """Push a file on the device."""
        self.printer.info("Pushing: %s -> %s" % (src, dst))
        self.remote_op.upload(src, dst)
Example #53
0
class DB:
    _db_connection = None
    _db_cur = None

    def __init__(self):
        ssh_tunnel = Credentials.ssh_tunnel
        db_config = Credentials.vicnode_db
        self._server = SSHTunnelForwarder(
            ((ssh_tunnel['host']), (int(ssh_tunnel['port']))),
            ssh_password=ssh_tunnel['ssh_password'],
            ssh_username=(ssh_tunnel['username']),
            ssh_pkey=(ssh_tunnel['private_key_file']),
            remote_bind_address=(db_config['host'], 5432),
            allow_agent=False
        )
        self._server.start()
        # we are about to bind to a 'local' server by means of an ssh tunnel
        # ssh tunnel: which will be seen as a local server...
        # so replace the loaded config host
        db_config['host'] = 'localhost'
        db_config['port'] = self._server.local_bind_port
        self._db_connection = psycopg2.connect(**db_config)
        self._db_cur = self._db_connection.cursor(
            cursor_factory=psycopg2.extras.RealDictCursor)
        self.test_connection()

    def __del__(self):
        self.close_connection()

    def close_connection(self):
        if self._server:
            self._db_connection.close()
            self._server.stop()
            self._server = None

    def test_connection(self):
        self._db_cur.execute("SELECT * FROM applications_suborganization;")
        rows = self._db_cur.fetchall()

    def get_contacts(self):
        """
        Returns: every project name and it's chief investigator (can be more
        than one?).
        """
        query = """
            SELECT
              collection.id,
              contacts_contact.email_address,
              contacts_contact.business_email_address
            FROM applications_project AS collection
              LEFT JOIN applications_custodian
                ON collection_id = collection.id
              LEFT JOIN contacts_contact
                ON applications_custodian.person_id = contacts_contact.id
            WHERE applications_custodian.role_id = 293
            ORDER BY id;
        """
        self._db_cur.execute(query)
        return self._db_cur.fetchall()

    def get_sub_organizations(self):
        """
        Returns: all the projects
        """
        query = """
            SELECT
              applications_request.project_id,
              contacts_organisation.short_name,
            CASE
              WHEN applications_suborganization.id = 1
                THEN 'ABP'
              WHEN applications_suborganization.id = 2
                THEN 'FBE'
              WHEN applications_suborganization.id = 3
                THEN 'FoA'
              WHEN applications_suborganization.id = 4
                THEN 'MGSE'
              WHEN applications_suborganization.id = 5
                THEN 'MSE'
              WHEN applications_suborganization.id = 6
                THEN 'MLS'
              WHEN applications_suborganization.id = 7
                THEN 'MDHS'
              WHEN applications_suborganization.id = 8
                THEN 'FoS'
              WHEN applications_suborganization.id = 9
                THEN 'VAS'
              WHEN applications_suborganization.id = 10
                THEN 'VCAMCM'
              WHEN applications_suborganization.id = 11
                THEN 'External'
              ELSE 'Unknown' END AS faculty
            FROM applications_request
              LEFT JOIN applications_suborganization
                ON applications_request.institution_faculty_id =
                   applications_suborganization.id
              LEFT JOIN contacts_organisation
                ON applications_request.institution_id = contacts_organisation.id
            WHERE -- institution_id = 2
              -- don't worry about this, as only UoM projects are assigned
              project_id NOTNULL
              AND name NOTNULL
            GROUP BY short_name, project_id, faculty
            ORDER BY project_id;
        """
        self._db_cur.execute(query)
        return self._db_cur.fetchall()

    def get_used(self, end_date):
        query = """
            SELECT
              collection_id,
              sum
            FROM (
                   SELECT
                     collection_id,
                     SUM(used_capacity) AS sum
                   FROM applications_ingest
                   WHERE storage_product_id IN (1, 4, 10, 23, 24)
                         -- and this is the last record
                         AND extraction_date =
                             (SELECT MAX(extraction_date)
                              FROM applications_ingest t2
                              WHERE t2.collection_id = applications_ingest.collection_id
                                    AND t2.storage_product_id =
                                        applications_ingest.storage_product_id
                                    AND t2.extraction_date < (DATE %(end)s))
                   GROUP BY collection_id
                   ORDER BY collection_id) totals
            WHERE sum NOTNULL AND sum > 0;
        """
        self._db_cur.execute(query, {
            'end': end_date.strftime("%Y-%m-%d")
        })
        return self._db_cur.fetchall()
Example #54
0
class ServerManager(object):
    """
    class ServerManager

    This class contains the information about the given server.
    And, acts as connection manager for that particular session.
    """

    def __init__(self, server):
        self.connections = dict()
        self.local_bind_host = '127.0.0.1'
        self.local_bind_port = None
        self.tunnel_object = None
        self.tunnel_created = False

        self.update(server)

    def update(self, server):
        assert (server is not None)
        assert (isinstance(server, Server))

        self.ver = None
        self.sversion = None
        self.server_type = None
        self.server_cls = None
        self.password = None
        self.tunnel_password = None

        self.sid = server.id
        self.host = server.host
        self.hostaddr = server.hostaddr
        self.port = server.port
        self.db = server.maintenance_db
        self.did = None
        self.user = server.username
        self.password = server.password
        self.role = server.role
        self.ssl_mode = server.ssl_mode
        self.pinged = datetime.datetime.now()
        self.db_info = dict()
        self.server_types = None
        self.db_res = server.db_res
        self.passfile = server.passfile
        self.sslcert = server.sslcert
        self.sslkey = server.sslkey
        self.sslrootcert = server.sslrootcert
        self.sslcrl = server.sslcrl
        self.sslcompression = True if server.sslcompression else False
        self.service = server.service
        self.connect_timeout = \
            server.connect_timeout if server.connect_timeout else 0
        if config.SUPPORT_SSH_TUNNEL:
            self.use_ssh_tunnel = server.use_ssh_tunnel
            self.tunnel_host = server.tunnel_host
            self.tunnel_port = server.tunnel_port
            self.tunnel_username = server.tunnel_username
            self.tunnel_authentication = server.tunnel_authentication
            self.tunnel_identity_file = server.tunnel_identity_file
            self.tunnel_password = server.tunnel_password
        else:
            self.use_ssh_tunnel = 0
            self.tunnel_host = None
            self.tunnel_port = 22
            self.tunnel_username = None
            self.tunnel_authentication = None
            self.tunnel_identity_file = None
            self.tunnel_password = None

        for con in self.connections:
            self.connections[con]._release()

        self.update_session()

        self.connections = dict()

    def as_dict(self):
        """
        Returns a dictionary object representing the server manager.
        """
        if self.ver is None or len(self.connections) == 0:
            return None

        res = dict()
        res['sid'] = self.sid
        res['ver'] = self.ver
        res['sversion'] = self.sversion
        if hasattr(self, 'password') and self.password:
            # If running under PY2
            if hasattr(self.password, 'decode'):
                res['password'] = self.password.decode('utf-8')
            else:
                res['password'] = str(self.password)
        else:
            res['password'] = self.password

        if self.use_ssh_tunnel:
            if hasattr(self, 'tunnel_password') and self.tunnel_password:
                # If running under PY2
                if hasattr(self.tunnel_password, 'decode'):
                    res['tunnel_password'] = \
                        self.tunnel_password.decode('utf-8')
                else:
                    res['tunnel_password'] = str(self.tunnel_password)
            else:
                res['tunnel_password'] = self.tunnel_password

        connections = res['connections'] = dict()

        for conn_id in self.connections:
            conn = self.connections[conn_id].as_dict()

            if conn is not None:
                connections[conn_id] = conn

        return res

    def ServerVersion(self):
        return self.ver

    @property
    def version(self):
        return self.sversion

    def MajorVersion(self):
        if self.sversion is not None:
            return int(self.sversion / 10000)
        raise Exception("Information is not available.")

    def MinorVersion(self):
        if self.sversion:
            return int(int(self.sversion / 100) % 100)
        raise Exception("Information is not available.")

    def PatchVersion(self):
        if self.sversion:
            return int(int(self.sversion / 100) / 100)
        raise Exception("Information is not available.")

    def connection(
            self, database=None, conn_id=None, auto_reconnect=True, did=None,
            async_=None, use_binary_placeholder=False, array_to_string=False
    ):
        if database is not None:
            if hasattr(str, 'decode') and \
                    not isinstance(database, unicode):
                database = database.decode('utf-8')
            if did is not None:
                if did in self.db_info:
                    self.db_info[did]['datname'] = database
        else:
            if did is None:
                database = self.db
            elif did in self.db_info:
                database = self.db_info[did]['datname']
            else:
                maintenance_db_id = u'DB:{0}'.format(self.db)
                if maintenance_db_id in self.connections:
                    conn = self.connections[maintenance_db_id]
                    if conn.connected():
                        status, res = conn.execute_dict(u"""
SELECT
    db.oid as did, db.datname, db.datallowconn,
    pg_encoding_to_char(db.encoding) AS serverencoding,
    has_database_privilege(db.oid, 'CREATE') as cancreate, datlastsysoid
FROM
    pg_database db
WHERE db.oid = {0}""".format(did))

                        if status and len(res['rows']) > 0:
                            for row in res['rows']:
                                self.db_info[did] = row
                                database = self.db_info[did]['datname']

                        if did not in self.db_info:
                            raise Exception(gettext(
                                "Could not find the specified database."
                            ))

        if database is None:
            # Check SSH Tunnel is alive or not.
            if self.use_ssh_tunnel == 1:
                self.check_ssh_tunnel_alive()
            else:
                raise ConnectionLost(self.sid, None, None)

        my_id = (u'CONN:{0}'.format(conn_id)) if conn_id is not None else \
            (u'DB:{0}'.format(database))

        self.pinged = datetime.datetime.now()

        if my_id in self.connections:
            return self.connections[my_id]
        else:
            if async_ is None:
                async_ = 1 if conn_id is not None else 0
            else:
                async_ = 1 if async_ is True else 0
            self.connections[my_id] = Connection(
                self, my_id, database, auto_reconnect, async_,
                use_binary_placeholder=use_binary_placeholder,
                array_to_string=array_to_string
            )

            return self.connections[my_id]

    def _restore(self, data):
        """
        Helps restoring to reconnect the auto-connect connections smoothly on
        reload/restart of the app server..
        """
        # restore server version from flask session if flask server was
        # restarted. As we need server version to resolve sql template paths.
        from pgadmin.browser.server_groups.servers.types import ServerType

        self.ver = data.get('ver', None)
        self.sversion = data.get('sversion', None)

        if self.ver and not self.server_type:
            for st in ServerType.types():
                if st.instanceOf(self.ver):
                    self.server_type = st.stype
                    self.server_cls = st
                    break

        # Hmm.. we will not honour this request, when I already have
        # connections
        if len(self.connections) != 0:
            return

        # We need to know about the existing server variant supports during
        # first connection for identifications.
        self.pinged = datetime.datetime.now()
        try:
            if 'password' in data and data['password']:
                data['password'] = data['password'].encode('utf-8')
            if 'tunnel_password' in data and data['tunnel_password']:
                data['tunnel_password'] = \
                    data['tunnel_password'].encode('utf-8')
        except Exception as e:
            current_app.logger.exception(e)

        connections = data['connections']
        for conn_id in connections:
            conn_info = connections[conn_id]
            conn = self.connections[conn_info['conn_id']] = Connection(
                self, conn_info['conn_id'], conn_info['database'],
                conn_info['auto_reconnect'], conn_info['async_'],
                use_binary_placeholder=conn_info['use_binary_placeholder'],
                array_to_string=conn_info['array_to_string']
            )

            # only try to reconnect if connection was connected previously and
            # auto_reconnect is true.
            if conn_info['wasConnected'] and conn_info['auto_reconnect']:
                try:
                    # Check SSH Tunnel needs to be created
                    if self.use_ssh_tunnel == 1 and not self.tunnel_created:
                        status, error = self.create_ssh_tunnel(
                            data['tunnel_password'])

                        # Check SSH Tunnel is alive or not.
                        self.check_ssh_tunnel_alive()

                    conn.connect(
                        password=data['password'],
                        server_types=ServerType.types()
                    )
                    # This will also update wasConnected flag in connection so
                    # no need to update the flag manually.
                except Exception as e:
                    current_app.logger.exception(e)
                    self.connections.pop(conn_info['conn_id'])

    def release(self, database=None, conn_id=None, did=None):
        # Stop the SSH tunnel if release() function calls without
        # any parameter.
        if database is None and conn_id is None and did is None:
            self.stop_ssh_tunnel()

        if did is not None:
            if did in self.db_info and 'datname' in self.db_info[did]:
                database = self.db_info[did]['datname']
                if hasattr(str, 'decode') and \
                        not isinstance(database, unicode):
                    database = database.decode('utf-8')
                if database is None:
                    return False
            else:
                return False

        my_id = (u'CONN:{0}'.format(conn_id)) if conn_id is not None else \
            (u'DB:{0}'.format(database)) if database is not None else None

        if my_id is not None:
            if my_id in self.connections:
                self.connections[my_id]._release()
                del self.connections[my_id]
                if did is not None:
                    del self.db_info[did]

                if len(self.connections) == 0:
                    self.ver = None
                    self.sversion = None
                    self.server_type = None
                    self.server_cls = None
                    self.password = None

                self.update_session()

                return True
            else:
                return False

        for con in self.connections:
            self.connections[con]._release()

        self.connections = dict()
        self.ver = None
        self.sversion = None
        self.server_type = None
        self.server_cls = None
        self.password = None

        self.update_session()

        return True

    def _update_password(self, passwd):
        self.password = passwd
        for conn_id in self.connections:
            conn = self.connections[conn_id]
            if conn.conn is not None or conn.wasConnected is True:
                conn.password = passwd

    def update_session(self):
        managers = session['__pgsql_server_managers'] \
            if '__pgsql_server_managers' in session else dict()
        updated_mgr = self.as_dict()

        if not updated_mgr:
            if self.sid in managers:
                managers.pop(self.sid)
        else:
            managers[self.sid] = updated_mgr
        session['__pgsql_server_managers'] = managers
        session.force_write = True

    def utility(self, operation):
        """
        utility(operation)

        Returns: name of the utility which used for the operation
        """
        if self.server_cls is not None:
            return self.server_cls.utility(operation, self.sversion)

        return None

    def export_password_env(self, env):
        if self.password:
            password = decrypt(
                self.password, current_user.password
            ).decode()
            os.environ[str(env)] = password

    def create_ssh_tunnel(self, tunnel_password):
        """
        This method is used to create ssh tunnel and update the IP Address and
        IP Address and port to localhost and the local bind port return by the
        SSHTunnelForwarder class.
        :return: True if tunnel is successfully created else error message.
        """
        # Fetch Logged in User Details.
        user = User.query.filter_by(id=current_user.id).first()
        if user is None:
            return False, gettext("Unauthorized request.")

        if tunnel_password is not None and tunnel_password != '':
            try:
                tunnel_password = decrypt(tunnel_password, user.password)
                # Handling of non ascii password (Python2)
                if hasattr(str, 'decode'):
                    tunnel_password = \
                        tunnel_password.decode('utf-8').encode('utf-8')
                # password is in bytes, for python3 we need it in string
                elif isinstance(tunnel_password, bytes):
                    tunnel_password = tunnel_password.decode()

            except Exception as e:
                current_app.logger.exception(e)
                return False, "Failed to decrypt the SSH tunnel " \
                              "password.\nError: {0}".format(str(e))

        try:
            # If authentication method is 1 then it uses identity file
            # and password
            if self.tunnel_authentication == 1:
                self.tunnel_object = SSHTunnelForwarder(
                    (self.tunnel_host, int(self.tunnel_port)),
                    ssh_username=self.tunnel_username,
                    ssh_pkey=get_complete_file_path(self.tunnel_identity_file),
                    ssh_private_key_password=tunnel_password,
                    remote_bind_address=(self.host, self.port)
                )
            else:
                self.tunnel_object = SSHTunnelForwarder(
                    (self.tunnel_host, int(self.tunnel_port)),
                    ssh_username=self.tunnel_username,
                    ssh_password=tunnel_password,
                    remote_bind_address=(self.host, self.port)
                )

            self.tunnel_object.start()
            self.tunnel_created = True
        except BaseSSHTunnelForwarderError as e:
            current_app.logger.exception(e)
            return False, "Failed to create the SSH tunnel." \
                          "\nError: {0}".format(str(e))

        # Update the port to communicate locally
        self.local_bind_port = self.tunnel_object.local_bind_port

        return True, None

    def check_ssh_tunnel_alive(self):
        # Check SSH Tunnel is alive or not. if it is not then
        # raise the ConnectionLost exception.
        if self.tunnel_object is None or not self.tunnel_object.is_active:
            self.tunnel_created = False
            raise SSHTunnelConnectionLost(self.tunnel_host)

    def stop_ssh_tunnel(self):
        # Stop the SSH tunnel if created.
        if self.tunnel_object and self.tunnel_object.is_active:
            self.tunnel_object.stop()
            self.local_bind_port = None
            self.tunnel_object = None
            self.tunnel_created = False
Example #55
0
 def __init__(self, ssh_address, ssh_port, ssh_username, ssh_password, remote_bind_address, remote_bind_port):
     self.server = SSHTunnelForwarder(ssh_address=(ssh_address, ssh_port), ssh_username=ssh_username, 
         ssh_password=ssh_password, remote_bind_address=(remote_bind_address, remote_bind_port))
class StorageDB:
    _db_connection = None
    _db_cur = None

    def __init__(self):
        ssh_tunnel = Credentials.ssh_tunnel
        db_config = Credentials.vicnode_db
        self._server = SSHTunnelForwarder(
            ((ssh_tunnel['host']), (int(ssh_tunnel['port']))),
            ssh_password=ssh_tunnel['ssh_password'],
            ssh_username=(ssh_tunnel['username']),
            ssh_pkey=(ssh_tunnel['private_key_file']),
            remote_bind_address=(db_config['host'], 5432),
            allow_agent=False
        )
        self._server.start()
        # we are about to bind to a 'local' server by means of an ssh tunnel
        # ssh tunnel: which will be seen as a local server...
        # so replace the loaded config host
        db_config['host'] = 'localhost'
        db_config['port'] = self._server.local_bind_port
        self._db_connection = psycopg2.connect(**db_config)
        self._db_cur = self._db_connection.cursor(
            cursor_factory=psycopg2.extras.RealDictCursor)
        self.test_connection()

    def __enter__(self):
        return self

    def __exit__(self, type, value, traceback):
        self.close_connection()

    def __del__(self):
        self.close_connection()

    def close_connection(self):
        if self._server:
            self._db_connection.close()
            self._server.stop()
            self._server = None

    def test_connection(self):
        self._db_cur.execute("SELECT * FROM applications_suborganization;")
        rows = self._db_cur.fetchall()

    def get_contacts(self):
        """
        Returns: every project name and it's chief investigator (can be more
        than one).
        """
        query = """
            SELECT
              collection.id,
              contacts_contact.first_name,
              contacts_contact.last_name,
              contacts_contact.email_address,
              contacts_contact.business_email_address
            FROM applications_project AS collection
              LEFT JOIN applications_custodian
                ON collection_id = collection.id
              LEFT JOIN contacts_contact
                ON applications_custodian.person_id = contacts_contact.id
            WHERE applications_custodian.role_id = 293
            ORDER BY id;
        """
        self._db_cur.execute(query)
        return self._db_cur.fetchall()

    def get_sub_organizations(self):
        """
        Returns: all the projects that have a suborganization
        """
        query = """
            SELECT
              applications_allocation.collection_id AS project_id,
              CASE
              WHEN applications_suborganization.id = 1
                THEN 'ABP'
              WHEN applications_suborganization.id = 2
                THEN 'FBE'
              WHEN applications_suborganization.id = 3
                THEN 'FoA'
              WHEN applications_suborganization.id = 4
                THEN 'MGSE'
              WHEN applications_suborganization.id = 5
                THEN 'MSE'
              WHEN applications_suborganization.id = 6
                THEN 'MLS'
              WHEN applications_suborganization.id = 7
                THEN 'MDHS'
              WHEN applications_suborganization.id = 8
                THEN 'FoS'
              WHEN applications_suborganization.id = 9
                THEN 'VAS'
              WHEN applications_suborganization.id = 10
                THEN 'VCAMCM'
              WHEN applications_suborganization.id = 11
                THEN 'Services'
              ELSE 'Unknown' END AS faculty
            FROM applications_allocation
              LEFT JOIN applications_request
                ON applications_allocation.application_id = applications_request.id
              LEFT JOIN applications_suborganization
                ON applications_request.institution_faculty_id =
                   applications_suborganization.id
            WHERE institution_id = 2
            GROUP BY collection_id, faculty
            ORDER BY collection_id;
        """
        self._db_cur.execute(query)
        return self._db_cur.fetchall()

    def get_allocated(self, year_wanted):
        """
        Returns:
        """
        query = """
            SELECT
              collection_id,
              name,
              sum(size) AS allocated
            FROM applications_allocation
              LEFT JOIN applications_request
                ON applications_allocation.application_id = applications_request.id
              LEFT JOIN applications_project
                ON applications_allocation.collection_id = applications_project.id
            WHERE storage_product_id IN (1, 4, 10, 23, 24)
                  AND COALESCE(applications_allocation.creation_date, '2014-11-14' :: DATE) < %(day_date)s :: DATE
                  AND applications_request.institution_id = 2
            GROUP BY collection_id, name
            ORDER BY collection_id;
        """
        self._db_cur.execute(query, {
            'day_date': '%s-12-31' % year_wanted
        })
        return self._db_cur.fetchall()

    def get_used(self, year_wanted, collection_id):
        query = """
            SELECT
            --   lt.max_date,
            --   lt.storage_product_id,
              SUM(rt.used_capacity) AS used
            FROM
              (
                -- get the max date for each product type
                SELECT
                  MAX(extraction_date) AS max_date,
                  storage_product_id
                FROM applications_ingest
                WHERE collection_id =%(collection_id)s
                      AND extraction_date < %(day_date)s :: DATE
                GROUP BY storage_product_id
              ) lt
              INNER JOIN
              (
                -- get the max date for each change in the used capacity of each
                -- product type
                SELECT
                  MAX(extraction_date) AS max_date,
                  used_capacity
                FROM applications_ingest
                WHERE collection_id = %(collection_id)s
                      AND extraction_date < %(day_date)s :: DATE
                GROUP BY used_capacity
              ) rt
                ON lt.max_date = rt.max_date;
        """
        self._db_cur.execute(query, {
            'day_date': '%s-12-31' % year_wanted,
            'collection_id': collection_id
        })
        return self._db_cur.fetchone()
from math import log
from sshtunnel import SSHTunnelForwarder # for SSH connection
import pymysql.cursors # MySQL handling API
import sys
sys.path.append("./configs/")
import server_config # (1) info2_server (2) exploration_db


if __name__ == "__main__":

    # READ DATA FROM SERVER
    #read_Data_from_Server()
    # Server connection
    server = SSHTunnelForwarder(
        (server_config.info2_server['host'], 22),
        ssh_username=server_config.info2_server['user'],
        ssh_password=server_config.info2_server['password'],
        remote_bind_address=('127.0.0.1', 3306))

    server.start()

    connection = pymysql.connect(host='127.0.0.1',
                                 port=server.local_bind_port,
                                 user=server_config.exploration_db['user'],
                                 password=server_config.exploration_db['password'],
                                 db=server_config.exploration_db['database'])
    connection.autocommit(True)
    cursor = connection.cursor()
    print("MySQL connection established.")

    # Get the participants list from the table of 'final_participants'
Example #58
0
    config.set('SSH', 'Address', '127.0.0.1')
    config.set('SSH', 'Enable', 'true')

    with open('config.cfg', 'w') as configfile:
        config.write(configfile)

    print('Please fill out config.cfg, then restart the program.')
    exit(1)

ser = serial.Serial(config.get('Serial', 'Port'),
                    config.getint('Serial', 'Baud'))

forwarder = SSHTunnelForwarder(
        config.get('SSH', 'Address'),
        ssh_username=config.get('SSH', 'Username'),
        ssh_password=config.get('SSH', 'Password'),
        remote_bind_address=('127.0.0.1', config.getint('SSH', 'ServerPort')),
        local_bind_address=('127.0.0.1', config.getint('SSH', 'ClientPort')),
        set_keepalive=5
    )

if config.getboolean('SSH', 'Enable'):
    forwarder.start()
    print('[SSH] Opened SSH tunnel with %s:%d' % (forwarder.ssh_host, forwarder.ssh_port))

engine = create_engine(
    config.get('Database', 'ConnectionString'),
    pool_recycle=3600,
    pool_size=5
)
Base = declarative_base()