Ejemplo n.º 1
0
    def __init__( self, setup_file_name):

        setup_file_values = setup_file_reader.read_in_setup_file(setup_file_name)
        
        assert( setup_file_values['host'] and 
            setup_file_values['username'] and
            setup_file_values['passwd'] and
            setup_file_values['db_name'] and
            setup_file_values['table_columns'] ) #check for non-nullity and non-emptiness

        self.host = setup_file_values['host'] 
        self.username = setup_file_values['username']
        self.passwd = setup_file_values['passwd'] 
        self.db_name = setup_file_values['db_name']
        self.table_columns = setup_file_values['table_columns'].split(',')
        self.column_datatypes = setup_file_values['column_datatypes'].split(',')
        self.db_connection = mysql.connector.connect( host=self.host, user=self.username, passwd=self.passwd, db=self.db_name )
        self.cursor = self.db_connection.cursor()
Ejemplo n.º 2
0
import setup_file_reader as setup

"""
    Server/Client code for the fishpaste crawler.
    This handles passing out new sites for each crawler to visit.

"""

#client/server needs to run with sudo.

BUFFER_SIZE = 4096
TCP_PORT = 666
TCP_CALLBACK_PORT = 667


setup_info = setup.read_in_setup_file("./fishpaste_server.setup")
SERVER_IP = setup_info['host']
CLIENT_IP = setup_info['client']


def encode_for_transport( string ):
    return bytes( string, "utf-8" )

def client_get_more_links( exclusion_url ):
    if not exclusion_url:
        exclusion_url = "None"
    TRIES = 10
    
    string_data = None
    s = socket.socket( socket.AF_INET,  socket.SOCK_STREAM )