Example #1
0
    def select_task_impala_load_not_ready(self):
        try:
            con = MySQLdb.connect(host=self.mysqlinfo["host"],
                                  port=string.atoi(self.mysqlinfo["port"]),
                                  user=self.mysqlinfo["user"],
                                  passwd=self.mysqlinfo["passwd"],
                                  db=self.mysqlinfo["dbname"])
            cursor = con.cursor()
            task_str = self.select_task_impala_taskname_list()
            sql = "select task_name,node_num,batch_id,h_db_name,h_table_name,h_table_part,i_db_name,i_table_name,i_table_part,insert_type,count(1) as num \
                    from task_impala_load where status!='SUCCESS' \
		    and task_name in {task_str}\
                    group by task_name,node_num,batch_id,h_db_name,h_table_name,h_table_part,i_db_name,i_table_name,i_table_part,insert_type having num=node_num;"

            sql = sql.replace("{task_str}", task_str)
            print(
                "select_task_impala_load_not_ready functions run! select sql => "
                + sql)
            cursor.execute(sql)
            results = cursor.fetchall()
            if (len(results) != 0):
                print(
                    "select_task_impala_load_not_ready  sql result : \n r(0): "
                    + str(results[0]))
            else:
                print("result is null! ")
            return results
        except Error, e:
            Logger.error("mysql error select_task_impala_load_ready" +
                         e.args[0].__str__() + ":" + e.args[1].__str__())
            print "mysql error select_task_impala_load_ready%d: %s" % (
                e.args[0], e.args[1])
Example #2
0
 def select_task_impala_taskname_list(self):
     try:
         con = MySQLdb.connect(host=self.mysqlinfo["host"],
                               port=string.atoi(self.mysqlinfo["port"]),
                               user=self.mysqlinfo["user"],
                               passwd=self.mysqlinfo["passwd"],
                               db=self.mysqlinfo["dbname"])
         cursor = con.cursor()
         sql = "select task_name from task_impala_sql"
         cursor.execute(sql)
         taskname_list = cursor.fetchall()
         task_str = "("
         for i in range(0, len(taskname_list)):
             for j in range(0, len(taskname_list[i])):
                 if (i == (len(taskname_list) - 1)):
                     task_str = task_str + "'" + str(
                         taskname_list[i][j]) + "'"
                 else:
                     task_str = str(task_str + "'" +
                                    taskname_list[i][j]) + "'" + ","
         task_str = task_str + ")"
         return task_str
     except Error, e:
         Logger.error("select_task_impala_taskname_list" +
                      e.args[0].__str__() + ":" + e.args[1].__str__())
         print "mysql error select_task_impala_taskname_list%d: %s" % (
             e.args[0], e.args[1])
Example #3
0
def getConfigFileName(taskName):
    configRoot = getConfigRoot()
    if (configRoot == errorCode):
        Logger.error("failed to get the root dir of config file " + taskName)
        return errorCode
    configFileName = configRoot + str(taskName) + ".cfg"
    return configFileName
Example #4
0
  def save(relation):
    if ArtistRelationFactory.__use_db:
      artistA = relation.artistA.key()
      artistB = relation.artistB.key()
      if artistA > artistB:
        artistC = artistB
        artistB = artistA
        artistA = artistC
      ArtistRelationFactory.__db.execute(
          u"REPLACE INTO artist_relation "\
          "(artistA, artistB, relation) VALUES (?, ?, ?)", (
          artistA, artistB, Binary(pickle.dumps(relation, -1))))
      ArtistRelationFactory.__db.commit()

    elif TrackRelationFactory.__use_fs:
      pathA = join(ArtistRelationFactory.__path, relation.artistA.key())
      fileA = u"%s.pkl" % join(pathA, relation.artistB.key())
      pathB = join(ArtistRelationFactory.__path, relation.artistB.key())
      fileB = u"%s.pkl" % join(pathB, relation.artistA.key())
      if not isdir(pathA): makedirs(pathA)
      if not isdir(pathB): makedirs(pathB)
      if fileA > fileB:
        fileC = fileB
        fileB = fileA
        fileA = fileC
      ArtistRelationFactory._save(relation, fileA)
      if fileA != fileB:
        if exists(fileB): remove(fileB)
        try:
          link(fileA, fileB)
        except OSError:
          Logger.error(u"Failed to link %s to %s" % (fileA, fileB))
      relation.lastmodified = getmtime(fileA)
Example #5
0
def getVoucherUrl(driver, waitJobid, ticketPlatform, ticketNum):
    # Load logger
    voucherlogger = Logger(ticketPlatform,
                           waitJobid,
                           botid='1',
                           method='wait_voucher',
                           logger='waitVoucher').getlog()
    voucherlogger.info("wait_vocher jobid=" + waitJobid)
    voucherlogger.info("order ticketplatform=" + ticketPlatform)
    voucherlogger.info("wait_vocher ticketNumber=" + ticketNum)

    singleorderurl = 'http://www.travelmart.com.cn/singleOrder.asp?id='
    try:
        driver.get(singleorderurl + ticketNum)
    except:
        voucherlogger.error("Don't open singleorderurl")
        return False

    try:
        voucherurl = driver.find_element_by_link_text('查看打印凭证').get_attribute(
            'href')
    except:
        voucherlogger.error("[Voucherurl] don't find element")
        return False

    return voucherurl
Example #6
0
def getSqoopJobDict():
    jobDict = {}
    Logger = log.Logger
    conf = ConfigParser.RawConfigParser()
    #获取sqoop配置文件名称
    sqoopConfFileName = getConfigFileName("sqoop")
    try:
        conf.read(sqoopConfFileName)
    except:
        Logger.error("read sqoop configuration file error " +
                     sqoopConfFileName)
        return errorCode
    jobNames_str = conf.get("jobsName", "jobs")
    if (jobNames_str is None):
        Logger.info("there is no job in the configuration!")
        return errorCode
    jobNames = jobNames_str.split(",")
    for jobName in jobNames:
        sqoopcmd = getEnv("sqoop") + " "
        for (key, value) in conf.items(jobName):
            #如果数据来自sql语句查询的结果,那么sql语句放在子sql里面 防止sql中包含“=”号
            #             if(str(key) == "-e" or str(key) == "--query"):
            #                 value = getSqoopSubSql(value)
            sqoopcmd = sqoopcmd + " " + key + " " + value
        jobDict[jobName] = sqoopcmd
    return jobDict
Example #7
0
class Audit(object):
    """
    A class for handling audit information
    """
    def __init__(self, db, enabled=True):
        self.db = db
        self.enabled = enabled

        self.log = Logger(system=self)

        self.action_ids = {
            "GET_PROFILE": 0,
            "GET_CONTRACT": 1,
            "GET_LISTINGS": 2,  # Click Store tab
            "GET_FOLLOWING": 3,
            "GET_FOLLOWERS": 4,
            "GET_RATINGS": 5
        }

    def record(self, guid, action_id, contract_hash=None):
        if self.enabled is not True:
            return
        self.log.info("Recording Audit Event [%s]" % action_id)

        if action_id in self.action_ids:
            self.db.audit_shopping.set(guid, self.action_ids[action_id],
                                       contract_hash)
        else:
            self.log.error("Could not identify this action id")
Example #8
0
class BaiduMap:
    def __init__(self):
        self._logger = Logger(__file__)
        self.status ={0:"正常",
                      1:"服务器内部错误",
                      2:"请求参数非法",
                      3:"权限校验失败",
                      4:"配额校验失败",
                      5:"ak不存在或者非法",
                      101:"服务禁用",
                      102:"不通过白名单或者安全码不对",
                      200:"无权限",
                      300:"配额错误"}
        self.geoprefix = "http://api.map.baidu.com/geocoder/v2/?address="
        self.revprefix = "http://api.map.baidu.com/geocoder/v2/?location="
        self.suffix = "&output=json&ak="
        self.headers = {}
        self.headers["User-Agent"]="Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0"

    def access(self,url):
        """get Json object from specified url
        """
        try:
            req = urllib2.Request(url,headers=self.headers)
            response = urllib2.urlopen(req)
            return json.loads(response.read())
        except Exception,e:
            self._logger.error("error occured when get geo data")
            return None
Example #9
0
class Crawler(threading.Thread):
    def __init__(self, cookie, manager):
        threading.Thread.__init__(self)
        cookie_handler = urllib2.HTTPCookieProcessor(cookie)
        self._opener = urllib2.build_opener(cookie_handler)
        self._manager = manager
        self._logger = Logger(u"crawler.log")

    def run(self):
        while True:
            try:
                # fetch url
                url = self._manager.get_url()
                self._logger.debug("Fetch a url " + url)
                # fetch page
                request = urllib2.Request(url)
                page = self._opener.open(request)
                self._logger.debug("Fetch the page for url " + url)
                # insert page
                while True:
                    try:
                        self._manager.insert_page(url, page)
                        self._logger.debug("Insert the page")
                        break
                    except PageQueueFullForNowError:
                        time.sleep(0.1)
            except urllib2.URLError:
                self._manager.insert_page(url, None)
                self._logger.error("URLError for " + url)
            except UrlQueueEmptyForNowError:
                time.sleep(0.1)
            except NoUrlToCrawlError:
                break
Example #10
0
class Audit(object):
    """
    A class for handling audit information
    """

    def __init__(self, db, enabled=True):
        self.db = db
        self.enabled = enabled

        self.log = Logger(system=self)

        self.action_ids = {
            "GET_PROFILE": 0,
            "GET_CONTRACT": 1,
            "GET_LISTINGS": 2,  # Click Store tab
            "GET_FOLLOWING": 3,
            "GET_FOLLOWERS": 4,
            "GET_RATINGS": 5
        }

    def record(self, guid, action_id, contract_hash=None):
        if self.enabled is not True:
            return
        self.log.info("Recording Audit Event [%s]" % action_id)

        if action_id in self.action_ids:
            self.db.audit_shopping.set(guid, self.action_ids[action_id], contract_hash)
        else:
            self.log.error("Could not identify this action id")
Example #11
0
def getSqoopPoolNumber():
    poolNumber = getValueByKey("sqoop", "sqoopTask", "threadPoolNumber")
    if (poolNumber != errorCode):
        return poolNumber
    else:
        Logger.error("failed to get sqoop pool number!")
        return errorCode
Example #12
0
 def load(input):
   for i in range(len(Factories.__lib)):
     Logger.debug(u"Factories.load(): loading %s" % i)
     k = pickle.load(input)
     if k != i:
       # FIXME: we should die here
       Logger.error(u"Factories.load(): key mismatch %d vs %d" % (i, k))
     Factories.__lib[k].setstate(pickle.load(input))
Example #13
0
class Server(object):
    def __init__(self, kserver, signing_key, database):
        """
        A high level class for sending direct, market messages to other nodes.
        A node will need one of these to participate in buying and selling.
        Should be initialized after the Kademlia server.
        """
        self.kserver = kserver
        self.signing_key = signing_key
        self.router = kserver.protocol.router
        self.db = database
        self.log = Logger(system=self)
        self.protocol = MarketProtocol(kserver.node, self.router, signing_key,
                                       database)

        # TODO: we need a loop here that republishes keywords when they are about to expire

        # TODO: we also need a loop here to delete expiring contract (if they are set to expire)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed for known vendors and save the vendors to the db.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        for sp in list_seed_pubkey:
            seed, pubkey = sp
            try:
                self.log.debug("querying %s for vendors" % seed)
                c = httplib.HTTPConnection(seed)
                c.request("GET", "/?type=vendors")
                response = c.getresponse()
                self.log.debug("Http response from %s: %s, %s" %
                               (seed, response.status, response.reason))
                data = response.read()
                reread_data = data.decode("zlib")
                proto = peers.PeerSeeds()
                proto.ParseFromString(reread_data)
                verify_key = nacl.signing.VerifyKey(
                    pubkey, encoder=nacl.encoding.HexEncoder)
                verify_key.verify("".join(proto.serializedNode),
                                  proto.signature)
                v = self.db.VendorStore()
                for peer in proto.serializedNode:
                    try:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        v.save_vendor(n.guid.encode("hex"), peer)
                    except Exception:
                        pass
            except Exception, e:
                self.log.error("failed to query seed: %s" % str(e))
class SMTPNotification(object):
    """
    A class for sending SMTP notifications
    """

    def __init__(self, db):
        self.db = db
        self.server = 'localhost:25'
        self.sender = 'OpenBazaar'
        self.recipient = ''
        self.username = None
        self.password = None

        self.log = Logger(system=self)

        self.get_smtp_settings()

    def get_smtp_settings(self):
        settings = self.db.settings.get()
        self.server = settings[15]
        self.sender = settings[16]
        self.recipient = settings[17]
        self.username = settings[18]
        self.password = settings[19]

    def send(self, subject, body):

        settings = self.db.settings.get()
        is_enabled = True if settings[14] == 1 else False

        if is_enabled:
            # Construct MIME message
            msg = MIMEMultipart('alternative')
            msg['Subject'] = subject
            msg['From'] = self.sender
            msg['To'] = self.recipient

            html_body = MIMEText(body, 'html')
            msg.attach(html_body)

            try:
                server = smtplib.SMTP(self.server)
                server.starttls()

                # Authenticate if username/password set
                if self.username and self.password:
                    server.login(self.username, self.password)

                server.sendmail(self.sender, self.recipient, msg.as_string())
            except SMTPAuthenticationError as e:
                self.log.error(e)
                print e

            server.quit()
class SMTPNotification(object):
    """
    A class for sending SMTP notifications
    """
    def __init__(self, db):
        self.db = db
        self.server = 'localhost:25'
        self.sender = 'OpenBazaar'
        self.recipient = ''
        self.username = None
        self.password = None

        self.log = Logger(system=self)

        self.get_smtp_settings()

    def get_smtp_settings(self):
        settings = self.db.settings.get()
        self.server = settings[15]
        self.sender = settings[16]
        self.recipient = settings[17]
        self.username = settings[18]
        self.password = settings[19]

    def send(self, subject, body):

        settings = self.db.settings.get()
        is_enabled = True if settings[14] == 1 else False

        if is_enabled:
            # Construct MIME message
            msg = MIMEMultipart('alternative')
            msg['Subject'] = subject
            msg['From'] = self.sender
            msg['To'] = self.recipient

            html_body = MIMEText(body, 'html')
            msg.attach(html_body)

            try:
                server = smtplib.SMTP(self.server)
                server.starttls()

                # Authenticate if username/password set
                if self.username and self.password:
                    server.login(self.username, self.password)

                server.sendmail(self.sender, self.recipient, msg.as_string())
            except SMTPAuthenticationError as e:
                self.log.error(e)
                print e

            server.quit()
Example #16
0
class Server(object):
    def __init__(self, kserver, signing_key, database):
        """
        A high level class for sending direct, market messages to other nodes.
        A node will need one of these to participate in buying and selling.
        Should be initialized after the Kademlia server.
        """
        self.kserver = kserver
        self.signing_key = signing_key
        self.router = kserver.protocol.router
        self.db = database
        self.log = Logger(system=self)
        self.protocol = MarketProtocol(kserver.node, self.router, signing_key, database)

        # TODO: we need a loop here that republishes keywords when they are about to expire

        # TODO: we also need a loop here to delete expiring contract (if they are set to expire)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed for known vendors and save the vendors to the db.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        for sp in list_seed_pubkey:
            seed, pubkey = sp
            try:
                self.log.debug("querying %s for vendors" % seed)
                c = httplib.HTTPConnection(seed)
                c.request("GET", "/?type=vendors")
                response = c.getresponse()
                self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
                data = response.read()
                reread_data = data.decode("zlib")
                proto = peers.PeerSeeds()
                proto.ParseFromString(reread_data)
                verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
                verify_key.verify("".join(proto.serializedNode), proto.signature)
                v = self.db.VendorStore()
                for peer in proto.serializedNode:
                    try:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        v.save_vendor(n.guid.encode("hex"), peer)
                    except Exception:
                        pass
            except Exception, e:
                self.log.error("failed to query seed: %s" % str(e))
Example #17
0
def getEnv(task):
    conf = ConfigParser.RawConfigParser()
    #通过任务类型获取对应的配置文件
    ConfFileName = getConfigFileName(task)
    try:
        conf.read(ConfFileName)
    except:
        #email()
        Logger.error("failed to read sqoop config file!" + ConfFileName)
        return errorCode
    #获取该命令所在机器的环境变量
    cmd = conf.get(task + "Env", "cmdPath")
    return cmd
Example #18
0
def getSqoopJobListByName(jobName):
    conf = ConfigParser.RawConfigParser()
    sqoopConfFileName = getConfigFileName("sqoop")
    sqoopCmd = getEnv("sqoop")
    try:
        conf.read(sqoopConfFileName)
    except:
        #email()
        Logger.error("failed to read config file " + sqoopConfFileName)
        return errorCode
    for (key, value) in conf.items(jobName):
        sqoopCmd = sqoopCmd + " " + key + " " + value
    return sqoopCmd
Example #19
0
def send_message(contact, message):
    try:
        contact.get_chat().send_message(message)
    except Exception as e:
        if (is_time_out_error(e)):
            print(f"Error trying to send message because time out - {e}")
            internet.wait_until_connection_becames_available()
            Logger.error(
                f"Error trying to send message because time out - {e}")
        else:
            print(f"unknown error {e}")
            Logger.error(f"Error trying to send message - {e}")
            pass
        time.sleep(2)
Example #20
0
def getValueByKey(taskName, section, key):
    conf = ConfigParser.RawConfigParser()
    configFile = getConfigFileName(taskName)
    if (configFile == errorCode):
        Logger.error(
            "Failed to get configFile from function getConfigFileName(" +
            taskName + ")")
        return errorCode
    try:
        conf.read(configFile)
    except:
        Logger.error("read config file failed ! logfile dir is " + configFile)
        return errorCode
    value = conf.get(section, key)
    return value
Example #21
0
 def insert_task_log(self, values):
     try:
         con = MySQLdb.connect(host=self.mysqlinfo["host"],
                               port=string.atoi(self.mysqlinfo["port"]),
                               user=self.mysqlinfo["user"],
                               passwd=self.mysqlinfo["passwd"],
                               db=self.mysqlinfo["dbname"])
         cursor = con.cursor()
         sql = "insert into task_log(task_id,task_type,start_time,end_time,cost,ifnormal,run_date) values(%s,%s,%s,%s,%s,%s,%s)"
         cursor.execute(sql, values)
         con.commit()
     except Error, e:
         Logger.error("mysql error insert_task_log" + e.args[0].__str__() +
                      ":" + e.args[1].__str__())
         print "mysql error insert_task_log %d: %s" % (e.args[0], e.args[1])
Example #22
0
def getKeyValueBySection(taskName, section):
    conf = ConfigParser.RawConfigParser()
    configFile = getConfigFileName(taskName)
    if (configFile == errorCode):
        Logger.error(
            "Failed to get configFile from function getConfigFileName(" +
            taskName + ")")
        return errorCode
    try:
        conf.read(configFile)
    except:
        Logger.error("read config file failed ! logfile dir is " + configFile)
        return errorCode
    dict_kv = conf.items(section)
    return dict_kv
Example #23
0
class DictionaryReader():
    word_list = None
    logger = None
    filename = None

    def __init__(self):  #TODO: implement config
        self.logger = Logger()  # TODO: refractor
        try:
            filepath = os.path.expanduser(config.get('dictionary location'))
            self.read_file(filepath)
        except FileNotFoundError as err:
            self.logger.error(err)  # TODO: refractor

    def reload_dictionary(self):
        self.logger.info("Reloading dictionary")
        self.read_file(self.filename)

    def read_file(self, filename):
        self.filename = filename
        try:
            with open(filename, 'r') as f:
                content = f.read()
                splitted = content.split('\n')
                only_unique = (list(set(splitted)))
                sorted_data = sorted(only_unique)
                self.word_list = sorted_data
        except FileNotFoundError as err:
            self.logger.error(err)  #TODO: refractor
            try:
                with open(filename, 'w') as f:
                    f.write('Sample\ndata\nin the\ndictionary')
            except IOError as err:
                self.logger.error(err)
        except PermissionError as err:
            self.logger.error(err)  #TODO: refractor
        except OSError as err:
            self.logger.error(err)  #TODO: refractor

    def get_random_word(self):
        if len(self.word_list) > 0:
            random_entry = random.randrange(0, len(self.word_list), 1)
            word = self.word_list[random_entry]
            if config.get('remove words from pool'):
                self.word_list.remove(word)
            return word
        else:
            self.logger.info('Dictionary emptied')
            return 'Greetings! You emptied the pool!'
Example #24
0
def send_media(driver, path, chat_id, caption, contact):
    try:
        driver.send_media(path, chat_id, caption)
    except Exception as e:
        print(f"Error trying to send media -")
        if (is_time_out_error(e)):
            print(f"Error trying to send media because time out - {e}")
            Logger.error(f"Error trying to send media because time out - {e}")
            internet.wait_until_connection_becames_available()
        else:
            print(f"unknown error {e}")
            Logger.error(f"Error trying to send media - {e}")
            pass

        send_message(
            contact,
            f"Não foi possível enviar a media do {contact.get_safe_name()}")
Example #25
0
 def select_task_sceduling(self, run_date):
     try:
         con = MySQLdb.connect(host=self.mysqlinfo["host"],
                               port=string.atoi(self.mysqlinfo["port"]),
                               user=self.mysqlinfo["user"],
                               passwd=self.mysqlinfo["passwd"],
                               db=self.mysqlinfo["dbname"])
         cursor = con.cursor()
         sql = "select task_id,runstatus,ifnormal from task_sceduling where date(run_date)='" + run_date + "'"
         cursor.execute(sql)
         results = cursor.fetchall()
         return results
     except Error, e:
         Logger.error("mysql error select_task_sceduling" +
                      e.args[0].__str__() + ":" + e.args[1].__str__())
         print "mysql error select_task_sceduling%d: %s" % (e.args[0],
                                                            e.args[1])
Example #26
0
class IWatchdog(threading.Thread):
    """
    interface class needed to instanciate a watchdog class
    """
    def __init__(self):
        """
        constructor
        """
        threading.Thread.__init__(self)
        self.logger = Logger("IWatchdog")

    def run(self):
        """
        redefinition of run method of threading.Thread class
        :return: None
        """
        self.logger.error("undefined watchdog launcher")
Example #27
0
class MySQL:
    # the mysql error code
    error_code = ''
    # the instance of this class
    _instance = None
    # the database connection object
    _conn = None
    # the cursor
    _cur = None
    # the default timeout
    _TIMEOUT = 30
    _timecount = 0

    def __init__(self, dbconfig):
        """create database connection with specified arguments
        # Parameters:
        dbconfig: the configuration argument:
            host:
            user:
            passwd:
            port:
            charset:
        # Return:
        """
        self._logger = Logger(__file__)
        try:
            self._conn = MySQLdb.connect(host=dbconfig['host'],
                                         port=dbconfig['port'],
                                         user=dbconfig['user'],
                                         passwd=dbconfig['passwd'],
                                         db=dbconfig['db'],
                                         charset=dbconfig['charset'])
        except MySQLdb.Error, e:
            self.error_code = str(e.args[0])
            error_msg = 'MySQL error! ', str(e.args[0]), e.args[1]
            print error_msg
            # if not exceed the default timeout, try again
            if self._timecount < self._TIMEOUT:
                interval = 5
                self._timecount += interval
                time.sleep(interval)
                self.__init__(dbconfig)
            else:
                self._logger.error(str(e.args[0]) + e.args[1])
                raise Exception(error_msg)
        self._cur = self._conn.cursor()
Example #28
0
class BaiduMap:
    def __init__(self):
        self._logger = Logger(__file__)
        self.headers = {}
        self.headers[
            "User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0"

    def access(self, url):
        """get Json object from specified url
        """
        try:
            req = urllib2.Request(url, headers=self.headers)
            response = urllib2.urlopen(req)
            return json.loads(response.read())
        except Exception, e:
            self._logger.error("error occured when get geo data")
            return None
class MessageListenerImpl(object):
    implements(MessageListener)

    def __init__(self, web_socket_factory, database):
        self.ws = web_socket_factory
        self.db = database
        self.log = Logger(system=self)

    def notify(self, plaintext, signature):
        try:
            success = self.db.messages.save_message(
                plaintext.sender_guid.encode("hex"),
                plaintext.handle,
                plaintext.pubkey,
                plaintext.subject,
                PlaintextMessage.Type.Name(plaintext.type),
                plaintext.message,
                plaintext.timestamp,
                plaintext.avatar_hash,
                signature,
                False,
            )

            if plaintext.subject != "":
                self.db.purchases.update_unread(plaintext.subject)
                self.db.sales.update_unread(plaintext.subject)
                self.db.cases.update_unread(plaintext.subject)

            if success:
                message_json = {
                    "message": {
                        "sender": plaintext.sender_guid.encode("hex"),
                        "subject": plaintext.subject,
                        "message_type": PlaintextMessage.Type.Name(plaintext.type),
                        "message": plaintext.message,
                        "timestamp": plaintext.timestamp,
                        "avatar_hash": plaintext.avatar_hash.encode("hex"),
                        "public_key": plaintext.pubkey.encode("hex"),
                    }
                }
                if plaintext.handle:
                    message_json["message"]["handle"] = plaintext.handle
                self.ws.push(json.dumps(sanitize_html(message_json), indent=4))
        except Exception as e:
            self.log.error("Market.Listener.notify Exception: %s" % e)
Example #30
0
def getWorkflow():
    conf = ConfigParser.RawConfigParser()
    workflowConfigFile = getConfigFileName("workflow")
    if (workflowConfigFile == errorCode):
        Logger.error("failed to get workflow config file!")
        return errorCode
    try:
        conf.read(workflowConfigFile)
    except:
        send_mail("failed to read workflow config file!",
                  "time: " + str(time.time()))
        Logger.error("failed to read workflow config file!")
        return errorCode
    workDict = conf.items("workflow")
    if (workDict is not None):
        return workDict
    else:
        return errorCode
Example #31
0
    def update_task_impala_load_satus(self, vals):
        try:
            con = MySQLdb.connect(host=self.mysqlinfo["host"],
                                  port=string.atoi(self.mysqlinfo["port"]),
                                  user=self.mysqlinfo["user"],
                                  passwd=self.mysqlinfo["passwd"],
                                  db=self.mysqlinfo["dbname"])
            cursor = con.cursor()
            sql = "update task_impala_load set status=%s where task_name=%s and batch_id=%s and h_db_name=%s and h_table_name=%s \
            and h_table_part=%s and i_db_name=%s and i_table_name=%s and i_table_part=%s and insert_type=%s"

            cursor.execute(sql, vals)
            con.commit()
        except Error, e:
            Logger.error("mysql error update_task_impala_load_satus" +
                         e.args[0].__str__() + ":" + e.args[1].__str__())
            print "mysql error update_task_impala_load_satus%d: %s" % (
                e.args[0], e.args[1])
Example #32
0
 def select_task_impala_sql(self, task_name):
     try:
         con = MySQLdb.connect(host=self.mysqlinfo["host"],
                               port=string.atoi(self.mysqlinfo["port"]),
                               user=self.mysqlinfo["user"],
                               passwd=self.mysqlinfo["passwd"],
                               db=self.mysqlinfo["dbname"])
         cursor = con.cursor()
         sql = "select sub_sql from task_impala_sql where task_name='" + task_name + "'"
         print("impala_sql" + sql)
         cursor.execute(sql)
         result = cursor.fetchall()
         return result
     except Error, e:
         Logger.error("mysql error select_task_impala_sql" +
                      e.args[0].__str__() + ":" + e.args[1].__str__())
         print "mysql error select_task_impala_sql%d: %s" % (e.args[0],
                                                             e.args[1])
Example #33
0
def execmd(cmd):
    result = {}
    result["type"] = cmd
    # 数据统一用json串发送
    try:
        if cmd == "cpu":
            result["data"] = get_cpu_rate()
        elif cmd == "memory":
            result["data"] = get_mem_rate()
        elif cmd == "diskio":
            result["data"] = get_diskio_rate()
        elif cmd == "network":
            result["data"] = get_net_rate()
        elif cmd == "hostinfo":
            result["data"] = get_host_info()
        result["hostname"] = socket.gethostname()
        Logger.info("Collected %s Messages" % cmd)
    except Exception, e:
        Logger.error(e)
Example #34
0
class ShellLib(object):

    log = None
    command = None

    def __init__(self):
        self.logger = Logger()

    def newCommand(self):
        self.command = Command()

    def setWorkingDirectory(self, path):
        self.command.setWorkingDirectory(path)

    def addParameter(self, value):
        self.command.addParameter(value)
        self.logger.info(str(self.command))

    def enableShellMode(self):
        self.command.setShellMode(True)

    def disableShellMode(self):
        self.command.setShellMode(False)

    def execute(self):

        self.logger.info("Executing.. [%s] wd=%s" %
                         (self.command, self.command.getWorkingDirectory()))

        p = subprocess.Popen(self.command.getParameters(),
                             shell=self.command.getShellMode(),
                             cwd=self.command.getWorkingDirectory(),
                             stdout=subprocess.PIPE,
                             stderr=subprocess.PIPE)
        out, err = p.communicate()

        if out: self.logger.info(out)
        if err: self.logger.error(err)

        if p.returncode != 0:
            raise Exception(
                "Error occurred while trying to execute console command RC=%d"
                % p.returncode)
Example #35
0
def execmd(cmd):
    result = {}
    result['type'] = cmd
    # 数据统一用json串发送
    try:
        if cmd == "cpu":
            result['data'] = get_cpu_rate()
        elif cmd == "memory":
            result['data'] = get_mem_rate()
        elif cmd == "diskio":
            result['data'] = get_diskio_rate()
        elif cmd == "network":
            result['data'] = get_net_rate()
        elif cmd == 'hostinfo':
            result['data'] = get_host_info()
        result['hostname'] = socket.gethostname()
        Logger.info('Collected %s Messages' % cmd)
    except Exception, e:
        Logger.error(e)
Example #36
0
 def update_task_runstatus(self, task_id, run_date, runstatus):
     try:
         con = MySQLdb.connect(host=self.mysqlinfo["host"],
                               port=string.atoi(self.mysqlinfo["port"]),
                               user=self.mysqlinfo["user"],
                               passwd=self.mysqlinfo["passwd"],
                               db=self.mysqlinfo["dbname"])
         cursor = con.cursor()
         sql = "update task_sceduling set runstatus=%s where task_id=%s and run_date=%s"
         if runstatus:
             cursor.execute(sql, ["1", task_id, run_date])
         else:
             cursor.execute(sql, ["0", task_id, run_date])
         con.commit()
     except Error, e:
         Logger.error("mysql error update_task_sceduling" +
                      e.args[0].__str__() + ":" + e.args[1].__str__())
         print "mysql error update_task_sceduling%d: %s" % (e.args[0],
                                                            e.args[1])
    def get_event_message(script_path=None):
        logger = Logger().get_logger()
        if script_path is None:
            logger.warning("script path is empty!")

        script = LoadFromYaml().save_load(script_path=script_path)
        if not script:
            logger.error("script file:{0} load failed".format(script_path))
            return ""

        try:
            script_conf = script["script config"]
            # logger.info(script_conf)
            return script_conf
        except KeyError:
            event_cls_list = LaunchExtractor().do(script_path=script_path)

        message = ""
        user_input_event_list = []
        for event in event_cls_list:
            # logger.info(event.event_type)
            if isinstance(event, UserInputEvent):
                user_input_event_list.append(event)
                user_message = "input@{0} >> {1}".format(
                    event.window["class name"], event.message)
                # logger.info(user_message)
                message += user_message
                message += '\n'
        message_body = '*'*20 + 'Script' + '*'*20 + '\n'
        message_body += "<script>\n"
        message_body += "delay=1\n"
        message_body += "script@{0}\n".format(script_path)
        message_body += message
        message_body += "</script>\n"
        logger.error(message_body)
        message_head = '*'*20 + 'Head' + '*'*20 + '\n'
        message_head += "<head>\n"
        message_head += "script_run_count = 1\n"
        # message_head += "script_index = script_list[1]\n"
        message_head += "script_delay = 1\n"
        message_head += "</head>\n"
        return message_head + message_body
Example #38
0
def write(driverNumber, queue, group_id):
    #time.sleep(40)
    while True:
        #print("write thread live")
        if (config.reset == True):
            statusThread["write" + driverNumber] = False
            while (config.reset == True):
                time.sleep(1)
            statusThread["write" + driverNumber] = True
            print(f"Write {driverNumber} voltando a funcionar")
        if not queue.empty():
            msg_type, path, caption = queue.get()
            print(f"Removed from queue: {msg_type}-{path}-{caption}")
            try:
                contact = config.driver[driverNumber].get_contact_from_id(
                    group_id)
            except Exception as identifier:
                #config.reset=True
                print(f"Error in function get_contact_from_id")
                Logger.error(f"[writer-{driverNumber}] {identifier}")
            if msg_type == "chat":
                send_message(contact, caption)
            elif msg_type in ['document', 'image', 'video', 'ptt', 'audio']:
                chat_id = contact.get_chat().id
                print(f"write in path: {path}")
                if msg_type in ['document', 'ptt', 'audio']:
                    send_media(config.driver[driverNumber], path, chat_id, "",
                               contact)
                    time.sleep(1)
                    contact.get_chat().send_message(caption)
                else:
                    send_media(config.driver[driverNumber], path, chat_id,
                               caption, contact)
                os.remove(path)
                print(f"Deleted: {path}")
            elif msg_type == "sticker":
                pass
            write_on_backup_file(queue_dict[queue], "write", msg_type, path,
                                 caption)
            print(f"Writed: {msg_type}-{path}-{caption}")
Example #39
0
class ShellLib(object):
    
    log = None
    command = None
    
    def __init__(self):        
        self.logger = Logger()
        
    def newCommand(self):        
        self.command = Command()
        
    def setWorkingDirectory(self, path):
        self.command.setWorkingDirectory(path)
        
    def addParameter(self, value):        
        self.command.addParameter(value)
        self.logger.info(str(self.command))
        
    def enableShellMode(self):
        self.command.setShellMode(True)

    def disableShellMode(self):
        self.command.setShellMode(False)
           
    def execute(self):
        
        self.logger.info("Executing.. [%s] wd=%s" % (self.command, self.command.getWorkingDirectory()))
        
        p = subprocess.Popen(self.command.getParameters(), shell=self.command.getShellMode(), cwd=self.command.getWorkingDirectory(), stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate()
        
        if out: self.logger.info(out)
        if err: self.logger.error(err)
                
        if p.returncode != 0:
            raise Exception("Error occurred while trying to execute console command RC=%d" % p.returncode)
Example #40
0
    def handle(self):
        while True:
            self.data = self.request.recv(1024).strip()
            # cur_thread = threading.currentThread()
            info = "RECV from ", self.client_address[0], self.client_address[1]
            Logger.info(info)
            if self.data == None or len(self.data) == 0:
                Logger.error("empty command")
                break
            Logger.debug(self.data)
            # business logic here
            try:
                cmd = command.decode(self.data)
            except:
                Logger.error("format error")
                break

            #            try:
            response = application.dispatch(cmd)
            Logger.debug(response)
            self.send(response)
            if response["code"] == -1:
                # server.shutdown()
                break
Example #41
0
  def save(relation):
    if TrackRelationFactory.__use_db:
      trackA = relation.trackA.key()
      trackB = relation.trackB.key()
      if trackA > trackB:
        trackC = trackB
        trackB = trackA
        trackA = trackC
      TrackRelationFactory.__db.execute(
          u"REPLACE INTO track_relation "\
          "(trackA, trackB, relation) values (?, ?, ?)", (
          trackA, trackB, Binary(pickle.dumps(relation, -1))))
      TrackRelationFactory.__db.commit()

    elif TrackRelationFactory.__use_fs:
      pathA = join(TrackRelationFactory.__path, relation.trackA.artist.key(),
          relation.trackA.key())
      fileA = u"%s.pkl" % join(pathA, relation.key())

      pathB = join(TrackRelationFactory.__path, relation.trackB.artist.key(),
          relation.trackB.key())
      fileB = u"%s.pkl" % join(pathB, relation.key())
      if not isdir(pathA): makedirs(pathA)
      if not isdir(pathB): makedirs(pathB)
      if fileA > fileB:
        fileC = fileB
        fileB = fileA
        fileA = fileC
      TrackRelationFactory._save(relation, fileA)
      if fileA != fileB:
        if exists(fileB): remove(fileB)
        try:
          link(fileA, fileB)
        except OSError:
          Logger.error(u"Failed to link %s to %s" % (fileA, fileB))
      relation.lastmodified = getmtime(fileA)
Example #42
0
 def load_file(filename):
   try:
     input = open(filename, 'rb')
     relation = pickle.load(input)
     input.close()
     if ArtistRelationFactory.register(relation):
       relation.lastmodified = getmtime(filename)
       return True
     # we forget inactive / unplayed files, so invalid relations are probably
     # OK
     Logger.error(u"ArtistRelationFactory.load_file(%s): invalid relation" % filename)
   except IOError:
     Logger.error('IOError')
     Logger.error(format_exc())
   except EOFError:
     Logger.error('EOFError')
     Logger.error(format_exc())
   except AttributeError:
     Logger.error('AttributeError: Format changed?')
     Logger.error(format_exc())
   return False
Example #43
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, proto, router, waitTimeout=5):
        """
        Args:
            proto: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Consider it a connetion failure if no response
                    within this time window.
            noisy: Whether or not to log the output for this class.
            testnet: The network parameters to use.

        """
        self.proto = proto
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, datagram, connection):
        m = Message()
        try:
            m.ParseFromString(datagram)
            sender = node.Node(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey, m.sender.vendor)
        except Exception:
            # If message isn't formatted property then ignore
            self.log.warning("received unknown message from %s, ignoring" % str(connection.dest_addr))
            return False

        if m.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if m.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        # Check that the GUID is valid. If not, ignore
        if self.router.isNewNode(sender):
            try:
                pubkey = m.sender.signedPublicKey[len(m.sender.signedPublicKey) - 32:]
                verify_key = nacl.signing.VerifyKey(pubkey)
                verify_key.verify(m.sender.signedPublicKey)
                h = nacl.hash.sha512(m.sender.signedPublicKey)
                pow_hash = h[64:128]
                if int(pow_hash[:6], 16) >= 50 or hexlify(m.sender.guid) != h[:40]:
                    raise Exception('Invalid GUID')

            except Exception:
                self.log.warning("received message from sender with invalid GUID, ignoring")
                connection.shutdown()
                return False

        if m.sender.vendor:
            self.db.VendorStore().save_vendor(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey)

        msgID = m.messageID
        if m.command == NOT_FOUND:
            data = None
        else:
            data = tuple(m.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif m.command != NOT_FOUND:
            self._acceptRequest(msgID, str(Command.Name(m.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d, timeout = self._outstanding[msgID]
        timeout.cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.proto)
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            for arg in response:
                m.arguments.append(str(arg))
        data = m.SerializeToString()
        connection.send_message(data)

    def _timeout(self, msgID, address):
        """
        If a message times out we are first going to try hole punching because
        the node may be behind a restricted NAT. If it is successful, the original
        should get through. This timeout will only fire if the hole punching
        fails.
        """
        # pylint: disable=pointless-string-statement
        """
        Hole punching disabled for now

        seed = SEED_NODE_TESTNET if self.multiplexer.testnet else SEED_NODE
        if not hp and self.multiplexer.ip_address[0] != seed[0]:
            args = (address[0], address[1], b64encode(msgID))
            self.log.debug("did not receive reply from %s:%s for msgID %s, trying hole punching..." % args)
            self.hole_punch(seed, address[0], address[1], "True")
            timeout = reactor.callLater(self._waitTimeout, self._timeout, msgID, address, True)
            self._outstanding[msgID][1] = timeout
        else:
        """
        args = (b64encode(msgID), self._waitTimeout)
        self.log.warning("did not receive reply for msg id %s within %i seconds" % args)
        self._outstanding[msgID][0].callback((False, None))
        del self._outstanding[msgID]
        self.multiplexer[address].shutdown()

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise sent a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.hole_punch((ip, int(port)), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            self.multiplexer.send_datagram(" ", (ip, int(port)))

    def _get_waitTimeout(self, command):
        if command == GET_IMAGE or command == GET_CONTRACT:
            return 100
        else:
            return self._waitTimeout

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(address, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()
            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.proto)
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            data = m.SerializeToString()
            d = defer.Deferred()
            timeout = reactor.callLater(self._get_waitTimeout(m.command), self._timeout, msgID, address)
            self._outstanding[msgID] = [d, timeout]
            self.multiplexer.send_message(data, address)
            self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))
            return d

        return func
Example #44
0
 def by_key(key):
   if TrackRelationFactory.__lib.has_key(key):
     return TrackRelationFactory.__lib[key]
   Logger.error(u"TrackRelationFactory.by_key(%s): key not found" % key)
   return None
Example #45
0
    cursor.close()
    conn.commit()


def callback(ch, method, properties, body):
    print " [x] %r" % (body,)
    data = json.loads(body)
    print data
    write_sql(data)


try:
    connection = pika.BlockingConnection(pika.ConnectionParameters(host="localhost"))
    channel = connection.channel()

    channel.exchange_declare(exchange="logs", type="fanout")
except Exception, e:
    Logger.error(e)


result = channel.queue_declare(exclusive=True)
queue_name = result.method.queue

channel.queue_bind(exchange="logs", queue=queue_name)

print " [*] Waiting for logs. To exit press CTRL+C"

channel.basic_consume(callback, queue=queue_name, no_ack=True)

channel.start_consuming()
Example #46
0
class Global:
    """
       Class represents parser for global.conf
    """
    # options common for all connectors
    conf_ams = {'AMS': ['Host', 'Token', 'Project', 'Topic', 'Bulk', 'PackSingleMsg']}
    conf_general = {'General': ['PublishAms', 'WriteAvro']}
    conf_auth = {'Authentication': ['HostKey', 'HostCert', 'CAPath', 'CAFile',
                                    'VerifyServerCert', 'UsePlainHttpAuth',
                                    'HttpUser', 'HttpPass']}
    conf_conn = {'Connection': ['Timeout', 'Retry', 'SleepRetry']}
    conf_state = {'InputState': ['SaveDir', 'Days']}

    # options specific for every connector
    conf_topo_schemas = {'AvroSchemas': ['TopologyGroupOfEndpoints',
                                         'TopologyGroupOfGroups']}
    conf_topo_output = {'Output': ['TopologyGroupOfEndpoints',
                                   'TopologyGroupOfGroups']}
    conf_downtimes_schemas = {'AvroSchemas': ['Downtimes']}
    conf_downtimes_output = {'Output': ['Downtimes']}
    conf_weights_schemas = {'AvroSchemas': ['Weights']}
    conf_weights_output = {'Output': ['Weights']}
    conf_poem_output = {'Output': ['Poem']}
    conf_poem_schemas = {'AvroSchemas': ['Poem']}

    def __init__(self, caller, confpath=None, **kwargs):
        self.optional = dict()

        self.logger = Logger(str(self.__class__))
        self._filename = '/etc/argo-egi-connectors/global.conf' if not confpath else confpath
        self._checkpath = kwargs['checkpath'] if 'checkpath' in kwargs.keys() else False

        self.optional.update(self._lowercase_dict(self.conf_ams))
        self.optional.update(self._lowercase_dict(self.conf_auth))

        self.shared_secopts = self._merge_dict(self.conf_ams,
                                               self.conf_general,
                                               self.conf_auth, self.conf_conn,
                                               self.conf_state)
        self.secopts = {'topology-gocdb-connector.py':
                        self._merge_dict(self.shared_secopts,
                                         self.conf_topo_schemas,
                                         self.conf_topo_output),
                        'downtimes-gocdb-connector.py':
                        self._merge_dict(self.shared_secopts,
                                         self.conf_downtimes_schemas,
                                         self.conf_downtimes_output),
                        'weights-vapor-connector.py':
                        self._merge_dict(self.shared_secopts,
                                         self.conf_weights_schemas,
                                         self.conf_weights_output),
                        'poem-connector.py':
                        self._merge_dict(self.shared_secopts,
                                         self.conf_poem_schemas,
                                         self.conf_poem_output)
                        }

        if caller:
            self.caller_secopts = self.secopts[os.path.basename(caller)]
        else:
            self.caller_secopts = self.shared_secopts

    def _merge_dict(self, *args):
        newd = dict()
        for d in args:
            newd.update(d)
        return newd

    def _lowercase_dict(self, d):
        newd = dict()
        for k in d.iterkeys():
            opts = [o.lower() for o in d[k]]
            newd[k.lower()] = opts
        return newd

    def merge_opts(self, custopt, section):
        newd = custopt.copy()
        opts = [o for o in self.options.keys() if o.startswith(section)]
        for o in opts:
            if o in newd:
                continue
            newd.update({o: self.options[o]})

        return newd

    def is_complete(self, opts, section):
        all = set([section+o for o in self.optional[section]])
        diff = all.symmetric_difference(opts.keys())
        if diff:
            return (False, diff)
        return (True, None)

    def _concat_sectopt(self, d):
        opts = list()

        for k in d.iterkeys():
            for v in d[k]:
                opts.append(k+v)

        return opts

    def _one_active(self, options):
        loweropts = self._lowercase_dict(options)

        lval = [eval(self.options[k]) for k in self._concat_sectopt(loweropts)]

        if any(lval):
            return True
        else:
            return False

    def parse(self):
        config = ConfigParser.ConfigParser()

        if not os.path.exists(self._filename):
            self.logger.error('Could not find %s' % self._filename)
            raise SystemExit(1)

        config.read(self._filename)
        options = {}

        lower_section = [sec.lower() for sec in config.sections()]

        try:
            for sect, opts in self.caller_secopts.items():
                if (sect.lower() not in lower_section and
                    sect.lower() not in self.optional.keys()):
                    raise ConfigParser.NoSectionError(sect.lower())

                for opt in opts:
                    for section in config.sections():
                        if section.lower().startswith(sect.lower()):
                            try:
                                optget = config.get(section, opt)
                                if self._checkpath and os.path.isfile(optget) is False:
                                    raise OSError(errno.ENOENT, optget)

                                if ('output' in section.lower() and
                                    'DATE' not in optget):
                                        logger.error('No DATE placeholder in %s' % option)
                                        raise SystemExit(1)

                                options.update({(sect+opt).lower(): optget})

                            except ConfigParser.NoOptionError as e:
                                s = e.section.lower()
                                if (s in self.optional.keys() and
                                    e.option in self.optional[s]):
                                    pass
                                else:
                                    raise e

            self.options = options

            if not self._one_active(self.conf_general):
                self.logger.error('At least one of %s needs to be True' % (', '.join(self._concat_sectopt(self.conf_general))))
                raise SystemExit(1)

        except ConfigParser.NoOptionError as e:
            self.logger.error(e.message)
            raise SystemExit(1)
        except ConfigParser.NoSectionError as e:
            self.logger.error("%s defined" % (e.args[0]))
            raise SystemExit(1)
        except OSError as e:
            self.logger.error('%s %s' % (os.strerror(e.args[0]), e.args[1]))
            raise SystemExit(1)

        return options
Example #47
0
class CustomerConf:
    """
       Class with parser for customer.conf and additional helper methods
    """
    _custattrs = None
    _cust = {}
    _defjobattrs = {'topology-gocdb-connector.py' : ['TopoFetchType',
                                                     'TopoSelectGroupOfGroups',
                                                     'TopoSelectGroupOfEndpoints',
                                                     'TopoFeed',
                                                     'TopoFeedPaging'],
                    'poem-connector.py': ['PoemServerHost',
                                          'PoemServerVO',
                                          'PoemNamespace'],
                    'downtimes-gocdb-connector.py': ['DowntimesFeed'],
                    'weights-vapor-connector.py': ['WeightsFeed']
                    }
    _jobs, _jobattrs = {}, None
    _cust_optional = ['AmsHost', 'AmsProject', 'AmsToken', 'AmsTopic',
                      'AmsPackSingleMsg', 'AuthenticationUsePlainHttpAuth',
                      'AuthenticationHttpUser', 'AuthenticationHttpPass']
    tenantdir = ''
    deftopofeed = 'https://goc.egi.eu/gocdbpi/'

    def __init__(self, caller, confpath, **kwargs):
        self.logger = Logger(str(self.__class__))
        self._filename = '/etc/argo-egi-connectors/customer.conf' if not confpath else confpath
        if not kwargs:
            self._jobattrs = self._defjobattrs[os.path.basename(caller)]
        else:
            if 'jobattrs' in kwargs.keys():
                self._jobattrs = kwargs['jobattrs']
            if 'custattrs' in kwargs.keys():
                self._custattrs = kwargs['custattrs']

    def parse(self):
        config = ConfigParser.ConfigParser()
        if not os.path.exists(self._filename):
            self.logger.error('Could not find %s' % self._filename)
            raise SystemExit(1)
        config.read(self._filename)

        lower_custopt = [oo.lower() for oo in self._cust_optional]

        for section in config.sections():
            if section.lower().startswith('CUSTOMER_'.lower()):
                optopts = dict()

                try:
                    custjobs = config.get(section, 'Jobs').split(',')
                    custjobs = [job.strip() for job in custjobs]
                    custdir = config.get(section, 'OutputDir')
                    custname = config.get(section, 'Name')

                    for o in lower_custopt:
                        try:
                            code = "optopts.update(%s = config.get(section, '%s'))" % (o, o)
                            exec code
                        except ConfigParser.NoOptionError as e:
                            if e.option in lower_custopt:
                                pass
                            else:
                                raise e

                except ConfigParser.NoOptionError as e:
                    self.logger.error(e.message)
                    raise SystemExit(1)

                self._cust.update({section: {'Jobs': custjobs, 'OutputDir': custdir, 'Name': custname}})
                if optopts:
                    ams, auth = {}, {}
                    for k, v in optopts.iteritems():
                        if k.startswith('ams'):
                            ams.update({k: v})
                        if k.startswith('authentication'):
                            auth.update({k: v})
                    self._cust[section].update(AmsOpts=ams)
                    self._cust[section].update(AuthOpts=auth)

                if self._custattrs:
                    for attr in self._custattrs:
                        if config.has_option(section, attr):
                            self._cust[section].update({attr: config.get(section, attr)})


        for cust in self._cust:
            for job in self._cust[cust]['Jobs']:
                if config.has_section(job):
                    try:
                        profiles = config.get(job, 'Profiles')
                        dirname = config.get(job, 'Dirname')
                    except ConfigParser.NoOptionError as e:
                        self.logger.error(e.message)
                        raise SystemExit(1)

                    self._jobs.update({job: {'Profiles': profiles, 'Dirname': dirname}})
                    if self._jobattrs:
                        for attr in self._jobattrs:
                            if config.has_option(job, attr):
                                self._jobs[job].update({attr: config.get(job, attr)})
                else:
                    self.logger.error("Could not find Jobs: %s for customer: %s" % (job, cust))
                    raise SystemExit(1)

    def _sect_to_dir(self, sect):
        try:
            match = re.match('(?:^\w+?_)(\w+)', sect)
            assert match != None
            dirname = match.group(1)
        except (AssertionError, KeyError) as e:
            self.logger.error("Could not get Dirname for %s" % e)
            raise SystemExit(1)
        return dirname

    def _dir_from_sect(self, sect, d):
        dirname = ''

        for k, v in d.items():
            if k == sect:
                if 'Dirname' in v.keys():
                    dirname = v['Dirname']
                elif 'OutputDir' in v.keys():
                    dirname = v['OutputDir']
                else:
                    dirname = self._sect_to_dir(sect)

        return dirname

    def get_jobdir(self, job):
        return self._dir_from_sect(job, self._jobs)

    def get_amsopts(self, cust):
        if 'AmsOpts' in self._cust[cust]:
            return self._cust[cust]['AmsOpts']
        else:
            return dict()

    def get_authopts(self, feed, jobcust):
        for job, cust in jobcust:
            if 'AuthOpts' in self._cust[cust]:
                return self._cust[cust]['AuthOpts']
            else:
                return dict()

    def get_fulldir(self, cust, job):
        return self.get_custdir(cust) + '/' + self.get_jobdir(job) + '/'

    def get_fullstatedir(self, root, cust, job):
        return root + '/' + self.get_custname(cust) + '/' + self.get_jobdir(job)

    def get_custdir(self, cust):
        return self._dir_from_sect(cust, self._cust)

    def get_custname(self, cust):
        return self._cust[cust]['Name']

    def make_dirstruct(self, root=None):
        dirs = []
        for cust in self._cust.keys():
            for job in self.get_jobs(cust):
                if root:
                    dirs.append(root + '/' + self.get_custname(cust) + '/' + self.get_jobdir(job))
                else:
                    dirs.append(self.get_custdir(cust) + '/' + self.get_jobdir(job))
            for d in dirs:
                try:
                    os.makedirs(d)
                except OSError as e:
                    if e.args[0] != errno.EEXIST:
                        self.logger.error('%s %s %s' % (os.strerror(e.args[0]), e.args[1], d))
                        raise SystemExit(1)

    def get_jobs(self, cust):
        jobs = []
        try:
            jobs = self._cust[cust]['Jobs']
        except KeyError:
            self.logger.error("Could not get Jobs for %s" % cust)
            raise SystemExit(1)
        return jobs

    def get_customers(self):
        return self._cust.keys()

    def get_profiles(self, job):
        profiles = self._jobs[job]['Profiles'].split(',')
        for i, p in enumerate(profiles):
            profiles[i] = p.strip()
        return profiles

    def get_gocdb_fetchtype(self, job):
        return self._jobs[job]['TopoFetchType']

    def _get_tags(self, job, option):
        tags = {}
        if option in self._jobs[job].keys():
            tagstr = self._jobs[job][option]
            match = re.findall("(\w+)\s*:\s*(\(.*?\))", tagstr)
            if match is not None:
                for m in match:
                    tags.update({m[0]: [e.strip('() ') for e in m[1].split(',')]})
            match = re.findall('([\w]+)\s*:\s*([\w\.\-\_]+)', tagstr)
            if match is not None:
                for m in match:
                    tags.update({m[0]: m[1]})
            else:
                self.logger.error("Could not parse option %s: %s" % (option, tagstr))
                return dict()
        return tags

    def get_gocdb_ggtags(self, job):
        return self._get_tags(job, 'TopoSelectGroupOfGroups')

    def get_gocdb_getags(self, job):
        return self._get_tags(job, 'TopoSelectGroupOfEndpoints')

    def get_vo_ggtags(self, job):
        return self._get_tags(job, 'TopoSelectGroupOfGroups')

    def _get_feed(self, job, key):
        try:
            feed = self._jobs[job][key]
        except KeyError:
            feed = ''
        return feed

    def _is_paginated(self, job):
        paging = False

        try:
            paging = self._jobs[job]['TopoFeedPaging']
        except KeyError:
            pass

        return paging

    def _update_feeds(self, feeds, feedurl, job, cust):
        if feedurl in feeds.keys():
            feeds[feedurl].append((job, cust))
        elif feedurl:
            feeds[feedurl] = []
            feeds[feedurl].append((job, cust))

    def get_feedscopes(self, feed, jobcust):
        ggtags, getags = [], []
        distinct_scopes = set()
        for job, cust in jobcust:
            gg = self._get_tags(job, 'TopoSelectGroupOfGroups')
            ge = self._get_tags(job, 'TopoSelectGroupOfEndpoints')
            for g in gg.items() + ge.items():
                if 'Scope'.lower() == g[0].lower():
                    if isinstance(g[1], list):
                        distinct_scopes.update(g[1])
                    else:
                        distinct_scopes.update([g[1]])

        return distinct_scopes

    def is_paginated(self, feed, jobcust):
        paginated = False

        for job, cust in jobcust:
            paginated = self._is_paginated(job)
            if paginated:
                break

        return eval(str(paginated))

    def get_mapfeedjobs(self, caller, name=None, deffeed=None):
        feeds = {}
        for c in self.get_customers():
            for job in self.get_jobs(c):
                if 'topology' in caller:
                    feedurl = self._get_feed(job, 'TopoFeed')
                    if feedurl:
                        self._update_feeds(feeds, feedurl, job, c)
                    else:
                        feedurl = deffeed
                        self._update_feeds(feeds, feedurl, job, c)
                elif 'downtimes' in caller:
                    feedurl = self._get_feed(job, 'DowntimesFeed')
                    if feedurl:
                        self._update_feeds(feeds, feedurl, job, c)
                    else:
                        feedurl = deffeed
                        self._update_feeds(feeds, feedurl, job, c)
                elif 'weights' in caller:
                    feedurl = self._get_feed(job, 'WeightsFeed')
                    if feedurl:
                        self._update_feeds(feeds, feedurl, job, c)
                    else:
                        feedurl = deffeed
                        self._update_feeds(feeds, feedurl, job, c)

        return feeds

    def get_poemserver_host(self, job):
        return self._jobs[job]['PoemServerHost']

    def get_poemserver_vo(self, job):
        vo = self._jobs[job]['PoemServerVO'].split(',')
        for i, p in enumerate(vo):
            vo[i] = p.strip()
        return vo

    def get_namespace(self, job):
        return self._jobs[job]['PoemNamespace']
Example #48
0
 def by_key(key):
   if ArtistRelationFactory.__lib.has_key(key):
     return ArtistRelationFactory.__lib[key]
   Logger.error(u"ArtistRelationFactory.by_key(%s): key not found" % key)
   return None
Example #49
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node, router, signing_key, database):
        self.router = router
        self.node = node
        RPCProtocol.__init__(self, node, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.db = database
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
                                 GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
                                 BROADCAST, MESSAGE, ORDER, ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN,
                                 DISPUTE_CLOSE, GET_RATINGS, REFUND]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("serving contract %s to %s" % (contract_hash.encode('hex'), sender))
        self.router.addContact(sender)
        try:
            with open(self.db.filemap.get_file(contract_hash.encode("hex")), "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("could not find contract %s" % contract_hash.encode('hex'))
            return None

    def rpc_get_image(self, sender, image_hash):
        self.router.addContact(sender)
        try:
            if len(image_hash) != 20:
                self.log.warning("Image hash is not 20 characters %s" % image_hash)
                raise Exception("Invalid image hash")
            self.log.info("serving image %s to %s" % (image_hash.encode('hex'), sender))
            with open(self.db.filemap.get_file(image_hash.encode("hex")), "rb") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("could not find image %s" % image_hash[:20].encode('hex'))
            return None

    def rpc_get_profile(self, sender):
        self.log.info("serving profile to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("unable to load the profile")
            return None

    def rpc_get_user_metadata(self, sender):
        self.log.info("serving user metadata to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.error("unable to load profile metadata")
            return None

    def rpc_get_listings(self, sender):
        self.log.info("serving store listings to %s" % sender)
        self.router.addContact(sender)
        try:
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(self.db.listings.get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
        except Exception:
            self.log.warning("could not find any listings in the database")
            return None

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("serving metadata for contract %s to %s" % (contract_hash.encode("hex"), sender))
        self.router.addContact(sender)
        try:
            proto = self.db.listings.get_proto()
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    listing.avatar_hash = p.avatar_hash
                    listing.handle = p.handle
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("could not find metadata for contract %s" % contract_hash.encode("hex"))
            return None

    def rpc_follow(self, sender, proto, signature):
        self.log.info("received follow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.pubkey)
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.node.id:
                raise Exception('Following wrong node')
            f.signature = signature
            self.db.follow.set_follower(f)
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.short_description = proto.short_description
            m.nsfw = proto.nsfw
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, f.metadata.handle, "follow", "", "", f.metadata.avatar_hash)
                except DoesNotImplement:
                    pass
            return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.warning("failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("received unfollow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.pubkey)
            verify_key.verify("unfollow:" + self.node.id, signature)
            f = self.db.follow
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning("failed to validate signature on unfollow request")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("serving followers list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.follow.get_followers()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("serving following list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.follow.get_following()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_broadcast(self, sender, message, signature):
        if len(message) <= 140 and self.db.follow.is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.pubkey)
                verify_key.verify(message, signature)
            except Exception:
                self.log.warning("received invalid broadcast from %s" % sender)
                return ["False"]
            self.log.info("received a broadcast from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(BroadcastListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(), PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = PlaintextMessage()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.pubkey)
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.pubkey)
            pow_hash = h[40:]
            if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.warning("received invalid message from %s" % sender)
            return ["False"]

    def rpc_order(self, sender, pubkey, encrypted):
        try:
            box = Box(self.signing_key.to_curve25519_private_key(), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            v = c.verify(sender.pubkey)
            if v is True:
                self.router.addContact(sender)
                self.log.info("received an order from %s, waiting for payment..." % sender)
                payment_address = c.contract["buyer_order"]["order"]["payment"]["address"]
                chaincode = c.contract["buyer_order"]["order"]["payment"]["chaincode"]
                masterkey_b = c.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
                buyer_key = derive_childkey(masterkey_b, chaincode)
                amount = c.contract["buyer_order"]["order"]["payment"]["amount"]
                listing_hash = c.contract["vendor_offer"]["listing"]["contract_id"]
                signature = self.signing_key.sign(
                    str(payment_address) + str(amount) + str(listing_hash) + str(buyer_key))[:64]
                c.await_funding(self.get_notification_listener(), self.multiplexer.blockchain, signature, False)
                return [signature]
            else:
                self.log.warning("received invalid order from %s reason %s" % (sender, v))
                return ["False"]
        except Exception, e:
            self.log.error("Exception (%s) occurred processing order from %s" % (e.message, sender))
            return ["False"]
Example #50
0
def execmd(cmd):
    result = {}
    result["type"] = cmd
    # 数据统一用json串发送
    try:
        if cmd == "cpu":
            result["data"] = get_cpu_rate()
        elif cmd == "memory":
            result["data"] = get_mem_rate()
        elif cmd == "diskio":
            result["data"] = get_diskio_rate()
        elif cmd == "network":
            result["data"] = get_net_rate()
        elif cmd == "hostinfo":
            result["data"] = get_host_info()
        result["hostname"] = socket.gethostname()
        Logger.info("Collected %s Messages" % cmd)
    except Exception, e:
        Logger.error(e)

    return result


if __name__ == "__main__":
    if sys.argv[1] and sys.argv[1] in ["cpu", "memory", "diskio", "network", "hostinfo"]:
        result = execmd(sys.argv[1])
        data_tran(json.dumps(result))
    else:
        Logger.error("wrong parameter")
Example #51
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node_proto, router, signing_key):
        self.router = router
        RPCProtocol.__init__(self, node_proto, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.hashmap = HashMap()
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
                                 GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
                                 NOTIFY, MESSAGE]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("Looking up contract ID %s" % contract_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(contract_hash), "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("Could not find contract %s" % contract_hash.encode('hex'))
            return ["None"]

    def rpc_get_image(self, sender, image_hash):
        self.log.info("Looking up image with hash %s" % image_hash.encode('hex'))
        self.router.addContact(sender)
        try:
            with open(self.hashmap.get_file(image_hash), "r") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("Could not find image %s" % image_hash.encode('hex'))
            return ["None"]

    def rpc_get_profile(self, sender):
        self.log.info("Fetching profile")
        self.router.addContact(sender)
        try:
            proto = Profile().get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("Unable to load the profile")
            return ["None"]

    def rpc_get_user_metadata(self, sender):
        self.log.info("Fetching metadata")
        self.router.addContact(sender)
        try:
            proto = Profile().get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.error("Unable to get the profile metadata")
            return ["None"]

    def rpc_get_listings(self, sender):
        self.log.info("Fetching listings")
        self.router.addContact(sender)
        try:
            p = Profile().get()
            l = Listings()
            l.ParseFromString(ListingsStore().get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
        except Exception:
            self.log.warning("Could not find any listings in the database")
            return ["None"]

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("Fetching metadata for contract %s" % hexlify(contract_hash))
        self.router.addContact(sender)
        try:
            proto = ListingsStore().get_proto()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("Could not find metadata for contract %s" % hexlify(contract_hash))
            return ["None"]

    def rpc_follow(self, sender, proto, signature):
        self.log.info("Follow request from %s" % sender.id.encode("hex"))
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.proto.guid:
                raise Exception('Following wrong node')
            f.signature = signature
            FollowData().set_follower(f)
            proto = Profile().get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.warning("Failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("Unfollow request from %s" % sender.id.encode("hex"))
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify("unfollow:" + self.proto.guid, signature)
            f = FollowData()
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning("Failed to validate follower signature")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("Fetching followers list from db")
        self.router.addContact(sender)
        ser = FollowData().get_followers()
        if ser is None:
            return ["None"]
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("Fetching following list from db")
        self.router.addContact(sender)
        ser = FollowData().get_following()
        if ser is None:
            return ["None"]
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_notify(self, sender, message, signature):
        if len(message) <= 140 and FollowData().is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
                verify_key.verify(message, signature)
            except Exception:
                return ["False"]
            self.log.info("Received a notification from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = Plaintext_Message()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.signed_pubkey)
            pow_hash = h[64:128]
            if int(pow_hash[:6], 16) >= 50 or hexlify(p.sender_guid) != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("Received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.error("Received invalid message from %s" % sender)
            return ["False"]

    def callGetContract(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_image(address, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetProfile(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_profile(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetUserMetadata(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_user_metadata(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetListings(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_listings(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetContractMetadata(self, nodeToAsk, contract_hash):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_contract_metadata(address, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFollow(self, nodeToAsk, proto, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.follow(address, proto, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callUnfollow(self, nodeToAsk, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.unfollow(address, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowers(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_followers(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowing(self, nodeToAsk):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.get_following(address)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callNotify(self, nodeToAsk, message, signature):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.notify(address, message, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
        address = (nodeToAsk.ip, nodeToAsk.port)
        d = self.message(address, ehemeral_pubkey, ciphertext)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.log.info("got response from %s, adding to router" % node)
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def __iter__(self):
        return iter(self.handled_commands)
Example #52
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, db, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize, db)
        self.refreshLoop = LoopingCall(self.refreshTable).start(3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for rid in self.protocol.getRefreshIDs():
            node = Node(rid)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for keyword in self.storage.iterkeys():
                for k, v in self.storage.iteritems(keyword):
                    if self.storage.get_ttl(keyword, k) < 601200:
                        ds.append(self.set(keyword, k, v))

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, seed, pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
           seed: A `string` consisting of "ip:port" or "hostname:port"
           pubkey: The hex encoded public key to verify the signature on the response
        """
        try:
            nodes = []
            c = httplib.HTTPConnection(seed)
            c.request("GET", "/")
            response = c.getresponse()
            self.log.info("Https response from %s: %s, %s" % (seed, response.status, response.reason))
            data = response.read()
            reread_data = data.decode("zlib")
            proto = peers.PeerSeeds()
            proto.ParseFromString(reread_data)
            for peer in proto.peer_data:
                p = peers.PeerData()
                p.ParseFromString(peer)
                tup = (str(p.ip_address), p.port)
                nodes.append(tup)
            verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
            verify_key.verify("".join(proto.peer_data), proto.signature)
            return nodes
        except Exception, e:
            self.log.error("Failed to query seed: %s" % str(e))
Example #53
0
class RPCProtocol():
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, proto, router, waitTimeout=5, noisy=True):
        """
        Args:
            proto: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Consider it a connetion failure if no response
                    within this time window.
            noisy: Whether or not to log the output for this class.

        """
        self.proto = proto
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.noisy = noisy
        self.log = Logger(system=self)

    def receive_message(self, datagram, connection):
        m = Message()
        try:
            m.ParseFromString(datagram)
            sender = node.Node(m.sender.guid, connection.dest_addr[0], connection.dest_addr[1],
                               m.sender.signedPublicKey, m.sender.vendor)
        except:
            # If message isn't formatted property then ignore
            self.log.msg("Received unknown message from %s, ignoring" % str(connection.dest_addr))
            return False

        # Check that the GUID is valid. If not, ignore
        if self.router.isNewNode(sender):
            try:
                pubkey = m.sender.signedPublicKey[len(m.sender.signedPublicKey) - 32:]
                verify_key = nacl.signing.VerifyKey(pubkey)
                verify_key.verify(m.sender.signedPublicKey)
                h = nacl.hash.sha512(m.sender.signedPublicKey)
                pow = h[64:128]
                if int(pow[:6], 16) >= 50 or hexlify(m.sender.guid) != h[:40]:
                    raise Exception('Invalid GUID')

            except:
                self.log.msg("Received message from sender with invalid GUID, ignoring")
                return False

        msgID = m.messageID
        data = tuple(m.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        else:
            self._acceptRequest(msgID, str(Command.Name(m.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        msgargs = (b64encode(msgID), sender)
        if self.noisy:
            self.log.msg("Received response for message id %s from %s" % msgargs)
        d, timeout = self._outstanding[msgID]
        timeout.cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        if self.noisy:
            self.log.msg("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        d = defer.maybeDeferred(f, sender, *args)
        d.addCallback(self._sendResponse, funcname, msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        if self.noisy:
            self.log.msg("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.proto)
        m.command = Command.Value(funcname.upper())
        for arg in response:
            m.arguments.append(arg)
        data = m.SerializeToString()
        connection.send_message(data)

    def _timeout(self, msgID):
        args = (b64encode(msgID), self._waitTimeout)
        self.log.error("Did not received reply for msg id %s within %i seconds" % args)
        self._outstanding[msgID][0].callback((False, None))
        del self._outstanding[msgID]

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(address, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()
            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.proto)
            m.command = Command.Value(name.upper())
            for arg in args:
                m.arguments.append(arg)
            data = m.SerializeToString()
            if self.noisy:
                self.log.msg("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))
            self.multiplexer.send_message(data, address)
            d = defer.Deferred()
            timeout = reactor.callLater(self._waitTimeout, self._timeout, msgID)
            self._outstanding[msgID] = (d, timeout)
            return d

        return func
Example #54
0
class Lint:
    def __init__(self, options):
        self.log = Logger()

        if not options.workdir:
            raise RuntimeError, "Lint: No working directory specified."
        
        lintResult = self.__getLintResult(options.workdir)
        
        ignoreMessages = []
        if hasattr(options, "ignoremessages"):
            optionType = type(options.ignoremessages).__name__
            if optionType == "list":
                ignoreMessages = options.ignoremessages
            elif optionType == "str":
                ignoreMessages = options.ignoremessages.split(",")
        
        ignoreClasses = []
        if hasattr(options, "ignoreclasses"):
            optionType = type(options.ignoreclasses).__name__
            if optionType == "list":
                ignoreClasses = options.ignoreclasses
            elif optionType == "str":
                ignoreClasses = options.ignoreclasses.split(",")
        
        self.lintData = self.__parseLintResult(lintResult, ignoreMessages, ignoreClasses)
        
        
    def __getLintResult(self, workdir):
        self.log.info("Running lint in directory %s" %workdir)
        startdir = os.getcwd()
        os.chdir(workdir)
        ret,out,err = util.invokePiped("python generate.py lint")
        
        if (ret > 0):
            raise RuntimeError, "Lint run failed. " + err
        
        os.chdir(startdir)
        return out

    
    def __parseLintResult(self, text, ignoreMessages=[], ignoreClasses=[]):
        self.log.info("Lint parsing lint output")
        self.log.debug("Lint ignoring messages: %s" %repr(ignoreMessages))
        self.log.debug("Lint ignoring classes: %s" %repr(ignoreClasses))
        log = ""
        if (isinstance(text,str)):
            import string
            log = string.split(text,"\n")
        else:
            log = text
          
        data = {}
        for line in log:
            msgre = re.compile('.*\): (.*)$')
            msgma = msgre.match(line)
            msg = None
            if not msgma:
                continue
            msg = msgma.group(1)
            genericmsg = None
            member = None
            hint = None
            (genericmsg, member, hint) = self.__getMessage(msg)      
      
            if not genericmsg:
                self.log.error("Lint.parseLintResult couldn't extract generic message from line:\n" + line)
            if (genericmsg[len(genericmsg)-3:] == " in"):
                genericmsg = genericmsg[0:len(genericmsg)-3]
            
            if genericmsg in ignoreMessages:
                continue
            
            msgid = genericmsg
            if (not msgid in data):
                data[msgid] = []
  
            if (hint[0:2] == "! "):
                hint = hint[2:]
  
            info = {}
            info['member'] = member
            if (hint != ""):
                info['hint'] = hint
            info['path'] = ''
            info['line'] = ''
  
            pathre = re.compile('^.*([\\\/]source[\\\/].*) \(')
            pathma = pathre.match(line)
            if (pathma):
                info['path'] = pathma.group(1)
  
            linecolre = re.compile('.*(\(.*\)).*')
            linecolma = linecolre.match(line)
            if (linecolma):
                info['line'] = linecolma.group(1)
  
            ignoreClass = False
            for cls in ignoreClasses:
                classPath = cls.replace(".","/")
                clsre = re.compile("^.*" + classPath + ".*$")
                clsma = clsre.match(info['path'])
                if (clsma):
                    ignoreClass = True

            if not ignoreClass:
                data[msgid].append(info)  
          
        del_keys = []
        for key, value in data.iteritems():
            if (len(value) == 0):
                del_keys.append(key)
        for k in del_keys:
            del data[k]
      
        return data


    def __getMessage(self,fullmsg):
        genericmsg = None
        member = None
        hint = None
      
        msgre = re.compile("^([\w\- ]+)'([^\s]*)'([\w ]*)[\. ]*(.*)$")
        msgrma = msgre.match(fullmsg)
        if msgrma:        
            genericmsg = msgrma.group(1) + msgrma.group(3)
            if (genericmsg[len(genericmsg)-1] == " "):
                genericmsg = genericmsg[:-1]
            member = msgrma.group(2)
            hint = msgrma.group(4)
    
        return (genericmsg, member, hint)

    
    def getResult(self):
        return self.lintData  
    
    
    def getFlatResult(self):
        flatData = []
        data = self.getResult()
        for message in data:
            for messageDetails in data[message]:
                flatMessage = {
                  "message" : message,
                  "member" : messageDetails["member"],
                  "path" : messageDetails["path"],
                  "line" : messageDetails["line"]
                }
                flatData.append(flatMessage)
        return flatData

    
    def getResultJson(self):
        return json.dumps(self.lintData, sort_keys=True, indent=2)
Example #55
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, sourceNode, router, waitTimeout=15):
        """
        Args:
            sourceNode: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Timeout for whole messages. Note the txrudp layer has a per-packet
                    timeout but invalid responses wont trigger it. The waitTimeout on this
                     layer needs to be long enough to allow whole messages (ex. images) to
                     transmit.

        """
        self.sourceNode = sourceNode
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, message, sender, connection, ban_score):
        if message.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if message.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        self.multiplexer.vendors[sender.id] = sender

        msgID = message.messageID
        if message.command == NOT_FOUND:
            data = None
        else:
            data = tuple(message.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif message.command != NOT_FOUND:
            # ban_score.process_message(message)
            self._acceptRequest(msgID, str(Command.Name(message.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d = self._outstanding[msgID][0]
        if self._outstanding[msgID][2].active():
            self._outstanding[msgID][2].cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)
            d.addErrback(self._sendResponse, "bad_request", msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.sourceNode.getProto())
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            if not isinstance(response, list):
                response = [response]
            for arg in response:
                m.arguments.append(str(arg))
        m.signature = self.signing_key.sign(m.SerializeToString())[:64]
        connection.send_message(m.SerializeToString())

    def timeout(self, node):
        """
        This timeout is called by the txrudp connection handler. We will run through the
        outstanding messages and callback false on any waiting on this IP address.
        """
        address = (node.ip, node.port)
        for msgID, val in self._outstanding.items():
            if address == val[1]:
                val[0].callback((False, None))
                if self._outstanding[msgID][2].active():
                    self._outstanding[msgID][2].cancel()
                del self._outstanding[msgID]

        self.router.removeContact(node)
        try:
            self.multiplexer[address].shutdown()
        except Exception:
            pass

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise send a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.log.debug("relaying hole punch packet to %s:%s for %s:%s" %
                           (ip, port, sender.ip, str(sender.port)))
            self.hole_punch(Node(digest("null"), ip, int(port), nat_type=FULL_CONE), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            # pylint: disable=W0612
            for i in range(20):
                self.multiplexer.send_datagram("", (ip, int(port)))

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(node, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()

            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.sourceNode.getProto())
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            m.signature = self.signing_key.sign(m.SerializeToString())[:64]
            data = m.SerializeToString()

            address = (node.ip, node.port)
            relay_addr = None
            if node.nat_type == SYMMETRIC or \
                    (node.nat_type == RESTRICTED and self.sourceNode.nat_type == SYMMETRIC):
                relay_addr = node.relay_node

            d = defer.Deferred()
            if m.command != HOLE_PUNCH:
                timeout = reactor.callLater(self._waitTimeout, self.timeout, node)
                self._outstanding[msgID] = [d, address, timeout]
                self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))

            self.multiplexer.send_message(data, address, relay_addr)

            if self.multiplexer[address].state != State.CONNECTED and \
                            node.nat_type == RESTRICTED and \
                            self.sourceNode.nat_type != SYMMETRIC:
                self.hole_punch(Node(digest("null"), node.relay_node[0], node.relay_node[1], nat_type=FULL_CONE),
                                address[0], address[1], "True")
                self.log.debug("sending hole punch message to %s" % address[0] + ":" + str(address[1]))

            return d

        return func
Example #56
0
class RPCProtocol:
    """
    This is an abstract class for processing and sending rpc messages.
    A class that implements the `MessageProcessor` interface probably should
    extend this as it does most of the work of keeping track of messages.
    """
    __metaclass__ = abc.ABCMeta

    def __init__(self, sourceNode, router, waitTimeout=2.5):
        """
        Args:
            proto: A protobuf `Node` object containing info about this node.
            router: A `RoutingTable` object from dht.routing. Implies a `network.Server` object
                    must be started first.
            waitTimeout: Consider it a connetion failure if no response
                    within this time window.
            noisy: Whether or not to log the output for this class.
            testnet: The network parameters to use.

        """
        self.sourceNode = sourceNode
        self.router = router
        self._waitTimeout = waitTimeout
        self._outstanding = {}
        self.log = Logger(system=self)

    def receive_message(self, datagram, connection, ban_score):
        m = Message()
        try:
            m.ParseFromString(datagram)
            sender = node.Node(m.sender.guid, m.sender.ip, m.sender.port, m.sender.signedPublicKey, m.sender.vendor)
        except Exception:
            # If message isn't formatted property then ignore
            self.log.warning("received unknown message from %s, ignoring" % str(connection.dest_addr))
            return False

        if m.testnet != self.multiplexer.testnet:
            self.log.warning("received message from %s with incorrect network parameters." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        if m.protoVer < PROTOCOL_VERSION:
            self.log.warning("received message from %s with incompatible protocol version." %
                             str(connection.dest_addr))
            connection.shutdown()
            return False

        # Check that the GUID is valid. If not, ignore
        if self.router.isNewNode(sender):
            try:
                pubkey = m.sender.signedPublicKey[len(m.sender.signedPublicKey) - 32:]
                verify_key = nacl.signing.VerifyKey(pubkey)
                verify_key.verify(m.sender.signedPublicKey)
                h = nacl.hash.sha512(m.sender.signedPublicKey)
                pow_hash = h[64:128]
                if int(pow_hash[:6], 16) >= 50 or hexlify(m.sender.guid) != h[:40]:
                    raise Exception('Invalid GUID')

            except Exception:
                self.log.warning("received message from sender with invalid GUID, ignoring")
                connection.shutdown()
                return False

        if m.sender.vendor:
            self.db.VendorStore().save_vendor(m.sender.guid.encode("hex"), m.sender.ip,
                                              m.sender.port, m.sender.signedPublicKey)

        msgID = m.messageID
        if m.command == NOT_FOUND:
            data = None
        else:
            data = tuple(m.arguments)
        if msgID in self._outstanding:
            self._acceptResponse(msgID, data, sender)
        elif m.command != NOT_FOUND:
            #ban_score.process_message(m)
            self._acceptRequest(msgID, str(Command.Name(m.command)).lower(), data, sender, connection)

    def _acceptResponse(self, msgID, data, sender):
        if data is not None:
            msgargs = (b64encode(msgID), sender)
            self.log.debug("received response for message id %s from %s" % msgargs)
        else:
            self.log.warning("received 404 error response from %s" % sender)
        d = self._outstanding[msgID][0]
        if self._outstanding[msgID][2].active():
            self._outstanding[msgID][2].cancel()
        d.callback((True, data))
        del self._outstanding[msgID]

    def _acceptRequest(self, msgID, funcname, args, sender, connection):
        self.log.debug("received request from %s, command %s" % (sender, funcname.upper()))
        f = getattr(self, "rpc_%s" % funcname, None)
        if f is None or not callable(f):
            msgargs = (self.__class__.__name__, funcname)
            self.log.error("%s has no callable method rpc_%s; ignoring request" % msgargs)
            return False
        if funcname == "hole_punch":
            f(sender, *args)
        else:
            d = defer.maybeDeferred(f, sender, *args)
            d.addCallback(self._sendResponse, funcname, msgID, sender, connection)
            d.addErrback(self._sendResponse, "bad_request", msgID, sender, connection)

    def _sendResponse(self, response, funcname, msgID, sender, connection):
        self.log.debug("sending response for msg id %s to %s" % (b64encode(msgID), sender))
        m = Message()
        m.messageID = msgID
        m.sender.MergeFrom(self.sourceNode.getProto())
        m.protoVer = PROTOCOL_VERSION
        m.testnet = self.multiplexer.testnet
        if response is None:
            m.command = NOT_FOUND
        else:
            m.command = Command.Value(funcname.upper())
            if not isinstance(response, list):
                response = [response]
            for arg in response:
                m.arguments.append(str(arg))
        connection.send_message(m.SerializeToString())

    def timeout(self, address):
        """
        This timeout is called by the txrudp connection handler. We will run through the
        outstanding messages and callback false on any waiting on this IP address.
        """
        for msgID, val in self._outstanding.items():
            if address == val[1]:
                val[0].callback((False, None))
                del self._outstanding[msgID]
        try:
            node_to_remove = self.multiplexer[address].handler.node
            if node_to_remove is not None:
                self.router.removeContact(node_to_remove)
            self.multiplexer[address].shutdown()
        except Exception:
            pass

    def rpc_hole_punch(self, sender, ip, port, relay="False"):
        """
        A method for handling an incoming HOLE_PUNCH message. Relay the message
        to the correct node if it's not for us. Otherwise send a datagram to allow
        the other node to punch through our NAT.
        """
        if relay == "True":
            self.hole_punch((ip, int(port)), sender.ip, sender.port)
        else:
            self.log.debug("punching through NAT for %s:%s" % (ip, port))
            # pylint: disable=W0612
            for i in range(20):
                self.multiplexer.send_datagram("", (ip, int(port)))

    def __getattr__(self, name):
        if name.startswith("_") or name.startswith("rpc_"):
            return object.__getattr__(self, name)

        try:
            return object.__getattr__(self, name)
        except AttributeError:
            pass

        def func(address, *args):
            msgID = sha1(str(random.getrandbits(255))).digest()
            d = defer.Deferred()
            if name != "hole_punch":
                seed = SEED_NODE_TESTNET if self.multiplexer.testnet else SEED_NODE
                if address in self.multiplexer and self.multiplexer[address].state == State.CONNECTED:
                    timeout = timeout = reactor.callLater(self._waitTimeout, self.timeout, address)
                else:
                    timeout = reactor.callLater(self._waitTimeout, self.hole_punch, seed,
                                                address[0], address[1], "True", msgID)
                self._outstanding[msgID] = [d, address, timeout]
                self.log.debug("calling remote function %s on %s (msgid %s)" % (name, address, b64encode(msgID)))
            elif args[3] in self._outstanding:
                prev_msgID = args[3]
                args = args[:3]
                deferred, addr, hp = self._outstanding[prev_msgID]  # pylint: disable=W0612
                timeout = reactor.callLater(3, self.timeout, addr)
                self._outstanding[prev_msgID] = [deferred, addr, timeout]
                self.log.debug("sending hole punch message to %s" % args[0] + ":" + str(args[1]))

            m = Message()
            m.messageID = msgID
            m.sender.MergeFrom(self.sourceNode.getProto())
            m.command = Command.Value(name.upper())
            m.protoVer = PROTOCOL_VERSION
            for arg in args:
                m.arguments.append(str(arg))
            m.testnet = self.multiplexer.testnet
            data = m.SerializeToString()

            self.multiplexer.send_message(data, address)
            return d

        return func
Example #57
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize)
        self.refreshLoop = LoopingCall(self.refreshTable).start(3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        for id in self.protocol.getRefreshIDs():
            node = Node(id)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest)
            ds.append(spider.find())

        def republishKeys(_):
            ds = []
            # Republish keys older than one hour
            for keyword in self.storage.iterkeys():
                for k, v in self.storage.iteritems(keyword):
                    if self.storage.get_ttl(keyword, k) < 601200:
                        ds.append(self.set(keyword, k, v))

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, seed, pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
           seed: A `string` consisting of "ip:port" or "hostname:port"
           pubkey: The hex encoded public key to verify the signature on the response
        """
        nodes = []
        c = httplib.HTTPConnection(seed)
        c.request("GET", "/")
        response = c.getresponse()
        self.log.info("Https response from %s: %s, %s" % (seed, response.status, response.reason))
        data = response.read()
        reread_data = data.decode("zlib")
        seeds = peers.PeerSeeds()
        try:
            seeds.ParseFromString(reread_data)
            for peer in seeds.peer_data:
                p = peers.PeerData()
                p.ParseFromString(peer)
                tup = (str(p.ip_address), p.port)
                nodes.append(tup)
            verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
            verify_key.verify(seed.signature + "".join(seeds.peer_data))
        except:
            self.log.error("Error parsing seed response.")
        return nodes

    def bootstrappableNeighbors(self):
        """
        Get a :class:`list` of (ip, port) :class:`tuple` pairs suitable for use as an argument
        to the bootstrap method.

        The server should have been bootstrapped
        already - this is just a utility for getting some neighbors and then
        storing them if this server is going down for a while.  When it comes
        back up, the list of nodes can be used to bootstrap.
        """
        neighbors = self.protocol.router.findNeighbors(self.node)
        return [tuple(n)[-2:] for n in neighbors]

    def bootstrap(self, addrs):
        """
        Bootstrap the server by connecting to other known nodes in the network.

        Args:
            addrs: A `list` of (ip, port) `tuple` pairs.  Note that only IP addresses
                   are acceptable - hostnames will cause an error.
        """

        # if the transport hasn't been initialized yet, wait a second
        if self.protocol.multiplexer.transport is None:
            return task.deferLater(reactor, 1, self.bootstrap, addrs)

        def initTable(results):
            nodes = []
            for addr, result in results.items():
                if result[0]:
                    n = objects.Node()
                    try:
                        n.ParseFromString(result[1][0])
                        pubkey = n.signedPublicKey[len(n.signedPublicKey) - 32:]
                        verify_key = nacl.signing.VerifyKey(pubkey)
                        verify_key.verify(n.signedPublicKey)
                        h = nacl.hash.sha512(n.signedPublicKey)
                        pow = h[64:128]
                        if int(pow[:6], 16) >= 50 or hexlify(n.guid) != h[:40]:
                            raise Exception('Invalid GUID')
                        nodes.append(Node(n.guid, addr[0], addr[1], n.signedPublicKey))
                    except:
                        self.log.msg("Bootstrap node returned invalid GUID")
            spider = NodeSpiderCrawl(self.protocol, self.node, nodes, self.ksize, self.alpha)
            return spider.find()

        ds = {}
        for addr in addrs:
            ds[addr] = self.protocol.ping((addr[0], addr[1]))
        return deferredDict(ds).addCallback(initTable)

    def inetVisibleIP(self):
        """
        Get the internet visible IP's of this node as other nodes see it.

        Returns:
            A `list` of IP's.  If no one can be contacted, then the `list` will be empty.
        """

        def handle(results):
            ips = []
            for result in results:
                if result[0]:
                    ips.append((result[1][0], int(result[1][1])))
            self.log.debug("other nodes think our ip is %s" % str(ips))
            return ips

        ds = []
        for neighbor in self.bootstrappableNeighbors():
            ds.append(self.protocol.stun(neighbor))
        return defer.gatherResults(ds).addCallback(handle)

    def get(self, keyword):
        """
        Get a key if the network has it.

        Returns:
            :class:`None` if not found, the value otherwise.
        """
        dkey = digest(keyword)
        if self.storage.get(dkey) is not None:
            return defer.succeed(self.storage.get(dkey))
        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to get key %s" % keyword)
            return None
        spider = ValueSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find()

    def set(self, keyword, key, value):
        """
        Set the given key/value tuple at the hash of the given keyword.
        All values stored in the DHT are stored as dictionaries of key/value
        pairs. If a value already exists for a given keyword, the new key/value
        pair will be appended to the dictionary.

        Args:
            keyword: a `string` keyword. The SHA1 hash of which will be used as
                the key when inserting in the DHT.
            key: the 20 byte hash of the data.
            value: a serialized `protos.objects.Node` object which serves as a
                pointer to the node storing the data.

        Return: True if at least one peer responded. False if the store rpc
            completely failed.
        """
        self.log.debug("setting '%s' = '%s':'%s' on network" % (keyword, hexlify(key), hexlify(value)))
        dkey = digest(keyword)

        def store(nodes):
            self.log.info("setting '%s' on %s" % (keyword, map(str, nodes)))
            ds = [self.protocol.callStore(node, dkey, key, value) for node in nodes]

            keynode = Node(dkey)
            if self.node.distanceTo(keynode) < max([n.distanceTo(keynode) for n in nodes]):
                self.storage[dkey] = (key, value)
                self.log.debug("got a store request from %s, storing value" % str(self.node))

            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to set key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(store)

    def delete(self, keyword, key, signature):
        """
        Delete the given key/value pair from the keyword dictionary on the network.
        To delete you must provide a signature covering the key that you wish to
        delete. It will be verified against the public key stored in the value. We
        use our ksize as alpha to make sure we reach as many nodes storing our value
        as possible.

        Args:
            keyword: the `string` keyword where the data being deleted is stored.
            key: the 20 byte hash of the data.
            signature: a signature covering the key.

        """
        self.log.debug("deleting '%s':'%s' from the network" % (keyword, hexlify(key)))
        dkey = digest(keyword)

        def delete(nodes):
            self.log.info("deleting '%s' on %s" % (key, map(str, nodes)))
            ds = [self.protocol.callDelete(node, dkey, key, signature) for node in nodes]

            if self.storage.getSpecific(keyword, key) is not None:
                self.storage.delete(keyword, key)

            return defer.DeferredList(ds).addCallback(self._anyRespondSuccess)

        node = Node(dkey)
        nearest = self.protocol.router.findNeighbors(node)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to delete key %s" % key)
            return defer.succeed(False)
        spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.ksize)
        return spider.find().addCallback(delete)

    def get_node(self, guid):
        """
        Given a guid return a `Node` object containing its ip and port or none if it's
        not found.

        Args:
            guid: the 20 raw bytes representing the guid.
        """
        node_to_find = Node(guid)

        def check_for_node(nodes):
            for node in nodes:
                if node.id == node_to_find.id:
                    return node
            return None
        index = self.protocol.router.getBucketFor(node_to_find)
        nodes = self.protocol.router.buckets[index].getNodes()
        for node in nodes:
            if node.id == node_to_find.id:
                return defer.succeed(node)
        nearest = self.protocol.router.findNeighbors(node_to_find)
        if len(nearest) == 0:
            self.log.warning("There are no known neighbors to find node %s" % node_to_find.id.encode("hex"))
            return defer.succeed(None)
        spider = NodeSpiderCrawl(self.protocol, node_to_find, nearest, self.ksize, self.alpha)
        return spider.find().addCallback(check_for_node)

    def _anyRespondSuccess(self, responses):
        """
        Given the result of a DeferredList of calls to peers, ensure that at least
        one of them was contacted and responded with a Truthy result.
        """
        for deferSuccess, result in responses:
            peerReached, peerResponse = result
            if deferSuccess and peerReached and peerResponse:
                return True
        return False

    def saveState(self, fname):
        """
        Save the state of this node (the alpha/ksize/id/immediate neighbors)
        to a cache file with the given fname.
        """
        data = {'ksize': self.ksize,
                'alpha': self.alpha,
                'id': self.node.id,
                'signed_pubkey': self.node.signed_pubkey,
                'neighbors': self.bootstrappableNeighbors()}
        if len(data['neighbors']) == 0:
            self.log.warning("No known neighbors, so not writing to cache.")
            return
        with open(fname, 'w') as f:
            pickle.dump(data, f)

    @classmethod
    def loadState(self, fname, ip_address, port, multiplexer, storage=None):
        """
        Load the state of this node (the alpha/ksize/id/immediate neighbors)
        from a cache file with the given fname.
        """
        with open(fname, 'r') as f:
            data = pickle.load(f)
        n = Node(data['id'], ip_address, port, data['signed_pubkey'])
        s = Server(n, data['ksize'], data['alpha'], storage=storage)
        s.protocol.connect_multiplexer(multiplexer)
        if len(data['neighbors']) > 0:
            s.bootstrap(data['neighbors'])
        return s

    def saveStateRegularly(self, fname, frequency=600):
        """
        Save the state of node with a given regularity to the given
        filename.

        Args:
            fname: File name to save retularly to
            frequencey: Frequency in seconds that the state should be saved.
                        By default, 10 minutes.
        """
        loop = LoopingCall(self.saveState, fname)
        loop.start(frequency)
        return loop
Example #58
0
class Server(object):
    """
    High level view of a node instance.  This is the object that should be created
    to start listening as an active node on the network.
    """

    def __init__(self, node, db, signing_key, ksize=20, alpha=3, storage=None):
        """
        Create a server instance.  This will start listening on the given port.

        Args:
            node: The node instance for this peer. It must contain (at minimum) an ID,
                public key, ip address, and port.
            ksize (int): The k parameter from the paper
            alpha (int): The alpha parameter from the paper
            storage: An instance that implements :interface:`~dht.storage.IStorage`
        """
        self.ksize = ksize
        self.alpha = alpha
        self.log = Logger(system=self)
        self.storage = storage or ForgetfulStorage()
        self.node = node
        self.protocol = KademliaProtocol(self.node, self.storage, ksize, db, signing_key)
        self.refreshLoop = LoopingCall(self.refreshTable)
        reactor.callLater(1800, self.refreshLoop.start, 3600)

    def listen(self, port):
        """
        Start listening on the given port.

        This is the same as calling::

            reactor.listenUDP(port, server.protocol)
        """
        return reactor.listenUDP(port, self.protocol)

    def refreshTable(self):
        """
        Refresh buckets that haven't had any lookups in the last hour
        (per section 2.3 of the paper).
        """
        ds = []
        refresh_ids = self.protocol.getRefreshIDs()
        refresh_ids.append(digest(random.getrandbits(255)))  # random node so we get more diversity
        for rid in refresh_ids:
            node = Node(rid)
            nearest = self.protocol.router.findNeighbors(node, self.alpha)
            spider = NodeSpiderCrawl(self.protocol, node, nearest, self.ksize, self.alpha)
            ds.append(spider.find())

        def republishKeys(_):
            self.log.debug("Republishing key/values...")
            neighbors = self.protocol.router.findNeighbors(self.node, exclude=self.node)
            for node in neighbors:
                self.protocol.transferKeyValues(node)

        return defer.gatherResults(ds).addCallback(republishKeys)

    def querySeed(self, list_seed_pubkey):
        """
        Query an HTTP seed and return a `list` if (ip, port) `tuple` pairs.

        Args:
            Receives a list of one or more tuples Example [(seed, pubkey)]
            seed: A `string` consisting of "ip:port" or "hostname:port"
            pubkey: The hex encoded public key to verify the signature on the response
        """

        nodes = []
        if not list_seed_pubkey:
            self.log.error('failed to query seed {0} from ob.cfg'.format(list_seed_pubkey))
            return nodes
        else:
            for sp in list_seed_pubkey:
                seed, pubkey = sp
                try:
                    self.log.info("querying %s for peers" % seed)
                    c = httplib.HTTPConnection(seed)
                    c.request("GET", "/")
                    response = c.getresponse()
                    self.log.debug("Http response from %s: %s, %s" % (seed, response.status, response.reason))
                    data = response.read()
                    reread_data = data.decode("zlib")
                    proto = peers.PeerSeeds()
                    proto.ParseFromString(reread_data)
                    for peer in proto.serializedNode:
                        n = objects.Node()
                        n.ParseFromString(peer)
                        tup = (str(n.nodeAddress.ip), n.nodeAddress.port)
                        nodes.append(tup)
                    verify_key = nacl.signing.VerifyKey(pubkey, encoder=nacl.encoding.HexEncoder)
                    verify_key.verify("".join(proto.serializedNode), proto.signature)
                    self.log.info("%s returned %s addresses" % (seed, len(nodes)))
                except Exception, e:
                    self.log.error("failed to query seed: %s" % str(e))
            return nodes
Example #59
0
class MarketProtocol(RPCProtocol):
    implements(MessageProcessor)

    def __init__(self, node, router, signing_key, database):
        self.router = router
        self.node = node
        RPCProtocol.__init__(self, node, router)
        self.log = Logger(system=self)
        self.multiplexer = None
        self.db = database
        self.signing_key = signing_key
        self.listeners = []
        self.handled_commands = [GET_CONTRACT, GET_IMAGE, GET_PROFILE, GET_LISTINGS, GET_USER_METADATA,
                                 GET_CONTRACT_METADATA, FOLLOW, UNFOLLOW, GET_FOLLOWERS, GET_FOLLOWING,
                                 BROADCAST, MESSAGE, ORDER, ORDER_CONFIRMATION, COMPLETE_ORDER, DISPUTE_OPEN,
                                 DISPUTE_CLOSE]

    def connect_multiplexer(self, multiplexer):
        self.multiplexer = multiplexer

    def add_listener(self, listener):
        self.listeners.append(listener)

    def rpc_get_contract(self, sender, contract_hash):
        self.log.info("serving contract %s to %s" % (contract_hash.encode('hex'), sender))
        self.router.addContact(sender)
        try:
            with open(self.db.HashMap().get_file(contract_hash.encode("hex")), "r") as filename:
                contract = filename.read()
            return [contract]
        except Exception:
            self.log.warning("could not find contract %s" % contract_hash.encode('hex'))
            return None

    def rpc_get_image(self, sender, image_hash):
        self.router.addContact(sender)
        try:
            if len(image_hash) != 20:
                raise Exception("Invalid image hash")
            self.log.info("serving image %s to %s" % (image_hash.encode('hex'), sender))
            with open(self.db.HashMap().get_file(image_hash.encode("hex")), "rb") as filename:
                image = filename.read()
            return [image]
        except Exception:
            self.log.warning("could not find image %s" % image_hash[:20].encode('hex'))
            return None

    def rpc_get_profile(self, sender):
        self.log.info("serving profile to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(True)
            return [proto, self.signing_key.sign(proto)[:64]]
        except Exception:
            self.log.error("unable to load the profile")
            return None

    def rpc_get_user_metadata(self, sender):
        self.log.info("serving user metadata to %s" % sender)
        self.router.addContact(sender)
        try:
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.short_description = proto.short_description
            m.avatar_hash = proto.avatar_hash
            m.nsfw = proto.nsfw
            return [m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.error("unable to load profile metadata")
            return None

    def rpc_get_listings(self, sender):
        self.log.info("serving store listings to %s" % sender)
        self.router.addContact(sender)
        try:
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(self.db.ListingsStore().get_proto())
            l.handle = p.handle
            l.avatar_hash = p.avatar_hash
            return [l.SerializeToString(), self.signing_key.sign(l.SerializeToString())[:64]]
        except Exception:
            self.log.warning("could not find any listings in the database")
            return None

    def rpc_get_contract_metadata(self, sender, contract_hash):
        self.log.info("serving metadata for contract %s to %s" % (contract_hash.encode("hex"), sender))
        self.router.addContact(sender)
        try:
            proto = self.db.ListingsStore().get_proto()
            p = Profile(self.db).get()
            l = Listings()
            l.ParseFromString(proto)
            for listing in l.listing:
                if listing.contract_hash == contract_hash:
                    listing.avatar_hash = p.avatar_hash
                    listing.handle = p.handle
                    ser = listing.SerializeToString()
            return [ser, self.signing_key.sign(ser)[:64]]
        except Exception:
            self.log.warning("could not find metadata for contract %s" % contract_hash.encode("hex"))
            return None

    def rpc_follow(self, sender, proto, signature):
        self.log.info("received follow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify(proto, signature)
            f = Followers.Follower()
            f.ParseFromString(proto)
            if f.guid != sender.id:
                raise Exception('GUID does not match sending node')
            if f.following != self.node.id:
                raise Exception('Following wrong node')
            f.signature = signature
            self.db.FollowData().set_follower(f)
            proto = Profile(self.db).get(False)
            m = Metadata()
            m.name = proto.name
            m.handle = proto.handle
            m.avatar_hash = proto.avatar_hash
            m.short_description = proto.short_description
            m.nsfw = proto.nsfw
            for listener in self.listeners:
                try:
                    verifyObject(NotificationListener, listener)
                    listener.notify(sender.id, f.metadata.handle, "follow", "", "", f.metadata.avatar_hash)
                except DoesNotImplement:
                    pass
            return ["True", m.SerializeToString(), self.signing_key.sign(m.SerializeToString())[:64]]
        except Exception:
            self.log.warning("failed to validate follower")
            return ["False"]

    def rpc_unfollow(self, sender, signature):
        self.log.info("received unfollow request from %s" % sender)
        self.router.addContact(sender)
        try:
            verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
            verify_key.verify("unfollow:" + self.node.id, signature)
            f = self.db.FollowData()
            f.delete_follower(sender.id)
            return ["True"]
        except Exception:
            self.log.warning("failed to validate signature on unfollow request")
            return ["False"]

    def rpc_get_followers(self, sender):
        self.log.info("serving followers list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.FollowData().get_followers()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_get_following(self, sender):
        self.log.info("serving following list to %s" % sender)
        self.router.addContact(sender)
        ser = self.db.FollowData().get_following()
        if ser is None:
            return None
        else:
            return [ser, self.signing_key.sign(ser)[:64]]

    def rpc_broadcast(self, sender, message, signature):
        if len(message) <= 140 and self.db.FollowData().is_following(sender.id):
            try:
                verify_key = nacl.signing.VerifyKey(sender.signed_pubkey[64:])
                verify_key.verify(message, signature)
            except Exception:
                self.log.warning("received invalid broadcast from %s" % sender)
                return ["False"]
            self.log.info("received a broadcast from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(BroadcastListener, listener)
                    listener.notify(sender.id, message)
                except DoesNotImplement:
                    pass
            return ["True"]
        else:
            return ["False"]

    def rpc_message(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            plaintext = box.decrypt(encrypted)
            p = PlaintextMessage()
            p.ParseFromString(plaintext)
            signature = p.signature
            p.ClearField("signature")
            verify_key = nacl.signing.VerifyKey(p.signed_pubkey[64:])
            verify_key.verify(p.SerializeToString(), signature)
            h = nacl.hash.sha512(p.signed_pubkey)
            pow_hash = h[64:128]
            if int(pow_hash[:6], 16) >= 50 or p.sender_guid.encode("hex") != h[:40] or p.sender_guid != sender.id:
                raise Exception('Invalid guid')
            self.log.info("received a message from %s" % sender)
            self.router.addContact(sender)
            for listener in self.listeners:
                try:
                    verifyObject(MessageListener, listener)
                    listener.notify(p, signature)
                except DoesNotImplement:
                    pass
            return ["True"]
        except Exception:
            self.log.warning("received invalid message from %s" % sender)
            return ["False"]

    def rpc_order(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            if c.verify(sender.signed_pubkey[64:]):
                self.router.addContact(sender)
                self.log.info("received an order from %s, waiting for payment..." % sender)
                payment_address = c.contract["buyer_order"]["order"]["payment"]["address"]
                chaincode = c.contract["buyer_order"]["order"]["payment"]["chaincode"]
                masterkey_b = c.contract["buyer_order"]["order"]["id"]["pubkeys"]["bitcoin"]
                buyer_key = derive_childkey(masterkey_b, chaincode)
                amount = c.contract["buyer_order"]["order"]["payment"]["amount"]
                listing_hash = c.contract["buyer_order"]["order"]["ref_hash"]
                signature = self.signing_key.sign(
                    str(payment_address) + str(amount) + str(listing_hash) + str(buyer_key))[:64]
                c.await_funding(self.get_notification_listener(), self.multiplexer.blockchain, signature, False)
                return [signature]
            else:
                self.log.warning("received invalid order from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order from %s" % sender)
            return ["False"]

    def rpc_order_confirmation(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)
            contract_id = c.accept_order_confirmation(self.get_notification_listener())
            if contract_id:
                self.router.addContact(sender)
                self.log.info("received confirmation for order %s" % contract_id)
                return ["True"]
            else:
                self.log.warning("received invalid order confirmation from %s" % sender)
                return ["False"]
        except Exception:
            self.log.error("unable to decrypt order confirmation from %s" % sender)
            return ["False"]

    def rpc_complete_order(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            c = Contract(self.db, contract=json.loads(order, object_pairs_hook=OrderedDict),
                         testnet=self.multiplexer.testnet)

            contract_id = c.accept_receipt(self.get_notification_listener(), self.multiplexer.blockchain)
            self.router.addContact(sender)
            self.log.info("received receipt for order %s" % contract_id)
            return ["True"]
        except Exception:
            self.log.error("unable to parse receipt from %s" % sender)
            return ["False"]

    def rpc_dispute_open(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            contract = json.loads(order, object_pairs_hook=OrderedDict)
            process_dispute(contract, self.db, self.get_message_listener(),
                            self.get_notification_listener(), self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute opened by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed contract from %s" % sender)
            return ["False"]

    def rpc_dispute_close(self, sender, pubkey, encrypted):
        try:
            box = Box(PrivateKey(self.signing_key.encode(nacl.encoding.RawEncoder)), PublicKey(pubkey))
            order = box.decrypt(encrypted)
            contract = json.loads(order, object_pairs_hook=OrderedDict)
            close_dispute(contract, self.db, self.get_message_listener(),
                          self.get_notification_listener(), self.multiplexer.testnet)
            self.router.addContact(sender)
            self.log.info("Contract dispute closed by %s" % sender)
            return ["True"]
        except Exception:
            self.log.error("unable to parse disputed close message from %s" % sender)
            return ["False"]

    def callGetContract(self, nodeToAsk, contract_hash):
        d = self.get_contract(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetImage(self, nodeToAsk, image_hash):
        d = self.get_image(nodeToAsk, image_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetProfile(self, nodeToAsk):
        d = self.get_profile(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetUserMetadata(self, nodeToAsk):
        d = self.get_user_metadata(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetListings(self, nodeToAsk):
        d = self.get_listings(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetContractMetadata(self, nodeToAsk, contract_hash):
        d = self.get_contract_metadata(nodeToAsk, contract_hash)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callFollow(self, nodeToAsk, proto, signature):
        d = self.follow(nodeToAsk, proto, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callUnfollow(self, nodeToAsk, signature):
        d = self.unfollow(nodeToAsk, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowers(self, nodeToAsk):
        d = self.get_followers(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callGetFollowing(self, nodeToAsk):
        d = self.get_following(nodeToAsk)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callBroadcast(self, nodeToAsk, message, signature):
        d = self.broadcast(nodeToAsk, message, signature)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callMessage(self, nodeToAsk, ehemeral_pubkey, ciphertext):
        d = self.message(nodeToAsk, ehemeral_pubkey, ciphertext)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callOrderConfirmation(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.order_confirmation(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callCompleteOrder(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.complete_order(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeOpen(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_open(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def callDisputeClose(self, nodeToAsk, ephem_pubkey, encrypted_contract):
        d = self.dispute_open(nodeToAsk, ephem_pubkey, encrypted_contract)
        return d.addCallback(self.handleCallResponse, nodeToAsk)

    def handleCallResponse(self, result, node):
        """
        If we get a response, add the node to the routing table.  If
        we get no response, make sure it's removed from the routing table.
        """
        if result[0]:
            self.router.addContact(node)
        else:
            self.log.debug("no response from %s, removing from router" % node)
            self.router.removeContact(node)
        return result

    def get_notification_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(NotificationListener, listener)
                return listener
            except DoesNotImplement:
                pass
    def get_message_listener(self):
        for listener in self.listeners:
            try:
                verifyObject(MessageListener, listener)
                return listener
            except DoesNotImplement:
                pass

    def __iter__(self):
        return iter(self.handled_commands)
Example #60
0
        if status == 'success':
            logger.info("type:%s, target:%s, size:%d, cost:%f, speed:%f" %('upload', target, size, costTime, size/costTime/1000))
    speed = 0
    if totalTime > 0:
        speed = totalSize/totalTime/1000
    logger.info("type:%s, target:%s, succ:%d, fail:%d, size:%d, cost:%f, speed:%f" %('upload', target, totalSucc, totalFail, totalSize, totalTime, speed))

def cleanUploadedPics(picDir):
	for file in os.listdir(picDir):
	    os.remove(os.path.join(picDir, file))
            logger.debug('Removed pic: %s' %(file))
	logger.info('Removed all the pics under dir: %s' %(picDir))

files = getPicFiles(pic_dir)
if len(files) <= 0 :
    logger.error("There's no files under dir[%s]" %(pic_dir))
else:
    try:
        markImage.markAll(files)
    except:
        logger.error("Mark image error: %s", sys.exc_info()[0])

    try:
        uploadToWeibo(files)
    except:
        logger.error("Upload to Weibo error: %s", sys.exc_info()[0])

    try:
        uploadToQQ(files)
    except:
        logger.error("Upload to Tencent error: %s", sys.exc_info()[0])