Example #1
0
 def generate_reply(self, returned=None, optional=None):
   """This function is a bit crazy as it is entered from several points as both a callback or called directly :(
   returns either the successful msg, containing the symmertric key, balance, bank ip address, and login token
   OR
   the timeout until the client can log in again
   @param optional: text to return to the client with a code
   @type optional: [int, str]
   @return: None"""
   PROTOCOL = 1
   if self.successful:
     log_msg('Login was successful.', 3)
     EventLogging.save_event(BankLogin(username=self.username))
     #address of the bank server
     address = addressBook[0]
     curExp, nextExp = BankUtil.get_interval_time_deltas()
     format = '!BBIIII4sI'
     reply = struct.pack(format, PROTOCOL, 1, self.balance, Globals.CURRENT_ACOIN_INTERVAL[0], \
                                 curExp, nextExp, 
                                 inet_aton(address.host), address.port)
     reply += self.authBlob
   else:
     reply = struct.pack('!BBI', PROTOCOL, 0, self.timeout)
   #currently, 1 will replace the txt for a failure displayed client side, while 1 adds to it- thats all for now
   optional = [0, 'hello']
   if optional:
     code = struct.pack('!B', optional[0])
     reply += code + optional[1]
   #shove off the reply  
   self.reply(reply)
Example #2
0
 def _on_finished(self, results):
     """Everything worked (well enough).
 Log the event and return the SUCCESS code reply"""
     EventLogging.save_event(
         AccountCreated(hexkey=self.hexKey,
                        username=self.username,
                        mailinglist=self.shouldAddToMailingList))
     self._reply(RESPONSE_CODES["SUCCESS"])
Example #3
0
 def __init__(self, eventTypes, logName):
     #: mapping from events to their event classes:
     self._eventTypes = eventTypes
     #: used to track events, so they can be logged hourly, for better anonymity and performance
     self._reset_events()
     #open the logs:
     EventLogging.open_logs(logName)
     #schedule a flush of the logs for the end of the hour:
     self._schedule_next_flush()
Example #4
0
 def __init__(self, eventTypes, logName):
   #: mapping from events to their event classes:
   self._eventTypes = eventTypes
   #: used to track events, so they can be logged hourly, for better anonymity and performance
   self._reset_events()
   #open the logs:
   EventLogging.open_logs(logName)
   #schedule a flush of the logs for the end of the hour:
   self._schedule_next_flush()
Example #5
0
  def _update_events(self):
    earliestTime = get_current_gmtime()
    curTime = time.time()
    
    for logTypeName, remoteFolder in self.serverList.iteritems():
      try:
        log_msg("Updating logs from %s..." % (logTypeName))
        dataDir = os.path.join(self.baseDataDir, logTypeName)
        if not os.path.exists(dataDir):
          os.makedirs(dataDir)

        #get the changes from the remote server:
        os.system('rsync --append -rtz -e "ssh -i %s" %s/ %s' % (IDENTITY, remoteFolder, dataDir))
        
        #for each file in the folder
        fileNames = glob.glob(dataDir + "/*")
        for fileName in fileNames:
          baseFileName = os.path.split(fileName)[1]
          
          #ignore really old files:
          if self._file_is_old(fileName, curTime):
            self._remove_file(fileName, baseFileName, logTypeName)
            continue
          
          #look up the database row
          results = self._get_file_row(baseFileName, logTypeName)
          
          #if the row existed, figure out if it is old enough to be deleted
          if len(results) > 0:
            assert len(results) == 1, "Why are there two rows for %s and %s?" % (baseFileName, logTypeName)
            numEvents, lastMTimeString = results[0]
            rowExisted = True
            if not self._file_was_modified(fileName, lastMTimeString):
              #don't bother continuing to parse the file if it hasnt been modified
              continue
          #otherwise, just note that we've obviously never parsed any events from this file
          else:
            numEvents = 0
            rowExisted = False
            
          #load all lines
          cur = self.conn.cursor()
          try:
            startTime, newNumEvents = EventLogging.parse_events(cur, fileName, numEvents)
            if startTime < earliestTime:
              earliestTime = startTime
            log_msg("Parsed %s events" % (newNumEvents-numEvents))
          #if any line fails, abort everything and log the failure
          except Exception, error:
            self._log_failure(error, "Failure (%s) while processing line from %s" % (error, fileName))
          #otherwise update the file row in the database to note that we've successfully parsed newNumEvents events
          else:
            newMTimeString = str(os.path.getmtime(fileName))
            if rowExisted:
              self._update_file_row(cur, baseFileName, logTypeName, newNumEvents, newMTimeString)
            else:
              self._insert_file_row(cur, baseFileName, logTypeName, newNumEvents, newMTimeString)
          finally:
            cur.close()
            self.conn.commit()
Example #6
0
#!/usr/bin/python
import os
import random
from serverCommon import EventLogging
from serverCommon import Events

os.system("rm ./testevents.ou*") == 0
EventLogging.open_logs("testevents.out")

def random_hex(size):
  size /= 2
  data = "".join(chr(random.randrange(0, 256)) for i in xrange(size))
  return data.encode("hex")

def reduce_size(data, amount):
  keys = data.keys()
  count = 0
  for key in keys:
    del data[key]
    count += 1
    if count >= amount:
      return

emails = {}
for i in range(0,40):
  emails["*****@*****.**"%(random_hex(10))] = random_hex(20)

for email, ref in emails.iteritems():
  EventLogging.save_event(Events.EmailSent(address=email, hexkey=ref))
reduce_size(emails, 5)
Example #7
0
 def _on_finished(self, results):
   """Everything worked (well enough).
   Log the event and return the SUCCESS code reply"""
   EventLogging.save_event(AccountCreated(hexkey=self.hexKey, username=self.username, mailinglist=self.shouldAddToMailingList))
   self._reply(RESPONSE_CODES["SUCCESS"])
Example #8
0
parser.add_option("--live", action="store_true", dest="is_live", default=False)
parser.add_option("--dev", action="store_true", dest="is_dev", default=False)
(options, args) = parser.parse_args()
if options.is_live:
  from common.conf import Live as Conf
else:
  if not options.is_dev:
    print("You must use either the --live or --dev switches")
    sys.exit(1)
  from common.conf import Dev as Conf

#open the event logs
LOG_DIR = "/mnt/logs/consensus/"
if not os.path.exists(LOG_DIR):
  os.makedirs(LOG_DIR)
EventLogging.open_logs(os.path.join(LOG_DIR, "consensus_events.out"))

#get the document
conn = httplib.HTTPConnection("%s:%s" % (Conf.AUTH_SERVERS[0]["address"], Conf.AUTH_SERVERS[0]["dirport"]))
conn.request("GET", "/tor/status-vote/current/consensus")
response = conn.getresponse()
responseData = response.read()

#parse all of the bandwidths out of it
bandwidthRe = re.compile('w Bandwidth=(\d{1,10})')

#get bandwidth
bandwidths = [int(r) for r in bandwidthRe.findall(responseData)]
totalBandwidth = sum(bandwidths)
numRelays = len(bandwidths)
bandwidths.sort()
Example #9
0
#!/usr/bin/python
from serverCommon import cyborg_db as db
from serverCommon import EventLogging

conn = db.Pool.get_conn()
cur = conn.cursor()
f = open("testevents.out", "rb")
for line in f.readlines():
  event = EventLogging.load_event(line)
  event.insert(cur)
cur.close()
conn.commit()
f.close()
Example #10
0
parser.add_option("--live", action="store_true", dest="is_live", default=False)
parser.add_option("--dev", action="store_true", dest="is_dev", default=False)
(options, args) = parser.parse_args()
if options.is_live:
    from common.conf import Live as Conf
else:
    if not options.is_dev:
        print("You must use either the --live or --dev switches")
        sys.exit(1)
    from common.conf import Dev as Conf

#open the event logs
LOG_DIR = "/mnt/logs/consensus/"
if not os.path.exists(LOG_DIR):
    os.makedirs(LOG_DIR)
EventLogging.open_logs(os.path.join(LOG_DIR, "consensus_events.out"))

#get the document
conn = httplib.HTTPConnection(
    "%s:%s" %
    (Conf.AUTH_SERVERS[0]["address"], Conf.AUTH_SERVERS[0]["dirport"]))
conn.request("GET", "/tor/status-vote/current/consensus")
response = conn.getresponse()
responseData = response.read()

#parse all of the bandwidths out of it
bandwidthRe = re.compile('w Bandwidth=(\d{1,10})')

#get bandwidth
bandwidths = [int(r) for r in bandwidthRe.findall(responseData)]
totalBandwidth = sum(bandwidths)
Example #11
0
#!/usr/bin/python
import os
import random
from serverCommon import EventLogging
from serverCommon import Events

os.system("rm ./testevents.ou*") == 0
EventLogging.open_logs("testevents.out")


def random_hex(size):
    size /= 2
    data = "".join(chr(random.randrange(0, 256)) for i in xrange(size))
    return data.encode("hex")


def reduce_size(data, amount):
    keys = data.keys()
    count = 0
    for key in keys:
        del data[key]
        count += 1
        if count >= amount:
            return


emails = {}
for i in range(0, 40):
    emails["*****@*****.**" % (random_hex(10))] = random_hex(20)

for email, ref in emails.iteritems():
Example #12
0
#!/usr/bin/python
from serverCommon import cyborg_db as db
from serverCommon import EventLogging

conn = db.Pool.get_conn()
cur = conn.cursor()
f = open("testevents.out", "rb")
for line in f.readlines():
    event = EventLogging.load_event(line)
    event.insert(cur)
cur.close()
conn.commit()
f.close()
Example #13
0
from common.utils.Basic import log_msg, log_ex, _ # pylint: disable-msg=W0611
from common import Globals
from common.system import Files
from common.utils import Basic
from common.utils import TorUtils
from common.classes import Logger
from common.classes import SymmetricKey
from common.classes import PublicKey
from serverCommon.DBUtil import format_auth
from serverCommon import db
from serverCommon.Events import BankLogin
from serverCommon import EventLogging
import BankUtil
import AccountCreation
EventLogging.open_logs("/mnt/logs/bank/login_events.out")

cert = '/etc/apache2/ssl/server.crt'
privateKey = '/etc/apache2/ssl/server.key'

if os.path.exists("THIS_IS_KANS_MACHINE"):
  from common.conf import Dev as Conf
  bank1Address = address.IPv4Address('TCP', '24.131.16.34', 10001) #
  Conf.LOGIN_PORT = 1092
elif os.path.exists("THIS_IS_DEBUG"):
  from common.conf import Dev as Conf
  bank1Address = address.IPv4Address('TCP', Conf.DEV_SERVER, Conf.SERVER_PORT) #innomi.net
  cert = '/home/certificate_authority/server.crt'
else:
  from common.conf import Live as Conf
  bank1Address = address.IPv4Address('TCP', '174.129.199.15', Conf.SERVER_PORT) #login.bitblinder.com
Example #14
0
 def _flush_thread(self, recentEvents):
     """Log an event for each message type and user, essentially"""
     for collectionName, events in recentEvents.iteritems():
         eventType = self._eventTypes.get(collectionName)
         for key, value in events.iteritems():
             EventLogging.save_event(eventType(source=key, amount=value))
Example #15
0
 def _flush_thread(self, recentEvents):
   """Log an event for each message type and user, essentially"""
   for collectionName, events in recentEvents.iteritems():
     eventType = self._eventTypes.get(collectionName)
     for key, value in events.iteritems():
       EventLogging.save_event(eventType(source=key, amount=value))