def on_message(self, message): #This happens when you receive a message. request = ast.literal_eval(message) response = "" if 'name' in request: LCDQueue.put([request['name'], "Processing"]) LCDEvent.set() if request['type'] == "keypress": requestprocessor().keypress(request['key']) elif request['type'] == "radio": requestprocessor().change_radio_station(request['station']) response = [request['type'] + ',' + request['station']] elif request['type'] == "command": response = requestprocessor().handle_command(request['name']) elif request['type'] == "volume": zone, chassis = configuration().zone_location(request['zone']) requestprocessor().change_volume(request['value'], zone, chassis) response = ['volume,' + request['zone'] + "," + request['value']] elif request['type'] == "treble": zone, chassis = configuration().zone_location(request['zone']) requestprocessor().change_treble(int(request['value']), zone, chassis) response = ['treble,' + request['zone'] + "," + request['value']] elif request['type'] == "aton": response = configuration().get_system_status() print(response) elif request['type'] == "base": zone, chassis = configuration().zone_location(request['zone']) requestprocessor().change_base(int(request['value']), zone, chassis) response = ['base,' + request['zone'] + "," + request['value']] elif request['type'] == "pandora": PandoraRequestQueue.put(request['name']) PandoraRequestReadyEvent.set() if request['name'] in [ 'pandoraon', 'nextsong', 'pandoraoff', 'thumbsdown', 'CurrentSong' ] or len(request['name']) == 1: #print('If statement hit.') PandoraDataReadyEvent.wait() response = [] while not PandoraDataQueue.empty(): response.append(PandoraDataQueue.get()) PandoraDataQueue.task_done() PandoraDataReadyEvent.clear() if response != "": #print(response) for item in response: #print(item) for client in self.clients: client.write_message(item)
def __init__(self, configFile, mktName, processingDate, debugFlag): """ Purpose: Constructor :param self: class object itself :param configFile: Configuration file to use """ # Initialize m_logger object from class Logger and add Header to the log, using addGenericInfo function self.m_logger = Logger(logging.INFO, configFile, processingDate) self.m_logger.addFileHandler(logging.DEBUG) self.m_logger.addGenericInfo(__file__) self.processingDate = processingDate self.debugFlag = debugFlag #self.forceFlag = forceFlag self.configFile = configFile self.mktName = mktName try: # Get configuration to a dictionary self.m_configDict = configuration(self.configFile, True).m_dictionary #Initialize Oracle instance along with connection self.m_oracle_db = Oracle(self.m_configDict, self.m_logger) except Exception as exp: # An exception occurred self.m_logger.error("Unable to initialize the configuration " + str(exp)) print("ERROR: Unable to initialize the configuration for logger " + str(exp)) sys.exit(1)
def main(configFile, logLevel, tradeDate): """ Purpose - Main function that read the arguments and call the processLoader function to perform the actions :param argv: :return: """ log = Logger(logLevel, configFile, tradeDate) log.addFileHandler(logging.DEBUG) log.addGenericInfo(__file__) try: #Validata Config file if not os.path.isfile(configFile): print("Invalid config file ", configFile) sys.exit(103) if not validateDate(tradeDate): print("Invalid trade date format ", tradeDate) sys.exit(102) m_configDict = configuration(configFile, True).m_dictionary # Get instance of the class Internal_recon process = Internal_recon(m_configDict, tradeDate, log) process.intern_recon(m_configDict,log) except Exception as e: print "Failed on main", str(e) log.error(str(e)) exit(100)
def main(configfile,log_level,dataset=None,tdate="20160101"): """ Purpose: Log the contents :param self: class object itself :param log_level: log level for the logger """ log = Logger(log_level,configfile) conf = configuration(configfile) # test#1: File logging log.addFileHandler(logging.DEBUG) formatter1 = logging.Formatter('%(asctime)s - %(name)s %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') myExtract = NZExtract(datetime,conf,log,dataset,delim='\t') #myExtract = NZExtract(datetime,conf,log,dataset) myExtract.execute(conf,dataset,log,tdate) myExtract.extractdata() #myExtract.extractgz(filename="/tmp/testdata") #myExtract.extractbz2(filename="/tmp/testdata") #myExtract.extracttxt(filename="/tmp/testdata") log.info("SQL Result read completed") # close cursor and connection myExtract.cleanup(log) # cleanup the logger log.cleanup() sys.exit(0)
def configure_kernel(cml_file): config = configuration() if not os.path.exists(BUILDDIR): os.mkdir(BUILDDIR) cml2_configure(cml_file)
def main(): try: ## Rad the CFG file for the process and set params. cfg_file = os.path.join("/home/hadoop/", sys.argv[1] + ".ini") except IndexError: ## Catch Exception and print on screen. print "Pass valid cfg file parameter" sys.exit(1) ## Intialize the Logger object. log = Logger(logging.INFO, cfg_file) log.addFileHandler(logging.DEBUG) log.addMailHandler(log.m_config["cluster_aws_mailprfx"] +" ERROR: AWS process failed for the day",logging.ERROR) try: ## Set env variables for the process. conf = configuration(cfg_file,True) log.info(os.environ["job_prefix"] + " process started for the day..") ## Create Hive class object and create queries for execution objhive = Hive(log, os.environ['hive_path']) hive_options = '-f' loop_count = 0 ## Get the list of SQL and SQL files for execution log.info("Getting Hive Sql list...\n") sql_name=os.environ["sql_names"].split(",") sql_filename=os.environ["sql_files"].split(",") ## Loop through all the sql for the process. while loop_count < len(sql_filename): log.info('Hive '+ sql_name[loop_count]+' SQL Execution started.') objhive.buildHql(hive_options, os.environ["sql_dir"]+ '/' + sql_filename[loop_count]) log.info("SQL File:- " + objhive.hive_sql) objhive.execute() log.info('SQL Execution completed successfully...\n') loop_count=loop_count+1 log.info(os.environ["job_prefix"] + " process completed successfully for the day..") ## Cleanup and send the success mail for the process. log.cleanup() log.addMailHandler(log.m_config["cluster_aws_mailprfx"] +" SUCCESS: AWS process completed successfully for the day",logging.ERROR) log.sendlog() except Exception, e: print e log.info(os.environ["job_prefix"] + " process thrown exception "+ str(e) +"\n") log.info("##======= Please Find Proces Temp Log Below =======##\n") log.cleanup() log.addMailHandler(log.m_config["cluster_aws_mailprfx"]+" EXCEPTION: AWS process on cluster thrown exception "+ str(e) +"..",logging.ERROR) log.sendlog("error",50) sys.exit(1)
def __init__(self, level, config, format="", setenv=False): """ Purpose: Constructor :param self: class object itself :param level: logging level for the logger :param config: configuration file to read :param format: formatter to be used for logging :param setenv: Whether to export the key/value pairs in the environment """ # initialize logger self.m_log = logging.getLogger(__name__) # set the level for the logger self.m_log.setLevel(level) if format: # If formatter is specified, use it rather than default formatter self.m_formatter = format try: # read the config and initialize m_config self.m_config = configuration(config, setenv).m_dictionary # set logfiles for the process logging. self.m_logfile = self.m_config["log_dir"] + "/" + self.m_config[ self.JOB_LOG] + '_' + getTime('YYMMDD') + getTime( 'HHMMSS') + '.log' except Exception, exp: # An exception occurred print "ERROR: Unable to initialize the configuration for logger " + str( exp) sys.exit(1)
def __init__(self, level, config, format="", setenv=False): """ Purpose: Constructor :param self: class object itself :param level: logging level for the logger :param config: configuration file to read :param format: formatter to be used for logging :param setenv: Whether to export the key/value pairs in the environment """ # initialize logger self.m_log = logging.getLogger(__name__) # set the level for the logger self.m_log.setLevel(level) if format: # If formatter is specified, use it rather than default formatter self.m_formatter = format try: # read the config and initialize m_config self.m_config = configuration(config, setenv).m_dictionary # set logfiles for the process logging. self.m_logfile = self.m_config["log_dir"] + "/" + self.m_config[self.JOB_LOG] + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' except Exception, exp: # An exception occurred print "ERROR: Unable to initialize the configuration for logger " + str(exp) sys.exit(1)
def save_configuration(self): config = configuration(self.trainingImagePath.text(), self.trainingLabelsPath.text(), self.validationImagePath.text(), self.validationLabelPath.text()) config.print() file = open("configuration.json", "w") json_data = json.dumps(config.__dict__) file.write(json_data) file.close()
def open_configuration(self): filename = QFileDialog.getOpenFileName() if filename[0] != "" and filename[0][-5:] == '.json': file = open(filename[0], "r") json_data = file.read() file.close() new_config = (configuration(**json.loads(json_data))) self.load_configuration(new_config) else: self.write_line("Nieprawidłowy plik")
def readMktConfigFile(self, mktConfigFile): """ Purpose - To read the content of mktConfigFile into the global dictionary m_mktConfigDict for reference :param mktConfigFile: :return: """ try: self.m_mktConfigDict = configuration(mktConfigFile, True).m_dictionary except Exception as exp: # An exception occurred self.m_logger.error("Unable to initialize the configuration for logger " + str(exp)) print("ERROR: Unable to initialize the configuration for logger " + str(exp)) sys.exit(1)
def get(self): #Get system information mceConfig = configuration().get_mce_configuration() systemStatus = configuration().get_system_status_template() systemConfiguration = configuration().get_system_configuration() #Request the station list. PandoraRequestQueue.put('StationList') PandoraRequestReadyEvent.set() #Wait to get the station list back. PandoraDataReadyEvent.wait() pandorastations = PandoraDataQueue.get() PandoraDataQueue.task_done() PandoraDataReadyEvent.clear() index = Template(filename='Web/index.html').render( mceConfig=mceConfig, systemStatus=systemStatus, systemConfiguration=systemConfiguration, pandorastations=pandorastations) self.write(index)
def main(configfile,log_level): """ Purpose: Log the contents :param self: class object itself :param log_level: log level for the logger """ log = Logger(log_level,configfile) conf = configuration(configfile); # test#1: File logging log.addFileHandler(logging.DEBUG) formatter1 = logging.Formatter('%(asctime)s - %(name)s %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') # log to file and send out email log.info("message") myutil = Netezza(datetime) myutil.configread(log,conf) print('***** Debugging - Functions *****') myutil.connect(log) myutil.execute(log,"select RECTYPE , RECNO , TDATE , SYMBOL , TIM , TIMTICKS , REPLACEIND , SIDE , TA , QTY , TYPOFINT , INSTR , OTYPE , LMTPRICE " ", TIF , DNIR , NXOTHER , ORIG_TA , POSDUP , PUBQTY , FIRM , BRANCH , SEQNO , MISC , GIVEUP , SOURCE , EFFECSEQ , BROKER , PEG_IND , PEG_PRICE , PEG_MIN , MAX_DISC_VOL , DISC_PRICE_RANGE , OS_MIN_SIZE , EXEC_DISC_VOL , SOCISO , ITSALLIND , ORDR_IND , NODISPLAY_IND , DARK_IND , MIN_DISC_TRADE_SIZE , ACCTYP , ODATE , PRICE_OFFSET , CCGTIM , CCGTIMTICKS , CLIENT_ORDERID , OPP_SIDE_PEG_IND , PARTIAL_FILL_IND , CONNECTION_ID , SENDER_COMP_ID , GATEWAY_NODE , STP_IND , MPID , SENDER_SUB_ID , MAX_CONTRA_QTY " "from ordr limit 2") for row in myutil.__rows__: print(row[1:]) print(row[0],row[1], row[2], row[3]) myutil.execute(log,"select * from ordr limit 2") print(type(myutil.__rows__)) for row in myutil.__rows__: for col in row[1:]: print(col,type(col)) print(len(myutil.__rows__)) log.info("Netezza connected using ODBC") print("Netezza connected using ODBC") myutil.executeWoResult(log,"select * from ordr limit 2") myutil.readResult(arraysize=10) # close cursor and connection myutil.cleanup(log) # cleanup the logger log.cleanup() sys.exit(0)
def __init__(self, configFile): """ Purpose: Constructor :param self: class object itself :param configFile: Configuration file to use """ # Initialize global logger object self.m_logger = Logger(logging.INFO, configFile) self.m_logger.addFileHandler(logging.DEBUG) try: # Add generic information fname = inspect.getfile(inspect.currentframe()) fpath = os.path.dirname(os.path.abspath(fname)) self.m_logger.addGenericInfo(fpath + "/" + fname) # export all the values from config into environment configObject = configuration(configFile, True) # Create Adama replica PG db object self.m_adamapg = Postgre(os.environ['adama_pg'], self.m_logger) # Create Picard Postgres Datamart object self.m_picardpg = Postgre(os.environ['picard_pg'], self.m_logger) # Create lock for the process self.m_lock = Lock(os.environ['LOCK_FILE'], self.m_logger) # loader type self.m_loaderType = self.getloaderType() # process name self.process_name = os.environ['process_name'] self.m_logger.info("Initializing the process, %s" % self.process_name) except Exception, e: self.m_logger.error( "ERROR: Unable to initialize the process due to: %s" % str(e)) self.updateProcessStatus("F") if self.m_adamapg: self.m_adamapg.closeConnection() if self.m_picardpg: self.m_picardpg.closeConnection() if self.m_lock: self.m_lock.remove() sys.exit("ERROR: Unable to initialize the process due to: %s" % str(e))
def test_confKeyNotFound(self): """configuration: Test that accessing a key that is not specified in cofig file fails""" # create aws.ini file = open("aws.ini", "w") file.write("[MAIL]\n") file.write("[email protected]\n") file.close() # intialize configuration with setenv=True conf = configuration("aws.ini", True) # check the key "mailto" throws KeyError self.assertRaises(KeyError, lambda: os.environ['mailto']) # cleanup conf file os.remove("aws.ini")
def __init__(self, configFile): """ Purpose: Constructor :param self: class object itself :param configFile: Configuration file to use """ # Initialize global logger object self.m_logger = Logger(logging.INFO, configFile) self.m_logger.addFileHandler(logging.DEBUG) try: # Add generic information fname = inspect.getfile(inspect.currentframe()) fpath = os.path.dirname(os.path.abspath(fname)) self.m_logger.addGenericInfo(fpath + "/" + fname) # export all the values from config into environment configObject = configuration(configFile, True) # Create Adama replica PG db object self.m_adamapg = Postgre(os.environ['adama_pg'], self.m_logger) # Create Picard Postgres Datamart object self.m_picardpg = Postgre(os.environ['picard_pg'], self.m_logger) # Create lock for the process self.m_lock = Lock(os.environ['LOCK_FILE'], self.m_logger) # loader type self.m_loaderType = self.getloaderType() # process name self.process_name = os.environ['process_name'] self.m_logger.info("Initializing the process, %s" % self.process_name ) except Exception, e: self.m_logger.error("ERROR: Unable to initialize the process due to: %s" % str(e)) self.updateProcessStatus("F") if self.m_adamapg: self.m_adamapg.closeConnection() if self.m_picardpg: self.m_picardpg.closeConnection() if self.m_lock: self.m_lock.remove() sys.exit("ERROR: Unable to initialize the process due to: %s" % str(e))
def test_confSetenv(self): """configuration: Test that config file contents have been exported into environment""" # test 2 # create aws.ini file = open("aws.ini", "w") file.write("[MAIL]\n") file.write("[email protected]\n") file.close() # intialize configuration with setenv=True conf = configuration("aws.ini", True) # check the key has been exported self.assertEqual("*****@*****.**", os.environ['mailfrom'], "Configuration variable is not found in the environment") # cleanup conf file os.remove("aws.ini")
def __init__(self, awsConfigFile, logger): """ Purpose: Constructor :param self: class object itself :param confFile: Configuration file to use for this database """ # Initialize the logger member self.m_logger = logger try: # read the config and initialize m_config self.m_configFile = configuration(awsConfigFile, True).m_dictionary except Exception as exp: # An exception occurred self.m_logger.error("ERROR: Unable to initialize the configuration for logger " + str(exp)) sys.exit(1)
def __init__(self, confFile, logger): """ Purpose: Constructor :param self: class object itself :param confFile: Configuration file to use for this database """ # initialize logger object self.m_logger = logger if confFile: # configuration file has been provided try: # read the config and initialize m_config self.m_configfile = configuration(confFile, True).m_dictionary except Exception, exp: # An exception occurred self.m_logger.error("ERROR: Unable to initialize the configuration for logger " + str(exp)) sys.exit(1)
def test_confSetenv(self): """configuration: Test that config file contents have been exported into environment""" # test 2 # create aws.ini file = open("aws.ini", "w") file.write("[MAIL]\n") file.write("[email protected]\n") file.close() # intialize configuration with setenv=True conf = configuration("aws.ini", True) # check the key has been exported self.assertEqual( "*****@*****.**", os.environ['mailfrom'], "Configuration variable is not found in the environment") # cleanup conf file os.remove("aws.ini")
def __init__(self, awsConfigFile, logger): """ Purpose: Constructor :param self: class object itself :param confFile: Configuration file to use for this database """ # Initialize the logger member self.m_logger = logger try: # read the config and initialize m_config self.m_configfile = configuration(awsConfigFile, True).m_dictionary except Exception, exp: # An exception occurred self.m_logger.error( "ERROR: Unable to initialize the configuration for logger " + str(exp)) sys.exit(1)
def test_configuration(self): """configuration: Test that config file is converted into a dictionary""" # create aws.ini file = open("aws.ini", "w") file.write("[MAIL]\n") file.write("[email protected]\n") file.close() # create configuration object conf = configuration("aws.ini") # check the contents of the class dictionary expDict = dict() expDict['mailfrom'] = "*****@*****.**" #print expDict #print conf.m_dictionary self.assertEqual(expDict, conf.m_dictionary) # Clean up conf file os.remove("aws.ini")
def main(): try: config = configuration() host_ip=config.ip port=config.port #print 'Server Initialized...' except: print "Configuration error" sys.exit(1) host_addr=host_ip,port server_setup(host_addr) while 1: client_data = server_sock.recvfrom(buff) client_details(client_data)
def main(log_level,configfile=None,manifest=None,dataset=None,tdate=None,runid=None): """ Purpose: create manifest/done file for FINRA AWS :param configfile: class object itself :param manifest: input manifest file with file contents :param loglevel: log level for the logger :param dataset: dataset to be read :param tdate: trade date """ log = Logger(log_level,configfile) conf = configuration(configfile) # test#1: File logging log.addFileHandler(logging.DEBUG) formatter1 = logging.Formatter('%(asctime)s - %(name)s %(levelname)s - %(message)s', datefmt='%Y-%m-%d %H:%M:%S') myObj = GenManifest(datetime,conf,log,manifest=manifest) myObj.createawsManifest() myObj.readManifest() myObj.readDefaults() myObj.createDoneFile(runid) #myExtract = NZExtract(datetime,conf,log,dataset) #myExtract.execute(conf,dataset,log,tdate) #myExtract.extractdata() #myExtract.extractgz(filename="/tmp/testdata") #myExtract.extractbz2(filename="/tmp/testdata") #myExtract.extracttxt(filename="/tmp/testdata") log.info("Processing complete") # cleanup the logger log.cleanup() sys.exit(0)
def __init__(self, level, config, sub_dir="", format="", setenv=False): """ Purpose: Constructor :param self: class object itself :param level: logging level for the logger :param config: configuration file to read :param sub_dir: sub directory to be created based on the param passed to main :param format: formatter to be used for logging :param setenv: Whether to export the key/value pairs in the environment """ # initialize logger self.m_log = logging.getLogger(__name__) # set the level for the logger self.m_log.setLevel(level) if format: # If formatter is specified, use it rather than default formatter self.m_formatter = format try: # read the config and initialize m_config self.m_config = configuration(config, setenv).m_dictionary # set logfiles for the process logging. if sub_dir: full_dir_path = self.m_config["LOG"]["log_dir"] + "/" + sub_dir if not os.path.exists(full_dir_path): os.makedirs(full_dir_path) self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' else: self.m_logfile = self.m_config["LOG"]["log_dir"] + "/" + self.m_config["LOG"]["job_name"] + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' except Exception as exp: # An exception occurred print( "ERROR: Unable to initialize the configuration for logger " , str(exp)) sys.exit(1)
def main(log_level, configfile=None, manifest=None, dataset=None, tdate=None, runid=None): """ Purpose: create manifest/done file for FINRA AWS :param configfile: class object itself :param manifest: input manifest file with file contents :param loglevel: log level for the logger :param dataset: dataset to be read :param tdate: trade date """ manifestList = manifest.split(".", 4) if dataset is None: dataset = manifestList[1] if tdate is None: tdate = manifestList[2] log = Logger(log_level, configfile, tdate) conf = configuration(configfile) log.addFileHandler(logging.DEBUG) formatter1 = logging.Formatter("%(asctime)s - %(name)s %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S") print("Executing genManifest.py with params : ", args.dc) log.info("Parameters : {}".format(args.dc)) log.addGenericInfo(__file__) # log.info("Logfile: ",log) myObj = genManifest(datetime, conf, log, manifest=manifest) myObj.createawsManifest() log.info("Processing complete") # cleanup the logger log.cleanup() sys.exit(0)
def __init__(self, confFile): """ Purpose: Constructor :param self: class object itself :param confFile: Configuration file to use for the process """ # Initialize the logger member self.m_logger = Logger(logging.INFO, confFile) # Add file handler for File logging self.m_logger.addFileHandler(logging.DEBUG) #Check if configuration file is provided and throw an error. if confFile: # configuration file has been provided try: # read the config and initialize m_config self.m_config = configuration(confFile, True).m_dictionary except Exception, exp: # An exception occurred self.m_logger.error("ERROR: Unable to initialize the configuration for s3utils.. " + str(exp)) sys.exit(1)
import platform
def main(configFile, logLevel, tDate): log = Logger(logLevel,configFile, tDate) log.addFileHandler(logging.DEBUG) log.addGenericInfo(__file__) try: m_configDict = configuration(configFile, True).m_dictionary #print "m_configDict = ", m_configDict myOracle = Oracle(m_configDict, log) #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_DATASET_TRANS(:datasetID, :runID, :tDate, :status))" #pDatasetID = 2 #pRunID = 234234234 # 20151216144156584829 #pTDate = 20151215 #pStatus = 'P' #myParams = {"datasetID": pDatasetID, "runID": pRunID, "tDate": pTDate, "status": pStatus} #select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS('DLE_INFO', 20151216144156584829, 1, 'opb1.dat.bz2', 20151215, 111, 'test_hostname', 'P', 'Process Started')) #select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS('DLE_INFO', 20151216144156584829, 1, 'opb1.dat.bz2', 20151215, 111, 'test_hostname', 'P', 'Process Started')); mySql = "select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS(:datasetName, :runID, :fileID, :fileName, :tDate, :processID, :hostName, :lstatus , :lcomment))" pDatasetName = "DLE_INFO" pRunID = 20151216144156584 pTDate = 20151210 pStatus = 'P' pFileID = 1 pFileName = 'test.dat.gz' pComment = 'Process Started' pProcessID = 1234 pHostName = 'Test_Host' myParams = {"datasetName": pDatasetName, "runID": pRunID, "fileID": pFileID, "fileName": pFileName, "tDate": pTDate, "processID": pProcessID, "hostName": pHostName, "lstatus": pStatus, "lcomment": pComment} #returnStr = self.__cursor.execute("select * from table(PKG_RFCM_DDY.f_ddy_insert_manifest_trans(1, 20151215, 'opb.test1.bz2', 'manifest.opb', 23423, 2342334))") #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_insert_manifest_trans(:datasetID, :tDate, :dataFileName, :manifestFileName, :noOfRecords, :fileSize))" #pDatasetID = 3 #pTDate = 20151210 #pDataFileName = 'opb.test1.bz2' #pManifestFileName = 'manifest.opb' #pNoOfRecords = 23423 #pFileSize = 2342334 #myParams = {"datasetID": pDatasetID, "tDate": pTDate, "dataFileName": pDataFileName, "manifestFileName":pManifestFileName, "noOfRecords":pNoOfRecords, "fileSize":pFileSize} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_process_status(20151216144156584829,1));" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_process_status(:runID,:fileID))" #pRunID = 20151216144156584829 #pFileID = 1 #myParams = {"runID":pRunID, "fileID":pFileID} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_status(234234234));" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_status(:runID))" #pRunID = 234234234 #myParams = {"runID":pRunID} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_active_loads());" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_active_loads())" #myParams = {} #mySql = select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(p_dataset_name => 'ADW_EVENT_LSH_RAW')); #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(:datasetName))" #mySql = "select RETURN_CODE, RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(:datasetName))" #pDatasetName = 'ADW_EVENT_LSH_RAW' #pDatasetName = 'DLE_INFO' myParams = {"datasetName":pDatasetName} #mySql = select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_race('ADW_AMEX_OPT_RAW')); #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_race(:datasetName))" #pDatasetName = "ADW_AMEX_OPT_RAW" #myParams = {"datasetName":pDatasetName} """ #mySql = "select RETURN_CODE, RETURN_MSG, P_MARKET_IND, P_DEFAULTS_FILENAME from table(pkg_rfcm_ddy.f_ddy_get_makt_info_dflt_fname(:datasetName))" mySql = "select RETURN_CODE, RETURN_MSG, P_MARKET_IND, P_DEFAULTS_FILENAME from table(pkg_rfcm_ddy.f_ddy_get_makt_info_dflt_fname('datasetName'))" pDatasetName = 'DLE_INFO' new_mySql = re.sub('datasetName', pDatasetName, mySql.rstrip()) returnStr = myOracle.runSqlWthParamsGetOneRow(new_mySql) print "Return Value = ", returnStr[0], " Return Code = ", returnStr[1], " Mkt = ", returnStr[2], "Defaults = ", returnStr[3] """ """ mySql = "select RETURN_CODE, RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_DATASET_TRANS('datasetName', 'runID', 'tDate', 'status', 'tidalRunID'))" pDatasetName = 'DLE_INFO' pRunID = '20151216144156584829' pTDate = '20151215' pStatus = 'P' pTidalRunID ='20151215' myParamsDict = {"datasetName":self.datasetName, "runID": self.runID, "tDate":self.tradeDate, "status": pStatus, "tidalRunID":self.tidalRunID} tempGrp = "(%s)" % "|".join( map(re.escape, myParamsDict.keys()) ) mySql = re.sub( tempGrp, lambda m:myParamsDict[m.group()], tempSql) rdict = { 'datasetName' : pDatasetName, 'runID' : pRunID, 'tDate' : pTDate, 'status' : pStatus, 'tidalRunID' : pTidalRunID } pat = "(%s)" % "|".join( map(re.escape, rdict.keys()) ) new_mySql = re.sub( pat, lambda m:rdict[m.group()], mySql) print "mySql = ", mySql, "new_mySql = ", new_mySql """ """ mySql = "select RETURN_CODE,RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS('datasetName', 'runID', 'fileID', 'fileName', 'tDate', 'processID', 'hostName', 'status' , 'lcomment', 'tidalRunID'))" pDatasetName = "DLE_INFO" pRunID = '20151216144156584' pTDate = '20151210' pStatus = 'P' pFileID = '1' pFileName = 'test.dat.gz' pComment = 'Process Started' pProcessID = '1234' pHostName = 'Test_Host' pTidalRunID = '20151210' rdict = { 'datasetName' : pDatasetName, 'runID' : pRunID, 'fileID' : pFileID, 'fileName' : pFileName, 'tDate' : pTDate, 'processID' : pProcessID, 'hostName' : pHostName, 'status' : pStatus, 'lcomment' : pComment, 'tidalRunID' : pTidalRunID } pat = "(%s)" % "|".join( map(re.escape, rdict.keys()) ) new_mySql = re.sub( pat, lambda m:rdict[m.group()], mySql) print "mySql = ", mySql, "new_mySql = ", new_mySql """ #>>> print re.sub( pat, lambda m:rdict[m.group()], target) #select RETURN_CODE, RETURN_MSG, P_MARKET_IND, P_DEFAULTS_FILENAME from table(pkg_rfcm_ddy.f_ddy_get_makt_info_dflt_fname('DLE_INFO', pNumber)) #>>> rdict = { 'datasetName' : pDatasetName, 'dnumber' : pNumber } #>>> pat = "(%s)" % "|".join( map(re.escape, rdict.keys()) ) #>>> print re.sub( pat, lambda m:rdict[m.group()], target) #Traceback (most recent call last): #File "<stdin>", line 1, in <module> #File "/var/opt/icetools/python/python2.7/lib/python2.7/re.py", line 155, in sub #return _compile(pattern, flags).sub(repl, string, count) #TypeError: sequence item 3: expected string, int found #>>> #myParams = {"datasetID": pDatasetID, "runID": pRunID, "tDate": pTDate, "status": pStatus} mySql = "select * from table(PKG_RFCM_DDY.f_ddy_internal_recon('tradeDate')) order by 4" tDate = '20160212' new_mySql = re.sub('tradeDate', tDate, mySql.rstrip()) print "mySql = ", mySql, "new_mySql = ", new_mySql returnStrs = myOracle.runSqlWthParamsGetMultipleRows(new_mySql) print "Return Strs = ", returnStrs print "returnStrs[0][0] = ", returnStrs[0][0] print "returnStrs[0][1] = ", returnStrs[0][1] print "returnStrs[0][2] = ", returnStrs[0][2] #for returnStr in returnStrs: #print "Return Value = ", returnStr[0], " Return Code = ", returnStr[1] #print "mySql = ", mySql, "myParams = ", myParams ########### Multiprocessing test code """ fileID=1 fileIDQueue = Queue() for x in range(5): processHandle = Process(target=myOracle.worker, args=( mySql, myParams, fileID, fileIDQueue)) processHandle.start() fileID += 1 processHandle.join() #Without sleep the queue is unreliable and do not return the expected values time.sleep(2) failureFlag=0 while not fileIDQueue.empty(): qFileID, qResult = fileIDQueue.get() print("qFileID = ", qFileID, "qResult = ", qResult) if qResult: failureFlag=1 print "FailureFlag = ", failureFlag """ ########## End except Exception as e: print "Failed on main", str(e) exit(1)
def test_confFileNotExists(self): """configuration: Test that config file was not provided and throws ValueError""" # check constructor throws ValueError self.assertRaises(ValueError, lambda: configuration("dsds"))
def configure_system(options, args): # # Configure only if we are told to do so. # if not options.config: return if not os.path.exists(BUILDDIR): os.mkdir(BUILDDIR) # # If we have an existing config file or one supplied in options # and we're not forced to autogenerate, we use the config file. # # Otherwise we autogenerate a ruleset and compile it, and create # a configuration file from it from scratch. # if (options.cml_file or os.path.exists(CML2_CONFIG_FILE)) \ and not options.reset_config: if options.cml_file: cml2_config_file = options.cml_file else: cml2_config_file = CML2_CONFIG_FILE # # If we have a valid config file but not a rules file, # we still need to autogenerate the rules file. # if not os.path.exists(CML2_COMPILED_RULES): rules_file = autogen_rules_file(options, args) # Compile rules file. os.system(CML2TOOLSDIR + '/cmlcompile.py -o ' + \ CML2_COMPILED_RULES + ' ' + rules_file) if options.batch: # Create configuration from existing file os.system(CML2TOOLSDIR + '/cmlconfigure.py -b -o ' + \ CML2_CONFIG_FILE + ' -i ' + cml2_config_file + \ ' ' + CML2_COMPILED_RULES) else: # Create configuration from existing file os.system(CML2TOOLSDIR + '/cmlconfigure.py -c -o ' + \ CML2_CONFIG_FILE + ' -i ' + cml2_config_file + \ ' ' + CML2_COMPILED_RULES) else: rules_file = autogen_rules_file(options, args) # Compile rules file. os.system(CML2TOOLSDIR + '/cmlcompile.py -o ' + \ CML2_COMPILED_RULES + ' ' + rules_file) # Create configuration from scratch os.system(CML2TOOLSDIR + '/cmlconfigure.py -c -o ' + \ CML2_CONFIG_FILE + ' ' + CML2_COMPILED_RULES) # After configure, if user might have chosen to quit without saving if not os.path.exists(CML2_CONFIG_FILE): print "Exiting without saving configuration." sys.exit() # Create header file os.system(TOOLSDIR + '/cml2header.py -o ' + \ CML2_CONFIG_H + ' -i ' + CML2_CONFIG_FILE) # The rest: if not os.path.exists(os.path.dirname(CONFIG_H)): os.mkdir(os.path.dirname(CONFIG_H)) shutil.copy(CML2_CONFIG_H, CONFIG_H) config = configuration() cml2_header_to_symbols(CML2_CONFIG_H, config) cml2_add_default_caps(config) cml2_update_config_h(CONFIG_H, config) configuration_save(config) # Initialise config dependent projpaths define_config_dependent_projpaths(config) # Generate baremetal container files if new ones defined baremetal_cont_gen = BaremetalContGenerator() baremetal_cont_gen.baremetal_container_generate(config) return config
def main(configFile, logLevel, tDate): log = Logger(logLevel,configFile, tDate) log.addFileHandler(logging.DEBUG) log.addGenericInfo(__file__) try: m_configDict = configuration(configFile, True).m_dictionary #print "m_configDict = ", m_configDict myOracle = Oracle(m_configDict, log) #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_DATASET_TRANS(:datasetID, :runID, :tDate, :status))" #pDatasetID = 2 #pRunID = 234234234 # 20151216144156584829 #pTDate = 20151215 #pStatus = 'P' #myParams = {"datasetID": pDatasetID, "runID": pRunID, "tDate": pTDate, "status": pStatus} #select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS('DLE_INFO', 20151216144156584829, 1, 'opb1.dat.bz2', 20151215, 111, 'test_hostname', 'P', 'Process Started')) #select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS('DLE_INFO', 20151216144156584829, 1, 'opb1.dat.bz2', 20151215, 111, 'test_hostname', 'P', 'Process Started')); mySql = "select RETURN_CODE||'|'||RETURN_MSG from table(PKG_RFCM_DDY.F_DDY_INSERT_PROCESS_STATUS(:datasetName, :runID, :fileID, :fileName, :tDate, :processID, :hostName, :lstatus , :lcomment))" pDatasetName = "DLE_INFO" pRunID = 20151216144156584 pTDate = 20151210 pStatus = 'P' pFileID = 1 pFileName = 'test.dat.gz' pComment = 'Process Started' pProcessID = 1234 pHostName = 'Test_Host' myParams = {"datasetName": pDatasetName, "runID": pRunID, "fileID": pFileID, "fileName": pFileName, "tDate": pTDate, "processID": pProcessID, "hostName": pHostName, "lstatus": pStatus, "lcomment": pComment} #returnStr = self.__cursor.execute("select * from table(PKG_RFCM_DDY.f_ddy_insert_manifest_trans(1, 20151215, 'opb.test1.bz2', 'manifest.opb', 23423, 2342334))") #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_insert_manifest_trans(:datasetID, :tDate, :dataFileName, :manifestFileName, :noOfRecords, :fileSize))" #pDatasetID = 3 #pTDate = 20151210 #pDataFileName = 'opb.test1.bz2' #pManifestFileName = 'manifest.opb' #pNoOfRecords = 23423 #pFileSize = 2342334 #myParams = {"datasetID": pDatasetID, "tDate": pTDate, "dataFileName": pDataFileName, "manifestFileName":pManifestFileName, "noOfRecords":pNoOfRecords, "fileSize":pFileSize} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_process_status(20151216144156584829,1));" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_process_status(:runID,:fileID))" #pRunID = 20151216144156584829 #pFileID = 1 #myParams = {"runID":pRunID, "fileID":pFileID} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_status(234234234));" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_status(:runID))" #pRunID = 234234234 #myParams = {"runID":pRunID} #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_active_loads());" #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_active_loads())" #myParams = {} #mySql = select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(p_dataset_name => 'ADW_EVENT_LSH_RAW')); #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(:datasetName))" #mySql = "select RETURN_CODE, RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_market_info(:datasetName))" #pDatasetName = 'ADW_EVENT_LSH_RAW' #pDatasetName = 'DLE_INFO' myParams = {"datasetName":pDatasetName} #mySql = select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_race('ADW_AMEX_OPT_RAW')); #mySql = "select RETURN_CODE||'|'|| RETURN_MSG from table(PKG_RFCM_DDY.f_ddy_get_dataset_race(:datasetName))" #pDatasetName = "ADW_AMEX_OPT_RAW" #myParams = {"datasetName":pDatasetName} #mySql = "select RETURN_CODE, RETURN_MSG, P_MARKET_IND, P_DEFAULTS_FILENAME from table(pkg_rfcm_ddy.f_ddy_get_makt_info_dflt_fname(:datasetName))" mySql = "select RETURN_CODE, RETURN_MSG, P_MARKET_IND, P_DEFAULTS_FILENAME from table(pkg_rfcm_ddy.f_ddy_get_makt_info_dflt_fname('datasetName'))" pDatasetName = 'DLE_INFO' new_mySql = re.sub('datasetName', pDatasetName, mySql.rstrip()) returnStr = myOracle.runSqlWthParamsGetOneRow(new_mySql) print "Return Value = ", returnStr[0], " Return Code = ", returnStr[1], " Mkt = ", returnStr[2], "Defaults = ", returnStr[3] mySql = "select * from table(PKG_RFCM_DDY.f_ddy_internal_recon('tradeDate')) order by 4" tDate = '20160212' new_mySql = re.sub('tradeDate', tDate, mySql.rstrip()) print "mySql = ", mySql, "new_mySql = ", new_mySql returnStrs = myOracle.runSqlWthParamsGetMultipleRows(new_mySql) print "Return Strs = ", returnStrs #for returnStr in returnStrs: #print "Return Value = ", returnStr[0], " Return Code = ", returnStr[1] #print "mySql = ", mySql, "myParams = ", myParams ########### Multiprocessing test code """ fileID=1 fileIDQueue = Queue() for x in range(5): processHandle = Process(target=myOracle.worker, args=( mySql, myParams, fileID, fileIDQueue)) processHandle.start() fileID += 1 processHandle.join() #Without sleep the queue is unreliable and do not return the expected values time.sleep(2) failureFlag=0 while not fileIDQueue.empty(): qFileID, qResult = fileIDQueue.get() print("qFileID = ", qFileID, "qResult = ", qResult) if qResult: failureFlag=1 print "FailureFlag = ", failureFlag """ ########## End except Exception as e: print "Failed on main", str(e) exit(1)
def get(self): #Add the system information to the webpage. index = Template(filename='Web/index.html').render( systemConfiguration=configuration().systemConfiguration()) self.write(index)
def main(): try: config = configuration() username_client = raw_input("Please enter your username: "******"Please enter your password: "******"0"] = server_address my_username["0"] = username_client username_encrypted = crypt.rsa_encryption(server_pub_key, username_client) username_client_encoded = base64.encodestring(username_encrypted) message = username_client_encoded + ':,:,:' + 'SIGN-IN' #sending SIGN-IN message to register user on the server #username to be sent to server for authentication, encrypted with public key of server client_socket.sendto(message, ((server_address))) try: client_socket.settimeout(4) server_data = client_socket.recvfrom(client_buff) client_socket.settimeout(None) except timeout: print "Looks like the server is unreachable or offline,please try again" os._exit(1) serv_string = str(server_data[0]) if serv_string.find('no_user') != -1: sig = serv_string.split(":,:,:")[1] try: crypt.verify(server_pub_key, sig, "no_user") print "incorrect username or password, please log back in with the correct credentials" time.sleep(1) os._exit(0) except: return challenge_from_server = server_data[0] challenge = challenge_from_server.split(':,:,:')[0] tag = challenge_from_server.split(':,:,:')[1] iv = challenge_from_server.split(':,:,:')[2] salt = challenge_from_server.split(':,:,:')[3] challenge_sig = challenge_from_server.split(':,:,:')[4] try: crypt.verify( server_pub_key, challenge_sig, challenge) #verifying that the challenge came from the server except: print "signature forged, shutting down" os._exit(0) passwd = crypt.key_derivation( passwd, salt) #deriving key from the user supplied password try: response = crypt.symmetric_decryption(passwd, iv, challenge, tag) except: print "incorrect username or password, please log back in with the correct credentials" response = os.urandom(32) salt_for_passwd = os.urandom(2) passwd = salt_for_passwd + passwd digest = crypt.hash(passwd) client_pub_bytes = crypt.key_conversion_bytes(client_pub) Nonce = uuid.uuid4().hex complete_response = response + ":,:,:" + salt_for_passwd + ":,:,:" + str( digest) + ":,:,:" + client_pub_bytes + ":,:,:" + Nonce complete_response_encrypted = crypt.rsa_encryption( server_pub_key, complete_response) client_socket.sendto( complete_response_encrypted, (server_address )) # sending wrong response to server if password is incorrect time.sleep(1) os._exit(0) #construct response for the server salt_for_passwd = os.urandom(2) passwd = salt_for_passwd + passwd digest = crypt.hash(passwd) #client_pub_load=crypt.public_key_load(client_pub) # for serializing public key of client client_pub_bytes = crypt.key_conversion_bytes(client_pub) Nonce = uuid.uuid4().hex complete_response = response + ":,:,:" + salt_for_passwd + ":,:,:" + str( digest) + ":,:,:" + client_pub_bytes + ":,:,:" + Nonce complete_response_encrypted = crypt.rsa_encryption(server_pub_key, complete_response) client_socket.sendto(complete_response_encrypted, (server_address)) dh_key = pyDH.DiffieHellman() dh_pubkey = dh_key.gen_public_key() try: client_socket.settimeout(2) dh_peer_pub_key_encrypted = client_socket.recvfrom(client_buff)[0] client_socket.settimeout(None) except: print "incorrect username or password, please log back in with the correct credentials" if (dh_peer_pub_key_encrypted != "failure"): dh_encrypted_key = dh_peer_pub_key_encrypted.split(':,:,:')[0] key_encrypted = dh_peer_pub_key_encrypted.split(':,:,:')[1] iv = dh_peer_pub_key_encrypted.split(':,:,:')[2] tag = dh_peer_pub_key_encrypted.split(':,:,:')[3] get_symmtric_key = crypt.rsa_decryption(client_priv, key_encrypted) get_shared_key_n1_n2 = crypt.symmetric_decryption( get_symmtric_key, iv, dh_encrypted_key, tag) get_shared_key = long(get_shared_key_n1_n2.split(":,:,:")[0]) n1 = get_shared_key_n1_n2.split(":,:,:")[1] n2 = get_shared_key_n1_n2.split(":,:,:")[2] if crypt.nonce_check(n1) == 0 or crypt.nonce_check(n2) == 0: print "Hack detected, shutting down" os._exit(0) else: dh_pub_nonce = str(dh_pubkey) + ":,:,:" + n2 dh_pubkey_encrypt = crypt.rsa_encryption(server_pub_key, dh_pub_nonce) client_socket.sendto(dh_pubkey_encrypt, (server_address)) dh_sharedkey = dh_key.gen_shared_key(long(get_shared_key)) user_session[username_client] = dh_sharedkey else: print "Hack detected, shutting down, Please login again" os._exit(0) client_send_thread = threading.Thread( target=client_send_message_thread) #spawning a thread to send command client_send_thread.start() while 1: #for messages to peer try: peer_message = client_socket.recvfrom(client_buff)[0] encrypted_msg = peer_message.split(":,:,:")[0] encrypted_key = peer_message.split(":,:,:")[3] iv = peer_message.split(":,:,:")[1] tag = peer_message.split(":,:,:")[2] shared_peer_key = crypt.rsa_decryption(client_priv, encrypted_key) decrypted_message = crypt.symmetric_decryption( shared_peer_key, iv, encrypted_msg, tag) n3 = decrypted_message.split(":,:,:")[2] if crypt.nonce_check(n3) == 1: message = decrypted_message.split(":,:,:")[0] user_who_sent_it = decrypted_message.split(":,:,:")[1] else: print "hack detected, Shutting down" sys.exit(1) print '\n<-' + ' <From ' + user_who_sent_it + '>: ' + message #final message to print sys.stdout.write('+>') sys.stdout.flush() except: continue # so that it goes on an endless listening loop
import Queue from time import * from requestprocessor import * from configuration import * from controllers import * from mako.template import Template import tornado.httpserver import tornado.ioloop import tornado.options import tornado.web import tornado.websocket import re from tornado.options import define, options, parse_command_line import pdb LCDExists = configuration().config_exists('LCD') GPIOExists = configuration().config_exists('IO') PandoraExists = True LCDQueue = Queue.Queue() LCDEvent = threading.Event() PandoraRequestQueue = Queue.Queue() PandoraDataQueue = Queue.Queue() PandoraRequestReadyEvent = threading.Event() PandoraDataReadyEvent = threading.Event() #Note the starting time. print(time.strftime("[%Y/%m/%d %H:%M:%S] ", time.localtime()) + "started") #Unsolicited feedback is annoying...and unsolicited. Turn it off. controllers().write_to_serial_port("&AH66,CH,UFB,OFF")
def client_send_message_thread(): #Thread for message prompt client_socket = socket.socket( socket.AF_INET, socket.SOCK_DGRAM) #registering client socket as datagram username_client = my_username["0"] config = configuration() server_ip = config.ip server_port = config.port server_address = server_ip, server_port while 1: prompt = raw_input('+>') if prompt.find('list') != -1 and prompt.find( 'list') == 0: # if list command is encountered nonce = crypt.create_nonce() username_client_send = crypt.rsa_encryption( server_pub_key, username_client) username_client_encoded = base64.encodestring(username_client_send) message = username_client_encoded + ":,:,:" + str( nonce) + ":,:,:" + "list" client_socket.sendto(message, (server_address)) server_data1 = client_socket.recvfrom(client_buff)[0] encrypted_data = server_data1.split(":,:,:")[0] iv = server_data1.split(":,:,:")[1] tag = server_data1.split(":,:,:")[2] key = str(user_session[username_client] )[:len(str(user_session[username_client])) / 2] server_data1 = crypt.symmetric_decryption(key, iv, encrypted_data, tag) print '<- Signed In Users: ' + server_data1 elif prompt.find('send') != -1 and prompt.find( 'send') == 0: #if send command is encountered try: get_client_username = prompt.split(' ')[1] get_client_details = prompt.split(' ')[1:] except: print "No username entered, please use the format: " \ "send <username> <message> to send messages" continue if get_client_username: encrypted_username_client = crypt.rsa_encryption( server_pub_key, username_client) get_client_message = prompt.split(' ')[2:] #Grab the symmetric key client_server and IV #pass encrypted initial message to server key = str(user_session[username_client] )[:len(str(user_session[username_client])) / 2] iv = os.urandom(32) tag, encrypted_client_details = crypt.symmetric_encryption( key, iv, str(get_client_username)) n1 = uuid.uuid4().hex get_peer_information=encrypted_client_details+":,:,:"+iv+":,:,:"+n1+":,:,:"+tag+":,:,:"\ +encrypted_username_client+":,:,:"+"send" client_socket.sendto(get_peer_information, (server_address)) try: client_socket.settimeout(3) get_client_details_encoded = client_socket.recvfrom( client_buff) #reusing variable for receiver details client_socket.settimeout(None) except: print "Either the user is not logged-in or not registered" continue get_client_details_decoded = get_client_details_encoded[0] encrypted_message = get_client_details_decoded.split( ":,:,:")[0] tag = get_client_details_decoded.split(":,:,:")[1] iv = get_client_details_decoded.split(":,:,:")[2] get_peer_info = crypt.symmetric_decryption( key, iv, encrypted_message, tag) n1_received = get_peer_info.split(":,:,:")[0] n2 = get_peer_info.split(":,:,:")[1] peer_pub = get_peer_info.split(":,:,:")[2] peer_info_to_send = get_peer_info.split(":,:,:")[3] message = get_peer_info.split(":,:,:")[4] iv_peer = get_peer_info.split(":,:,:")[5] tag_peer = get_peer_info.split(":,:,:")[6] shared_peer_key = get_peer_info.split(":,:,:")[7] get_client_message = " ".join( str(x) for x in get_client_message) n3 = crypt.create_nonce() get_client_message = get_client_message + ":,:,:" + str( username_client) + ":,:,:" + n3 iv = os.urandom(32) tag, my_message_encrypted = crypt.symmetric_encryption( shared_peer_key, iv, get_client_message) if n1 != n1 and crypt.nonce_check(n2) == 0: print "Hack Detected, shutting down" os._exit(0) else: peer_ip = peer_info_to_send.split(':')[0] peer_port = peer_info_to_send.split(':')[1] peer_pub_key = serialization.load_pem_public_key( peer_pub, backend=default_backend()) encrypt_key_for_peer = crypt.rsa_encryption( peer_pub_key, shared_peer_key) finale = my_message_encrypted + ":,:,:" + iv + ":,:,:" + tag + ":,:,:" + encrypt_key_for_peer client_socket.sendto( finale, (peer_ip, int(peer_port))) #sending message to peer client client_socket.settimeout(None) else: print "Please enter the username you want to send message to. Use" \ "the format Send <username> <message> to send the message" #except: # print 'Wrong Username. Please check the username via list command and send the message in the format:'\ # 'send <username> <message>' elif (prompt.find('logout')!=-1 and prompt.find('logout')==0) or (prompt.find('exit')!=-1 and prompt.find('exit')==0) \ or (prompt.find('quit')!=-1 and prompt.find('quit')==0): username = my_username["0"] nonce = crypt.create_nonce() message = username + ":,:,:" + nonce encrypted = crypt.rsa_encryption(server_pub_key, message) final_message = encrypted + ":,:,:" + "logout" client_socket.sendto(final_message, (server_address)) received = client_socket.recvfrom(client_buff)[0] nonce_encrypted = received.split(":,:,:")[0] iv = received.split(":,:,:")[1] tag = received.split(":,:,:")[2] shared_key = str( user_session[username])[:len(str(user_session[username])) / 2] nonce = crypt.symmetric_decryption(shared_key, iv, nonce_encrypted, tag) if crypt.nonce_check(nonce) == 1: print "<Logged Out, the Client module will shutdown now>" time.sleep(1) os._exit(1) else: print 'Commands Supported:\nlist - to get the complete list of users connected \n '\ 'send <username> <message> - to send peers your message\n' \ 'logout, exit or quit to safely logout'
def __init__(self, level, config, sub_dir="", datasetName="", format="", setenv=False): """ Purpose: Constructor :param self: class object itself :param level: logging level for the logger :param config: configuration file to read :param sub_dir: sub directory to be created based on the param passed to main :param format: formatter to be used for logging :param setenv: Whether to export the key/value pairs in the environment """ # initialize logger self.m_log = logging.getLogger(__name__) # set the level for the logger self.m_log.setLevel(level) if format: # If formatter is specified, use it rather than default formatter self.m_formatter = format try: # read the config and initialize m_config self.m_config = configuration(config, setenv).m_dictionary #print "datasetName = ", datasetName #print "sub_dir = ", sub_dir # set logfiles for the process logging. if sub_dir: #print "Inside Logger sub_dir", sub_dir full_dir_path = self.m_config["LOG"]["log_dir"] + "/" + sub_dir if not os.path.exists(full_dir_path): try: print "Inside not os.path.exists full_dir_path", full_dir_path os.makedirs(full_dir_path) except OSError as exc: if exc.errno != errno.EEXIST: print( "ERROR: Unable to create directory due the error " , str(exc)) sys.exit(1) currentDateTime = datetime.utcnow().strftime('%Y%m%d%H%M%S%f') if not datasetName == "": #print "Inside if" self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + datasetName + '_' + currentDateTime + '.log' else: self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + currentDateTime + '.log' else: if not datasetName == "": #print "Inside if" self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + datasetName + '_' + currentDateTime + '.log' else: self.m_logfile = self.m_config["LOG"]["log_dir"] + "/" + self.m_config["LOG"]["job_name"] + '_' + currentDateTime + '.log' # if not datasetName == "": # #print "Inside if" # self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + datasetName + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' # else: # self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' # else: # if not datasetName == "": # #print "Inside if" # self.m_logfile = full_dir_path + "/" + self.m_config["LOG"]["job_name"] + '_' + datasetName + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' # else: # self.m_logfile = self.m_config["LOG"]["log_dir"] + "/" + self.m_config["LOG"]["job_name"] + '_' + getTime('YYMMDD') + getTime('HHMMSS') + '.log' except Exception as exp: # An exception occurred print( "ERROR: Unable to initialize the configuration for logger " , str(exp)) sys.exit(1)