def post_message(self, msg, done, status): if done: body = { 'task_id': self.task_id, 'result': msg, 'exit_code': status, 'is_timeout': False, 'is_aborted': False } type = 102 else: body = {'task_id': self.task_id, 'log': msg} type = 103 payload = { 'messages': [{ 'id': uuid.uuid4().hex, 'type': type, 'body': body }] } logger.info(payload) try: r = requests.post(self.message_url, json=payload, verify=False) if r.status_code != 200: logger.error('Post task log:{} failed, code {} !!!'.format( payload, r.status_code)) except Exception as e: logger.error('Post task log:{} failed, exception: {}!!!'.format( payload, e))
def start_session(): logger.info('Starting database session......') engine = create_engine('postgres://localhost:5432/Financial', echo=True) Session = sessionmaker(bind=engine) session = Session() return session, engine
def processLine(self, time, price, volume, corr, cond): """I process the raw string parsed from the CSV, I only need these fields (G127 and EX are useless). I assume time has been converted in datetime.time, price is a float and size an int.""" if self.filterTime(time): return if int(corr) > 2: return if cond in kContinousCondition: self.processContinuousTrade(time, volume, price) return if cond in kOpenCondition: self.processOpeningTrade(volume, price) return if cond in kCloseCondition: self.processClosingTrade(volume, price) return if cond in kInValidCondition: return if cond in kSkipDayCondition: logger.info("%s, %s - Invalid Day" % (self.symbol, self.day)) self.InvalidDay = True return else: logger.info("Unknown condition, %s" % cond) raise UnknownConditionCode
def __init__(self, tags, ipaddr, filename, timer, timeout): """ :tags: the ENIP tags to query :ipaddr: the IP address of thr PLC to query :filename: the name of the .png file :timer: period in which the HMI has to query the tags (s) :timeout: period of activity (s) """ HMI.id += 1 self.__id = HMI.id self.__tags = tags self.__ipaddr = ipaddr self.__filename = filename self.__timer = timer self.__timeout = timeout self.__start_time = 0.0 self.__process = None # save the HMI PID to kill it later # dict of lists self.__values = {} # ... one list for each tag for tag in tags: self.__values[tag] = [] # ... plus a list to save timestamps self.__values['time'] = [] self.__http = None # save the HTTP server PID to kill it later logger.info('HMI%d - monitors: %s' % (self.__id, ', '.join(map(str, self.__tags))))
def get_historical_data(start=INITIAL_DAY, ticker=None): if ticker is None: tickers = Tickers.get_all_tickers() else: tickers = [ticker.upper()] for ticker in tickers: logger.info("Gathering data for: {}".format(ticker)) DataMapper.map_historical_data(ticker, DataFetcher.get_historical_stock_data(ticker, start, TradingDay.get_today()))
def get_records(session, model): logger.info('Fetching records from database: {}'.format(model.__tablename__)) df_query = session.query(model) df = pd.read_sql(df_query.statement, df_query.session.bind) if model.__tablename__ == 'income': df.rename(columns={'net_income': 'amount'}, inplace=True) return df[['txn_date', 'bank_acct', 'amount', 'currency']]
def write_records(session, records, model): logger.info('Deleting outdated entries') session.query(model).filter( model.txn_date >= datetime.date.today().replace(day=1)).delete() logger.info('Writing to database: {}'.format(model.__tablename__)) session.bulk_insert_mappings(model, records.to_dict('record')) session.commit()
def get_daily_data(ticker=None): if ticker is None: tickers = Tickers.get_all_tickers() else: tickers = [ticker] for ticker in tickers: logger.info("Gathering data for: {}".format(ticker)) DataMapper.map_data_from_day(ticker, datetime.date.today(), DataFetcher.get_today_stock_data(ticker))
def write_balance(session, balance_new, currency): if currency == 'USD': model = BalanceUSD elif currency == 'CNY': model = BalanceCNY else: logger.error('Wrong currency!') logger.info('Writing to database: {}'.format(model.__tablename__)) session.bulk_insert_mappings(model, balance_new.to_dict('record'))
def calculate_change(balance, records, currency): logger.info('Calculating changes: {}'.format(records['currency'].unique()[0])) filtered = records.loc[records['currency'] == currency] merged = filtered.merge( balance, left_on='bank_acct', right_on='account', how='left') # group by & sum df = merged.loc[merged['txn_date'] > merged['observation_date']] changes = df[['bank_acct', 'amount']].groupby('bank_acct').sum() return changes
def dump_results(results: Dict[str, str]) -> None: """ Dump results to the file. Args: results: results as a dictionary """ logger.info('Dumping results to "{}"'.format(RESULTS)) with open(RESULTS, 'w') as fp: json.dump(results, fp, indent=2, sort_keys=True)
def load_database() -> Dict[str, Any]: """ Load database from a file. Returns: database as a dictionary """ logger.info('Loading database from "{}"'.format(DATABASE)) with open(DATABASE) as fp: return json.load(fp)
def translate(query, is_futhark=False): """Translate a string from Futhark to and from English.""" log_message = 'Translating to {}...' if is_futhark is True: log_message = log_message.format('English') translation = query.translate(english_table) else: log_message = log_message.format('Futhark') query = query.lower() translation = query.translate(futhark_table) logger.info(log_message) return translation
def detect_translate(query): """Detect the language and translate accordingly.""" log_message = 'Detected {} text.' average_codepoint = sum(ord(char) for char in query) / len(query) if average_codepoint < 1000: log_message = log_message.format('English') logger.info(log_message) translation = translate(query) else: log_message = log_message.format('Futhark') logger.info(log_message) translation = translate(query, is_futhark=True) return translation
def key_event(event): """Called with the key event argument on each event.""" key_value = '' if hasattr(event, 'keyboard_key'): key_value = event.keyboard_key.value if 'CTRL' in key_value: key_status['CTRL'] = event.event.value elif (key_value in copy_keys and event.event.value == 'DOWN' and key_status['CTRL'] == 'DOWN'): logger.info('\nReceived Copy shortcut: ' + key_value) time.sleep(0.1) copied = pyperclip.paste() translated = detect_translate(copied) pyperclip.copy(translated) logger.info('Replaced clipboard contents.')
def start_http_server(self, port=80): """Starts a simple http server on a choosen port. :port: integer defaults to 80 """ if (self.__http is None): cmd = "python -m SimpleHTTPServer %d" % port try: self.__http = Popen(cmd, shell=True, preexec_fn=setsid) logger.info('HMI%d - HTTP server started on port %d' % (self.__id, port)) except OSError as e: emsg = repr(e) logger.warning('HMI%d - HTTP server cannot start: %s' % (self.__id, emsg))
def dump_database_verbose(database: Dict[str, List[Connection]]) -> None: """ Dump verbose database to the file. Args: database: database as a dictionary """ logger.info('Dumping database verbose to "{}"'.format(DATABASE_VERBOSE)) output = {} for app_name in database.keys(): output[app_name] = [ connection.to_output_verbose() for connection in database[app_name] ] with open(DATABASE_VERBOSE, 'w') as fp: json.dump(output, fp, indent=2, sort_keys=True)
def save(self): # close last continuous trading interval if self.activeTradingIntervalIndex != len(self.prices) - 2: # it should be like this raise WrongClosingTrades totalVol, avgPrice = self.activeTradingInterval.closeInterval() self.volumes[self.activeTradingIntervalIndex] = totalVol self.prices[self.activeTradingIntervalIndex] = avgPrice # save file filename = kSaveDir + "%s_%s_profile" % (self.symbol, self.day) logger.info("saving %s" % filename) if self.InvalidDay: filename += '_INVALID' filename += '.csv' with open(filename, 'w') as f: f.write("TIME,SIZE,PRICE\n") for i, time in enumerate(self.times_list): f.write("%s,%d,%.3f\n" % (time, self.volumes[i], self.prices[i]))
def dump_database(database: Dict[str, List[Connection]]) -> None: """ Dump database to the file. Args: database: database as a dictionary """ logger.info('Dumping database to "{}"'.format(DATABASE)) output = {} for app_name in database.keys(): conns = {conn.to_tuple() for conn in database[app_name]} output[app_name] = [{ 'client_ja3': conn[0], 'server_ja3': conn[1], 'sni': conn[2] } for conn in conns] with open(DATABASE, 'w') as fp: json.dump(output, fp, indent=2, sort_keys=True)
def process_training_dir(app_name: str, dir_path: str) -> List[Connection]: """ Process all pcap files in the training directory. Args: app_name: training app dir_path: path to the training directory Returns: list of connections (TLS client hello + TLS server hello) """ logger.info('Processing training data for app "{}"'.format(app_name)) connections = [] # Process all the files in the directory for file_name in listdir(dir_path): file_path = '{}{}{}'.format(dir_path, sep, file_name) logger.info('Processing file "{}"'.format(file_path)) connections += process_training_file(app_name, file_path) return connections
def process_testing_file(database: Dict[str, Any], file_path: str) -> List[str]: """ Process testing pcap file and recognize the communicating app. Args: database: database of known apps and their ja3 hashes file_path: path to the testing pcap file Returns: list of recognized apps """ recognize_apps = [] for connection in get_connections(file_path): for app, fingerprints in database.items(): for fingerprint in fingerprints: if connection.compare(fingerprint): recognize_apps.append(app) logger.info('recognize_apps = {}'.format(recognize_apps)) return most_frequent(recognize_apps)
def run_hooks(self): hooks_dir = nfs.join(self.new_agent_dir, 'hooks') if not nfs.exists(hooks_dir): return True for dir_name in nfs.listdir(hooks_dir): hooks_file = nfs.join(hooks_dir, dir_name) if nfs.isfile(hooks_file) and hooks_file.endswith('.py'): p = Popen([sys.executable, hooks_file], stdout=PIPE, stderr=STDOUT, cwd=ROOT_DIR, shell=True) while p.poll() is None: time.sleep(1) if p.stdout: logger.info(p.stdout.read()) if p.poll() != 0: self.http_handler.log_ok('Run hooks {} failed!' ''.format(hooks_file)) return False return True
p101_str = read_single_statedb('1', 'DO_P_101_START')[3] if p101_str == '1': write_cpppo(L1_PLCS_IP['plc1'], 'HMI_P101-Status', '2') else: write_cpppo(L1_PLCS_IP['plc1'], 'HMI_P101-Status', '1') mv101_str = read_single_statedb('1', 'DO_MV_101_OPEN')[3] if mv101_str == '1': write_cpppo(L1_PLCS_IP['plc1'], 'HMI_MV101-Status', '2') else: write_cpppo(L1_PLCS_IP['plc1'], 'HMI_MV101-Status', '1') # wait for the other plcs time.sleep(3) logger.info("PLC1 - enters main loop") start_time = time.time() while(time.time() - start_time < TIMEOUT): # Read and update HMI_tag lit101_str = read_single_statedb('1', 'AI_LIT_101_LEVEL')[3] write_cpppo(L1_PLCS_IP['plc1'], 'HMI_LIT101-Pv', lit101_str) val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_LIT101-Pv', PLC1_CPPPO_CACHE) logger.debug("PLC1 - read_cpppo HMI_LIT101-Pv: %s" % val) lit101 = float(lit101_str) # lit101
if (symbol == activeSymbol and day == activeDay): # try: activeMarketTradingDay.processLine(datetime.time(*[int(i) for i in time.split(':')]), float(price), int(size), corr, cond) # except Exception as e: # print "We couldn't process line %s" % line # raise e else: if not activeMarketTradingDay is None: activeMarketTradingDay.save() activeSymbol = symbol activeDay = day activeMarketTradingDay = MarketTradingDay(symbol, day) if index == nrows: break activeMarketTradingDay.save() if __name__ == '__main__': from constants import * import gzip import os raw_data_files = [fname for fname in os.listdir(RAWDATAFOLDER) if fname[-2:] == 'gz'] for raw_data_file in raw_data_files: logger.info('\n\n Processing %s' % raw_data_file) with gzip.open(RAWDATAFOLDER + raw_data_file) as f: f.readline() # skip first line readFile(f)
""" # remove_db(STATE_DB_PATH) db_is_new = not os.path.exists(STATE_DB_PATH) if db_is_new: create_db(STATE_DB_PATH, SCHEMA) init_db(STATE_DB_PATH, DATATYPES) # SPHINX_SWAT_TUTORIAL SET LIT101DB # update_statedb('1198', 'AI_LIT_101_LEVEL') update_statedb('798', 'AI_LIT_101_LEVEL') # update_statedb('498', 'AI_LIT_101_LEVEL') # update_statedb('248', 'AI_LIT_101_LEVEL') # SPHINX_SWAT_TUTORIAL END SET LIT101DB update_statedb('710', 'AI_LIT_301_LEVEL') update_statedb('1', 'DO_P_101_START') update_statedb('1', 'DO_MV_101_OPEN') update_statedb('0', 'DO_MV_101_CLOSE') update_statedb('2.55', 'AI_FIT_101_FLOW') update_statedb('2.45', 'AI_FIT_201_FLOW') update_statedb('1', 'DO_MV_201_OPEN') update_statedb('0', 'DO_MV_201_CLOSE') logger.info('DB - Initial values set')
def close_session(session): logger.info('Committing changes and closing database session......') session.commit() session.close()
val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_LIT101-Pv', PLC1_CPPPO_CACHE) logger.debug("PLC1 - read_cpppo HMI_LIT101-Pv: %s" % val) lit101 = float(lit101_str) # lit101 if lit101 >= LIT_101['HH']: logger.warning("PLC1 - lit101 over HH: %.2f >= %.2f" % (lit101, LIT_101['HH'])) elif lit101 <= LIT_101['LL']: logger.warning("PLC1 - lit101 under LL: %.2f <= %.2f" % (lit101, LIT_101['LL'])) val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_P101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - p101 read HMI_P101-Status: %s" % val) elif lit101 <= LIT_101['L']: logger.warning("PLC1 - lit101 under L: %.2f <= %.2f" % (lit101, LIT_101['L'])) val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_MV101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - p101 read HMI_MV101-Status: %s" % val) elif lit101 >= LIT_101['H']: logger.warning("PLC1 - lit101 over H: %.2f <= %.2f" % (lit101, LIT_101['H'])) val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_MV101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - mv101 read HMI_MV101-Status: %s" % val)
val = read_cpppo(L1_PLCS_IP['plc1'], 'HMI_LIT101-Pv', PLC1_CPPPO_CACHE) logger.debug("PLC1 - read_cpppo HMI_LIT101-Pv: %s" % val) lit101 = float(lit101_str) # lit101 if lit101 >= LIT_101['HH']: logger.warning("PLC1 - lit101 over HH: %.2f >= %.2f" % ( lit101, LIT_101['HH'])) elif lit101 <= LIT_101['LL']: logger.warning("PLC1 - lit101 under LL: %.2f <= %.2f" % ( lit101, LIT_101['LL'])) val = read_cpppo( L1_PLCS_IP['plc1'], 'HMI_P101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - p101 read HMI_P101-Status: %s" % val) elif lit101 <= LIT_101['L']: logger.warning("PLC1 - lit101 under L: %.2f <= %.2f" % ( lit101, LIT_101['L'])) val = read_cpppo( L1_PLCS_IP['plc1'], 'HMI_MV101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - p101 read HMI_MV101-Status: %s" % val) elif lit101 >= LIT_101['H']: logger.warning("PLC1 - lit101 over H: %.2f <= %.2f" % ( lit101, LIT_101['H'])) val = read_cpppo( L1_PLCS_IP['plc1'], 'HMI_MV101-Status', PLC1_CPPPO_CACHE) logger.info("PLC1 - mv101 read HMI_MV101-Status: %s" % val)
def get_company_information(): tickers = Tickers.get_all_tickers() for ticker in tickers: logger.info("Gathering info for: {}".format(ticker)) DataMapper.map_company_info(ticker, DataFetcher.get_company_info(ticker))
def create_table(engine, model): if model.__table__.exists(engine): model.__table__.drop(engine) model.__table__.create(engine) logger.info('Model created: {}'.format(model.__tablename__))
def stop_http_server(self): """Kills the HTTP server.""" if (self.__http is not None): killpg(getpgid(self.__http.pid), SIGTERM) logger.info('HMI%d - HTTP server stopped' % self.__id) self.__http = None
tags = [] tags.extend(P1_PLC3_TAGS) # tags.extend(P2_PLC3_TAGS) time.sleep(2) init_cpppo_server(tags) # wait for the other plcs time.sleep(1) # write_cpppo(L1_PLCS_IP['plc3'], 'AI_LIT_301_LEVEL', '3') # val = read_cpppo( # L1_PLCS_IP['plc3'], 'AI_LIT_301_LEVEL', # 'examples/swat/plc3_cpppo.cache') # logger.debug("read_cpppo: %s" % val) logger.info("PLC3 - enters main loop") start_time = time.time() while(time.time() - start_time < TIMEOUT): # cmd = read_single_statedb('AI_FIT_101_FLOW', '1') lit301pv = read_single_statedb('3', 'AI_LIT_301_LEVEL')[3] write_cpppo(L1_PLCS_IP['plc3'], 'HMI_LIT301-Pv', lit301pv) val = read_cpppo(L1_PLCS_IP['plc3'], 'HMI_LIT301-Pv', PLC3_CPPPO_CACHE) logger.debug("PLC3 - read_cpppo HMI_LIT301-Pv: %s" % val) time.sleep(T_PLC_R) logger.info("PLC3 - exits main loop")
def log_finished_algorithm_test(self): logger.info("Ran algorithm {} from {} to {}".format(self.algorithm.name, self.start_date, self.end_date)) logger.info("Total Return: {}".format(self.algorithm.get_overall_return()))