def add_links_to_queue(links, distance): for url in links: if Database.check_if_in_visited(url) is True: continue if Database.check_if_in_unvisited(url) is True: continue Database.add_url_to_unvisited(url, distance)
def search(self, start=None, end=None, diagnosis=None): """Validates the parameters, search the database, and returns the results or an error message page. """ template = self.lookup.get_template('report/results.mako') (u, c) = getUserInfo() if start and end and diagnosis: conn = Database() session = conn.get() results = [] query = session.query(RadiologyRecord).filter( RadiologyRecord.test_date >= start, RadiologyRecord.test_date <= end) for word in diagnosis.split(): query = query.filter( RadiologyRecord.diagnosis.ilike( "%" + word + "%")) for entry in query.all(): results.append( [entry.patient.last_name, entry.patient.first_name, entry.patient.address, entry.patient.phone, entry.test_date, entry.diagnosis]) conn.close() if (len(results) == 0): template = self.lookup.get_template('report/report.mako') return template.render(username=u, classtype=c, action="fail") return template.render(username=u, classtype=c, results=results) else: return template.render(username=u, classtype=c, action="noparams")
def __init__(self, database_file_path: str): """ Constructor :param database_file_path: Database file path """ tables = Tables() tables.user = UserTableSqlite() tables.user_authentication = UserAuthenticationTableSqlite() tables.user_authentication_parameter = UserAuthenticationParameterTableSqlite() tables.session_token = SessionTokenTableSqlite() tables.revision = RevisionTableSqlite() tables.project = ProjectTableSqlite() tables.project_information = ProjectInformationTableSqlite() tables.tracker = TrackerTableSqlite() tables.tracker_information = TrackerInformationTableSqlite() tables.tracker_field = TrackerFieldTableSqlite() tables.tracker_field_information = TrackerFieldInformationTableSqlite() Database.__init__(self, tables) self.__database_file_path = database_file_path self.__application_id = 0x53414c4d # HEX for "SALM" self.__encoding = "\"UTF-8\"" self.__user_version = 1 # Version of the database file
def index(self): """Returns the main page of the module which allows a user to generate a report for analysis, based on specified criteria. """ # Database connection conn = Database() session = conn.get() # Get a list of all patients patients = [] testTypes = [] for entry in session.query(User).filter(User.class_type == 'p').all(): if (entry.person.__dict__ not in patients): patients.append(entry.person.__dict__) for entry in session.query(RadiologyRecord).distinct().all(): if (entry.test_type not in testTypes): testTypes.append(entry.test_type) template = self.lookup.get_template('analysis/analysis.mako') (u, c) = getUserInfo() conn.close() return template.render( username=u, classtype=c, patients=patients, testTypes=testTypes)
def postImage(self, id=None, radiologyimage=None): """Uploads an image to the system and returns a page with the result. """ template = self.lookup.get_template('upload/upload.mako') (u, c) = getUserInfo() if (id and radiologyimage.file): ''' Creates the necessary sizes of image in JPG format and converts to a byte stream for storage in the database ''' orig_image = Image.open(BytesIO(radiologyimage.file.read())) image = Image.new("RGB", orig_image.size, (255, 255, 255)) image.paste(orig_image) fullstream = BytesIO() image.save(fullstream, "JPEG") normalstream = BytesIO() normalimage = image.copy() normalimage.thumbnail((600, 600), Image.ANTIALIAS) normalimage.save(normalstream, "JPEG") thumbstream = BytesIO() thumbimage = image.copy() thumbimage.thumbnail((200, 200), Image.ANTIALIAS) thumbimage.save(thumbstream, "JPEG") conn = Database() session = conn.get() pacsimage = PacsImage( record_id=id, thumbnail=thumbstream.getvalue(), regular_size=normalstream.getvalue(), full_size=fullstream.getvalue()) session.add(pacsimage) conn.CAC() return template.render(username=u, classtype=c, action="added") else: return template.render(username=u, classtype=c, action="error")
def addRecord(self): """Returns a page that allows for the input of a radiology record into the system. """ template = self.lookup.get_template('upload/addrecord.mako') (u, c) = getUserInfo() conn = Database() session = conn.get() patients = [] doctors = [] # Get a list of all patients for entry in session.query(User).filter(User.class_type == 'p').all(): if (entry.person.__dict__ not in patients): patients.append(entry.person.__dict__) # Get a list of all doctors for entry in session.query(User).filter(User.class_type == 'd').all(): if (entry.person.__dict__ not in doctors): doctors.append(entry.person.__dict__) if (len(patients) == 0): template = self.lookup.get_template('upload/upload.mako') return template.render( username=u, classtype=c, action="noPatient") if (len(doctors) == 0): template = self.lookup.get_template('upload/upload.mako') return template.render( username=u, classtype=c, action="noDoctor") p = sorted(patients, key=itemgetter('last_name')) d = sorted(doctors, key=itemgetter('last_name')) conn.close() return template.render( username=u, classtype=c, patients=p, doctors=d)
def scrape_and_save(): """ Scrape the data and save it into the database using the Database class """ data = scrape_data() for item in data: Database.save_data_to_db(item)
def create_jobs(): for item in Connection.collection_unvisited.find(): url = item['url'] distance = item['distance'] if Database.check_if_in_visited(url) is False and (domain == get_domain_name(url) or broad_search is True): thread_queue.put(item) Database.add_url_to_visited(url, distance) thread_queue.join() create_jobs()
class DatabaseTests: def __init__(self): db = "test.db" os.remove(db) self.__db = Database(db) self.__db.load() failures = 0 try: self.__db.add_user("jrenggli", "*****@*****.**", 42) self.__db.add_user("yschaerli", "*****@*****.**", 8383) self.__db.add_user("jreNggli", "*****@*****.**", 28482) except sqlite3.IntegrityError: assert(False) except Exception: failures += 1 finally: assert(failures == 1) src = None try: src = self.__db.add_source("RCSB", "text/pdb", "http://www.rcsb.org/pdb/download/downloadFile.do?fileFormat=pdb&compression=NO&structureId={0}", "RCSB Protein Data Bank") except sqlite3.IntegrityError, e: assert(False) except Exception, e: failures += 1 assert(False)
def user(self, firstname=None, lastname=None, address=None, email=None, phone=None, password=None, password2=None): """Returns a page to edit a user's own information. If new information was passed on to it, it verifies it and edits accordingly. """ template = self.lookup.get_template('user.mako') (u, c) = getUserInfo() conn = Database() session = conn.get() user = session.query(User).filter(User.user_name == u).one() fail = False # make sure password was entered correctly if password or password2: if password == password2: user.password = password else: fail = True if firstname and not fail: user.person.first_name = firstname if lastname and not fail: user.person.last_name = lastname if address and not fail: user.person.address = address if email and not fail: user.person.email = email if phone and not fail: user.person.phone = phone if firstname or lastname or address or email or phone or password: if not fail: conn.commit() user = session.query(User).filter(User.user_name == u).one() oldinfo = [] oldinfo.append(user.person.first_name) oldinfo.append(user.person.last_name) oldinfo.append(user.person.address) oldinfo.append(user.person.email) oldinfo.append(user.person.phone) if firstname or lastname or address or email or phone or password: if fail: conn.close() return template.render( username=u, classtype=c, oldinfo=oldinfo, action="nomatch") else: conn.close() return template.render( username=u, classtype=c, oldinfo=oldinfo, action="success") else: conn.close() return template.render(username=u, classtype=c, oldinfo=oldinfo)
class DataManager(Thread): """ This is the DataManager class, it creates the database, data queue and the condition variable for synchronization between it, the framework and the plugins """ def __init__(self): super().__init__() self.db = Database() self.db.create_default_database() self.q = DataQueue() self.condition = Condition() self.kill = False self.logger = Logger().get('database.datamanager.DataManager') def run(self): """ This will insert all data in the queue and then once finished give up control of the condition variable """ while not self.kill: self.condition.acquire() if self.q.check_empty(): self.condition.wait() while not self.q.check_empty(): value = self.q.get_next_item() Table_Insert.prepare_data_for_insertion( self.q.dv.table_schema, value) self.condition.notify() self.condition.release() def insert_data(self, data): """ Synchronously inserts data into the database. :param data: A dictionary with a table name as its key and a dictionary of column names and corresponding values as its value. """ self.condition.acquire() if self.q.insert_into_data_queue(data): self.condition.notify() self.condition.release() def shutdown(self): self.kill = True self.condition.acquire() self.condition.notify() self.condition.release() self.join() self.logger.debug('Data manager has shut down.')
def __init__(self): """ Initialise servers, regexes """ english = Dictionary.get_language(Dictionary.ENGLISH) logging.basicConfig(filename=os.path.join(__here__, english.get("_server_log_")), level=logging.DEBUG) self.__database = Database(os.path.join(__here__, english.get("_server_db_"))) self.__server = responder.Responder()
def __init__(self): super().__init__() self.db = Database() self.db.create_default_database() self.q = DataQueue() self.condition = Condition() self.kill = False self.logger = Logger().get('database.datamanager.DataManager')
def load_data(self): """ Load the data from the database into the list widget """ self.list_widget.clear() data = Database.load_data_from_db() for item in data: self.list_widget.addItem(item.date + " : " + item.title + " - " + item.image)
def generate(self, start=None, end=None, patient=None, testType=None): """Returns a generated report for the analysis module""" template = self.lookup.get_template('analysis/generate.mako') # Database connection conn = Database() session = conn.get() #Basic query #query = session.query(RadiologyRecord).join(PacsImage, RadiologyRecord.record_id == PacsImage.record_id).join(Person, RadiologyRecord.patient_id == Person.person_id) query = session.query(RadiologyRecord, RadiologyRecord.test_type, func.count(PacsImage.record_id).label('total')).join(PacsImage).group_by(RadiologyRecord.test_type).order_by(RadiologyRecord.patient_id) # All edge cases are inclusive if (start != "") and (end != ""): query = query.filter( RadiologyRecord.test_date <= end).filter( RadiologyRecord.test_date >= start) testTypes = [] if testType != "_ALLTESTTYPES_": query = query.filter(RadiologyRecord.test_type == testType) testTypes.append(testType) else: for entry in session.query(RadiologyRecord).distinct().all(): if (entry.test_type not in testTypes): testTypes.append(entry.test_type) if patient != "_ALLPATIENTS_": query = query.filter(RadiologyRecord.patient_id == patient) results = query.all() #results = [] #for entry in query.all(): # if entry.__dict__ not in results: # results.append(entry.__dict__) (u, c) = getUserInfo() conn.close() return template.render( username=u, classtype=c, results=results, testTypes=testTypes)
def main(): """Main Irads function. Sets everything up and launches our application """ # Set up CherryPy config current_dir = os.path.dirname(os.path.abspath(__file__)) # Static directories for serving up various files conf = {'/': {'tools.staticdir.root': current_dir, 'tools.sessions.on': True, 'tools.sessions.storage_type': "ram", 'tools.sessions.timeout': 3600 }, '/css': {'tools.staticdir.on': True, 'tools.staticdir.dir': 'css'}, '/docs': {'tools.staticdir.on': True, 'tools.staticdir.dir': 'docs/html'}, '/js': {'tools.staticdir.on': True, 'tools.staticdir.dir': 'js'}} # Set up IP address and port cherrypy.config.update({'server.socket_host': config.IP, 'server.socket_port': config.PORT}) # Connect to the database database = Database(connect=True) database.connect( config.DATABASE_USERNAME, config.DATABASE_PASSWORD, config.DATABASE_HOSTNAME, config.DATABASE) # Set up CherryPy mapping Mapping = Irads() Mapping.analysis = IradsAnalysis() Mapping.manager = IradsManager() Mapping.report = IradsReport() Mapping.search = IradsSearch() Mapping.upload = IradsUpload() # Start cherrypy.quickstart(Mapping, '/', config=conf)
def index(self, username=None, password=None): """Returns a login page, and responsible for verifying the provided login information and accepting or rejecting it accordingly. """ template = self.lookup.get_template('login.mako') if username and password: username = escape(username, True) password = escape(password, True) conn = Database() session = conn.get() try: user = session.query(User).filter( User.user_name == username).filter( User.password == password).one() cherrypy.session['username'] = user.user_name cherrypy.session['classtype'] = user.class_type raise cherrypy.HTTPRedirect("/home") except NoResultFound: conn.close() return template.render(loginStatus=1) except MultipleResultsFound: conn.close() return template.render(loginStatus=1) else: return template.render(loginStatus=0)
def selectRecord(self): """Returns a page with a list of records from which one can be chosed to have images uploaded to it. Only records belonging to the current user are displayed. """ template = self.lookup.get_template('upload/selectrecord.mako') (u, c) = getUserInfo() conn = Database() session = conn.get() user = session.query(User).filter(User.user_name == u).one() records = session.query( Person).filter(Person.person_id == user.person_id).one( ).radiologyrecords_radiologist record = [] for r in records: record.append( [r.record_id, r.prescribing_date, r.test_date, r.diagnosis, r.description]) conn.close() return template.render( username=u, classtype=c, records=record)
def postRecord(self, patient=None, doctor=None, test_type=None, test_date=None, prescribing_date=None, diagnosis=None, description=None): """Adds a record to the system and returns a page with the result.""" template = self.lookup.get_template('upload/upload.mako') (u, c) = getUserInfo() if (patient and doctor and test_type and test_date and prescribing_date and diagnosis and description): conn = Database() session = conn.get() radiologist = session.query(User).filter(User.user_name == u).one() record = RadiologyRecord( patient_id=patient, doctor_id=doctor, radiologist_id=radiologist.person_id, test_type=test_type, test_date=test_date, prescribing_date=prescribing_date, diagnosis=diagnosis, description=description) session.add(record) conn.CAC() return template.render(username=u, classtype=c, action="success") else: return template.render( username=u, classtype=c, action="error")
def test_connect(self): # Negative testing self.assertIsNone(DatabaseHandler().connect("database")) self.assertIsNone(DatabaseHandler().connect("database.db")) self.assertIsNone(DatabaseHandler().connect("asdl;kfjeiei")) self.assertIsNone(DatabaseHandler().connect("./honeyDB/honeyDB.sqllite")) self.assertIsNone(DatabaseHandler().connect("./honeyDB/honeyDB.db")) self.assertIsNone(DatabaseHandler().connect(" ")) self.assertIsNone(DatabaseHandler().connect("")) # Testing for correct DB plugin_cfg_path = "tests/reportserver/testconfig/plugins.cfg" global_cfg_path = "tests/reportserver/testconfig/global.cfg" global_config = GlobalConfig(plugin_cfg_path, global_cfg_path, True) global_config.read_global_config() global_config.read_plugin_config() db = Database() db.create_db_dir() db.create_db() db_path = global_config["Database"]["path"] self.assertTrue(sqlite3.connect(db_path)) self.assertTrue(DatabaseHandler().connect(db_path)) self.assertTrue(DatabaseHandler().connect(None))
def get_by_email(cls, email): data = Database.find_one("users", {'email': email}) if data is not None: return cls(**data) return None
def application(argv): fix_thread_set_name() # init terminal displayer Terminal() options = { 'working-path': os.getcwd(), 'identity': 'real', 'config-path': './user/config', 'log-path': './user/log', 'reports-path': './user/reports', 'markets-path': './user/markets', 'log-name': 'siis.log' } # create initial siis data structure if necessary install(options) siis_log = SiisLog(options, Terminal().inst().style()) siis_logger = logging.getLogger('siis') # parse process command line if len(argv) > 1: options['livemode'] = True # utc or local datetime ? for arg in argv: if arg.startswith('--'): if arg == '--paper-mode': # livemode but in paper-mode options['paper-mode'] = True elif arg == '--fetch': # use the fetcher options['fetch'] = True elif arg == '--binarize': # use the binarizer options['binarize'] = True elif arg == '--optimize': # use the optimizer options['optimize'] = True elif arg == '--sync': # use the syncer options['sync'] = True elif arg == '--rebuild': # use the rebuilder options['rebuild'] = True elif arg == '--backtest': # backtest mean always paper-mode options['paper-mode'] = True options['backtesting'] = True elif arg.startswith('--timestep='): # backesting timestep, default is 60 second options['timestep'] = float(arg.split('=')[1]) elif arg.startswith('--time-factor='): # backtesting time-factor options['time-factor'] = float(arg.split('=')[1]) elif arg.startswith('--from='): # if backtest from date (if ommited use whoole data) date format is "yyyy-mm-dd-hh:mm:ss", fetch, binarize, optimize to date options['from'] = datetime.strptime( arg.split('=')[1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif arg.startswith('--to='): # if backtest to date (can be ommited), fetch, binarize, optimize to date options['to'] = datetime.strptime( arg.split('=')[1], '%Y-%m-%dT%H:%M:%S').replace(tzinfo=UTC()) elif arg.startswith('--last='): # fetch the last n data history options['last'] = int(arg.split('=')[1]) elif arg.startswith('--market='): # fetch, binarize, optimize the data history for this market options['market'] = arg.split('=')[1] elif arg.startswith('--spec='): # fetcher data history option options['option'] = arg.split('=')[1] elif arg.startswith('--broker='): # broker name for fetcher, watcher, optimize, binarize options['broker'] = arg.split('=')[1] elif arg.startswith('--timeframe='): # fetch, binarize, optimize base timeframe options['timeframe'] = arg.split('=')[1] elif arg.startswith('--cascaded='): # fetch cascaded ohlc generation options['cascaded'] = arg.split('=')[1] elif arg == '--watcher-only': # feed only with live data (not compatible with --read-only) options['watcher-only'] = True elif arg == '--read-only': # does not write to the database (not compatible with --watcher-only) options['read-only'] = True elif arg == '--check-data': # check DB ohlc data (@todo) options['check-data'] = True elif arg.startswith('--profile='): # appliances profile name options['profile'] = arg.split('=')[1] elif arg == '--version': Terminal.inst().info('%s %s' % (APP_SHORT_NAME, '.'.join( [str(x) for x in APP_VERSION]))) sys.exit(0) elif arg == '--help' or '-h': display_cli_help() sys.exit(0) else: options['identity'] = argv[1] # watcher-only read-only mutual exclusion if options.get('watcher-only') and options.get('read-only'): Terminal.inst().error( "Options --watcher-only and --read-only are mutually exclusive !" ) sys.exit(-1) # backtesting if options.get('backtesting', False): if options.get('from') is None or options.get('to') is None: del options['backtesting'] Terminal.inst().error( "Backtesting need from= and to= date time") sys.exit(-1) if options['identity'].startswith('-'): Terminal.inst().error("First option must be the identity name") # # binarizer mode # if options.get('binarize'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker'): from tools.binarizer import do_binarizer do_binarizer(options, siis_logger) else: display_cli_help() sys.exit(0) # # fetcher mode # if options.get('fetch'): if options.get('market') and options.get('broker') and options.get( 'timeframe'): from tools.fetcher import do_fetcher do_fetcher(options, siis_logger) else: display_cli_help() sys.exit(0) # # optimizer mode # if options.get('optimize'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker') and options.get('timeframe'): from tools.optimizer import do_optimizer do_optimizer(options, siis_logger) else: display_cli_help() sys.exit(0) # # sync mode # if options.get('sync'): if options.get('broker'): from tools.syncer import do_syncer do_syncer(options, siis_logger) else: display_cli_help() sys.exit(0) # # rebuilder mode # if options.get('rebuild'): if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker') and options.get('timeframe'): from tools.rebuilder import do_rebuilder do_rebuilder(options, siis_logger) else: display_cli_help() sys.exit(0) # # normal mode # Terminal.inst().info("Starting SIIS using %s identity..." % options['identity']) Terminal.inst().action("- (Press 'q' twice to terminate)") Terminal.inst().action("- (Press 'h' for help)") Terminal.inst().flush() if options.get('backtesting'): Terminal.inst().notice("Process a backtesting.") if options.get('paper-mode'): Terminal.inst().notice("- Using paper-mode trader.") else: Terminal.inst().notice("- Using live-mode trader.") signal.signal(signal.SIGINT, signal_handler) # monitoring service Terminal.inst().info("Starting monitor service...") monitor_service = MonitorService(options) # desktop notifier desktop_service = DesktopNotifier() # discord_service = DiscordNotifier() view_service = ViewService() # database manager Database.create(options) Database.inst().setup(options) # watcher service Terminal.inst().info("Starting watcher's service...") watcher_service = WatcherService(options) watcher_service.start(options) # trader service Terminal.inst().info("Starting trader's service...") trader_service = TraderService(watcher_service, monitor_service, options) trader_service.start(options) # want to display desktop notification and update views watcher_service.add_listener(desktop_service) watcher_service.add_listener(view_service) # want to display desktop notification and update views trader_service.add_listener(desktop_service) trader_service.add_listener(view_service) # trader service listen to watcher service and update views watcher_service.add_listener(trader_service) # strategy service Terminal.inst().info("Starting strategy's service...") strategy_service = StrategyService(watcher_service, trader_service, monitor_service, options) strategy_service.start(options) # strategy service listen to watcher service watcher_service.add_listener(strategy_service) # strategy service listen to trader service trader_service.add_listener(strategy_service) # want to display desktop notification, update view and notify on discord # strategy_service.add_listener(notifier_service) # @todo add notifier service and replace desktop service as desktop notifier into this service same for discord... strategy_service.add_listener(desktop_service) strategy_service.add_listener(view_service) # for display stats (@todo move to views) desktop_service.strategy_service = strategy_service desktop_service.trader_service = trader_service # register terminal commands commands_handler = CommandsHandler() commands_handler.init(options) # cli commands registration register_general_commands(commands_handler) register_trading_commands(commands_handler, trader_service, strategy_service, monitor_service) register_region_commands(commands_handler, strategy_service) setup_views(siis_logger, view_service, watcher_service, trader_service, strategy_service) # setup and start the monitor service monitor_service.setup(watcher_service, trader_service, strategy_service) monitor_service.start() Terminal.inst().message("Running main loop...") Terminal.inst().upgrade() Terminal.inst().message("Steady...", view='notice') display_welcome() LOOP_SLEEP = 0.016 # in second MAX_CMD_ALIVE = 15 # in second running = True value = None value_changed = False command_timeout = 0 prev_timestamp = 0 try: while running: # keyboard input commands try: c = Terminal.inst().read() key = Terminal.inst().key() if c: # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # update the current type command commands_handler.process_char(c, args) if key: if key == 'KEY_ESCAPE': # cancel command value = None value_changed = True command_timeout = 0 # use command mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # process on the arguments args = commands_handler.process_key( key, args, Terminal.inst().mode == Terminal.MODE_COMMAND) if args: # regen the updated commande ligne value = ":" + ' '.join(args) value_changed = True command_timeout = 0 desktop_service.on_key_pressed(key) # @todo move the rest to command_handler if c: if value and value[0] == ':': if c == '\b': # backspace, erase last command char value = value[:-1] if value else None value_changed = True command_timeout = time.time() elif c != '\n': # append to the advanced command value value += c value_changed = True command_timeout = time.time() elif c == '\n': result = commands_handler.process_cli(value) command_timeout = 0 if not result: # maybe an application level command if value == ':q' or value == ':quit': running = False elif value.startswith(':x '): # manually exit position at market @todo move as command target = value[2:] if target == "all" or target == "ALL": Terminal.inst().action( "Send close to market command for any positions", view='status') trader_service.command( Trader.COMMAND_CLOSE_ALL_MARKET, {}) else: Terminal.inst().action( "Send close to market command for position %s" % (target, ), view='status') trader_service.command( Trader.COMMAND_CLOSE_MARKET, {'key': target}) elif value.startswith(':d '): # @deprecated manually duplicate a position entry or exit must be associated to social strategy # @todo move as command target = value[2:] Terminal.inst().action( "Send replicate to market command for position %s" % (target, ), view='status') trader_service.command( Trader.COMMAND_TRIGGER, {'key': target}) # clear command value value_changed = True value = None # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) elif c != '\n': # initial command value value = "" + c value_changed = True command_timeout = time.time() if value and value[0] == ':': # use command mode Terminal.inst().set_mode(Terminal.MODE_COMMAND) if value and value[0] != ':': # direct key # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) try: result = commands_handler.process_accelerator(key) # @todo convert to Command object accelerator if not result: result = True # @todo might be replaced by views if value == 'p': trader_service.command( Trader.COMMAND_LIST_POSITIONS, {}) elif value == 'o': trader_service.command( Trader.COMMAND_LIST_ORDERS, {}) elif value == 'g': trader_service.command( Trader.COMMAND_SHOW_PERFORMANCE, {}) # display views elif value == 'C': Terminal.inst().clear_content() elif value == 'D': Terminal.inst().switch_view('debug') elif value == 'I': Terminal.inst().switch_view('content') elif value == 'F': Terminal.inst().switch_view('strategy') elif value == 'S': Terminal.inst().switch_view('stats') elif value == 'P': Terminal.inst().switch_view('perf') elif value == 'T': Terminal.inst().switch_view('ticker') elif value == 'A': Terminal.inst().switch_view('account') elif value == 'M': Terminal.inst().switch_view('market') elif value == 'Q': Terminal.inst().switch_view('asset') elif value == 'N': Terminal.inst().switch_view('signal') elif value == '?': # ping services and workers watcher_service.ping() trader_service.ping() strategy_service.ping() monitor_service.ping() elif value == ' ': # a simple mark on the terminal Terminal.inst().notice( "Trading time %s" % (datetime.fromtimestamp( strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='status') elif value == 'a': desktop_service.audible = not desktop_service.audible Terminal.inst().action( "Audible notification are now %s" % ("actives" if desktop_service.audible else "disabled", ), view='status') elif value == 'n': desktop_service.popups = not desktop_service.popups Terminal.inst().action( "Desktop notification are now %s" % ("actives" if desktop_service.popups else "disabled", ), view='status') elif value == 'e': desktop_service.discord = not desktop_service.discord Terminal.inst().action( "Discord notification are now %s" % ("actives" if desktop_service.discord else "disabled", ), view='status') else: result = False if result: value = None value_changed = True command_timeout = 0 except Exception as e: has_exception(siis_logger, e) except IOError: pass except Exception as e: has_exception(siis_logger, e) # display advanced command only if value_changed: if value and value.startswith(':'): Terminal.inst().action("Command: %s" % value[1:], view='command') else: Terminal.inst().message("", view='command') # clear input if no char hit during the last MAX_CMD_ALIVE if value and not value.startswith(':'): if (command_timeout > 0) and (time.time() - command_timeout >= MAX_CMD_ALIVE): value = None value_changed = True Terminal.inst().info("Current typing canceled", view='status') try: # display strategy tarding time (update max once per second) if strategy_service.timestamp - prev_timestamp >= 1.0: mode = "live" if trader_service.backtesting: mode = "backtesting" elif trader_service.paper_mode: mode = "paper-mode" Terminal.inst().message( "%s - %s" % (mode, datetime.fromtimestamp(strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='notice') prev_timestamp = strategy_service.timestamp # synchronous operations here watcher_service.sync() trader_service.sync() strategy_service.sync() monitor_service.sync() desktop_service.sync() view_service.sync() Terminal.inst().update() except BaseException as e: siis_logger.error(traceback.format_exc()) Terminal.inst().error(repr(e)) # don't waste CPU time on main thread time.sleep(LOOP_SLEEP) finally: Terminal.inst().restore_term() Terminal.inst().info("Terminate...") Terminal.inst().flush() commands_handler.terminate(options) commands_handler = None # service terminate monitor_service.terminate() strategy_service.terminate() trader_service.terminate() watcher_service.terminate() desktop_service.terminate() # discord_service.terminate() view_service.terminate() Terminal.inst().info("Saving database...") Terminal.inst().flush() Database.terminate() Terminal.inst().info("Bye!") Terminal.inst().flush() Terminal.terminate()
from database.database import Database from commands.print.printPlayers import printPlayers from commands.print.printTeams import printTeams from utils.promptNumber import promptNumber database = Database.connect('mlb.db') def editTeam(): teams = database.team.getTeams() # print all players printTeams() positions = promptNumber("Introduzca la posición del equipo a editar: ") team = teams[positions] ask = input("{} Éste es el equipo que desea editar ? ".format(team.name)) if ask == "si": name = input("Edita el nombre {} ".format(team.name)) championships = input("Edita la cantidad de coronas de {} ".format( team.name)) world_series = input( "Edita la cantidad de series mudiales de {} ".format(team.name))
def setUp(self): initialize_database() self.database = Database() self.database.delete_all()
def fetch_and_generate(self, market_id, timeframe, from_date=None, to_date=None, n_last=1000, fetch_option="", cascaded=None): if timeframe > 0 and timeframe not in self.GENERATED_TF: logger.error("Timeframe %i is not allowed !" % (timeframe, )) return generators = [] from_tf = timeframe self._last_ticks = [] self._last_ohlcs = {} if not from_date and n_last: # compute a from date today = datetime.now().astimezone(UTC()) if timeframe >= Instrument.TF_MONTH: from_date = ( today - timedelta(months=int(timeframe / Instrument.TF_MONTH) * n_last)).replace(day=1).replace(hour=0).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1D: from_date = (today - timedelta( days=int(timeframe / Instrument.TF_1D) * n_last)).replace( hour=0).replace(minute=0).replace(second=0) elif timeframe >= Instrument.TF_1H: from_date = (today - timedelta( hours=int(timeframe / Instrument.TF_1H) * n_last)).replace( minute=0).replace(second=0) elif timeframe >= Instrument.TF_1M: from_date = ( today - timedelta(minutes=int(timeframe / Instrument.TF_1M) * n_last)).replace(second=0) elif timeframe >= Instrument.TF_1S: from_date = (today - timedelta( seconds=int(timeframe / Instrument.TF_1S) * n_last)) from_date = from_date.replace(microsecond=0) if not to_date: today = datetime.now().astimezone(UTC()) if timeframe == Instrument.TF_MONTH: to_date = today + timedelta(months=1) else: to_date = today + timedelta(seconds=timeframe) to_date = to_date.replace(microsecond=0) # cascaded generation of candles if cascaded: for tf in Fetcher.GENERATED_TF: if tf > timeframe: # from timeframe greater than initial if tf <= cascaded: # until max cascaded timeframe generators.append(CandleGenerator(from_tf, tf)) from_tf = tf # store for generation self._last_ohlcs[tf] = [] else: from_tf = tf if timeframe > 0: self._last_ohlcs[timeframe] = [] n = 0 t = 0 if timeframe == 0: for data in self.fetch_trades(market_id, from_date, to_date, None): # store (int timestamp in ms, str bid, str ofr, str volume) Database.inst().store_market_trade( (self.name, market_id, data[0], data[1], data[2], data[3])) if generators: self._last_ticks.append( (float(data[0]) * 0.001, float(data[1]), float(data[2]), float(data[3]))) # generate higher candles for generator in generators: if generator.from_tf == 0: candles = generator.generate_from_ticks( self._last_ticks) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed ticks self._last_ticks = [] else: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf] += candles # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 10000: n = 0 Terminal.inst().info("%i trades for %s..." % (t, market_id)) # calm down the storage of tick, if parsing is faster while Database.inst().num_pending_ticks_storage( ) > Fetcher.MAX_PENDING_TICK: time.sleep(Fetcher.TICK_STORAGE_DELAY ) # wait a little before continue logger.info("Fetched %i trades for %s" % (t, market_id)) elif timeframe > 0: for data in self.fetch_candles(market_id, timeframe, from_date, to_date, None): # store (int timestamp ms, str open bid, high bid, low bid, close bid, open ofr, high ofr, low ofr, close ofr, volume) Database.inst().store_market_ohlc( (self.name, market_id, data[0], int(timeframe), data[1], data[2], data[3], data[4], data[5], data[6], data[7], data[8], data[9])) if generators: candle = Candle(float(data[0]) * 0.001, timeframe) candle.set_bid_ohlc(float(data[1]), float(data[2]), float(data[3]), float(data[4])) candle.set_ofr_ohlc(float(data[5]), float(data[6]), float(data[7]), float(data[8])) candle.set_volume(float(data[9])) candle.set_consolidated(True) self._last_ohlcs[timeframe].append(candle) # generate higher candles for generator in generators: candles = generator.generate_from_candles( self._last_ohlcs[generator.from_tf]) if candles: for c in candles: self.store_candle(market_id, generator.to_tf, c) self._last_ohlcs[generator.to_tf].extend(candles) # remove consumed candles self._last_ohlcs[generator.from_tf] = [] n += 1 t += 1 if n == 1000: n = 0 Terminal.inst().info( "%i candles for %s in %s..." % (t, market_id, timeframe_to_str(timeframe))) logger.info("Fetched %i candles for %s in %s" % (t, market_id, timeframe_to_str(timeframe)))
def fetch_market(self, market_id): """ Fetch and cache it. It rarely changes. """ instrument = self._instruments.get(market_id) market = None if instrument: market = Market(market_id, instrument['altname']) market.is_open = True market.expiry = '-' # "wsname":"XBT\/USD" # wsname = WebSocket pair name (if available) # pair_decimals = scaling decimal places for pair # lot_decimals = scaling decimal places for volume # lot_multiplier = amount to multiply lot volume by to get currency volume # "aclass_base":"currency" base_asset = instrument['base'] # XXBT market.set_base(base_asset, base_asset, instrument['pair_decimals']) # "aclass_quote":"currency" quote_asset = instrument['quote'] # ZUSD market.set_quote(quote_asset, quote_asset, instrument['lot_decimals']) # 8 # tick size at the base asset precision market.one_pip_means = math.pow(10.0, -instrument['pair_decimals']) # 1 market.value_per_pip = 1.0 market.contract_size = 1.0 market.lot_size = 1.0 # "lot":"unit", "lot_multiplier":1 # "margin_call":80, "margin_stop":40 # margin_call = margin call level # margin_stop = stop-out/liquidation margin level leverages = set(instrument.get('leverage_buy', [])) leverages.intersection(set(instrument.get('leverage_sell', []))) market.margin_factor = 1.0 / max(leverages) if len( leverages) > 0 else 1.0 market.set_leverages(leverages) size_limit = self._size_limits.get(instrument['altname'], {}) min_size = size_limit.get('min-size', 1.0) size_limits = [str(min_size), "0.0", str(min_size)] notional_limits = ["0.0", "0.0", "0.0"] price_limits = ["0.0", "0.0", "0.0"] market.set_size_limits(float(size_limits[0]), float(size_limits[1]), float(size_limits[2])) market.set_price_limits(float(price_limits[0]), float(price_limits[1]), float(price_limits[2])) market.set_notional_limits(float(notional_limits[0]), 0.0, 0.0) # "lot":"unit" market.unit_type = Market.UNIT_AMOUNT market.market_type = Market.TYPE_CRYPTO market.contract_type = Market.CONTRACT_SPOT market.trade = Market.TRADE_ASSET if leverages: market.trade |= Market.TRADE_MARGIN market.trade |= Market.TRADE_FIFO # orders capacities market.orders = Order.ORDER_LIMIT | Order.ORDER_MARKET | Order.ORDER_STOP | Order.ORDER_TAKE_PROFIT # @todo take the first but it might depends of the traded volume per 30 days, then request volume window to got it # "fees":[[0,0.26],[50000,0.24],[100000,0.22],[250000,0.2],[500000,0.18],[1000000,0.16],[2500000,0.14],[5000000,0.12],[10000000,0.1]], # "fees_maker":[[0,0.16],[50000,0.14],[100000,0.12],[250000,0.1],[500000,0.08],[1000000,0.06],[2500000,0.04],[5000000,0.02],[10000000,0]], fees = instrument.get('fees', []) fees_maker = instrument.get('fees_maker', []) if fees: market.taker_fee = round(fees[0][1] * 0.01, 6) if fees_maker: market.maker_fee = round(fees_maker[0][1] * 0.01, 6) if instrument.get('fee_volume_currency'): market.fee_currency = instrument['fee_volume_currency'] if quote_asset != self.BASE_QUOTE: # from XXBTZUSD / XXBTZEUR ... # @todo pass # if self._tickers_data.get(quote_asset+self.BASE_QUOTE): # market.base_exchange_rate = float(self._tickers_data.get(quote_asset+self.BASE_QUOTE, {'price', '1.0'})['price']) # elif self._tickers_data.get(self.BASE_QUOTE+quote_asset): # market.base_exchange_rate = 1.0 / float(self._tickers_data.get(self.BASE_QUOTE+quote_asset, {'price', '1.0'})['price']) # else: # market.base_exchange_rate = 1.0 else: market.base_exchange_rate = 1.0 # @todo contract_size # market.contract_size = 1.0 / mid_price # market.value_per_pip = market.contract_size / mid_price # volume 24h : not have here # notify for strategy self.service.notify(Signal.SIGNAL_MARKET_INFO_DATA, self.name, (market_id, market)) # store the last market info to be used for backtesting if not self._read_only: Database.inst().store_market_info(( self.name, market.market_id, market.symbol, market.market_type, market.unit_type, market.contract_type, # type market.trade, market.orders, # type market.base, market.base_display, market.base_precision, # base market.quote, market.quote_display, market.quote_precision, # quote market.expiry, int(market.last_update_time * 1000.0), # expiry, timestamp str(market.lot_size), str(market.contract_size), str(market.base_exchange_rate), str(market.value_per_pip), str(market.one_pip_means), '-', *size_limits, *notional_limits, *price_limits, str(market.maker_fee), str(market.taker_fee), str(market.maker_commission), str(market.taker_commission))) return market
def __enter__(self): # Executed when we enter the 'with' statement self.connection = Database.get_connection() self.cursor = self.connection.cursor() return self.cursor
def fetch_market(self, epic): """ Fetch and cache it. It rarely changes, except for base exchange rate, so assume it once for all. """ market_info = self._connector.market(epic) instrument = market_info['instrument'] snapshot = market_info['snapshot'] dealing_rules = market_info['dealingRules'] market = Market(epic, instrument['marketId']) # cannot interpret this value because IG want it as it is market.expiry = instrument['expiry'] # not perfect but IG does not provides this information if instrument["marketId"].endswith( instrument["currencies"][0]["name"]): base_symbol = instrument[ "marketId"][:-len(instrument["currencies"][0]["name"])] else: base_symbol = instrument["marketId"] market.base_exchange_rate = instrument['currencies'][0][ 'baseExchangeRate'] # "exchangeRate": 0.77 market.one_pip_means = float(instrument['onePipMeans'].split(' ')[0]) market.value_per_pip = float(instrument['valueOfOnePip']) market.contract_size = float(instrument['contractSize']) market.lot_size = float(instrument['lotSize']) # @todo how to determine base precision ? market.set_base(base_symbol, base_symbol) market.set_quote(instrument["currencies"][0]["name"], instrument["currencies"][0]['symbol'], decimal_place(market.one_pip_means)) # "USD", "$" if snapshot: market.is_open = snapshot["marketStatus"] == "TRADEABLE" market.bid = snapshot['bid'] market.ofr = snapshot['offer'] # "marginFactorUnit": "PERCENTAGE" not avalaible if market is down if instrument.get('marginFactor') and market.is_open: market.margin_factor = float(instrument['marginFactor']) margin_factor = instrument['marginFactor'] elif instrument.get('margin') and market.is_open: market.margin_factor = 0.1 / float(instrument['margin']) margin_factor = str(market.margin_factor) else: # we don't want this when market is down because it could overwrite the previous stored value margin_factor = None if instrument['unit'] == 'AMOUNT': market.unit_type = Market.UNIT_AMOUNT elif instrument['unit'] == 'CONTRACTS': market.unit_type = Market.UNIT_CONTRACTS elif instrument['unit'] == 'SHARES': market.unit_type = Market.UNIT_SHARES # BINARY OPT_* BUNGEE_* if instrument['type'] == 'CURRENCIES': market.market_type = Market.TYPE_CURRENCY elif instrument['type'] == 'INDICES': market.market_type = Market.TYPE_INDICE elif instrument['type'] == 'COMMODITIES': market.market_type = Market.TYPE_COMMODITY elif instrument['type'] == 'SHARES': market.market_type = Market.TYPE_STOCK elif instrument['type'] == 'RATES': market.market_type = Market.TYPE_RATE elif instrument['type'] == 'SECTORS': market.market_type = Market.TYPE_SECTOR market.trade = Market.TRADE_MARGIN market.contract_type = Market.CONTRACT_CFD # take minDealSize as tick size market.set_size_limits(dealing_rules["minDealSize"]["value"], 0.0, dealing_rules["minDealSize"]["value"]) # @todo there is some limits in contract size market.set_notional_limits(0.0, 0.0, 0.0) # @todo maybe decimal_place of onePipMeans for tick_size market.set_price_limits(0.0, 0.0, 0.0) # commission for stocks commission = "0.0" # @todo # store the last market info to be used for backtesting if not self._read_only: Database.inst().store_market_info(( self.name, epic, market.symbol, market.market_type, market.unit_type, market.contract_type, # type market.trade, market.orders, # type market.base, market.base_display, market.base_precision, # base market.quote, market.quote_display, market.quote_precision, # quote market.expiry, int(market.last_update_time * 1000.0), # expiry, timestamp instrument['lotSize'], instrument['contractSize'], str(market.base_exchange_rate), instrument['valueOfOnePip'], instrument['onePipMeans'].split(' ')[0], margin_factor, dealing_rules["minDealSize"]["value"], "0.0", dealing_rules["minDealSize"]["value"], # size limits "0.0", "0.0", "0.0", # notional limits "0.0", "0.0", "0.0", # price limits "0.0", "0.0", commission, commission) # fees ) # notify for strategy self.service.notify(Signal.SIGNAL_MARKET_INFO_DATA, self.name, (epic, market)) return market
def on_tick_update(self, item_update): name = item_update.get('name', '').split(':') try: if len(name) == 3 and name[0] == 'CHART' and name[2] == 'TICK': values = item_update['values'] market_id = name[1] bid = None ofr = None utm = None ltv = None if values['UTM']: utm = values['UTM'] elif market_id in self._cached_tick: utm = self._cached_tick[market_id][0] if values['BID']: bid = values['BID'] elif market_id in self._cached_tick: bid = self._cached_tick[market_id][1] if values['OFR']: ofr = values['OFR'] elif market_id in self._cached_tick: ofr = self._cached_tick[market_id][2] if values['LTV']: ltv = values['LTV'] elif market_id in self._cached_tick: ltv = self._cached_tick[market_id][3] if utm is None or bid is None or ofr is None: # need all informations, wait the next one return # cache for when a value is not defined self._cached_tick[market_id] = (utm, bid, ofr, ltv) tick = (float(utm) * 0.001, float(bid), float(ofr), float(ltv or "0")) # keep last complete tick values for ohlc generation self._last_tick[market_id] = tick self.service.notify(Signal.SIGNAL_TICK_DATA, self.name, (market_id, tick)) for tf in Watcher.STORED_TIMEFRAMES: # generate candle per each tf self.lock() candle = self.update_ohlc(market_id, tf, tick[0], tick[1], tick[2], tick[3]) self.unlock() if candle is not None: self.service.notify(Signal.SIGNAL_CANDLE_DATA, self.name, (market_id, candle)) # disabled for now if not self._read_only and self._store_trade: Database.inst().store_market_trade( (self.name, market_id, int(utm), bid, ofr, ltv or 0)) except Exception as e: logger.debug(repr(e)) error_logger.error(traceback.format_exc())
import shutil from functools import partial from PyQt5.QtWidgets import * from PyQt5.QtGui import * from PyQt5 import QtCore from interface.perfilProfessorWindow import * from database.database import Database import homeProfessor app = QtWidgets.QApplication(sys.argv) MainWindow = QtWidgets.QMainWindow() tela = Ui_perfilProfessor() tela.setupUi(MainWindow) database_professor = Database() def voltarHome(id): MainWindow.close() homeProfessor.startHomeProfessor(id) def verMaterias(professor): msg = QMessageBox() msg.setWindowTitle("Matérias") msg.setIcon(QMessageBox.Information) msg.setText("\n".join(professor.getMateria())) msg.show() msg.exec_()
def start_engine(): database = Database() size = database.getSize() while size <= maxentries: size = database.getSize() print 'Rows in database: ' + str(size) url = database.getUnvisited() database.update(url) if url is None: break try: finder = LinkFinder(url) if finder.isOkay(): linklist = finder.getLinks() print 'Links found: ' + str(len(linklist)) for link in linklist: if len(link) > 255: linklist.remove(link) try: database.addNew(linklist) except UnicodeEncodeError: print 'Link encoding error' else: database.markBAD(url) print 'Marked link as bad, reason: ' + finder.reason() finder.close() except socket.error: print 'timeout error' database.markBAD(url) except LookupError: print 'lookup error' if die[0]: break database.close()
import urllib2 import re import time import sys import traceback sys.path.append('.') from database.database import Database # init database cursor = Database.cursor() cursor.execute('''create table if not exists pictures ( id integer primary key auto_increment, url text not null, status char(10) not null, collection_id integer not null, filename text, foreign key (collection_id) references collections(id) )''' ) Database.commit() def get_html(url_target, socket_timeout): data = urllib2.urlopen(url = url_target, timeout = socket_timeout) return data.read() def get_pictures(html): pictures_content_reg = re.compile('<div class="postContent">.+?</div>', re.S) picture_content = re.findall(pictures_content_reg, html) if (len(picture_content) != 1): raise Exception('No post content found')
def save_to_mongo(self): Database.insert("users", self.json())
from PyQt5.QtGui import * #from interface.escolherMateriasDialog import * from interface.escolherMateriasDialog import * from database.database import Database #from interface.cadastroProfessorWindow import * from interface.cadastroProfessorWindow import * #from endereco import * from endereco import * #from professor import * from professor import * import homeAdm import homeServidor database = Database() app = QtWidgets.QApplication(sys.argv) MainWindow = QtWidgets.QMainWindow() tela = Ui_cadastroProfessor() tela.setupUi(MainWindow) Dialog = QtWidgets.QDialog() dialog = Ui_Dialog() dialog.setupUi(Dialog) def voltarHome(id, type): MainWindow.close() if type == "administrador":
#!/usr/bin/python # __author__ = 'jasonsheh' # -*- coding:utf-8 -*- from flask import Flask, render_template, request, redirect from database.database import Database from database.rules import Rules from lib.cel import port_scan, domain_scan, sendir_scan, site_scan app = Flask(__name__) max_domain = Database().count('subdomain') max_port = Database().count('port') max_sendir = Database().count('sendir') max_fingerprint = Rules().count('application') max_task = Database().count('task') max_vul = Database().count('vul') @app.route('/') @app.route('/index') @app.route('/index/<int:page>') def index(page=1): tasks = Database().select_task(page) return render_template('index.html', page=page, max_page=max_task, tasks=tasks, max_domain=max_domain, max_port=max_port, max_sendir=max_sendir,
def __del__(self): """ Destructor """ Database.__del__(self)
import urllib2 import re import time import sys import os import hashlib import traceback sys.path.append('.') from database.database import Database if (len(sys.argv) != 2): print 'should get base path' quit() base_path = sys.argv[1] cursor = Database.cursor() cursor.execute('select id, url from pictures where status = "%s"' % 'new') row = cursor.fetchone() socket_timeout = 5 while row: try: update_cursor = Database.cursor() update_cursor.execute('update pictures set status = "%s" where id = %d' % ('waiting', row[0])) Database.commit() print row[1] picture_data = urllib2.urlopen(url = row[1], timeout = socket_timeout).read() sha1sum = hashlib.sha1(picture_data).hexdigest() filename = sha1sum + '.' + row[1].split('.')[-1] print filename picture_file = open(base_path + '/' + filename, 'wb')
def install_market_data(self, market_id, market_data): """ Install a market info data into the database. """ market = Market(market_id, market_data.get('symbol', market_id)) market.base_exchange_rate = market_data.get('base-exchange-rate', 1.0) market.one_pip_means = market_data.get('one-pip-means', 1.0) market.value_per_pip = market_data.get('value-per-pip', 1.0) market.contract_size = market_data.get('contract-size', 1.0) market.lot_size = market_data.get('lot-size', 1.0) market.expiry = market_data.get('expiry', '-') base = market_data.get('base', {}) market.set_base(base.get('symbol', 'USD'), base.get('display', '$'), base.get('precision', 2)) quote = market_data.get('quote', {}) market.set_base(quote.get('symbol', 'USD'), quote.get('display', '$'), quote.get('precision', 2)) market.is_open = True market.margin_factor = market_data.get('margin-factor', 1.0) if 'unit' in market_data: if market_data['unit'] == 'amount': market.unit_type = Market.UNIT_AMOUNT elif market_data['unit'] == 'contracts': market.unit_type = Market.UNIT_CONTRACTS elif market_data['unit'] == 'shares': market.unit_type = Market.UNIT_SHARES else: market.unit_type = Market.UNIT_CONTRACTS else: market.unit_type = Market.UNIT_CONTRACTS if 'type' in market_data: if market_data['type'] == 'currency': market.market_type = Market.TYPE_CURRENCY elif market_data['type'] == 'indice': market.market_type = Market.TYPE_INDICE elif market_data['type'] == 'commodity': market.market_type = Market.TYPE_COMMODITY elif market_data['type'] == 'stock': market.market_type = Market.TYPE_STOCK elif market_data['type'] == 'rate': market.market_type = Market.TYPE_RATE elif market_data['type'] == 'sector': market.market_type = Market.TYPE_SECTOR else: market.market_type = Market.TYPE_UNKNOWN else: market.market_type = Market.TYPE_UNKNOWN if 'contract' in market_data: if market_data['contract'] == 'spot': market.contract_type = Market.CONTRACT_SPOT elif market_data['contract'] == 'cfd': market.contract_type = Market.CONTRACT_CFD elif market_data['contract'] == 'futur': market.contract_type = Market.CONTRACT_FUTUR elif market_data['contract'] == 'option': market.contract_type = Market.CONTRACT_OPTION elif market_data['contract'] == 'warrant': market.contract_type = Market.CONTRACT_WARRANT elif market_data['contract'] == 'turbo': market.contract_type = Market.CONTRACT_TURBO else: market.contract_type = Market.CONTRACT_SPOT else: market.contract_type = Market.CONTRACT_SPOT market.trade = 0 if market_data.get('spot', False): market.trade |= Market.TRADE_BUY_SELL if market_data.get('margin', False): market.trade |= Market.TRADE_MARGIN if market_data.get('indivisible', False): market.trade |= Market.TRADE_IND_MARGIN if market_data.get('fifo', False): market.trade |= Market.TRADE_FIFO if market_data.get('position', False): market.trade |= Market.TRADE_POSITION orders = market_data.get( 'orders', ('market', 'limit', 'stop-market', 'stop-limit', 'take-profit-market', 'take-profit-limit')) if 'market' in orders: market.orders |= Market.ORDER_MARKET if 'limit' in orders: market.orders |= Market.ORDER_LIMIT if 'stop-market' in orders: market.orders |= Market.ORDER_STOP_MARKET if 'stop-limit' in orders: market.orders |= Market.ORDER_STOP_LIMIT if 'take-profit-market' in orders: market.orders |= Market.ORDER_TAKE_PROFIT_MARKET if 'take-profit-limit' in orders: market.orders |= Market.ORDER_TAKE_PROFIT_LIMIT if 'one-cancel-other' in orders: market.orders |= Market.ORDER_ONE_CANCEL_OTHER size_limits = market_data.get('size-limits', { 'min': 0.0, 'max': 0.0, 'step': 0.0 }) market.set_size_limits(size_limits.get('min', 0.0), size_limits.get('max', 0.0), size_limits.get('step', 0.0)) notional_limits = market_data.get('notional-limits', { 'min': 0.0, 'max': 0.0, 'step': 0.0 }) market.set_size_limits(notional_limits.get('min', 0.0), notional_limits.get('max', 0.0), notional_limits.get('step', 0.0)) price_limits = market_data.get('price-limits', { 'min': 0.0, 'max': 0.0, 'step': 0.0 }) market.set_size_limits(price_limits.get('min', 0.0), price_limits.get('max', 0.0), price_limits.get('step', 0.0)) # fees & commissions fees = market_data.get('fees', {}) market.maker_fee = fees.get('maker', 0.0) market.taker_fee = fees.get('taker', 0.0) commissions = market_data.get('commissions', {}) market.maker_commission = commissions.get('maker', 0.0) market.taker_commission = commissions.get('maker', 0.0) # store the last market info to be used for backtesting Database.inst().store_market_info(( self.name, market.market_id, market.symbol, market.market_type, market.unit_type, market.contract_type, # type market.trade, market.orders, # type market.base, market.base_display, market.base_precision, # base market.quote, market.quote_display, market.quote_precision, # quote market.expiry, int(market.last_update_time * 1000.0), # expiry, timestamp str(market.lot_size), str(market.contract_size), str(market.base_exchange_rate), str(market.value_per_pip), str(market.one_pip_means), str(market.margin_factor), str(market.min_size), str(market.max_size), str(market.step_size), # size limits str(market.min_notional), str(market.max_notional), str(market.step_notional), # notional limits str(market.min_price), str(market.max_price), str(market.tick_price), # price limits str(market.maker_fee), str(market.taker_fee), str(market.maker_commission), str(market.taker_commission)) # fees )
self.register_fqfiles() self.prepare_SGElist() if self.__array: self.build_ArrayJob() self.writeArrayFiles() self.writeSGE() self.startSGE() sgelist = property(get_sgelist) if __name__ == '__main__': db = None parser = Parser() parser.parse() parser.start_logging() if parser.log: logfile = '{0}{1}'.format(Information.LOGDIRECTORY, getLogfile(basename(getframe().f_code.co_filename.rstrip('.py')))) mainlog = MainLogger(logfile) parser.main() if parser.query: db = Database(Information.DB_HOST, Information.DB_USER, Information.DB_PW, Information.DB) if parser.log: db.start_logging() db.setConnection() trinst = TransportFastq(parser.log, parser.files, parser.output, parser.zip, parser.array, parser.merge, parser.undet, parser.query, db) trinst.main() if parser.query: db.closeConnection()
class TestDatabase(unittest.TestCase): def setUp(self): initialize_database() self.database = Database() self.database.delete_all() def test_add_user(self): self.database.add_user('testi', 'salis') self.database.add_user('testi123', 'salis') user = self.database.get_user('testi') self.assertTrue(user) user = self.database.get_user('testi123') self.assertTrue(user) user = self.database.get_user('esimerkki') self.assertFalse(user) def test_find_all_users_ingredients(self): self.database.insert_a_new_ingredient(1619778769, 'tomaatti', 1619778769, 'testi') self.database.insert_a_new_ingredient(1619778769, 'omena', 1619778769, 'testi') self.database.insert_a_new_ingredient(1619778769, 'omena', 1619778769, 'testi123') ingredients = self.database.get_all_ingredients_by_a_user('testi') self.assertEqual(len(ingredients), 2) ingredients = self.database.get_all_ingredients_by_a_user('testi123') self.assertEqual(len(ingredients), 1) def test_mark_ingredient_as_eaten(self): self.database.insert_a_new_ingredient(1619778769, 'tomaatti', 1619778769, 'testi') self.database.mark_ingredient_as_eaten('testi', 1) ingredients = self.database.get_all_ingredients_by_a_user('testi') self.assertEqual(len(ingredients), 1) ingredient = ingredients[0] self.assertEqual(ingredient[4], 1) def test_find_ingredient(self): self.database.insert_a_new_ingredient(1619778769, 'tomaatti', 1619778769, 'testi') ingredient = self.database.find_ingredient('testi', 1) self.assertTrue(ingredient) ingredient = self.database.find_ingredient('testi', 2) self.assertFalse(ingredient) def tearDown(self): self.database.delete_all()
def delete(id, mode): Database().delete(id, mode)
def application(argv): fix_thread_set_name() # init terminal displayer Terminal() options = { 'working-path': os.getcwd(), 'identity': 'real', 'config-path': './user/config', 'log-path': './user/log', 'reports-path': './user/reports', 'markets-path': './user/markets', 'log-name': 'siis.log', } # create initial siis data structure if necessary install(options) siis_log = SiisLog(options, Terminal().inst().style()) siis_logger = logging.getLogger('siis') traceback_logger = logging.getLogger('siis.traceback') # parse process command line if len(argv) > 1: options['livemode'] = True # utc or local datetime ? for arg in argv: if arg.startswith('--'): if arg == '--paper-mode': # livemode but in paper-mode options['paper-mode'] = True elif arg == '--fetch': # use the fetcher options['tool'] = "fetcher" elif arg == '--binarize': # use the binarizer options['tool'] = "binarize" elif arg == '--optimizer': # use the optimizer options['tool'] = "optimizer" elif arg == '--sync': # use the syncer options['tool'] = "syncer" elif arg == '--rebuild': # use the rebuilder options['tool'] = "rebuilder" elif arg == '--export': # use the exporter options['tool'] = "exporter" elif arg == '--import': # use the importer options['tool'] = "importer" elif arg == '--clean': # use the cleaner options['tool'] = "cleaner" elif arg.startswith("--tool="): # use a named tool options['tool'] = arg.split('=')[1] elif arg == '--no-conf': options['no-conf'] = True elif arg == '--zip': options['zip'] = True elif arg == '--install-market': options['install-market'] = True elif arg == '--initial-fetch': # do the initial OHLC fetch for watchers options['initial-fetch'] = True elif arg == '--backtest': # backtest mean always paper-mode options['paper-mode'] = True options['backtesting'] = True elif arg.startswith('--timestep='): # backesting timestep, default is 60 second options['timestep'] = float(arg.split('=')[1]) elif arg.startswith('--time-factor='): # backtesting time-factor options['time-factor'] = float(arg.split('=')[1]) elif arg.startswith('--filename='): # used with import or export options['filename'] = arg.split('=')[1] elif arg.startswith('--from='): # if backtest from date and tools options['from'] = parse_datetime(arg.split('=')[1]) if not options['from']: Terminal.inst().error("Invalid 'from' datetime format") sys.exit(-1) elif arg.startswith('--to='): # if backtest to date and tools options['to'] = parse_datetime(arg.split('=')[1]) if not options['to']: Terminal.inst().error("Invalid 'to' datetime format") sys.exit(-1) elif arg.startswith('--last='): # fetch the last n data history options['last'] = int(arg.split('=')[1]) if options['last'] <= 0: Terminal.inst().error( "Invalid 'last' value. Must be at least 1") sys.exit(-1) elif arg.startswith('--market='): # fetch, binarize, optimize the data history for this market options['market'] = arg.split('=')[1] elif arg.startswith('--spec='): # fetcher data history option options['option'] = arg.split('=')[1] elif arg.startswith('--broker='): # broker name for fetcher, watcher, optimize, binarize options['broker'] = arg.split('=')[1] elif arg.startswith('--timeframe='): # fetch, binarize, optimize base timeframe options['timeframe'] = arg.split('=')[1] elif arg.startswith('--cascaded='): # fetch cascaded ohlc generation options['cascaded'] = arg.split('=')[1] elif arg.startswith('--target='): # target ohlc generation options['target'] = arg.split('=')[1] elif arg == '--watcher-only': # feed only with live data (not compatible with --read-only) options['watcher-only'] = True elif arg == '--read-only': # does not write to the database (not compatible with --watcher-only) options['read-only'] = True elif arg.startswith('--profile='): # appliances profile name options['profile'] = arg.split('=')[1] elif arg == '--version': Terminal.inst().info('%s %s' % (APP_SHORT_NAME, '.'.join( [str(x) for x in APP_VERSION]))) sys.exit(0) elif arg == '--help' or '-h': display_cli_help() sys.exit(0) else: options['identity'] = argv[1] # watcher-only read-only mutual exclusion if options.get('watcher-only') and options.get('read-only'): Terminal.inst().error( "Options --watcher-only and --read-only are mutually exclusive !" ) sys.exit(-1) # backtesting if options.get('backtesting', False): if options.get('from') is None or options.get('to') is None: del options['backtesting'] Terminal.inst().error( "Backtesting need from= and to= date time") sys.exit(-1) # # binarizer mode # if options.get('tool') == "binarizer": if options.get('market') and options.get('from') and options.get( 'to') and options.get('broker'): from tools.binarizer import do_binarizer do_binarizer(options) else: display_cli_help() sys.exit(0) # # fetcher mode # if options.get('tool') == "fetcher": if options.get('market') and options.get('broker'): from tools.fetcher import do_fetcher do_fetcher(options) else: display_cli_help() sys.exit(0) # # optimizer mode # if options.get('tool') == "optimizer": if options.get('market') and options.get('from') and options.get( 'broker'): from tools.optimizer import do_optimizer do_optimizer(options) else: display_cli_help() sys.exit(0) # # rebuilder mode # if options.get('tool') == "rebuilder": if options.get('market') and options.get('from') and options.get( 'broker') and options.get('timeframe'): from tools.rebuilder import do_rebuilder do_rebuilder(options) else: display_cli_help() sys.exit(0) # # exporter mode # if options.get('tool') == "exporter": if options.get('market') and options.get('from') and options.get( 'broker') and options.get('filename'): from tools.exporter import do_exporter do_exporter(options) else: display_cli_help() sys.exit(0) # # importer mode # if options.get('tool') == "importer": if options.get('filename'): from tools.importer import do_importer do_importer(options) else: display_cli_help() sys.exit(0) # # tool mode # if options.get('tool'): ToolClazz = Tool.load_tool(options.get('tool')) if ToolClazz: if ToolClazz.need_identity(): if options['identity'].startswith('-'): Terminal.inst().error( "First option must be the identity name") Terminal.inst().flush() sys.exit(-1) tool = ToolClazz(options) if not tool.check_options(options): display_cli_help() sys.exit(-1) if ToolClazz.need_identity(): Terminal.inst().info( "Starting SIIS %s using %s identity..." % (options.get('tool'), options['identity'])) else: Terminal.inst().info("Starting SIIS %s..." % options.get('tool')) Terminal.inst().flush() tool.execute(options) Terminal.inst().info( "%s done!" % (ToolClazz.alias() or options.get('tool')).capitalize()) Terminal.inst().flush() Terminal.terminate() sys.exit(0) else: sys.exit(-1) if options['identity'].startswith('-'): Terminal.inst().error("First option must be the identity name") # # normal mode # Terminal.inst().info("Starting SIIS using %s identity..." % options['identity']) Terminal.inst().action("- type ':q<Enter> or :quit<Enter>' to terminate") Terminal.inst().action( "- type ':h<Enter> or :help<Enter>' to display help") Terminal.inst().flush() if options.get('backtesting'): Terminal.inst().notice("Process a backtesting.") if options.get('paper-mode'): Terminal.inst().notice("- Using paper-mode trader.") else: Terminal.inst().notice("- Using live-mode trader.") signal.signal(signal.SIGINT, signal_handler) # # application # watchdog_service = WatchdogService(options) watchdog_service.start(options) # application services view_service = None notifier_service = None watcher_service = None trader_service = None strategy_service = None # monitoring service Terminal.inst().info("Starting monitor service...") monitor_service = MonitorService(options) # notifier service try: notifier_service = NotifierService(options) notifier_service.start(options) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # view service try: view_service = ViewService(options) watchdog_service.add_service(view_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # database manager try: Database.create(options) Database.inst().setup(options) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # watcher service Terminal.inst().info("Starting watcher's service...") try: watcher_service = WatcherService(options) watcher_service.start(options) watchdog_service.add_service(watcher_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # trader service Terminal.inst().info("Starting trader's service...") try: trader_service = TraderService(watcher_service, monitor_service, options) trader_service.start(options) watchdog_service.add_service(trader_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # want to display desktop notification and update views watcher_service.add_listener(view_service) # want to display desktop notification and update views trader_service.add_listener(view_service) # trader service listen to watcher service and update views watcher_service.add_listener(trader_service) # strategy service Terminal.inst().info("Starting strategy's service...") try: strategy_service = StrategyService(watcher_service, trader_service, monitor_service, options) strategy_service.start(options) watchdog_service.add_service(strategy_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) # wan't to be notifier of system errors watchdog_service.add_listener(notifier_service) # strategy service listen to watcher service watcher_service.add_listener(strategy_service) # strategy service listen to trader service trader_service.add_listener(strategy_service) # want to display desktop notification, update view and notify on discord strategy_service.add_listener(notifier_service) strategy_service.add_listener(view_service) # want signal and important notifications notifier_service.set_strategy_service(strategy_service) notifier_service.set_trader_service(trader_service) # register terminal commands commands_handler = CommandsHandler() commands_handler.init(options) # cli commands registration register_general_commands(commands_handler) register_trading_commands(commands_handler, trader_service, strategy_service, monitor_service, notifier_service) register_region_commands(commands_handler, strategy_service) # setup and start the monitor service monitor_service.setup(watcher_service, trader_service, strategy_service) try: monitor_service.start() watchdog_service.add_service(monitor_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) Terminal.inst().message("Running main loop...") Terminal.inst().upgrade() Terminal.inst().message("Steady...", view='notice') if view_service: # setup the default views try: setup_default_views(view_service, watcher_service, trader_service, strategy_service) except Exception as e: Terminal.inst().error(str(e)) terminate(watchdog_service, watcher_service, trader_service, strategy_service, monitor_service, view_service, notifier_service) sys.exit(-1) display_welcome() LOOP_SLEEP = 0.016 # in second MAX_CMD_ALIVE = 15 # in second running = True value = None value_changed = False command_timeout = 0 prev_timestamp = 0 try: while running: # keyboard input commands try: c = Terminal.inst().read() key = Terminal.inst().key() if c: # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # update the current type command commands_handler.process_char(c, args) if key: if key == 'KEY_ESCAPE': # cancel command value = None value_changed = True command_timeout = 0 # split the commande line args = [ arg for arg in (value[1:].split(' ') if value and value.startswith(':') else []) if arg ] if value and value[-1] == ' ': args.append('') # process on the arguments args = commands_handler.process_key( key, args, Terminal.inst().mode == Terminal.MODE_COMMAND) if args: # regen the updated commande ligne value = ":" + ' '.join(args) value_changed = True command_timeout = 0 view_service.on_key_pressed(key) if key == 'KEY_ESCAPE': # was in command me, now in default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) # @todo move the rest to command_handler if c: if value and value[0] == ':': if c == '\b': # backspace, erase last command char value = value[:-1] if value else None value_changed = True command_timeout = time.time() elif c != '\n': # append to the advanced command value value += c value_changed = True command_timeout = time.time() elif c == '\n': result = commands_handler.process_cli(value) command_timeout = 0 if not result: # maybe an application level command if value == ':q' or value == ':quit': running = False elif value.startswith(':x '): # manually exit position at market # @todo move as command target = value[3:] if target == "all" or target == "ALL": Terminal.inst().action( "Send close to market command for any positions", view='status') trader_service.command( Trader.COMMAND_CLOSE_ALL_MARKET, {}) else: Terminal.inst().action( "Send close to market command for position %s" % (target, ), view='status') trader_service.command( Trader.COMMAND_CLOSE_MARKET, {'key': target}) # clear command value value_changed = True value = None # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) elif c != '\n': # initial command value value = "" + c value_changed = True command_timeout = time.time() if value and value[0] == ':': # use command mode Terminal.inst().set_mode(Terminal.MODE_COMMAND) if value and value[0] != ':': # direct key # use default mode Terminal.inst().set_mode(Terminal.MODE_DEFAULT) try: result = commands_handler.process_accelerator(key) # @todo convert to Command object accelerator if not result: result = True # display views @todo must be managed by view_service if value == 'C': Terminal.inst().clear_content() elif value == 'D': Terminal.inst().switch_view('debug') elif value == 'I': Terminal.inst().switch_view('content') elif value == 'F': Terminal.inst().switch_view('strategy') elif value == 'S': Terminal.inst().switch_view('stats') elif value == 'P': Terminal.inst().switch_view('perf') elif value == 'X': Terminal.inst().switch_view('position') elif value == 'O': Terminal.inst().switch_view('order') elif value == 'T': Terminal.inst().switch_view('ticker') elif value == 'A': Terminal.inst().switch_view('account') elif value == 'M': Terminal.inst().switch_view('market') elif value == 'Q': Terminal.inst().switch_view('asset') elif value == 'N': Terminal.inst().switch_view('signal') elif value == '?': # ping services and workers watchdog_service.ping(1.0) elif value == ' ': # a simple mark on the terminal Terminal.inst().notice( "Trading time %s" % (datetime.fromtimestamp( strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='status') elif value == 'a': if notifier_service: notifier_service.command( Notifier.COMMAND_TOGGLE, { 'notifier': "desktop", 'value': "audible" }) elif value == 'n': if notifier_service: notifier_service.command( Notifier.COMMAND_TOGGLE, { 'notifier': "desktop", 'value': "popup" }) elif value == '%': if view_service: view_service.toggle_percent() elif value == ',': if view_service: view_service.toggle_group() elif value == '!': if view_service: view_service.toggle_datetime_format() else: result = False if result: value = None value_changed = True command_timeout = 0 except Exception as e: siis_logger.error(repr(e)) traceback_logger.error(traceback.format_exc()) except IOError: pass except Exception as e: siis_logger.error(repr(e)) traceback_logger.error(traceback.format_exc()) # display advanced command only if value_changed: if value and value.startswith(':'): Terminal.inst().message("Command: %s" % value[1:], view='command') else: Terminal.inst().message("", view='command') # clear input if no char hit during the last MAX_CMD_ALIVE if value and not value.startswith(':'): if (command_timeout > 0) and (time.time() - command_timeout >= MAX_CMD_ALIVE): value = None value_changed = True Terminal.inst().info("Current typing canceled", view='status') try: # display strategy tarding time (update max once per second) if strategy_service.timestamp - prev_timestamp >= 1.0: mode = "live" if trader_service.backtesting: mode = "backtesting" elif trader_service.paper_mode: mode = "paper-mode" Terminal.inst().message( "%s - %s" % (mode, datetime.fromtimestamp(strategy_service.timestamp). strftime('%Y-%m-%d %H:%M:%S')), view='notice') prev_timestamp = strategy_service.timestamp # synchronous operations here watcher_service.sync() trader_service.sync() strategy_service.sync() if monitor_service: monitor_service.sync() if view_service: view_service.sync() if notifier_service: notifier_service.sync() Terminal.inst().update() except BaseException as e: siis_logger.error(repr(e)) traceback_logger.error(traceback.format_exc()) # don't waste CPU time on main thread time.sleep(LOOP_SLEEP) finally: Terminal.inst().restore_term() Terminal.inst().info("Terminate...") Terminal.inst().flush() commands_handler.terminate(options) if commands_handler else None commands_handler = None # service terminate monitor_service.terminate() if monitor_service else None strategy_service.terminate() if strategy_service else None trader_service.terminate() if trader_service else None watcher_service.terminate() if watcher_service else None view_service.terminate() if view_service else None notifier_service.terminate() if notifier_service else None Terminal.inst().info("Saving database...") Terminal.inst().flush() Database.terminate() watchdog_service.terminate() if watchdog_service else None Terminal.inst().info("Bye!") Terminal.inst().flush() Terminal.terminate()
def subdomain(page=1): domains = Database().select_subdomain(page) return render_template('domain.html', page=page, max_page=max_domain // 15 + 1, domains=domains)
#!/usr/bin/python # -- coding: utf-8 -- from database.database import Database # initialise the dabatase expaToken = "b3a3520509a6dcf3b4da55c65b0b0659addd151983fcfaff4aadf29aecff2ae0" trelloToken = "9f9de4286e6a5f627f083dc3ca8fdf6dceae7307a06c5e9dcedda4212491a4e3" trelloKey = "448b14b4374aaa9429f4a8b979936e2b" db = Database(expaToken, trelloToken, trelloKey)
import psycopg2.extras from database.database import Database from config import database_config database_name = database_config.test_database database = Database(database_name) def database_name_exists(): assert database is not None def test_exists(): assert database.exists() is True def test_create(): assert database.create() is True def test_get_cursor_with_database(): cursor = database.cursor(True) assert type(cursor) is psycopg2.extras.DictCursor def test_get_cursor_without_database(): cursor = database.cursor(False) assert type(cursor) is psycopg2.extras.DictCursor
def Upload_Withdraw_Data(self): command_1 = {'Username': self.username} command_2 = {'$set': {'Balance': self.balance}} return Database.update_one('user_data', command_1, command_2)
from models.reviewers import Reviewer from database.database import Database from data.constants import dbDetails, dbUser_Krista Database.initialize(**dbDetails, **dbUser_Krista) Reviewer( 'Krista', 'K', '/Users/Krista/Desktop/blindScoring/Krista_K/toScore_KK', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Krista_K/Scored_KK' ).save_to_db() Reviewer( 'Jen', 'M', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Jen_M/toScore_JM', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Jen_M/Scored_JM' ).save_to_db() Reviewer( 'Dan', 'L', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Dan_L/toScore_DL', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Dan_L/Scored_DL' ).save_to_db() Reviewer( 'Alli', 'B', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Alli_B/toScore_AB', '/Volumes/SharedX/Neuro-Leventhal/data/mouseSkilledReaching/blindedScoring/Alli_B/Scored_AB' ).save_to_db() Reviewer(
def get_by_id(cls, _id): data = Database.find_one("users", {'_id': _id}) if data is not None: return cls(**data)
def Generate_Transaction_Id(self): user_database_id = Database.find_one("user_data", {'Username': self.username}) user_id = str(user_database_id['_id']) + str(int(time.time())) return user_id
if event.exception: print('The job did not run') else: print('The job completed @ {}'.format(datetime.now())) def missed_job(event): print('The job was missed. Scheduling a new one to run in one minute') run_time = datetime_to_dict(datetime.now() + timedelta(minutes=1)) scheduler.add_job(run_all, "cron", **run_time) scheduler.print_jobs() if __name__ == "__main__": # Database setup database = Database() year = 2019 session = Session(bind=database.engine) sched_tbl = database.get_table_mappings("sched_{}".format(year)) # Get today and the last day of the season so jobs can be scheduled from today through end of season start_date = datetime.date(datetime.now()) end_date = session.query(sched_tbl.start_time).order_by( sched_tbl.start_time.desc()).first()[0] end_date = datetime.date(end_date) # Get every date between now and the last day of the season date = start_date game_dates = [date] while date <= end_date: date = date + timedelta(days=1)
def Upload_History(self): Database.update_one('user_data', {'Username': self.username}, {'$set': { 'History': self.history }})
class RESTServer(object): """ The rest server itself """ def __init__(self): """ Initialise servers, regexes """ english = Dictionary.get_language(Dictionary.ENGLISH) logging.basicConfig(filename=os.path.join(__here__, english.get("_server_log_")), level=logging.DEBUG) self.__database = Database(os.path.join(__here__, english.get("_server_db_"))) self.__server = responder.Responder() def __call__(self, env, start_response): """ Entry point """ database = None user = None response = responder.Response() # First' check for session cookie try: database = self.__database.connect() if "HTTP_COOKIE" in env: #args = post_parser.parse(data=env["HTTP_COOKIE"]) #if args: cookies = cookie.Cookie.restore(env["HTTP_COOKIE"]) for c in cookies: #cookie = cookie.Cookie(client_string = env["HTTP_COOKIE"]) user = database.users().find_session(c) if user: logging.debug(user) break # # Check there is a valid path # path = env["PATH_INFO"] # if path[0:len(self.__root)] != self.__root: # logging.error("Can't handle this path!") # response.set_status_code(response.BAD_REQUEST) # return path = env["PATH_INFO"][1:].split("/") # Special paths: "", "favicon.ico", "sitemap.xml", "robots.txt" special = ["favicon.ico", "sitemap.xml", "robots.txt"] if len(path) == 1 and path[0] in special: path.append(path[0]) path[0] = "static" # if not path[1]: # path[1] = "index.html" # if path[1].endswith(".html"): # response.set_body(html_format_file("protein", protein_name.upper()), "text/html") # return response try: logging.debug("Now serving " + str(path)) if len(path) == 1: if not path[0]: path[0] = "index.html" if path[0].endswith(".html"): response.set_html(path[0], user) else: module = importlib.import_module("server." + path[0]) factory = getattr(module, path[0].title()) server = factory() response = server.serve(env, path, user, database) assert response finally: pass except Exception as exc: logging.error("Exception in main: " + str(exc)) logging.error(traceback.format_exc()) logging.error("--------") finally: response.finalise(user) logging.debug(response.get_status_code()) if database: database.close() return self.__server.serve(start_response, response)
def Retrieve_History(self): history_data = Database.find_one('user_data', {'Username': self.username}) return history_data['History']
def port(page=1): ports = Database().select_port(page) return render_template('port.html', page=page, max_page=max_port // 15 + 1, ports=ports)
#! /usr/bin/python3 from database.database import Database if __name__ == "__main__": db = Database() db.load() db.add_source("RCSB", "pdb", "http://www.rcsb.org/pdb/download/downloadFile.do?fileFormat=pdb&compression=NO&structureId={0}", "RCSB Protein Data Bank") # db.add_source("raspbian", "pdb", "http://192.168.2.128:8080/static/{0}.pdb", "Local test host") #db.add_user("jrenggli", "*****@*****.**", 42) source = db.find_source("rcsb", "pdb") #source = db.find_source("raspbian", "pdb") if not source: raise Exception("Source not found!") proteins = ["2KXR"]#, "3M3N"] for name in proteins: protein = db.get_protein_info(source, name) if not protein: raise Exception("Protein not found!") for mid in protein.get_models(): model = db.load_model(protein, mid) print(len(model.getvalue())) print("main")
def sendir(page=1): sendir = Database().select_sendir(page) return render_template('sendir.html', page=page, max_page=max_sendir // 15 + 1, sendirs=sendir)
def vul(page=1): vuls = Database().select_vul(page) return render_template('vul.html', page=page, max_page=max_vul // 15 + 1, sendirs=vuls)
lower_limit = read_req - (read_req * 0.1) print libid, read_req, lower_limit, readcount self.show_log('info', "{0}.{1} - Library(ies) with read requested set to 0 - {2}".format(self.__classname, getframe().f_code.co_name, self.__zeroread)) self.show_log('info', "{0}.{1} - Library(ies) with no tracks - {2}".format(self.__classname, getframe().f_code.co_name, self.__zerotrack)) self.show_log('info', "{0}.{1} - Library(ies) with FQ-files not done - {2}".format(self.__classname, getframe().f_code.co_name, self.__zerofqfile)) ''' check library status for 'sequencing' query reads requested for the libraries if reads_requested - 10% is matched -> set libraries to finished/data delivered if data_delivered -> publish reads question: how find out if finished/data delivered -> via project? what if there are more than one project? ''' if __name__ == '__main__': db = Database(Information.DB_HOST, Information.DB_USER, Information.DB_PW, Information.DB) logfile = '{0}{1}'.format(Information.LOGDIRECTORY, getLogfile(basename(getframe().f_code.co_filename.rstrip('.py')))) mainlog = MainLogger(logfile) db_main = DB_Maintain(db) db_main.start_logging() db.start_logging() db.setConnection() db_main.check_library_delivery_state() #db_main.check_library_sequencing_amount() db.commitConnection() db.closeConnection()
def search(self, start=None, end=None, keywords=None, sort=None): """Validates the parameters, search the database, and returns the results or an error message page. """ template = self.lookup.get_template('search/search_results.mako') (u, c) = getUserInfo() if not ((start and end and sort) or (keywords and sort)): template = self.lookup.get_template('search/search.mako') return template.render(username=u, classtype=c, action="noparams") conn = Database() session = conn.get() user = session.query(User).filter(User.user_name == u).one() query = session.query(RadiologyRecord) # Check if a date has been passed if (start and end): query = query.filter(RadiologyRecord.test_date >= start, RadiologyRecord.test_date <= end) # Check user's security level if (c == 'd'): ''' Checks that both the record's doctor id is the same as the current user's id and that the user's id is in the family_doctor table for the record's patient id ''' query = query.join( FamilyDoctor, FamilyDoctor.doctor_id == user.person_id).filter( RadiologyRecord.doctor_id == user.person_id).filter( FamilyDoctor.patient_id == RadiologyRecord.patient_id) elif (c == 'r'): query = query.filter( RadiologyRecord.radiologist_id == user.person_id) elif (c == 'p'): query = query.filter( RadiologyRecord.patient_id == user.person_id) # Checks sort type if (sort == 'newest'): query = query.order_by(desc(RadiologyRecord.test_date)) elif (sort == 'oldest'): query = query.order_by(RadiologyRecord.test_date) else: query = query if (keywords): ''' Split keywords into separate words and search for each word as a keyword instead of the whole keyword as one string ''' query = query.join( Person, RadiologyRecord.patient_id == Person.person_id) for word in keywords.split(): query = query.filter(or_( Person.last_name.ilike("%"+word+"%"), Person.first_name.ilike("%"+word+"%"), RadiologyRecord.diagnosis.ilike("%"+word+"%"), RadiologyRecord.description.ilike("%"+word+"%"))) results = [] for entry in query.all(): # Build a dict to the structure that the template expects current = {} current['id'] = entry.record_id current['patient_name'] = entry.patient.last_name + \ ", " + entry.patient.first_name current['doctor_name'] = entry.doctor.last_name + \ ", " + entry.doctor.first_name current['radiologist_name'] = entry.radiologist.last_name + \ ", " + entry.radiologist.first_name current['test_type'] = entry.test_type current['prescribing_date'] = entry.prescribing_date current['test_date'] = entry.test_date current['diagnosis'] = entry.diagnosis current['description'] = entry.description current['images'] = [] if (len(entry.pacsimage) > 0): for image in entry.pacsimage: current['images'].append( [image.image_id, base64.b64encode(image.thumbnail), base64.b64encode(image.regular_size), base64.b64encode(image.full_size)]) results.append(current) if (len(results) > 0): conn.close() return template.render(username=u, classtype=c, results=results) else: conn.close() template = self.lookup.get_template('search/search.mako') return template.render(username=u, classtype=c, action="fail")