Exemplo n.º 1
0
    def __init__(self, log_path=None, job_id=None):
        self._db = Database(name=self.DB_NAME)
        self.phase_name = None

        if job_id:
            # Load in an existing job from database.
            job_row = self._db.get_one_row('jobs', 'id="{0}"'.format(job_id))
            job_dict = query_result_to_dict([job_row], Constants.configs['tables'][self.DB_NAME]['jobs'])[0]

            # Read in job phase.
            phase_row = self._db.query_table('phases', 'job_id="{0}"'.format(job_dict['id']))
            phase_dict = query_result_to_dict(phase_row, Constants.configs['tables'][self.DB_NAME]['phases'])[-1]
            job_dict['phase_name'] = phase_dict['name']

        else:
            # Create new job and add it to the database.
            job_dict = self._create_job_dict(log_path)
            self._db.insert_row_from_dict('jobs', job_dict)

        # Set instance variables.
        self.id = job_dict['id']
        self.name = job_dict['name']
        self.script = job_dict['script']
        self.version = job_dict['version']
        self.log_path = job_dict['log_path']
        self.elapsed_time = job_dict['elapsed_time']
        self.finish_state = job_dict['finish_state']
        self.start_time = job_dict['start_time']
        self.phase_name = job_dict['phase_name']

        # Initiate the job is no phase.
        if self.phase_name is None:
            self.update_phase(Job.FIRST_PHASE)
Exemplo n.º 2
0
def log():
    # Returns log text for provided job id.

    # Authenticate.
    client_ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
    if client_ip not in Constants.configs['authorised_ip_address']:
        return response(401, 'Client is not authorised.')

    # Initiate database connection.
    db = Database(name=Job.DB_NAME)

    # Extract any parameters from url.
    params = {x: request.args[x] for x in request.args if x is not None}

    if 'id' in params:
        job_row = db.get_one_row('jobs', 'id="{0}"'.format(params['id']))
        if job_row is None:
            return response(401, 'Job doesnt exist.')
        job_dict = query_result_to_dict(
            [job_row], Constants.configs['tables'][Job.DB_NAME]['jobs'])[0]
        with open(job_dict['log_path'], 'r') as file:
            data = file.read().replace('\n', '<br>')
        return response(200, data)

    return response(401, 'Job id required.')
Exemplo n.º 3
0
def portfolio():
    # Returns portfolio row as dictionary for provided portfolio id.

    # Authenticate.
    client_ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
    if client_ip not in Constants.configs['authorised_ip_address']:
        return response(401, 'Client is not authorised.')

    # Initiate database connection.
    db = Database()

    # Extract any parameters from url.
    params = {x: request.args[x] for x in request.args if x is not None}

    if 'id' in params:
        # Get portfolio data.
        portfolio_row = db.get_one_row('portfolios',
                                       'id="{0}"'.format(params['id']))
        if portfolio_row is None:
            return response(400, 'Portfolio does not exist.')

        # Initiate portfolio object.
        portfolio_obj = Portfolio(params['id'], db)

        # Package portfolio data.
        data = {
            Portfolio.ID: portfolio_obj.id,
            Portfolio.CASH: float_to_str(portfolio_obj.cash),
            Portfolio.VALUE: float_to_str(portfolio_obj.valuate()),
        }

        return response(200, data)

    return response(401, 'Portfolio id required.')
def authenticate_profile(username, user_hash):
    # Check hash is valid
    db = Database(name='user_data')
    profile_row = db.get_one_row('profiles', 'username="******"'.format(username))
    if not profile_row:
        return None
    profile_dict = query_result_to_dict(
        [profile_row], Constants.configs['tables']['user_data']['profiles'])[0]
    if user_hash == profile_dict['hash']:
        return profile_dict['id']
    else:
        return None
Exemplo n.º 5
0
def is_script_new(script_name):
    db = Database(name=Job.DB_NAME)
    new_threshold = 10
    no_of_runs_on_latest_version = get_run_count(db, script_name, 'latest')
    if no_of_runs_on_latest_version < new_threshold:
        return True
    return False
Exemplo n.º 6
0
def tick_capture_job():
    # Authenticate.
    client_ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
    if client_ip not in Constants.configs['authorised_ip_address']:
        return response(401, 'Client is not authorised.')

    # Initiate database connection.
    db = Database(name=Job.DB_NAME)
    start_time_string, job_id = db.get_one_row(
        'jobs', 'script="{}"'.format('tick_capture'), 'max(start_time), id')
    data = {
        'start_time': format_datetime_str(start_time_string),
        'job_id': job_id
    }

    return response(200, data)
Exemplo n.º 7
0
def assets():
    # Returns asset rows as a dictionary for a given portfolio id.

    # Authenticate.
    client_ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
    if client_ip not in Constants.configs['authorised_ip_address']:
        return response(401, 'Client is not authorised.')

    # Extract any parameters from url.
    params = {x: request.args[x] for x in request.args if x is not None}

    if 'id' in params:
        db = Database()
        exchange = AlpacaInterface(Constants.configs['API_ID'],
                                   Constants.configs['API_SECRET_KEY'],
                                   paper=True)
        portfolio_obj = Portfolio(params['id'], db)
        portfolio_obj.sync_with_exchange(exchange)
        for asset in portfolio_obj.assets:
            exposure_as_string = float_to_str(
                portfolio_obj.assets[asset][Portfolio.EXPOSURE])
            portfolio_obj.assets[asset][
                Portfolio.EXPOSURE] = exposure_as_string
        return response(200, portfolio_obj.assets)

    return response(401, 'Portfolio id required.')
Exemplo n.º 8
0
def strategies():
    # Authenticate.
    client_ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr)
    if client_ip not in Constants.configs['authorised_ip_address']:
        return response(401, 'Client is not authorised.')

    # Initiate database connection.
    db = Database()

    # Extract any parameters from url.
    params = {x: request.args[x] for x in request.args if x is not None}

    if 'id' in params:
        strategies_rows = db.query_table('strategies',
                                         'name="{}"'.format(params['id']))
    else:
        strategies_rows = db.query_table('strategies')

    crumb_timestamp = BreadCrumbsDataLoader.TIMESTAMP
    crumb_type = BreadCrumbsDataLoader.TYPE
    crumb_data = BreadCrumbsDataLoader.DATA

    strategy_table_schema = Constants.configs['tables'][
        Constants.DB_NAME]['strategies']
    strategies_as_dict = query_result_to_dict(strategies_rows,
                                              strategy_table_schema)
    for strategy in strategies_as_dict:
        # Get historical valuations from way points.
        bread_crumbs_data_loader = BreadCrumbsDataLoader()
        bread_crumbs_data_loader.load_bread_crumbs_time_series(
            strategy['name'])
        bread_crumbs = bread_crumbs_data_loader.data[
            BreadCrumbsDataLoader.BREAD_CRUMBS_TIME_SERIES][strategy['name']]
        valuation_type = BreadCrumbs.VALUATION
        valuations = [(b[crumb_timestamp], float(b[crumb_data]))
                      for b in bread_crumbs if b[crumb_type] == valuation_type]

        strategy['historical_valuations'] = [[format_datetime_str(v[0]), v[1]]
                                             for v in valuations]
        strategy['pnl'] = float_to_str(
            float(valuations[-1][1] - valuations[0][1]))

    return response(200, strategies_as_dict)
Exemplo n.º 9
0
def export_strategy_bread_crumbs_to_csv(strategy, csv_file_path, regression_db=None):
    columns_to_print = [3, 2, 4]

    # Load data.
    bread_crumb_loader = BreadCrumbsDataLoader()
    bread_crumb_loader.db = regression_db if regression_db else Database()
    bread_crumb_loader.load_bread_crumbs_time_series(strategy.name)
    bread_crumbs = bread_crumb_loader.data[BreadCrumbsDataLoader.BREAD_CRUMBS_TIME_SERIES][strategy.name]

    # Generate meta data.
    results = evaluate_strategy_bread_crumbs(bread_crumbs)
    meta_data = [
        ['strategy', 'runs', 'signal ratio', 'trade ratio', 'pnl', 'data_warning_count', 'strategy_error_count'],
        [strategy.name, str(results.runs), str(results.signal_ratio), str(results.trade_ratio), str(results.pnl),
         str(results.data_warning_count), str(results.strategy_error_count)]]

    # Generate bread crumb headers.
    headers = Constants.configs['tables'][Constants.APP_NAME][BreadCrumbs.TABLE]

    # Reverse bread crumbs so latest is shown first.
    bread_crumbs.reverse()

    # Write bread crumbs to csv file.
    with open(csv_file_path, 'w') as csv_file:
        # Initiate writer.
        writer = csv.writer(csv_file)

        # Write meta data.
        writer.writerow(meta_data[0])
        writer.writerow(meta_data[1])

        # Write bread crumb headers.
        writer.writerow([headers[c] for c in columns_to_print])

        # Write bread crumbs.
        for row in bread_crumbs:
            row[3] = format_datetime_str(row[3])
            writer.writerow([row[c] for c in columns_to_print])

    Constants.log.info('Bread crumbs exported to {}'.format(csv_file_path))
def main():
    # Setup parse options, imitate global constants and logs.
    args = [SUPPRESS_TRADES, EXPORT_CSV]
    Constants.parse_arguments(Constants.APP_NAME, custom_args=args)

    # Setup database.
    db = Database()
    db.log()

    # Initiate Job
    job = Job(log_path=Constants.log_path)
    job.log()

    # Parse strategy xml.
    strategy = parse_strategy_from_xml(Constants.xml.path,
                                       return_object=True,
                                       db=db)
    Constants.log.info("Strategy portfolio: {0}".format(strategy.portfolio.id))
    db.update_value(Strategy.TABLE, 'updated_by', job.id,
                    'name="{}"'.format(strategy.name.lower()))

    # Initiate strategy executor
    strategy_executor = StrategyExecutor(
        strategy,
        job_object=job,
        suppress_trades=Constants.configs[SUPPRESS_TRADES])

    # Run strategy.
    strategy_executor.run()

    # Generate report.
    if Constants.configs[EXPORT_CSV]:
        strategy_executor.generate_strategy_report()

    # Check for any warnings.
    status = Job.SUCCESSFUL
    if strategy.data_loader.warnings:
        status = Job.WARNINGS

    # Finish job.
    job.finished(status=status, condition=strategy_executor.finish_condition)
    return status
Exemplo n.º 11
0
def add_profile(username, user_hash):
    profile_id = generate_unique_id(username)
    db = Database(name='user_data')
    values = [profile_id, username, user_hash]
    db.insert_row('profiles', values)
    return profile_id
Exemplo n.º 12
0
class Job:
    DB_NAME = 'job_data'

    SUCCESSFUL = 0
    WARNINGS = 2
    FAILED = 1
    STATUS_MAP = {
        SUCCESSFUL: 'finished successfully',
        WARNINGS: 'finished with warnings',
        FAILED: 'failed'
    }
    FIRST_PHASE = 'INITIATED'

    def __init__(self, log_path=None, job_id=None):
        self._db = Database(name=self.DB_NAME)
        self.phase_name = None

        if job_id:
            # Load in an existing job from database.
            job_row = self._db.get_one_row('jobs', 'id="{0}"'.format(job_id))
            job_dict = query_result_to_dict([job_row], Constants.configs['tables'][self.DB_NAME]['jobs'])[0]

            # Read in job phase.
            phase_row = self._db.query_table('phases', 'job_id="{0}"'.format(job_dict['id']))
            phase_dict = query_result_to_dict(phase_row, Constants.configs['tables'][self.DB_NAME]['phases'])[-1]
            job_dict['phase_name'] = phase_dict['name']

        else:
            # Create new job and add it to the database.
            job_dict = self._create_job_dict(log_path)
            self._db.insert_row_from_dict('jobs', job_dict)

        # Set instance variables.
        self.id = job_dict['id']
        self.name = job_dict['name']
        self.script = job_dict['script']
        self.version = job_dict['version']
        self.log_path = job_dict['log_path']
        self.elapsed_time = job_dict['elapsed_time']
        self.finish_state = job_dict['finish_state']
        self.start_time = job_dict['start_time']
        self.phase_name = job_dict['phase_name']

        # Initiate the job is no phase.
        if self.phase_name is None:
            self.update_phase(Job.FIRST_PHASE)

    @staticmethod
    def _create_job_dict(log_path):
        if Constants.job_name:
            name = Constants.job_name
        else:
            name = '{0}_manual_run'.format(Constants.script)
        return {
            'id': str(abs(hash(name + datetime.datetime.now().strftime(Constants.DATETIME_FORMAT)))),
            'name': name.lower(),
            'script': Constants.script,
            'version': Constants.configs['version'],
            'log_path': log_path,
            'elapsed_time': None,
            'finish_state': None,
            'start_time': datetime.datetime.now().strftime(Constants.DATETIME_FORMAT),
            'phase_name': None
        }

    def _add_phase(self, name):
        phase_id = str(abs(hash(name + self.id)))
        date_time = datetime.datetime.now().strftime(Constants.DATETIME_FORMAT)
        self._db.insert_row('phases', [phase_id, self.id, date_time, name])
        return phase_id

    def log(self, logger=None):
        if logger is None:
            logger = Constants.log
        logger.info('Starting job: {0}'.format(self.id))
        log_hr()

    def update_phase(self, phase):
        self.phase_name = phase.replace(' ', '_').upper()
        phase_id = self._add_phase(self.phase_name)
        # self._db.update_value('jobs', 'phase_id', phase_id, 'id="{0}"'.format(self.id))

    def finished(self, status=SUCCESSFUL, condition=None):
        log_hr()

        # Update job phase.
        if condition is None:
            self.update_phase('TERMINATED_SUCCESSFULLY')
        else:
            Constants.log.warning('Job finished early with condition: "{0}"'.format(condition))
            self.update_phase('TERMINATED_{0}'.format(condition))

        # Update job.
        start_time = self._db.get_one_row('phases', 'job_id="{}" AND name="{}"'.format(self.id, Job.FIRST_PHASE))[2]
        start_time = datetime.datetime.strptime(start_time, Constants.DATETIME_FORMAT)
        run_time = round((datetime.datetime.now() - start_time).total_seconds(), 3)
        self._db.update_value('jobs', 'elapsed_time', run_time, 'id="{0}"'.format(self.id))
        self._db.update_value('jobs', 'finish_state', status, 'id="{0}"'.format(self.id))

        # Log final status.
        if status == Job.SUCCESSFUL or status == Job.WARNINGS:
            Constants.log.info('Job "{0}" {1} in {2} seconds.'.format(self.name, Job.STATUS_MAP[status], run_time))
        elif status == Job.FAILED:
            Constants.log.error('Job "{0}" {1} after {2} seconds.'.format(self.name, Job.STATUS_MAP[status], run_time))
        else:
            Constants.log.info('Job "{0}" finished in {1} seconds.'.format(self.name, run_time))
Exemplo n.º 13
0
def main():
    # Setup parse options, imitate global constants and logs.
    Constants.parse_arguments(Constants.APP_NAME)

    # Initiate Job.
    job = Job(log_path=Constants.log_path)
    job.log()

    # Setup connection to market data database, using the data loader's db name constant.
    db = Database(name=MarketDataLoader.DB_NAME)
    db.log()

    # Parse subscriptions file.
    job.update_phase('parsing subscriptions')
    subscriptions = []
    for tick_requirement in get_xml_root(Constants.xml.path):
        symbol = get_xml_element_attribute(tick_requirement, 'symbol', required=True)
        stale_tick_limit = get_xml_element_attribute(tick_requirement, 'stale_tick_limit')
        if stale_tick_limit:
            subscriptions.append({'symbol': symbol.upper(), 'stale_tick_limit': int(stale_tick_limit)})
        else:
            subscriptions.append({'symbol': symbol.upper()})
    Constants.log.info('Loaded {0} required tickers.'.format(len(subscriptions)))

    # Load data.
    job.update_phase('requesting data')
    ticker_data_source = TickerDataSource()
    warnings = 0
    for ticker in subscriptions:
        data_source_data = ticker_data_source.request_quote(ticker[TickerDataSource.SYMBOL])
        if data_source_data:
            # Add data to ticker dictionary.
            ticker['price'] = data_source_data[TickerDataSource.PRICE]
            ticker['volume'] = data_source_data[TickerDataSource.VOLUME]

            # Carry out checks on ticker.
            ticker_warnings = ticker_checks(ticker)

            # Save tick to database.
            run_time_string = Constants.run_time.strftime(Constants.DATETIME_FORMAT)
            db.insert_row('ticks', [generate_unique_id(ticker['symbol'] + run_time_string),
                                    run_time_string,
                                    ticker['symbol'],
                                    ticker['price'],
                                    ticker['volume']
                                    ]
                          )

            # Log ticks.
            Constants.log.info('symbol: {0}, price: {1}, volume: {2}'.format(ticker['symbol'], ticker['price'], ticker['volume']))
        else:
            ticker_warnings = ['no_data']

        for warning_type in ticker_warnings:
            warning_id = generate_unique_id(ticker['symbol'] + Constants.run_time.strftime(Constants.DATETIME_FORMAT))
            db.insert_row('data_warnings', [warning_id, 'tick', warning_type, ticker['symbol']])
            Constants.log.info('Could not get data for ticker {0}'.format(ticker['symbol']))
            warnings += 1

    if warnings:
        job.finished(status=Job.WARNINGS, condition='data warnings')
        return Job.WARNINGS
    else:
        job.finished()
        return Job.SUCCESSFUL
Exemplo n.º 14
0
def main():
    # Setup parse options, imitate global constants and logs.
    args = [CONFIG_FILE, FUNCTIONS]
    Constants.parse_arguments(custom_args=args)

    # Which functions will be doe.
    if Constants.configs['functions']:
        functions_to_do = [
            f.lower() for f in Constants.configs[FUNCTIONS].split(',')
            if f in ON_BOARDING_FUNCTIONS
        ]
    else:
        functions_to_do = FUNCTIONS

    if INITIATE_ENVIRONMENT in functions_to_do:
        Constants.log.info('Initiating environment.')
        # Generate resource directories.
        environment_path = get_environment_specific_path(
            Constants.root_path, Constants.environment)
        add_dir(environment_path, backup=True)
        for directory in Constants.RESOURCE_DIRS:
            resource_path = os.path.join(environment_path, directory)
            add_dir(resource_path, backup=True)

        # Move config file to environment specific config path.
        environment_config_path = os.path.join(
            environment_path, 'configs',
            os.path.basename(Constants.configs[CONFIG_FILE]))
        copy_file(Constants.configs[CONFIG_FILE], environment_config_path)

        # Read application configs.
        app_configs = read_json_file(Constants.configs[CONFIG_FILE])

        # Initiate database.
        dbos = [
            initiate_database(Constants.db_path, d, app_configs['tables'][d],
                              Constants.environment)
            for d in app_configs['tables']
        ]
        db = dbos[0]
    else:
        # Initiate database
        db_path = os.path.join(Constants.root_path, Constants.environment,
                               'data')
        db = Database(db_path, Constants.environment)
        # Load application configs.
        app_configs = read_json_file(Constants.configs[CONFIG_FILE])

    if Constants.xml:
        strategy_setup_dict = parse_strategy_setup_from_xml(Constants.xml.path)
        strategy_dict = parse_strategy_from_xml(Constants.xml.path)
    else:
        strategy_setup_dict = None
        strategy_dict = None

    if ON_BOARD_STRATEGIES in functions_to_do:
        if not strategy_setup_dict or not strategy_dict:
            raise Exception('XML file is required to on board a strategy.')

        Constants.log.info('Loading strategy "{}".'.format(
            strategy_dict['name']))

        # Initiate strategy if it does not exist.
        if not db.get_one_row('strategies', 'name="{0}"'.format(
                strategy_dict['name'])):
            # Add portfolio and strategy.
            portfolio_id = add_portfolio(
                db, '_{0}_portfolio'.format(strategy_dict['name']),
                strategy_setup_dict['allocation'], strategy_setup_dict['cash'])
            add_strategy(db, strategy_dict['name'], portfolio_id)

            # Add any assets.
            for asset in strategy_setup_dict['assets']:
                add_assets(db, portfolio_id, asset['symbol'])

        # Copy XML file to strategy directory.
        environment_path = get_environment_specific_path(
            Constants.root_path, Constants.environment)
        strategies_path = os.path.join(environment_path, 'strategies',
                                       '{0}.xml'.format(strategy_dict['name']))
        copy_file(Constants.xml.path, strategies_path)

    if SETUP_CRON_JOBS in functions_to_do:
        Constants.log.info('Setting up cron jobs.')
        if not strategy_setup_dict or not strategy_dict:
            raise Exception('XML file is required to add cron jobs.')

        # Only existing reset jobs when initialising the environment.
        reset = True if INITIATE_ENVIRONMENT in functions_to_do else False
        # interpreter = '/home/robot/projects/AlgoTradingPlatform/venv/bin/python3'
        interpreter = 'python3'
        code_path = '/home/robot/projects/AlgoTradingPlatform'

        # Initiate cron object.
        cron = CronTab(user=os.getlogin())
        if reset:
            cron.remove_all()

        # Create cron jobs from strategy schedule.
        for job in strategy_setup_dict['jobs']:
            # Extract details.
            name = job['name']
            script = job['script']
            schedule = job['schedule']

            # Parse script arguments.
            environment_path = get_environment_specific_path(
                Constants.root_path, Constants.environment)
            strategies_path = os.path.join(
                environment_path, 'strategies',
                '{0}.xml'.format(strategy_dict['name']))
            script_args_template = app_configs['script_details'][script][
                'args']
            script_args = parse_wildcards(
                script_args_template, {
                    '%e%': Constants.environment,
                    '%j%': '{0}_scheduled'.format(name),
                    '%r%': Constants.root_path,
                    '%x%': strategies_path
                })

            # Generate command.
            command_template = [
                interpreter,
                os.path.join(code_path, '{0}.py'.format(script)), script_args
            ]
            command = ' '.join(command_template)

            # Create cron jobs.
            job = cron.new(command=command)
            job.setall(schedule)
            cron.write()

    log_hr()
    Constants.log.info('On-boarding finished.')
    return 0