コード例 #1
0
ファイル: core.py プロジェクト: runestate/cryptrade
 def email(sender,
           receiver,
           title,
           text,
           smtp_host=None,
           smtp_user=None,
           smtp_password=None,
           smtp_port=587):
     try:
         if smtp_host is None:
             smtp_host = AppConfig.setting('SMTP_HOST')
         if smtp_user is None:
             smtp_user = AppConfig.setting('SMTP_USER')
         if smtp_password is None:
             smtp_password = AppConfig.setting('SMTP_PASSWORD')
         msg = EmailMessage()
         msg.set_content(text)
         msg['Subject'] = title
         msg['From'] = sender  #Address(display_name='Recipient', addr_spec='*****@*****.**')
         msg['To'] = receiver
         Log.t('sending email')
         with smtplib.SMTP(host=smtp_host, port=smtp_port) as smtp_server:
             smtp_server.starttls(context=SSLContext(PROTOCOL_TLSv1_2))
             smtp_server.login(user=smtp_user, password=smtp_password)
             smtp_server.send_message(msg)
             smtp_server.quit()
         Log.t('sent email')
     except Exception as e:
         raise Exception('Failed to send email') from e
コード例 #2
0
ファイル: parse_app.py プロジェクト: runestate/cryptrade
 def __parse_and_persist_as_transaction(row, parser, db):
     parsed = parser.parse(row)
     if parsed is None:
         return None
     parser_datasource_name = parser.datasource_name
     id = db.create_transaction(parsed)
     Log.t('persisted transaction id {}', id)
     return parsed
コード例 #3
0
	async def subscribe(self):	
		try:
			async for response_text in self.__socket_subscribe():
				Log.t('received text: {}', response_text)
				yield response_text
		except Exception as e:
			error_msg = 'Failed to subscribe for handler filepath {}'.format(self.handler_filepath)
			raise Exception(error_msg) from e
コード例 #4
0
	def reset(self, epoch_time):
		self.low = None
		self.high = None
		self.open = None
		self.close = None
		self.latest = None
		self.is_opening = False
		self.is_closing = False
		self.observation_count = 0
		interval_position_epoch = epoch_time % self.interval_second # find the 'time passed' within the interval
		self.interval_start_epoch = epoch_time - interval_position_epoch
		self.interval_end_epoch = self.interval_start_epoch + self.interval_second
		interval = self.interval_start_epoch / self.interval_second
		assert interval % 1 == 0, 'interval index {} is not an integer'.format(interval)
		self.interval_index = int(interval)
		Log.t(
			self.interval_index,
			datetime.utcfromtimestamp(self.interval_start_epoch),
			datetime.utcfromtimestamp(self.interval_end_epoch)
			)
コード例 #5
0
ファイル: parse_app.py プロジェクト: runestate/cryptrade
 def process_nonparsed_api_responses_subset(self, next_min_id=0):
     limit = 1000
     Log.i(
         'processing nonparsed api responses, starting from id {} with limit {}',
         next_min_id, limit)
     total_row_count = 0
     parse_count = 0
     is_to_keep_fetching = True
     while is_to_keep_fetching == True:
         datasources_frame = self.store.datasources_frame()
         frame = self.store.unparsed_datafetch_api_responses_frame(
             min_id=next_min_id, limit=1000)
         row_count = frame.shape[0]
         if row_count == 0:
             is_to_keep_fetching = False
         else:
             total_row_count += row_count
             for i, row in frame.iterrows():
                 try:
                     row_id = row['id']
                     datasource_id = row['datasource_id']
                     parser = self.find_parser(datasource_id,
                                               datasources_frame)
                     if ParseUtil.parse_and_persist_as_transaction_maybe(
                             row, parser, self.store) == True:
                         parse_count += 1
                 except Exception as e:
                     raise Exception(
                         'Failed to parse row index {} with id {}'.format(
                             i, row_id)) from e
             ids = frame['id']
             max_id = ids.max()
             Log.t('sweep of ids {}..{} returned {} entries', next_min_id,
                   max_id, row_count)
             next_min_id = max_id + 1  # start from the next row
     Log.i('search for nonparsed responses done, parse count: {}/{}',
           parse_count, total_row_count)
     return next_min_id
コード例 #6
0
	def parse(self, result):
		response_text = result['response']
		if not response_text:
			raise ValueError('could not parse empty response text')
		Log.t('parsing text: {}', response_text)
		response_dict = json.loads(response_text)
		type_key = 'type'
		if type_key in response_dict:
			type_value = response_dict[type_key]
			if not type_value == 'status':
				raise Exception('Unexpected type value "{}"'.format(type_value))
			Log.t('ignoring status message')
			return None
		event_key = 'event' # assume response can now only be of the 'event' type
		event_value = response_dict[event_key]
		if event_value != 'message':
			raise Exception('Cannot handle event value "{}"'.format(event_value))
		data_value = response_dict['data']
		data_success_value = data_value['success']
		if data_success_value != True:
			raise Exception('Unexpected success value "{}"'.format(data_success_value))
		display_time_epoch = data_value['timestamp']
		last_price = data_value['last']
		volume = data_value['volume']
		volume_percent = data_value['volume_percent']
		transaction = {
				'datasource_id': result['datasource_id'],
				'exchange_id': result['exchange_id'],
				'amount': 0, # transaction with zero amount indicates the current market value
				'price': last_price,
				'from_currency_id': result['from_currency_id'],
				'to_currency_id': result['to_currency_id'],
				'volume': volume,
				'volume_percent': volume_percent,
				'source_md5hash': result['response_md5hash'],
				'epoch_time': display_time_epoch
			}
		return transaction
コード例 #7
0
ファイル: generator_app.py プロジェクト: runestate/cryptrade
    def process_transaction_subset(self, transaction_min_timestamp, set_size,
                                   hdf5_filepath, job_changed_handler,
                                   is_realtime):
        assert job_changed_handler is not None, 'no job_changed_handler provided'
        window_size = 10
        subset_process_start_time = time.time()
        frame = self.db.transaction_by_timestamp_frame(
            transaction_min_timestamp, set_size, self.from_currency_ids,
            self.to_currency_ids)
        frame.set_index('epoch_time', inplace=True)
        row_count = frame.shape[0]
        Log.d('...time spent fetching subset ({} rows) from db: {:.2f}s',
              row_count,
              time.time() - subset_process_start_time)
        if row_count == 0:
            return None
        row_process_count = 0
        last_epoch_time = None
        Log.d('...processing rows...')
        row_process_start_time = time.time()
        gap_resolver = self.run_config['gap_resolver']
        for epoch_time, row in frame.iterrows():
            is_row_processed = False
            try:
                transaction_id = row['id']
                datasource_id = row['datasource_id']
                exchange_id = row['exchange_id']
                from_currency_id = row['from_currency_id']
                to_currency_id = row['to_currency_id']
                price = np.float64(row['price'])
                volume = np.float64(row['volume'])
                transaction_min_timestamp = epoch_time  #transaction_id + 1
                seconds_since_previous = 0 if last_epoch_time is None else epoch_time - last_epoch_time
                Log.t('seconds since previous epoch time: {}',
                      seconds_since_previous)
                if last_epoch_time is not None:
                    assert epoch_time >= last_epoch_time, 'epoch time ({}) was less than the previous epoch time ({})'.format(
                        epoch_time, last_epoch_time)

                seconds_since_previous = 0 if last_epoch_time is None else epoch_time - last_epoch_time
                assert seconds_since_previous >= 0, 'seconds_since_previous cannot be a negative value'
                last_epoch_time = epoch_time
                for job in self.jobs:
                    if (job.datasource.id == datasource_id
                            and job.exchange.id == exchange_id
                            and job.from_currency.id == from_currency_id
                            and job.to_currency.id == to_currency_id):
                        is_row_processed = True
                        try:
                            h5frame = job.frame
                            if h5frame is not None:  # perfrom integrity check on existing =  non-empty dataframe
                                assert not h5frame.empty  # should not be possible if the frame has previously been created
                                last_epoch = h5frame.index.values[-1]
                                seconds_since_previous = epoch_time - last_epoch
                                assert seconds_since_previous >= 0
                                max_gap_seconds = 120  # TODO make config setting
                                if (seconds_since_previous > max_gap_seconds
                                    ):  # TODO make config setting
                                    warn_message = 'excessive time (+{}s) passed since previous observation: {}s ({}) between {} ({}) and {} ({})'.format(
                                        max_gap_seconds,
                                        seconds_since_previous,
                                        Timespan.from_seconds(
                                            int(seconds_since_previous)
                                        ).as_string(), last_epoch,
                                        StringExpert.format_timestamp(
                                            last_epoch), epoch_time,
                                        StringExpert.format_timestamp(
                                            epoch_time))
                                    if gap_resolver is None:
                                        raise Exception(warn_message)
                                    Log.w(warn_message)
                                    prev_observation = h5frame.iloc[-1]
                                    df_intermediates = gap_resolver.intermediates_frame(
                                        max_gap_seconds,
                                        from_epoch=last_epoch,
                                        to_epoch=epoch_time,
                                        from_price=prev_observation['latest'],
                                        to_price=price,
                                        from_volume=prev_observation['volume'],
                                        to_volume=volume)
                                    Log.d(
                                        'simulating intermediate observations:\n{}',
                                        df_intermediates)
                                    simulated_count = 0
                                    for intermediate_epoch, intermediate in df_intermediates.iterrows(
                                    ):
                                        job_observation = job.job_observe(
                                            value=intermediate['price'],
                                            epoch_time=intermediate_epoch,
                                            volume=intermediate['volume'],
                                            is_simulated=True,
                                            is_realtime=False)
                                        assert job_observation is not None
                                        simulated_count += 1
                                        if simulated_count % 1000 == 0:
                                            Log.d('..simulated {}/{}..',
                                                  simulated_count,
                                                  len(df_intermediates))
                                    Log.i(
                                        'done simulating {} observations up until epoch {} ({})',
                                        len(df_intermediates), epoch_time,
                                        StringExpert.format_timestamp(
                                            epoch_time))
                            try:
                                job_observation = job.job_observe(
                                    value=price,
                                    epoch_time=epoch_time,
                                    volume=volume,
                                    is_simulated=False,
                                    is_realtime=is_realtime)
                                row = job_observation  # job_observation_to_frame_row(volume, job_observation)
                                assert row is not None
                                job_changed_handler(job)
                            except DoubleObservationError as doe:
                                Log.w(
                                    'epoch already in frame, will be ignored ({})',
                                    epoch_time)
                        except Exception as job_e:
                            raise Exception(
                                'Failed to feed row to job') from job_e
            except Exception as e:
                raise Exception(
                    'Failed to process row index {}'.format(epoch_time)) from e
            if is_row_processed:
                row_process_count += 1
        Log.d('...time spent processing {}/{} rows in time: {:.2f}s',
              row_process_count, frame.shape[0],
              time.time() - row_process_start_time)
        with pd.HDFStore(hdf5_filepath, mode='a') as h5:
            h5_process_start_time = time.time()
            start_observation_epoch = frame.index.values[0]
            for job in self.jobs:
                df_to_append = job.frame[
                    job.frame.index >= start_observation_epoch]
                try:
                    h5.append(job.uid,
                              df_to_append,
                              format='table',
                              data_columns=True)
                    row_count = h5.get_storer(job.uid).nrows
                    Log.d('...h5 key {}, row count is {}', job.uid, row_count)
                except Exception as append_error:
                    raise append_error
        Log.d('...time spent adding to h5: {:.2f}s',
              time.time() - h5_process_start_time)
        row_processing_time = time.time() - subset_process_start_time
        Log.d('...total time spent on subset: {:.2f}s ({:.2f}s per row)',
              row_processing_time, row_processing_time / row_process_count)
        return transaction_min_timestamp
コード例 #8
0
def h5_to_plot(h5, from_epoch, to_epoch, filterInNth, agents, format_as_image):
    Log.d('============')
    Log.d(agents)
    agent_keys = [a for a in agents.split(',') if a]
    if len(agent_keys) == 0:
        return 'No agent selected'
    filterInNth = int(filterInNth)
    df_info = ''
    pd.options.display.float_format = '{:.2f}'.format
    df_info += 'No agent selected\n\n{}\n\n'.format(h5.info())
    for key in h5:
        where = 'index >= {} and index <= {}'.format(from_epoch, to_epoch)
        Log.d('where: {}', where)
        frame = pd.read_hdf(h5, key, where=where)
        if frame.empty == True:
            return 'Empty frame'
        df_info += '{}\n\n'.format(frame.describe())
        background_color = '#272822'
        minute_intervals = [
            12 * 60,  # 12 hours
        ]
        x = range(100)
        y = [a * 2 + random.randint(-20, 20) for a in x]
        fig, ax = plt.subplots(figsize=(23, 12))  #figsize=(28,21))
        fig.patch.set_facecolor(background_color)
        Log.t('building plot')
        is_image_format = int(format_as_image) == True

        def label_connect(path_collection, labels, color=None):
            tooltip = mpld3.plugins.PointHTMLTooltip(path_collection, [
                '<span class="point-tooltip" style="color: {}">{} <span class="point-tooltip-key">{}<span><span>'
                .format(color, l, key) for l in labels
            ],
                                                     voffset=100,
                                                     hoffset=0)
            mpld3.plugins.connect(fig, tooltip)

        for agent_key in agent_keys:
            try:
                agent_name = agent_key.split('(')[0]
                Log.d('plotting agent: {} -> {}', agent_key, agent_name)
                agent = agent_map[agent_name]
                plot_title = ''
                col_prefix = 'feature_{}_'.format(agent_key)
                agent_plot = agent.plot(plot_title,
                                        None,
                                        frame,
                                        ax,
                                        is_image_format,
                                        label_connect=label_connect,
                                        filter_in_nth=filterInNth,
                                        cp=col_prefix)
                pe.style_plot(ax, plot_title)
            except KeyError as ke:
                Log.w('Valid keys are: {}', frame.keys())
                raise ke
        plot_dirpath = AppConfig.setting('PLOT_DIRPATH')
        plot_filepath = os.path.join(plot_dirpath,
                                     '{}.png'.format('some plot'))

        fig.patch.set_facecolor(style.backgroundColor)
        fig.tight_layout()
        if is_image_format == True:
            sio = BytesIO()
            fig.savefig(sio,
                        facecolor=fig.get_facecolor(),
                        edgecolor='none',
                        format="png")
            html = '<img src="data:image/png;base64,{}"/>'.format(
                base64.encodebytes(sio.getvalue()).decode())
            return html
        mpld3.plugins.connect(fig, ZoomSizePlugin())
        return mpld3.fig_to_html(fig)
    raise 'hmmm'
コード例 #9
0
 def __init__(self, col_prefix, tag=None):
     Log.t('construct')
     super().__init__(tag)
     self.col_prefix = col_prefix