示例#1
0
    def history(self, scr_no, rq_name, tr_code, _, prev_next):
        kwargs = self.share.get_args(name())
        period = history.get_period(tr_code)

        rec = history.get_record_name_for_its_name(
            tr_code)  # record_name = '종목코드' | '업종코드'
        code = string(self.api.get_comm_data(tr_code, rq_name, 0, rec))

        # Handle trading suspended stock
        if not code:  # code = ''
            code = kwargs['code']

        # Check if wrong data received
        if code != kwargs['code']:
            raise RuntimeError(
                f"Requested {kwargs['code']}, but the server still sends {code}."
            )

        # Fetch multi data
        data = {key: list() for key in history.outputs(tr_code, MULTI)}
        cnt = self.api.get_repeat_cnt(tr_code, rq_name)
        for i in range(cnt):
            for key, fn in history.preper(tr_code, MULTI):
                data[key].append(
                    fn(self.api.get_comm_data(tr_code, rq_name, i, key)))

        # Update downloaded data
        for key in data.keys():
            self.share.extend_history(code, key, data[key])

        # If data is more than needed, then stop downloading.
        if 'start' in kwargs:
            col = history.get_datetime_column(period)
            # To check whether it's an empty data.
            if len(data[col]) > 0:
                last = data[col][-1][:len('YYYYMMDD')]
                # Note that data is ordered from newest to oldest
                if date(last) < date(kwargs['start']):
                    prev_next = ''

        # Continue to download
        if prev_next == '2':
            try:
                # Call signal method again, but with prev_next='2'
                bot = self.api.signal('on_receive_tr_data', name())
                bot(code, period=period, prev_next=prev_next)
            except Exception as err:
                args = f"code={code}, period={period}, prev_next={prev_next}"
                self.share.update_single('history', 'error', True)
                print(f"An error at Bot.history({args}).\n\n{format_exc()}")

        # Download done
        else:
            # Sort to chronological order
            df = pd.DataFrame(self.share.get_history(code))[::-1]

            # To make df have datetime index
            col = history.get_datetime_column(period)
            fmt = history.get_datetime_format(period)
            """
                Make time-related column as pandas Datetime index
            """
            # To handle exceptional time and dates
            if not df.empty and history.is_sector(code) and col == '체결시간':
                # To choose exceptional datetime replacer
                edrfec = history.EXCEPTIONAL_DATETIME_REPLACER_FOR_EXCEPTIONAL_CODE
                replacer = edrfec[
                    code] if code in edrfec else history.EXCEPTIONAL_DATETIME_REPLACER

                # Find index of dates that delayed market opening time and inconvertibles in df
                indices = dict()
                exceptions = list()
                start, end = date(df[col].iat[0][:len('YYYYMMDD')]), date(
                    df[col].iat[-1][:len('YYYYMMDD')])
                for ymd, delay in history.EXCEPTIONAL_DATES.items():
                    if start <= date(ymd) <= end:
                        day = df[col].loc[df[col].str.match(ymd)]
                        indices[ymd] = day.index

                        # To save original data
                        for regex, datetime in replacer.items():
                            series = day.loc[day.str.contains(regex,
                                                              regex=True)]
                            series = series.replace(regex={regex: datetime})
                            series = pd.to_datetime(series,
                                                    format='%Y%m%d%H%M%S')
                            exceptions.append(series)

                # Replace inconvertibles (888888, 999999) to (16:00:00, 18:00:00)
                df[col].replace(regex=replacer, inplace=True)

                # To make column as pandas datetime series
                df[col] = pd.to_datetime(df[col], format=fmt)

                # Subtract delayed market time as if it pretends to start normally
                for ymd, idx in indices.items():
                    delay = history.EXCEPTIONAL_DATES[ymd]
                    df.loc[idx, col] -= pd.DateOffset(hours=delay)

                # Replace subtracted exceptional times back to original
                for series in exceptions:
                    df.loc[series.index, col] = series

            # col='일자' or including df.empty for both col
            else:
                df[col] = pd.to_datetime(df[col], format=fmt)

            # Finally make datetime column as index
            df.set_index(col, inplace=True)
            """
                Close downloading process
            """
            # To get rid of data preceding 'start'
            if 'start' in kwargs:
                df = df.loc[kwargs['start']:]
            # To get rid of data following 'end'
            if 'end' in kwargs:
                df = df.loc[:kwargs['end']]

            # If server sent mixed data
            if not df.index.is_monotonic_increasing:
                raise RuntimeError(
                    f'Downloaded data is not monotonic increasing. Error at Server.history() with code={code}.'
                )

            # Rename column
            if period == 'tick':
                df.rename(columns={'현재가': '체결가'}, inplace=True)

            # Save data to csv file
            self.history_to_csv(df, code, kwargs['path'], kwargs['merge'],
                                kwargs['warning'])

            # Once common variables are used, delete it
            self.share.remove_args(name())
            self.share.remove_history(code)

            # Mark successfully downloaded
            self.share.update_single(name(), 'complete', True)

            self.api.disconnect_real_data(scr_no)
            self.api.unloop()
示例#2
0
文件: bot.py 项目: snapbuy/kiwoom-1
    def history(
            self,
            code,
            period,
            unit=None,
            start=None,
            end=None,
            path=None,
            merge=True,
            warning=True,
            prev_next='0'
    ):
        """
        Download historical market data of given code and save it as csv to given path

        :param code: str
            unique code of stock or sector
        :param period: str
            one of tick, min, day, week, month and year
        :param unit: int
            1, 3, 5, 10, 30 etc.. (cf. 1 bar = unit * period)
        :param start: str
            string of start day in format 'YYYYMMDD'
        :param end: str
            string of end day in format 'YYYYMMDD'. if None, until now by default.
        :param path: str
            path to save downloaded data
        :param merge: bool
            whether to merge data with existing file or to overwrite it
        :param warning: bool
            turn on/off the warning message if any
        :param prev_next: str
            this param is given by the response from the server. default is '0'
        """
        # Wait for default request limit, 3600 ms
        QTest.qWait(history.REQUEST_LIMIT_TIME)

        ctype = history.get_code_type(code)  # ctype = 'stock' | 'sector'
        tr_code = history.get_tr_code(period, ctype)

        """
            Setting args just for once.
        """
        if prev_next == '0':
            # In case path is '' or None
            if not path:
                path = getcwd()

            # To share variables with Slot
            kwargs = effective_args(locals(), remove=['ctype', 'tr_code'])
            self.share.remove_single(name())
            self.share.update_single(name(), 'error', False)
            self.share.update_single(name(), 'restart', False)
            self.share.update_single(name(), 'complete', False)
            self.share.update_single(name(), 'impossible', False)

            # To check format of input dates
            if 'start' in kwargs:
                if not history.is_date(start):
                    raise ValueError(f"Given 'start' {start} is not a valid date.")
            if 'end' in kwargs:
                if not history.is_date(end):
                    raise ValueError(f"Given 'end' {end} is not a valid date.")

            """
                Check 'start' and 'end' points to save downloading time. 
            """
            if merge:
                try:
                    file = join(path, code + '.csv')
                    col = history.get_datetime_column(period)
                    df = read_csv(
                        file,
                        index_col=[col],
                        parse_dates=[col],
                        encoding=config.ENCODING
                    )

                    if period in ['tick', 'min']:
                        # Last tick for stock is 15:30 and for sector is 18:00
                        h, m = (15, 30) if ctype is history.STOCK else (18, 00)  # else for sector
                        last_day = date(df.index[-1])
                        last_tick_of_day = Timestamp(df.index[-1]).replace(hour=h, minute=m)
                        download_completed = last_tick_of_day <= df.index[-1]

                        # To push 'start' date further as much as possible. If None, set newly.
                        if 'start' not in kwargs or date(kwargs['start']) <= last_day:
                            if download_completed:
                                # Start from the day after last day
                                kwargs['start'] = str((last_day + DateOffset(1)).date()).replace('-', '')
                            else:
                                # Start from the last day
                                kwargs['start'] = str(last_day).replace('-', '')

                        # If downloading is not needed, just return
                        if 'end' in kwargs:
                            if download_completed:
                                if date(kwargs['end']) <= last_day:
                                    self.share.update_single(name(), 'complete', True)
                                    return

                    else:  # if period in ['day', 'week', 'year']
                        last_day = date(df.index[-1])
                        # To push 'start' date further as much as possible. If None, set newly.
                        if 'start' not in kwargs or date(kwargs['start']) <= last_day:
                            # Start from the last day
                            kwargs['start'] = str(last_day).replace('-', '')

                        # If downloading is not needed, just return
                        if 'end' in kwargs:
                            if date(kwargs['end']) < last_day:
                                self.share.update_single(name(), 'complete', True)
                                return

                    # Once read, use later in Server.history_to_csv() to increase efficiency
                    self.share.update_single(name(), 'file', df)

                # If any exception, just skip
                except Exception as err:
                    pass

            """
                Update and print arguments. 
            """
            # Done arg setting
            self.share.update_args(name(), kwargs)

            # Print args
            f = lambda key: f"'{kwargs[key]}'" if key in kwargs else None
            print(f"{{code={f('code')}, start={f('start')}, end={f('end')}, period={f('period')}}}")

        """
            Start downloading.
        """
        # Check requesting status
        self.share.single['histories']['nrq'] += 1
        if history.SPEEDING:
            if self.share.get_single('histories', 'nrq') >= history.REQUEST_LIMIT_TRY:
                # Set back to default configuration
                if self.share.get_single('histories', 'cnt') == 0:
                    self.share.update_single(name(), 'impossible', True)
                self.share.update_single(name(), 'restart', True)
                self.api.unloop()
                return

        # Finally request data to server
        for key, val in history.inputs(tr_code, code, unit, end):
            self.api.set_input_value(key, val)
        scr_no = self.scr.alloc(tr_code, code)

        # If comm_rq_data returns non-zero error code, restart downloading
        if self.api.comm_rq_data(name(), tr_code, prev_next, scr_no) != 0:
            self.share.update_single(name(), 'impossible', True)
            self.share.update_single(name(), 'restart', True)
            self.api.unloop()
            return

        # Wait response from the server
        self.api.loop()