Beispiel #1
0
def save_dataframe(df, code, date):
    """
    Save DataFrame into a csv file.

    :param df:
    :param code:
    :param date:
    :return:
    """
    save_path = os.path.join(minute_path, code)
    make_folder(save_path)
    df.to_csv(os.path.join(save_path, date + '.csv'))
from PyQt5.QtWidgets import *
from PyQt5.QAxContainer import *
from PyQt5.QtCore import *
import pandas as pd

sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder

base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
stock_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(stock_path, 'folder')
info_path = os.path.join(folder_path, '종목별상세정보_20190323')

make_folder(base_path, data_path, stock_path, folder_path, info_path)


class Kiwoom(QAxWidget):
    """
    ActiveX 컨트롤을 하는 Qt CLASS

    """
    def __init__(self):
        super().__init__()
        self._create_kiwoom_instance()
        self._set_signal_slots()

    def _create_kiwoom_instance(self):
        """
        키움증권의 OpenAPI+를 사용하려면 먼저 COM 오브젝트를 생성해야 합니다.
Beispiel #3
0
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder
from utils.logger import set_logger


base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
stock_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(stock_path, 'folder')
minute_path = os.path.join(folder_path, '분별매매가격_가상화폐')

code_path = os.path.join(base_path, 'code')
crawler_path = os.path.join(code_path, 'crawler')

make_folder(base_path, data_path, stock_path, folder_path, minute_path, code_path, crawler_path)
logger = set_logger('minute_logger')


def main(code, dates):
    for date in dates:
        tables = []

        for num in range(1, 43):
            page = "https://finance.naver.com/item/sise_time.nhn?code=" + str(code) + "&thistime=2019" + str(date) + "160000&page=" + str(num)
            html = urlopen(page)
            bs_object = BeautifulSoup(html, "html.parser")
            bs_object_table = bs_object.table
            tables.append(bs_object_table)
            sleep(0.1)
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder
from utils.logger import set_logger


base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
stock_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(stock_path, 'folder')
comment_path = os.path.join(folder_path, '네이버댓글_190414')

code_path = os.path.join(base_path, 'code')
crawler_path = os.path.join(code_path, 'crawler')

make_folder(base_path, data_path, stock_path, folder_path, comment_path, code_path, crawler_path)
logger = set_logger('comment_logger')
krx = Krx()


def get_naver_comment(code):
    comments = []
    views = []
    page = 40

    for num in range(1, page):
        page = "https://finance.naver.com/item/board.nhn?code=" + str(code) + "&page=" + str(num)
        html = urlopen(page)
        soup = BeautifulSoup(html, "html.parser")
        comment = soup.select('span.tah.gray03')
Beispiel #5
0
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder
from utils.logger import set_logger

logger = set_logger("trend")
base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
save_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(save_path, 'folder')
day_path = os.path.join(folder_path, '일봉_20190323')

invest_path = os.path.join(base_path, 'invest')
trend_path = os.path.join(invest_path, '3.추세주')
result_path = os.path.join(trend_path, 'result')

make_folder(invest_path, trend_path, result_path)


def load_day_data(files, code_list, standard_day):
    df_list = []

    for i, file in enumerate(files):
        try:
            new_df = pd.read_csv(os.path.join(day_path, file),
                                 engine='python',
                                 encoding='utf-8')
            new_df['code'] = file.split('.')[0]
            df_list.append(new_df.copy())
        except:
            pass
from bs4 import BeautifulSoup

sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))

from utils.folder import make_folder
from utils.logger import set_logger

logger = set_logger('basic_finance_crawler')

base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
stock_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(stock_path, 'folder')
summary_path = os.path.join(folder_path, '재무제표_20190331')

make_folder(base_path, data_path, stock_path, folder_path, summary_path)


def get_summary_finance(code):
    url = 'http://media.kisline.com/highlight/mainHighlight.nice?nav=1&paper_stock=' + str(
        code)

    try:
        tables = pd.read_html(url)

        df = None
        for i in range(4, 4 + 4):
            df = pd.concat([df, tables[i]], axis=1)

        df.columns = df.columns.get_level_values(1)
        change_column_name(df)
from pykrx import Krx
import pandas as pd

sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder
from utils.logger import set_logger


logger = set_logger('day_crawler')
base_path = os.path.dirname(os.path.abspath('..'))
data_path = os.path.join(base_path, 'data')
save_path = os.path.join(data_path, 'stock')
folder_path = os.path.join(save_path, 'folder')
day_path = os.path.join(folder_path, '일별매매가격_190414')

make_folder(base_path, data_path, save_path, folder_path, day_path)
krx = Krx()


def get_day_info(code):
    df = krx.get_market_ohlcv("20100101", "20190414", code)
    df.to_csv(os.path.join(day_path, code + '.csv'))
    logger.info("successfully saved " + str(code))


if __name__ == "__main__":
    code_list = krx.get_tickers()
    # code_list = ['033320', '121800', '057680', '036090', '041190', '021080', '027830', '101140']
    logger.info("Succesfully get code list")

    counter = Value('i', 0)
"""
모든 종목 코드를 가져오는 코드
"""
import os
import sys
import time

from pykrx import Krx
import pandas as pd

sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utils.folder import make_folder


if __name__ == "__main__":
    krx = Krx()
    code_list = krx.get_tickers()

    base_path = os.path.dirname(os.path.abspath('..'))
    data_path = os.path.join(base_path, 'data')
    stock_path = os.path.join(data_path, 'stock')
    folder_path = os.path.join(stock_path, 'folder')

    make_folder(stock_path)
    make_folder(folder_path)

    df = pd.DataFrame({'code': code_list})
    df.to_csv(os.path.join(folder_path, 'code_list.csv'), index=False)