Пример #1
0
    def submit(self):
        self.download_label.hide()
        country = self.input_country.text()
        self.input_country.setText('')

        try:
            response = get_data(country)
        except requests.exceptions.RequestException:
            print('Check your Internet connection!')
            response = None
        if response is not None:
            try:
                data = response.json()
                newest_data = data[-1]
            except KeyError:
                self.create_error_label()
                self.info_label.hide()
                self.delete_btn.hide()
                return
            else:
                title, date, message = output_message(newest_data)
                self.info_label.show()
                self.create_info_label(title, date, message)
                self.create_delete_btn()
                self.create_download_btn(data)
                self.error_label.setText('')
Пример #2
0
    def _parse_data_and_insert_db(self, table_name, data):
        ret_array = fetch_data.get_data(fetch_data.regular_split("hq_sinajs_cn_list", data))
        if 0 == len(ret_array):
            LOG_WARNING("hg_sinajs_cn_list regular %s empty data" %(data))
            return

        data_array = []

        into_db_columns = ["share_id","share_name","today_open","yesterday_close","cur","today_high","today_low","compete_buy_price","compete_sale_price",
                "trade_num","trade_price","buy_1_num","buy_1_price","buy_2_num","buy_2_price","buy_3_num","buy_3_price","buy_4_num","buy_4_price",
                "buy_5_num","buy_5_price","sale_1_num","sale_1_price","sale_2_num","sale_2_price","sale_3_num","sale_3_price","sale_4_num","sale_4_price",
                "sale_5_num","sale_5_price","time_date_str","time_str","empty"]
        columns_count = len(into_db_columns)

        for item in ret_array:
            if len(item) != columns_count:
                LOG_INFO("%s length is not match for column length %d" %(str(item), columns_count))
                continue
            data_array.append(item)

        if 0 == len(data_array):
            return

        db_manager = mysql_manager()
        conn = db_manager.get_mysql_conn(self._conn_name)
        conn.insert_data(table_name ,into_db_columns, data_array)
Пример #3
0
 def _get_all_share_ids(self):
     date_info = time.strftime('%Y_%m_%d')
     trade_table_name = "trade_info_%s" % (date_info)
     share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{"share_id":[["000001","000010","000301","000601","000901","002101","002401","002701","300001","300301","600301","600601","601801","603001","603601","603901"],"in"]}, pre = "distinct"))
     #share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{}, pre = "distinct"))
     #share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{"share_id":["000001","="]}, pre = "distinct"))
     return share_ids
Пример #4
0
 def _get_all_share_ids(self):
     date_info = time.strftime('%Y_%m_%d')
     trade_table_name = "trade_info_%s" % (date_info)
     share_ids = fetch_data.get_data(
         fetch_data.select_db(self._daily_temp_conn_name,
                              trade_table_name, ["share_id"], {},
                              pre="distinct"))
     return share_ids
Пример #5
0
 def _get_all_share_ids(self):
     date_info = time.strftime('%Y_%m_%d')
     trade_table_name = "trade_info_%s" % (date_info)
     #share_ids = fetch_data.get_data(fetch_data.select_db("daily_temp", trade_table_name, ["share_id"],{"share_id":["000001","="]}, pre = "distinct"))
     share_ids = fetch_data.get_data(
         fetch_data.select_db("daily_temp",
                              trade_table_name, ["share_id"], {},
                              pre="distinct"))
     return share_ids
Пример #6
0
 def _get_daily_info(self, share_id, table_name, start_time, compare):
     stock_conn_manager_obj = stock_conn_manager()
     conn_name = stock_conn_manager_obj.get_conn_name(share_id)
     data = fetch_data.get_data(
         fetch_data.select_db(conn_name,
                              table_name,
                              self._table_keys,
                              {"time": [start_time, compare]},
                              extend="order by time desc"))
     return data
Пример #7
0
 def _get_all_share_ids_market_type(self):
     share_ids = self._get_all_share_ids()
     ids = []
     for share_id in share_ids:
         ids.append(share_id[0])
     share_ids = fetch_data.get_data(
         fetch_data.select_db("stock_db", "share_base_info",
                              ["share_id", "market_type"],
                              {"share_id": [ids, "in"]}))
     return share_ids
Пример #8
0
 def _get_ex_dividend_last_time(self, share_id, table_name):
     stock_conn_manager_obj = stock_conn_manager()
     conn_name = stock_conn_manager_obj.get_conn_name(share_id)
     data = fetch_data.get_data(
         fetch_data.select_db(conn_name,
                              table_name, ["time"], {},
                              extend="order by time desc limit 1"))
     if len(data) > 0:
         last_day = data[0][0]
         return int(last_day)
     else:
         return 0
Пример #9
0
 def _get_start_time(self, share_id, table_name, ma_empty_start_time):
     stock_conn_manager_obj = stock_conn_manager()
     conn_name = stock_conn_manager_obj.get_conn_name(share_id)
     last_time = fetch_data.get_data(fetch_data.select_db(conn_name, table_name, ["time"], {"time":[ma_empty_start_time, "<="]}, extend="order by time desc limit 180"))
     if len(last_time) > 0:
         last_day = last_time[-1][0]
         tz = pytz.timezone('Asia/Shanghai')
         last_day_obj = datetime.datetime.fromtimestamp(last_day, tz)
         time_str = last_day_obj.strftime("%Y%m%d")
         return time.mktime(time.strptime(time_str, '%Y%m%d'))
     else:
         return ma_empty_start_time
 def _parse_data_and_insert_db(self, table_name, data):
     data_array = fetch_data.get_data(
         fetch_data.regular_split("string_comma_regular", data))
     db_manager = mysql_manager()
     conn = db_manager.get_mysql_conn(self._conn_name)
     into_db_columns = [
         "market_type", "share_id", "share_name", "price", "up_percent",
         "market_maker_net_inflow", "market_maker_net_inflow_per",
         "huge_inflow", "huge_inflow_per", "large_inflow",
         "large_inflow_per", "medium_inflow", "medium_inflow_per",
         "small_inflow", "small_inflow_per", "time_str"
     ]
     conn.insert_data(table_name, into_db_columns, data_array)
    def _bak_market_maker_info(self, share_id):
        date_info = time.strftime('%Y_%m_%d')
        table_name = "market_maker_%s" % (date_info)

        fields_array = [
            "time_str", "price", "up_percent", "market_maker_net_inflow",
            "market_maker_net_inflow_per", "huge_inflow", "huge_inflow_per",
            "large_inflow", "large_inflow_per", "medium_inflow",
            "medium_inflow_per", "small_inflow", "small_inflow_per"
        ]

        daily_data = fetch_data.get_data(
            fetch_data.select_db(self._daily_temp_conn_name, table_name,
                                 fields_array, {"share_id": [share_id, "="]}))
        self._bak_single_market_maker_info(share_id, daily_data)
Пример #12
0
 def _get_close_volume(self, share_id, table_name, start_time):
     stock_conn_manager_obj = stock_conn_manager()
     conn_name = stock_conn_manager_obj.get_conn_name(share_id)
     data = fetch_data.get_data(fetch_data.select_db(conn_name, table_name, ["time", "today_close", "volume"], {"time":[start_time, ">="]}))
     time_list = []
     close_list = []
     volume_list = []
     for item in data:
         time_int = item[0]
         close = item[1]
         volume = item[2]
         time_list.append(time_int)
         close_list.append(close)
         volume_list.append(volume)
     return {"time":time_list, "close":close_list, "volume":volume_list}
Пример #13
0
 def _filter_data(self, data):
     data = data.replace("None", "0")
     filter_data = fetch_data.get_data(
         fetch_data.regular_split("quotes_money_163", data))
     if len(filter_data) > 0:
         del filter_data[0]
     useful_data = []
     for item in filter_data:
         if int(item[-2]) == 0:
             continue
         time_str = item[0]
         time_int = time.mktime(time.strptime(time_str, '%Y-%m-%d'))
         item.insert(0, time_int)
         del item[2]
         del item[2]
         useful_data.append(item)
     return useful_data
Пример #14
0
 def _get_start_time(self, share_id, table_name):
     stock_conn_manager_obj = stock_conn_manager()
     conn_name = stock_conn_manager_obj.get_conn_name(share_id)
     last_time = fetch_data.get_data(
         fetch_data.select_db(conn_name,
                              table_name, ["time"], {},
                              extend="order by time desc limit 1"))
     if len(last_time) > 0:
         last_day = last_time[0][0]
         tz = pytz.timezone('Asia/Shanghai')
         last_day_obj = datetime.datetime.fromtimestamp(last_day, tz)
         while True:
             next_day_obj = last_day_obj + datetime.timedelta(days=1)
             if next_day_obj.weekday() < 5:
                 break
             last_day_obj = next_day_obj
         time_str = next_day_obj.strftime("%Y%m%d")
     else:
         time_str = "19900101"
     return time.mktime(time.strptime(time_str, '%Y%m%d'))
Пример #15
0
    def _get_average_info(self, share_id, table_name, period = 0):
        stock_conn_manager_obj = stock_conn_manager()
        conn_name = stock_conn_manager_obj.get_conn_name(share_id)
        periods = [5, 10, 20, 30, 60, 120]
        types = ["close_ma", "volume_ma"]
        columns = ["time", "today_close", "today_high", "today_low", "today_open", "yesterday_close", "pchg", "turnover_rate", "volume", "turnover"]
        for type_item in types:
            for period_item in periods:
                column_name = "%s%d" % (type_item, period_item)
                columns.append(column_name)

        extend_str = "order by time desc"
        if period > 0:
            extend_str = "%s limit %d" % (extend_str, period)

        data= fetch_data.get_data(fetch_data.select_db(conn_name, table_name, columns, {}, extend=extend_str))
        infos = []
        for data_item in data:
            info = {}
            for index in range(len(columns)):
                    info[columns[index]] = data_item[index]
            infos.insert(0, info)
        return infos
Пример #16
0
import glob
from PIL import Image
import os
import torch
from torch.utils.data import Dataset, DataLoader
from preprocess import preprocessor, DatalisttoDataset
from fetch_data import get_data
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
from collections import OrderedDict

batch_size = 100
num_epoch = 50
data, label = get_data(10, num_ratio=10, domain="target", mode="choiced")
print("preprocess finished")
dataset = DatalisttoDataset(data, label, transform=None)
train_dataset, test_dataset = torch.utils.data.random_split(
    dataset, [int(len(dataset) * 0.8),
              len(dataset) - int(len(dataset) * 0.8)])
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=True)
#device = "cpu"
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")


class Net(nn.Module):
    def __init__(self):
        super(Net, self).__init__()
        self.conv1 = nn.Conv2d(3, 32, 3)
        self.conv2 = nn.Conv2d(32, 32, 3)
Пример #17
0
def get_songs():
	for i in range(num):
		mc.add_string(get_data())
Пример #18
0
 def _get_data(self, ids):
     url_pre = "http://hq.sinajs.cn/list="
     url = url_pre + ids
     res = fetch_data.get_data(fetch_data.query_http(url))
     res = res.decode("gbk").encode("utf-8")
     return res
Пример #19
0
import glob
from PIL import Image
import os
import torch
from torch.utils.data import Dataset, DataLoader
from preprocess import preprocessor, DatalisttoDataset
from fetch_data import get_data
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
from collections import OrderedDict
from models import Net_google

batch_size = 100
num_epoch = 50
data, label = get_data(10, num_ratio = 10, domain = "target", mode = "choiced", data_aug = True)
print("preprocess finished")
dataset = DatalisttoDataset(data, label, transform = None)
#train_dataset, test_dataset = torch.utils.data.random_split(dataset, [int(len(dataset)*0.8), len(dataset) - int(len(dataset)*0.8)])
train_dataset = dataset
data_te, label_te = get_data(10, num_ratio = 10, domain = "target", mode = "processed", data_aug = False)
test_dataset = DatalisttoDataset(data_te, label_te)
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=True)
#device = "cpu"
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

net = Net_google().to(device)
model = Net_google()

#転移学習するときはここを使う
Пример #20
0
import os
import torch
from torch.utils.data import Dataset, DataLoader
from preprocess import DatalisttoDataset
import torch.nn as nn
import torch.nn.functional as F
import torchvision.transforms as transforms
from collections import OrderedDict
from models import Net_google
from fetch_data import get_data

batch_size = 100
num_epoch = 50
data, label = get_data(10,
                       num_ratio=10,
                       domain="source",
                       mode="processed",
                       data_aug=False)
print("preprocess finished")
dataset = DatalisttoDataset(data, label, transform=None)
train_dataset, test_dataset = torch.utils.data.random_split(
    dataset, [int(len(dataset) * 0.8),
              len(dataset) - int(len(dataset) * 0.8)])
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=True)
device = "cpu"
#device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")

model = Net_google()
model.load_state_dict(torch.load("net.model"))
#checkpoint = torch.load("net.model")
Пример #21
0
 def _get_data(self, market_type, id, start_time, end_time):
     url_format = """http://quotes.money.163.com/service/chddata.html?code=%d%s&start=%s&end=%s&fields=TCLOSE;HIGH;LOW;TOPEN;LCLOSE;PCHG;TURNOVER;VOTURNOVER;VATURNOVER"""
     url = url_format % (market_type, id, start_time, end_time)
     res = fetch_data.get_data(fetch_data.query_http(url))
     #res = res.decode("gbk").encode("utf-8")
     return res
Пример #22
0
def get_wards():
    return fetch_data.get_data()
 def _get_data(self):
     date_info = time.strftime('%Y-%m-%d')
     url_fomart = "http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx/JS.aspx?type=ct&st=(BalFlowMain)&sr=-1&p=3&ps=%d&js=var%%20vDUaFOen={pages:(pc),date:%%22%s%%22,data:[(x)]}&token=894050c76af8597a853f5b408b759f5d&cmd=C._AB&sty=DCFFITA&rt=49430148"
     url = format(url_fomart % (3500, date_info))
     res = fetch_data.get_data(fetch_data.query_http(url))
     return res
Пример #24
0
 def _get_data(self):
     url = r"http://nufm.dfcfw.com/EM_Finance2014NumericApplication/JS.aspx/JS.aspx?type=ct&st=(FFRank)&sr=1&p=1&ps=3500&js=var%20mozselQI={pages:(pc),data:[(x)]}&token=894050c76af8597a853f5b408b759f5d&cmd=C._AB&sty=DCFFITAM&rt=49461817"
     res = fetch_data.get_data(fetch_data.query_http(url))
     return res