def get_year_of_data(client, station_list, year):
    '''
    Main function sends calls to the NOAA cdo_api asking for a all data within a given year for the each of the stations listed.
    Api calls return a Pandas Dataframe which is then sent to the raw_weather table in our MySQL database
    '''
    startdate = datetime(year, 8, 1)
    enddate = datetime((year+1), 2, 1)

    # DataFrame to iterate through for stations cdo_api function required
    frame = {'id': station_list}
    stations = pd.DataFrame(data=frame)

    # Create a SqlAlchemy engine with the existing connection, then passed into pandas.to_sql
    db_engine = create_engine('mysql://', creator=connect)

    for rows, station in stations.iterrows():
        station_data = client.get_data_by_station(
            datasetid='GHCND',
            stationid=station['id'],
            startdate=startdate,
            enddate=enddate,
            return_dataframe=True
            # include_station_meta=True
        )
        # Create new DataFrame limiting columns (we don't want ALL of the station data)
        columns = ['station', 'date', 'PRCP', 'SNOW']
        single = pd.DataFrame(station_data, columns=columns)
        # Send to database
        single.to_sql(con=db_engine, name='raw_weather',if_exists='append', index=False)
Exemple #2
0
    def get_all(self):
        if self.last_is_complete:
            self.alldata.clear()
            start_Y = self.Sca_start_Y.get()
            start_M = self.Sca_start_M.get()
            start_D = self.Sca_start_D.get()
            start_time = datetime(start_Y, start_M, start_D)
            Least_time = datetime(1999, 11, 10)

            end_Y = self.Sca_end_Y.get()
            end_M = self.Sca_end_M.get()
            end_D = self.Sca_end_D.get()
            end_time = datetime(end_Y, end_M, end_D)
            Max_time = datetime(2016, 6, 8)

            if start_time > end_time or start_time < Least_time or end_time > Max_time:
                showinfo(message="time_error")
            try:
                str_data_type = self.listbox2.get(self.listbox2.curselection())
            except TclError as e:
                showinfo(message="No choice! default value is the first")
                str_data_type = self.listbox2.get((1,))
            x = str(start_time).split(" ")
            y = str(end_time).split(" ")
            str_notice_msg = "#1 "+x[0]+" "+y[0]+" "+str_data_type
            self.tcpCliSock.send(str_notice_msg.encode("utf-8"))
            while True:
                str_res = self.recive1()
                if str_res[0:2] == "#6":
                    break
        else:
            showinfo(message="wait! last mission didn't complete")
Exemple #3
0
def displayHistory(name, goalType):
    today = datetime.today()
    todayDT = datetime(today.year, today.month, today.day)

    history = []
    d_file = open('file/savinghistory.txt', 'r')

    for dlist in d_file:
        list = dlist.split(',')
        saveDate = list[7]
        saveDateStr = saveDate.split('-')
        dd = int(saveDateStr[0])
        mm = int(saveDateStr[1])
        yy = int(saveDateStr[2])
        date = datetime(yy, mm, dd)
        #print(mm)
        print('^^^^^^^^^^^')
        print(list[4])
        print(list[0])
        print(name)
        print(goalType)
        print(date)
        print(todayDT)
        print('^^^^^^^^^^^^^')
        if list[0] == name and list[4] == goalType and date <= todayDT:
            print('@@@@@@@@@@')
            print(list[4])
            print('@@@@@@@@@@')
            h = SavingHistory(list[0], list[1], list[2], list[3], list[4],
                              list[5], list[6], list[7])
            history.append(h)
    return history
 def get_by_trimesters(self, year, trimester):
     min_date = datetime(year, min(trimester), 1)
     max_date = datetime(year, max(trimester) + 1, 1)
     DD = timedelta(days=1)
     max_date = max_date - DD
     return self.filter(date__gte=min_date,
                        date__lte=max_date).order_by('-date')
def get_year_of_data(client, station_list, year):

    startdate = datetime(year, 8, 1)
    enddate = datetime((year + 1), 2, 1)

    # DataFrame to iterate through for stations cdo_api function required
    frame = {'id': station_list}
    stations = pd.DataFrame(data=frame)

    # Create a SqlAlchemy engine with the existing connection, then passed into pandas.to_sql
    db_engine = create_engine('mysql://', creator=connect)

    for rows, station in stations.iterrows():
        station_data = client.get_data_by_station(datasetid='GHCND',
                                                  stationid=station['id'],
                                                  startdate=startdate,
                                                  enddate=enddate,
                                                  return_dataframe=True
                                                  # include_station_meta=True
                                                  )
        # Create new DataFrame limiting columns (we don't want ALL of the station data)
        columns = ['station', 'date', 'PRCP', 'SNOW']
        single = pd.DataFrame(station_data, columns=columns)
        # Send to database
        single.to_sql(con=db_engine, name='raw_weather', if_exists='append')
Exemple #6
0
    def test_update_employee(self):
        serialized_employee = fixtures.load("unactivated_user")
        employee = EmployeeProvider.deserialize(serialized_employee)
        employee = EmployeeProvider.update_with(employee, name="New Name")
        self.assertEqual(employee.name, "New Name")

        employee = EmployeeProvider.update_with(employee,
                                                employment_date=datetime(
                                                    2019, 1, 30))
        self.assertEqual(employee.employment_date, datetime(2019, 1, 30))
        self.assertEqual(employee.rate, RateCalculator.MAX_DAYS)

        employee = EmployeeProvider.update_with(employee, email="*****@*****.**")
        self.assertEqual(employee.email, "*****@*****.**")

        employee = EmployeeProvider.update_with(employee,
                                                password="******")
        self.assertEqual(employee.password, "new_password")

        employee = EmployeeProvider.activate(employee)
        self.assertEqual(employee.activated, True)

        employee = EmployeeProvider.deactivate(employee)
        self.assertEqual(employee.activated, False)

        self.assertEqual(employee.is_admin, False)

        employee = EmployeeProvider.set_balance_vac(employee, 10)
        self.assertEqual(employee.vacation, 10)
Exemple #7
0
def convert_str_to_time(time_str: str) -> datetime:
    if len(re.findall(r"\d+\s*分鐘", time_str)) > 0:
        minute = timedelta(minutes=int(re.findall(r"(\d+)\s*分鐘", time_str)[0]))
        return datetime.now() - minute

    elif len(re.findall(r"\d+\s*小時", time_str)) > 0:
        hour = timedelta(hours=int(re.findall(r"(\d+)\s*小時", time_str)[0]))
        return datetime.now() - hour

    elif "昨天" in time_str:
        no_in_str = re.findall(r"\d+", time_str)
        hour = int(no_in_str[0])
        minute = int(no_in_str[1])

        return datetime(datetime.now().year,
                        datetime.now().month,
                        datetime.now().day - 1, hour, minute, 00)

    elif len(re.findall(r"\d+\s*月\d+\s*日", time_str)) > 0:
        no_in_str = re.findall(r"\d+", time_str)
        print(time_str)

        month = int(no_in_str[0])
        date = int(no_in_str[1])
        hour = int(no_in_str[2])
        minute = int(no_in_str[3])
        return datetime(datetime.now().year, month, date, hour, minute, 00)

    else:
        pass
def time_mw_to_tiki(mw_time):
    date = datetime(year=int(mw_time[:4]),
                    month=int(mw_time[4:6]),
                    day=int(mw_time[6:8]),
                    hour=int(mw_time[8:10]),
                    minute=int(mw_time[10:12]),
                    second=int(mw_time[12:]))
    epoch = datetime(1970, 1, 1)
    return (date - epoch).total_seconds()
Exemple #9
0
    def test_do_nothing_if_delta_timestamps_within_expected_range(
            self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(
            2000, 1, 1, 12, 6, 33), datetime(2000, 1, 1, 12, 5, 32)
        sut_event_controller = EventController(event_store_mock, 60)

        sut_event_controller.trigger_alarm_event()

        self.assertFalse(log_mock.warn.called)
Exemple #10
0
def compare_date(first_date, second_date):
    """Returns '0', '1' or '-1' dependent on equality of parameters"""
    first = first_date.split('-')
    second = second_date.split('-')
    first = datetime(int(first[2]), int(first[1]), int(first[0]))
    second = datetime(int(second[2]), int(second[1]), int(second[0]))
    if first < second:
        return -1
    elif first == second:
        return 0
    return 1
Exemple #11
0
    def test_log_warning_if_events_too_close_together(self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(
            2000, 1, 1, 12, 6, 30), datetime(2000, 1, 1, 12, 5, 32)
        sut_event_controller = EventController(event_store_mock, 60)

        sut_event_controller.trigger_alarm_event()

        self.assertTrue(log_mock.warn.called)
        last_call_args = log_mock.warn.call_args  # Returns arguments of last call
        self.assertIn(
            'out of heartbeat range',
            last_call_args[0][0])  # last_call_args = tuple containing tuple
Exemple #12
0
    def test_deserialize_employee(self):
        serialized_employee = fixtures.load("unactivated_user")
        employee = EmployeeProvider.deserialize(serialized_employee)

        self.assertIsNotNone(employee)
        self.assertEqual(employee.id, 2)
        self.assertEqual(employee.name, 'Unactivated User')
        self.assertEqual(employee.password, 'user')
        self.assertEqual(employee.email, '*****@*****.**')
        self.assertEqual(datetime(2019, 1, 1), employee.employment_date)
        self.assertEqual(datetime(2019, 1, 1), employee.acceptance_date)
        self.assertEqual(employee.vacation, 1.0)
        self.assertEqual(employee.activated, False)
        self.assertEqual(employee.is_admin, False)
Exemple #13
0
 def test_serialize_employee(self):
     employee = fixtures.load_instance("unactivated_user", Employee)
     employee.employment_date = datetime(2019, 1, 1)
     employee.acceptance_date = datetime(2019, 1, 1)
     serialized_employee = EmployeeProvider.serialize(employee)
     self.assertIsNotNone(serialized_employee)
     self.assertEqual(serialized_employee['id'], 2)
     self.assertEqual(serialized_employee['name'], 'Unactivated User')
     self.assertEqual(serialized_employee['email'], '*****@*****.**')
     self.assertEqual("2019.01.01", serialized_employee['employment_date'])
     self.assertEqual("2019.01.01", serialized_employee['acceptance_date'])
     self.assertEqual(serialized_employee['vacation'], 1.0)
     self.assertEqual(serialized_employee['activated'], False)
     self.assertEqual(serialized_employee['is_admin'], False)
def ooxmlDateToYMD(date):
    # ooxml date is the number of days since 1899-12-31
    # error with leap years
    nullDate = datetime(1899, 12, 31)
    newdate = nullDate + timedelta(int(date))
    date_str = newdate.strftime("%d/%m/%Y")
    return date_str
Exemple #15
0
def _parse_date(string_date):
    parsed_date = string_date.split('/')
    month = int(parsed_date[0])
    day = int(parsed_date[1])
    year = int(parsed_date[2])
    date = datetime(year, month, day)
    return date
def start_reminder(year, month, day, hour, minute, seconds):
    #print(year,month,day,hour,minute,seconds)
    year = int(year)
    month = int(month)
    day = int(day)
    hour = int(hour)
    minute = int(minute)
    seconds = int(seconds)
    current_datetime = dt.datetime.today()
    reminder_datetime = dt.datetime(year, month, day, hour, minute, seconds, 0)
    timedelta = (reminder_datetime - current_datetime)
    messagebox.showinfo("done",
                        "time remaining to reminder\n" + str(timedelta))
    #print(current_datetime,reminder_datetime)
    #print(timedelta.total_seconds())
    tp = topic.get()
    print(tp)
    mydb = mysql.connector.connect(host="localhost",
                                   user="******",
                                   passwd="1234",
                                   database="reminder_application_database")
    mycursor = mydb.cursor()
    mycursor.execute("select id from reminders where topic='" + str(tp) + "'")
    threade_index = mycursor.fetchone()

    t = threading.Thread(target=mc.run(threade_index,
                                       abs(timedelta.total_seconds())),
                         args=(threade_index, ))
Exemple #17
0
def filename2time(folder, filename):
    if filename.find('.ts') == -1:
        return None

    t = datetime(year=int(folder[0:4]),month=int(folder[4:6]),day=int(folder[6:8]), hour=int(filename[0:2]),minute=int(filename[2:4]),second=int(filename[4:6]))

    return t
Exemple #18
0
def _win_set_time(time_tuple):

    print('setando windows')
    import win32api
    dayOfWeek = datetime(*time_tuple).isocalendar()[2]
    t = time_tuple[:2] + (dayOfWeek, ) + time_tuple[2:]
    win32api.SetSystemTime(*t)
Exemple #19
0
def folder2time(folder):
    if len(folder) != 8:
        return None

    t = datetime(year=int(folder[0:4]),month=int(folder[4:6]),day=int(folder[6:8]), hour=0,minute=0)

    return t
Exemple #20
0
def from_string_to_datetime(string_with_date):
    date_massive = ['2020']
    date_massive += string_with_date.split('.')
    true_massive = []
    for i in date_massive:
        true_massive.append(int(i))
    datetime_obj = datetime(true_massive[0], true_massive[2], true_massive[1])
    return datetime_obj
Exemple #21
0
 def __init__(self):
     self._firstName = "Matt"
     self._lastName = "Gerling"
     self._address = "123 Main Street, Urban, Iowa"
     self._phone_number = "515.555.5555 "
     self._salaried = True
     self._start_date = datetime(2019, 1, 1)
     self._salary = 40000
    def test_do_nothing_if_delta_timestamps_within_expected_range(self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(2000, 1, 1, 12, 15, 33), datetime(2000, 1, 1, 12, 5, 31)
        sut_event_controller = EventController(event_store_mock, 600, self.mailer, self.sms_sender)

        sut_event_controller.trigger_alarm_event()

        self.assertFalse(log_mock.warning.called)
Exemple #23
0
def time_mw_to_jam(mw_time):
    date = datetime(year=int(mw_time[:4]),
                    month=int(mw_time[4:6]),
                    day=int(mw_time[6:8]),
                    hour=int(mw_time[8:10]),
                    minute=int(mw_time[10:12]),
                    second=int(mw_time[12:]))
    return date
    def test_storing_new_event_with_current_utc_timestamp(self, datetime_mock):
        mocked_utc_time = datetime(2000, 1, 1)
        datetime_mock.utcnow.return_value = mocked_utc_time
        event_store_mock = MagicMock(spec=EventStore)
        sut_event_controller = EventController(event_store_mock, 600, self.mailer, self.sms_sender)

        sut_event_controller.trigger_alarm_event()

        event_store_mock.store_event.assert_called_once_with(mocked_utc_time)
    def test_do_nothing_if_less_than_two_events_returned(self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(2000, 1, 1),
        sut_event_controller = EventController(event_store_mock, 600, self.mailer, self.sms_sender)

        sut_event_controller.trigger_alarm_event()

        last_call_args = log_mock.info.call_args  # Returns arguments of last call
        self.assertIn('Not enough events', last_call_args[0][0])  # last_call_args = tuple containing tuple
Exemple #26
0
    def test_storing_new_event_with_current_utc_timestamp(self, datetime_mock):
        mocked_utc_time = datetime(2000, 1, 1)
        datetime_mock.utcnow.return_value = mocked_utc_time
        event_store_mock = MagicMock(spec=EventStore)
        sut_event_controller = EventController(event_store_mock, 60)

        sut_event_controller.trigger_alarm_event()

        event_store_mock.store_event.assert_called_once_with(mocked_utc_time)
Exemple #27
0
 def get_date(self) -> [datetime, None]:
     if self.is_set():
         return datetime(year=self.year,
                         month=self.month,
                         day=self.date,
                         hour=self.hour,
                         minute=self.minute,
                         second=self.second)
     return None
Exemple #28
0
def _linux_set_time(time_tuple):
    import subprocess
    import shlex

    time_string = datetime(*time_tuple).isoformat()

    subprocess.call(shlex.split("timedatectl set-ntp false"))  # May be necessary
    subprocess.call(shlex.split("sudo date -s '%s'" % time_string))
    subprocess.call(shlex.split("sudo hwclock -w"))
    def _customer_cancel(self):
        # 当前订单状态 ,是否可以直接取消的操作
        # 判断当前预定的时间是否是免费的
        if self.hotelPackageOrder.process_state == HotelPackageOrder.CUSTOMER_REQUIRE:  # 如果商家未接单,全额退还
            self.hotelPackageOrder.process_state = HotelPackageOrder.CUSTOMER_CANCEL  # 标记为用户取消
            self.customer.add_customer_points(self.hotelPackageOrder.amount)
            # warn  全额退款 生成账单
        elif self.hotelPackageOrder.process_state == HotelPackageOrder.SELLER_ACCEPT:  # 商家已接单

            # warn 商家已接单
            hotelPackageOrderItems = self.hotelPackageOrder.items.select_subclasses()
            checkin_time = self.hotelPackageOrder.checkin_time

            # 入住当天6点的时间
            deduct_all_point_time = datetime(checkin_time.year, checkin_time.month, checkin_time.day,
                                             hour=18)  # 晚于这个时间扣除全部积分 (相当于默认的入住时间)

            # 入住前一天2点的时间
            dedcut_halt_point_time = deduct_all_point_time - timedelta(hours=28)  # 入住前一天的2点之后扣除一半

            self.hotelPackageOrder.process_state = HotelPackageOrder.CUSTOMER_BACK

            if self.cur_datetime < dedcut_halt_point_time:  # 未到扣除积分的时间
                self.hotelPackageOrder.process_state = HotelPackageOrder.CUSTOMER_CANCEL
                self.customer.add_customer_points(self.hotelPackageOrder.amount)
                # warn 同样的 也是i用户取消
            else:

                now = datetime.now()
                if self.cur_datetime < deduct_all_point_time:  # 扣除一半
                    need_deduct_poins = hotelPackageOrderItems[0].point * 0.5

                    if len(hotelPackageOrderItems) > 1 and now.hour >= 14:
                        need_deduct_poins += hotelPackageOrderItems[1].point * 0.5

                    need_back_to_customer_point = int(self.hotelPackageOrder.amount - need_deduct_poins)
                    orderBill = OrderBill.create_for_roomOrder_cancel(roomOrder=self.hotelPackageOrder,
                                                                      refund_amount=need_back_to_customer_point)
                    self.orderBill = orderBill
                else:  # 扣除当天全部 改代理商 %75
                    need_deduct_poins = hotelPackageOrderItems[0].point

                    if len(hotelPackageOrderItems) > 1 and now.hour >= 14:
                        need_deduct_poins += hotelPackageOrderItems[1].point * 0.5
                    need_back_to_customer_point = int(self.hotelPackageOrder.amount - need_deduct_poins)

                    orderBill = OrderBill.create_for_roomOrder_cancel(roomOrder=self.hotelPackageOrder,
                                                                      refund_amount=need_back_to_customer_point)

                    self.orderBill = orderBill
                self.hotelPackageOrder.success = True
                self.hotelPackageOrder.settled = True
        # TODO 如果
        self.hotelPackageOrder.closed = True
        self.hotelPackageOrder.success = True
        self.hotelPackageOrder.settled = True
    def test_log_warning_if_events_too_close_together(self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(2000, 1, 1, 12, 6, 30), datetime(2000, 1, 1, 12, 5, 32)
        sut_event_controller = EventController(event_store_mock, 600, self.mailer, self.sms_sender)

        sut_event_controller.trigger_alarm_event()

        self.assertTrue(log_mock.warning.called)
        last_call_args = log_mock.warning.call_args  # Returns arguments of last call
        self.assertIn('out of heartbeat range', last_call_args[0][0])  # last_call_args = tuple containing tuple
 def set_time(self, hours: int, minutes: int, seconds: int):
     self.current_time = datetime(100,
                                  1,
                                  1,
                                  hour=hours,
                                  minute=minutes,
                                  second=seconds)
     self.hours = hours
     self.minutes = minutes
     self.seconds = seconds
def linuxsetdate(date):
    import subprocess
    import shlex

    dates=datetime(*date).isoformat()

    subprocess.call(shlex.split("timedatectl set-ntp false"))
    subprocess.call(shlex.split("sudo date -s '%s'" % dates))
    subprocess.call(shlex.split("sudo hwclock -w"))
    return
Exemple #33
0
def half_birthday(year, month, day):
    """
    :param year: year that includes last birthday
    :param month: month of birthday
    :param day: birthday day
    :return: half birthday for most recent birthday year
    """
    bday = datetime(year, month, day)
    half_bday = bday + timedelta(days=184)
    return str(half_bday)
Exemple #34
0
    def __loadFromFile(self, fileName):
        with open(fileName) as file:
            for line in file:
                line = line.strip('\n')
                arguments = line.split(',')

                if self.__validator == MovieValidator:
                    super().save(
                        Movie(int(arguments[0]), arguments[1], arguments[2],
                              arguments[3]))

                if self.__validator == ClientValidator:
                    super().save(Client(int(arguments[0]), arguments[1]))

                if self.__validator == RentalValidator:
                    rentalDate = arguments[3].split(".")
                    dueDate = arguments[4].split(".")

                    if arguments[5] == "-":
                        super().save(
                            Rental(
                                int(arguments[0]), int(arguments[1]),
                                int(arguments[2]),
                                datetime(int(rentalDate[0]),
                                         int(rentalDate[1]),
                                         int(rentalDate[2])),
                                datetime(int(dueDate[0]), int(dueDate[1]),
                                         int(dueDate[2])), None))
                    else:
                        returnedDate = arguments[5].split(".")
                        super().save(
                            Rental(
                                int(arguments[0]), int(arguments[1]),
                                int(arguments[2]),
                                datetime(int(rentalDate[0]),
                                         int(rentalDate[1]),
                                         int(rentalDate[2])),
                                datetime(int(dueDate[0]), int(dueDate[1]),
                                         int(dueDate[2])),
                                datetime(int(returnedDate[0]),
                                         int(returnedDate[1]),
                                         int(returnedDate[2]))))
Exemple #35
0
    def test_do_nothing_if_less_than_two_events_returned(self, log_mock):
        event_store_mock = MagicMock(spec=EventStore)
        event_store_mock.find_last_events.return_value = datetime(2000, 1, 1),
        sut_event_controller = EventController(event_store_mock, 60)

        sut_event_controller.trigger_alarm_event()

        last_call_args = log_mock.info.call_args  # Returns arguments of last call
        self.assertIn(
            'Not enough events',
            last_call_args[0][0])  # last_call_args = tuple containing tuple
 def _is_freetime_for_cancel(self):
     if self.hotelPackageOrder.process_state == HotelPackageOrder.CUSTOMER_REQUIRE:  # 如果商家未接单
         return True
     max_later_hours = app_settings.hotelOrder_free_cancel_hours
     checkin_time = self.hotelPackageOrder.checkin_time
     checkin_time = datetime(checkin_time.year, checkin_time.month, checkin_time.day, hour=14)
     delay_date = checkin_time - timedelta(hours=max_later_hours)
     if self.cur_datetime < delay_date:  # 未到扣除积分的时间
         return True
     else:
         return False
def readCSV(name):

    data = list(())

    TeamPointWorkbook = xlrd.open_workbook(name)
    sheet = TeamPointWorkbook.sheet_by_index(0)

    for row in range(0,sheet.nrows):
        date = sheet.cell_value(row,1)
        dateT = xlrd.xldate_as_tuple(sheet.cell_value(row,1), TeamPointWorkbook.datemode)
        date = datetime(*dateT)
        date = date.strftime("%Y/%m/%d")
        data.append([sheet.cell_value(row,0), date, sheet.cell_value(row,2)])

    return data
def getStockData(updateTickerStats=False):
    
    start_time = time.time()
    dBase = stockConfig.dBase

    # cut up to most recent date, accounting for weekends
    maxDate = datetime.now()
    if maxDate.isoweekday() in range(2,7):
        maxDate = np.datetime64(maxDate).astype('datetime64[D]')
    elif maxDate.isoweekday() ==1:
        maxDate = np.datetime64(maxDate).astype('datetime64[D]')-2
    elif maxDate.isoweekday() ==7:
        maxDate = np.datetime64(maxDate).astype('datetime64[D]')-1

    
    tme = datetime.now()
    
    ''' As of 01/11/2017, yahoo API discontinued. Can only trade:
        - US, UK, Canadian and German stocks
        - UK ETFs
        - UK indices?
        Thus filter these out from relevant exchanges, and include only the 3 letter currencies
    '''

    exchangeMap = dBaseAction(dBase, ''' SELECT * from exchangeMap ''')[0]
    tickersBase = dBaseAction(dBase, ''' SELECT * from tickersNew ''')[0]
    testYahooDaily = readExclude(dBase = dBase, table = 'yahooDailyExclude')
    
    tickers = tickersBase[['category', 'exchange', 'name', 'ticker', 'type']].drop_duplicates().reset_index(drop = True)
    tickers = tickers[tickers.type.isin(['stock', 'etf', 'index', 'mutual_fund','future','warrant','bond','currency'])]
    tickers = tickers.drop_duplicates().reset_index(drop = True)
    tickers = tickers[['exchange', 'ticker', 'type']].drop_duplicates().reset_index(drop = True)

    ccys = tickers[ tickers.ticker.str.contains('=X')].reset_index(drop = True)
    ccys = ccys[ ccys.ticker.str.len() == 5].reset_index(drop = True)
    tickers = pd.merge(tickers, exchangeMap[['yahoo_exchange','Country']], left_on = 'exchange', right_on = 'yahoo_exchange')
    tickers = tickers[ (tickers.type.isin(['stock','index','etf']) & (tickers.Country == 'UK')) | 
                      (tickers.type.isin(['stock','etf']) & (tickers.Country == 'USA')) |
                      (tickers.type.isin(['stock']) & (tickers.Country == 'Germany')) |
                      (tickers.type.isin(['stock']) & (tickers.Country == 'Canada'))]
    filterExchange = stockConfig.filterExchange
    
    tickers = tickers[ tickers.exchange.isin(filterExchange)].reset_index(drop = True)
    tickers = tickers[ ~tickers.ticker.isin(testYahooDaily.tolist())].reset_index(drop = True)
    tickers = pd.concat([tickers, ccys]).reset_index(drop = True)
    tickers = tickers.drop_duplicates(subset='ticker')
    
    if updateTickerStats:
        #update all market caps once a week. interweek filter on appropriate market caps and those tickers that 
        #    require reporting as "changes" and rereun to get recomendations, earnings dates, etc everyday
        data = getTickerYahooStats(tickers.ticker.values)
        writeDBv2(dBase = dBase, name = 'yahooTickerStats', data = data, 
                        createArgs= '("' + '","'.join(data.columns.tolist()) + '", PRIMARY KEY("ticker"), UNIQUE("ticker"))',
                        indexColName = 'ticker',
                        args ='("' + '","'.join(data.columns.tolist()) + '")' )

    # get unique dates so all pivots have the same index
    uniqueDates = dBaseAction(dBase, ''' select distinct timestamp from yahooDaily ''')[0]
    dates = np.unique(uniqueDates).astype('datetime64[D]')
    dates.sort()
    
    dateTo = datetime.now()
    # get all data for exchanges that we can actually trade, clean and update everyday
    dateFrom = datetime(1996,1,1)
    yahooDailyWrite = []
    notAvailable = []
    track=0
    for i in range(len(tickers.ticker)):   
        time.sleep(random.random()*1.5)
        #-----------------
        # YAHOO daily data
        #-----------------
        dataNew, track = downloadYahoo(ticker = tickers.ticker[i], dailyDateRange = np.array([dateFrom, dateTo]),track = track, adjust = False)
        # Now we have full set of past time series, if the timeSeries available stops more than 10 days ago we will ignore    
        if (dataNew is None):
            notAvailable.append(tickers.ticker[i])
        elif (dataNew.timestamp.values.astype('datetime64[D]').max() < (np.datetime64(datetime.now()) - np.timedelta64(10,'D'))):
            notAvailable.append(tickers.ticker[i])
        else:
            print( ' '.join([str(i),'out of',str(len(tickers.ticker)),'-',tickers.ticker[i]]) + ': - Yahoo Daily - OK')
            dataNew['type'] = tickers.type[i]
            dataNew = dataNew.sort_values('timestamp')
            dataNew['ID'] = dataNew.timestamp + '_' + dataNew.ticker
            yahooDailyWrite.append(dataNew)
        
        # once get to 4000, save and take a break to calm servers
        if ((i%4000 == 0) & (i > 0)) | (i == (len(tickers.ticker)-1)):
            dataDrop = pd.concat(yahooDailyWrite).reset_index(drop = True)
            writeDBv2(dBase = dBase, name = 'yahooDaily', data = dataDrop, 
                        createArgs= '("' + '","'.join(dataDrop.columns.tolist()) + '", PRIMARY KEY("ID"), UNIQUE("ID"))',
                        indexColName = 'ticker',
                        args ='("' + '","'.join(dataDrop.columns.tolist()) + '")' )
            
            # run cleaning algorithm and update yahooDailyClean
            dataPivot = dataDrop.pivot_table(index = 'timestamp', columns = 'ticker', values = 'adj_close', aggfunc = sum)
            dataPivot.index = dataPivot.index.values.astype('datetime64[D]')       
            # cleanData
            dataClean = DataOps(timeSeriesData = dataPivot)
            ## must be less than todays date, dont want Nans filtering in for latest date for tickers that havnt published yet "today"
            dataClean,_ = dataClean.cleanseTimeSeries(cutMissingDates = True,dates = dates, cores = 4,
                                                        maxDate = maxDate)
            
            if dataClean.shape[1] == 0:
                break

            dataClean.reset_index(inplace = True)
            dataClean = dataClean.rename(columns = {'index':'timestamp'})
            dataClean = pd.melt(dataClean, id_vars = ['timestamp'])
            dataClean = dataClean.rename(columns = {'value':'adj_close'})
            
            dataClean['ID'] = dataClean.timestamp.astype(str) + '_' + dataClean.ticker
            dataClean = pd.merge(dataClean, tickers[['ticker','type']].drop_duplicates(), on = ['ticker']).reset_index(drop = True)
            dataClean.timestamp = dataClean.timestamp.astype(str)
            dataClean = dataClean.replace({np.nan:'N/A',np.inf:'N/A','inf':'N/A', '-inf':'N/A'},regex=True)
            
            # write clean data to its own table so can call from other functions
            writeDBv2(dBase = dBase, name = 'yeahooDailyClean', data = dataClean, 
                        createArgs= '("' + '","'.join(dataClean.columns.tolist()) + '", PRIMARY KEY("ID"), UNIQUE("ID"))',
                        indexColName = 'ticker', 
                        args ='("' + '","'.join(dataClean.columns.tolist()) + '")' )
            
            # add tickers that dont have data to a list we can import to ignore
            writeDBv2(dBase = dBase, name = 'yahooDailyExclude', data =  pd.DataFrame(notAvailable, columns = ['ticker']).drop_duplicates().reset_index(drop=True), 
                      createArgs= '(ticker, PRIMARY KEY(ticker), UNIQUE(ticker))',
                      args ='(ticker)' )
            
            yahooDailyWrite = []
            notAvailable = []
            time.sleep(60*3)
 
    print(time.time() - start_time)
    
    # run momentum
    momentum()
Exemple #39
0
from other_helpers import ListEnum
from _datetime import datetime
from collections import OrderedDict

MIN_DATE = datetime(year=2015, month=1, day=1)
MAX_DATE = datetime(year=2100, month=12, day=31)

#
#
#
LOG_LEVEL_LIST = ListEnum(
    [  # Standard log levels
        'LOG', 'OPERATING_SYSTEM', 'FATAL', 'EXCEPTION_TRACK', 'LEAK', 'WARNING', 'STATISTIC', 'ACTION', 'TRACE',
        'DUMP',
        'MESSAGES', 'UNKNOWN',
        # Map log levels
        'SYST HIGH', 'SYST MEDIUM', 'SYST LOW', 'SYST', 'DEBUG'])  # Mapgen also has a LOG level, we don't duplicate it
ErrorLogLevels = 'OPERATING_SYSTEM|FATAL|EXCEPTION_TRACK|LEAK|STATISTIC|SYST HIGH|SYST MEDIUM'

#
# Enumarates columns manipulated by the log parser for each session or log line
#
Headers = ListEnum(
    ['file', 'date', 'time', 'type', 'session', 'user', 'application', 'machine', 'group', 'has_crashed', 'category', 'measure', 'level', 'module', 'message',
     'context'])
IndexedHeaders = {x: i + 1 for i, x in enumerate(Headers)}

#
# Enumerates the row type when saving/loading to/from a CSV file
#
RowTypes = ListEnum(['error', 'file', 'session', 'line'])
def parseXmlFiles(path, show=True, showMissing=True):
    """Sparsa vse xml file ppt-ja na poti 'path', ki imajo ime enako
        'dan.mesec.leto.xml' in so podani z 'leta', 'meseci' in 'dnevi'."""
    
    print('Parsing scrap ...')
    
    result = {}
    
    g = open('test_scrap', "w")

    files = listdir(path)
    for file in files:
        print(file)
        
        if not file.endswith('.xml'):
            continue
        
        finish = False
        
        fname_arr = file.split(sep='.')
        day = int(fname_arr[0])
        month = int(fname_arr[1])
        year = int(fname_arr[2])
        
        try:
            tekstovnaPolja = {}
            with open(path + file, 'r') as f:
                i = 0
                data = ""
                for line in f.readlines():
                    i += 1
                    data = line
                    if i == 3:
                        break ## hocemo samo 3 vrstico
                data = data.split("a:p>")

                ## vsak druge element bo znotraj znacke
                tagOpen = False
                stPolja = 0
                prejsnja = False
                for polje in data:
                    if finish: break
                    if not tagOpen:
                        tagOpen = True
                        continue
                    tagOpen = False
                    toPolje = []
                    if prejsnja:
                        toPolje.append(prejsnja + " ")
                    pol = polje.split("a:t>")
                    if len(pol) < 2:
                        continue
                        ## ni teksta, torej je to odvecen xml konstrukt
                    odprtaZnackaT = False
                    for delec in pol:
                        if not odprtaZnackaT:
                            odprtaZnackaT = True
                            continue
                        odprtaZnackaT = False
                        toPolje.append(delec[:-2]) ## brez "</"
                    toPolje = "".join(toPolje)
                    if "<" in toPolje:
                        continue
                    if ("Mastertextformat bearbeiten" in toPolje
                        or "Mastertitelformat bearbeiten" in toPolje
                        or ("1" == toPolje and stPolja > 45)):
                        finish = True
                        continue
                    if prejsnja:
                        prejsnja = False
                    elif " /" in toPolje:
                        prejsnja = toPolje
                        continue
                    stPolja += 1

                    tekstovnaPolja[stPolja] = toPolje
                    
                if show:
                    print("Dolzina fila {0}: {1}".format(
                        file,
                        len(tekstovnaPolja)))
                #tekstovnaPolja["datum"] = [day, month, year]
                result[datetime(year, month, day).strftime('%d.%m.%Y')] = tekstovnaPolja
                g.write(str(tekstovnaPolja))
                g.write("\n")
        except IOError:
            print('Exception while opening file: ' + file)

    g.close()
    
    # the ID of our machine is 61282649 KM 1000/1
    offset = 8
    result1 = {}
    for date_str in result:
        result1[date_str] = {}
        fields = result[date_str]
        for shift_num in range(3):
            product = fields[offset + shift_num]
            shift_id = shift_num + 1
            
            if '/' in product:
                # we have two products
                products = product.split(' / ')
                # check if the products appear in the description
                
                shift_start = get_shift_start(shift_id)
                shift_end = get_shift_end(shift_id)
                
                if shift_end == get_shift_end(3):
                    shift_end += 24
                
                interval = float(shift_end - shift_start) / len(products)
                
                out = []
                for i, product in enumerate(products):
                    out.append({
                        'product': product,
                        'start': (shift_start + i*interval) % 24,
                        'end': (shift_start + (i+1)*interval) % 24
                    })
                result1[date_str][shift_id] = out
            else:
                result1[date_str][shift_id] = [{
                    'product': product,
                    'start': get_shift_start(shift_id),
                    'end': get_shift_end(shift_id)                           
                }]
            
    
    return result1
 def get_by_trimesters(self, year, trimester):
     min_date = datetime(year, min(trimester), 1)
     max_date = datetime(year, max(trimester)+1, 1)
     DD = timedelta(days=1)
     max_date = max_date - DD
     return self.filter(date__gte=min_date, date__lte=max_date).order_by('-date')
def initial_data(apps, schema_editor):
  TimeSlotTier = apps.get_model('codepot', 'TimeSlotTier')

  TimeSlotTier.objects.create(
    id='wGSj2UozkT',
    date_from=datetime(2015, 8, 29, 9, 15, 0),
    date_to=datetime(2015, 8, 29, 12, 30, 0),
    day=TimeSlotTierDayName.FIRST.value
  )

  TimeSlotTier.objects.create(
    id='6DNs2lvvZH',
    date_from=datetime(2015, 8, 29, 12, 45, 0),
    date_to=datetime(2015, 8, 29, 13, 45, 0),
    day=TimeSlotTierDayName.FIRST.value
  )

  TimeSlotTier.objects.create(
    id='XurOSgWLtg',
    date_from=datetime(2015, 8, 29, 13, 45, 0),
    date_to=datetime(2015, 8, 29, 14, 45, 0),
    day=TimeSlotTierDayName.FIRST.value
  )

  TimeSlotTier.objects.create(
    id='xMPbefCHK6',
    date_from=datetime(2015, 8, 29, 15, 0, 0),
    date_to=datetime(2015, 8, 29, 18, 15, 0),
    day=TimeSlotTierDayName.FIRST.value
  )

  TimeSlotTier.objects.create(
    id='sCbIKF07yh',
    date_from=datetime(2015, 8, 30, 9, 30, 0),
    date_to=datetime(2015, 8, 30, 12, 45, 0),
    day=TimeSlotTierDayName.SECOND.value
  )

  TimeSlotTier.objects.create(
    id='VZG2dH6HoX',
    date_from=datetime(2015, 8, 30, 13, 0, 0),
    date_to=datetime(2015, 8, 30, 14, 0, 0),
    day=TimeSlotTierDayName.SECOND.value
  )

  TimeSlotTier.objects.create(
    id='Rf0gaLELyI',
    date_from=datetime(2015, 8, 30, 14, 0, 0),
    date_to=datetime(2015, 8, 30, 15, 0, 0),
    day=TimeSlotTierDayName.SECOND.value
  )

  TimeSlotTier.objects.create(
    id='QvzUYBHB98',
    date_from=datetime(2015, 8, 30, 15, 15, 0),
    date_to=datetime(2015, 8, 30, 18, 30, 0),
    day=TimeSlotTierDayName.SECOND.value
  )
    testTick = True
    exchangeMap = dBaseAction(dBase, ''' SELECT * from exchangeMap ''')[0]

    tickers = tickersBase[['category', 'exchange', 'name', 'ticker', 'type']].drop_duplicates().reset_index(drop = True)
    tickers = tickers[tickers.type.isin(['stock', 'etf', 'index', 'mutual_fund','currency'])]
    #tickers = tickers.drop('ID', axis = 1).drop_duplicates().reset_index(drop = True)
    tickers = tickers[['category', 'exchange', 'name', 'ticker', 'type']].drop_duplicates().reset_index(drop = True)
    
    ##### Data Files
    
    #yahooIntraDay = readCSV(yahooIntraDayFile)
    dayRange = '999d'
    
    #yahooDaily = readCSV(yahooDailyFile)
    dateFrom = datetime(1996,1,1)
    dateTo = datetime.now()
    
    #googleIntra = readCSV(googleIntraDayFile)
    #googleDaily = readCSV(googleDailyFile)

    track = 0
    tme = datetime.now()
    
    #googleIntraOutFile = 'C:\\Users\\Nick\\Documents\\project MONEY\\data\\dateFiles\\dataBase_google_intraDay_' + str(np.datetime64(tme.date())).replace('-','') +  '.csv'
    #yahooIntraOutFile = 'C:\\Users\\Nick\\Documents\\project MONEY\\data\\dateFiles\\dataBase_yahoo_intraDay_' + str(np.datetime64(tme.date())).replace('-','') +  '.csv'
    
    start_time = time.time()
    
    yahooIntraWrite = []
    yahooDailyWrite = []