Пример #1
0
def beli_lapangan(id_lapangan):
	if islogin():
		status = True
		d_checkin = request.form['checkin']
		d_checkout = request.form['checkout']
		d_waktuin = request.form['waktuin']
		d_waktuout = request.form['waktuout']
		d_lapangan = lapangan.get(lapangan.id == id_lapangan)
		d_nama = request.form['nama']
		d_ktp = request.form['ktp']

		d_checkin = datetime.datetime.strptime(d_checkin,'%Y-%m-%d')
		d_checkout = datetime.datetime.strptime(d_checkout,'%Y-%m-%d')
		d_waktuin = datetime.datetime.strptime(d_waktuin,'%H:%M')
		d_waktuout = datetime.datetime.strptime(d_waktuout,'%H:%M')
		d_checkin = datetime.datetime(d_checkin.year,d_checkin.month,d_checkin.day,d_waktuin.hour,d_waktuin.minute)
		d_checkout = datetime.datetime(d_checkout.year,d_checkout.month,d_checkout.day,d_waktuout.hour,d_waktuout.minute)

		# cek
		d_transaksi = transaksi.select().where((transaksi.selesai == False)&(transaksi.id_lapangan == id_lapangan))
		if d_transaksi.exists():
			for data in d_transaksi:
				d_checkin_d = datetime.datetime(data.checkin.year,data.checkin.month,data.checkin.day,data.checkin.hour,data.checkin.minute)
				d_checkout_d = datetime.datetime(data.checkout.year,data.checkout.month,data.checkout.day,data.checkout.hour,data.checkout.minute)

				checkin_now = d_checkin
				checkout_now = d_checkout
				
				time_range = DateTimeRange(checkin_now,checkout_now)
				for date in time_range.range(datetime.timedelta(days=1)):
					hour_range = DateTimeRange(date,checkout_now)
					for hour in hour_range.range(datetime.timedelta(hours=1)):
						minute_range = DateTimeRange(hour,checkout_now)
						for minute in hour_range.range(datetime.timedelta(minutes=1)):
							if d_checkin_d == minute or d_checkout_d == minute:
								# print(True)
								status = False
								break
		if status == True:
			lama_menginap = d_checkout - d_checkin
			# print(lama_menginap.seconds // 3600) #getHours
			d_total_harga = int(lama_menginap.seconds // 3600) * int(d_lapangan.harga)

			transaksi.create(
				id_user=session['iduser'],
				id_lapangan=id_lapangan,
				nama_pemesan=d_nama,
				no_ktp=d_ktp,
				checkin = d_checkin,
				checkout = d_checkout,
				total_harga=d_total_harga,
				waktu_transaksi=datetimenow())
			return redirect(url_for('index'))
		return redirect(url_for('index'))
	return redirect(url_for('index'))
Пример #2
0
def generate_timestamp(start_datetime, sampling_rate, signal_length):
    """

    Parameters
    ----------
    start_datetime :
        
    sampling_rate : float
        
    signal_length : int
        

    Returns
    -------
    list : list of timestamps with length equal to signal_length.
    """
    number_of_seconds = (signal_length - 1)/sampling_rate
    if start_datetime is None:
        start_datetime = dt.datetime.now()
    end_datetime = start_datetime + dt.timedelta(seconds=number_of_seconds)
    time_range = DateTimeRange(start_datetime, end_datetime)
    timestamps = []
    for value in time_range.range(dt.timedelta(seconds=1/sampling_rate)):
        timestamps.append(value)
    if len(timestamps) != signal_length:
        raise Exception("Timestamp series generated is not valid, please "
                        "check sampling rate.")
    return timestamps
Пример #3
0
def update_training_for_staff_after_absence(sender, instance, **kwargs):
    if instance.abs_type.abbr == "FOR":
        absence_range = instance.datetime_range
        old_absence_start_date = instance.tracker.previous("start_date")
        old_absence_end_date = instance.tracker.previous("end_date")
        old_absence_range = DateTimeRange(old_absence_start_date,
                                          old_absence_end_date)

        years = []
        rt = RightTraining.objects.first()
        if rt:
            for value in absence_range.range(datetime.timedelta(days=1)):
                if value.month in range(rt.start_month, 13):
                    year = value.year
                else:
                    year = value.year - 1
                if year not in years:
                    years.append(year)
            if old_absence_start_date and old_absence_end_date:
                for value in old_absence_range.range(
                        datetime.timedelta(days=1)):
                    if value.month in range(rt.start_month, 13):
                        year = value.year
                    else:
                        year = value.year - 1
                    if year not in years:
                        years.append(year)

            call_command('update_training',
                         years=years,
                         staffs=[instance.staff.id])
Пример #4
0
    def horarios_futuros(self, data, solicitante):
        quando = self.solicitacao_futura(solicitante)
        if quando is not None:
            inicio = localtime(quando.inicio).strftime('%d/%m/%Y às %H:%M')
            raise Exception(
                f"Você já tem uma solicitação prevista para {inicio} na situação {quando.status}"
            )

        if not (max(self.inicio, date.today()) <= data.date() <= self.fim):
            raise Exception(
                f"A data deve ser entre {self.inicio} e {self.fim}, mas você informou a data {data}."
            )

        result = []
        for vaga in self.vaga_set.filter(dia=data.weekday()):
            time_range = DateTimeRange(datetime.combine(data, vaga.inicio),
                                       datetime.combine(data, vaga.fim))
            for dt in time_range.range(timedelta(minutes=self.janela)):
                if make_aware(dt) > now():
                    quantidade = Solicitacao.objects.filter(
                        agenda=self,
                        inicio=dt,
                        status__in=[
                            Solicitacao.Status.DEFERIDO,
                            Solicitacao.Status.SOLICITADO
                        ]).count()
                    if quantidade < vaga.atendimentos:
                        result.append(dt)
        return result[:-1]
Пример #5
0
def merge_schedules(players: Dict[str, List[DateTimeRange]], required: int,
                    start: datetime) -> Dict[datetime, List[Set[str]]]:
    """Merge individual schedules to time slots where people are free.

    Takes a mapping of people's availability and returns a mapping of
    blocks of time (by the start time) to players free in that block.

    E.g. if Jon is free at 12-2pm and Neel is free from 1-2pm and blocks
    are 30 minutes, then return a dictionary of 4 times, with the first
    2 time blocks mapping to (Jon) and the second two to (Jon, Neel).
    """
    if not start:
        start = datetime.now(timezone("Europe/London")).replace(minute=0,
                                                                second=0,
                                                                microsecond=0)
    candidates = DateTimeRange(start, start + timedelta(days=14))

    potentials = defaultdict(list)

    for candidate_times in candidates.range(timedelta(minutes=MINUTES)):
        candidate_players = []
        for player, timeranges in players.items():
            for timerange in timeranges:
                if timerange.start_datetime <= candidate_times < timerange.end_datetime:
                    candidate_players.append(player)

        for combination_length in range(required, len(candidate_players) + 1):
            combos = combinations(candidate_players, combination_length)
            potentials[candidate_times].extend(combos)

    return potentials
Пример #6
0
def generate_timeslots_dict(work_hours_list, interval):
    """

    Args:
        work_hours_list:
        interval:

    Returns:

    """
    timeslots_dict = dict()
    if len(work_hours_list) == 1:
        time_range = DateTimeRange(work_hours_list[0], work_hours_list[-1])
        timeslots_dict = {
            value: 1
            for value in time_range.range(relativedelta(
                minutes=+int(interval)))
        }
    elif len(work_hours_list) == 4:
        first_time_range = DateTimeRange(work_hours_list[0],
                                         work_hours_list[1])
        first_timeslots_dict = {
            value: 1
            for value in first_time_range.range(
                relativedelta(minutes=+int(interval)))
        }
        second_time_range = DateTimeRange(work_hours_list[1],
                                          work_hours_list[2])
        second_timeslots_dict = {
            value: 0
            for value in second_time_range.range(
                relativedelta(minutes=+int(interval)))
        }
        third_time_range = DateTimeRange(work_hours_list[2],
                                         work_hours_list[3])
        third_timeslots_dict = {
            value: 1
            for value in third_time_range.range(
                relativedelta(minutes=+int(interval)))
        }
        timeslots_dict.update(first_timeslots_dict)
        timeslots_dict.update(second_timeslots_dict)
        timeslots_dict.update(third_timeslots_dict)
    return timeslots_dict
Пример #7
0
def beli_kamar(id_kamar):
    if islogin():
        status = True
        d_checkin = request.form['checkin']
        d_checkout = request.form['checkout']
        d_kamar = kamar.get(kamar.id == id_kamar)
        d_nama = request.form['nama']
        d_ktp = request.form['ktp']

        d_checkin = datetime.datetime.strptime(d_checkin, '%Y-%m-%d')
        d_checkout = datetime.datetime.strptime(d_checkout, '%Y-%m-%d')

        # cek
        d_transaksi = transaksi.select().where(
            (transaksi.selesai == False) & (transaksi.id_kamar == id_kamar))
        if d_transaksi.exists():
            for data in d_transaksi:
                d_checkin_d = datetime.datetime(data.checkin.year,
                                                data.checkin.month,
                                                data.checkin.day, 0, 0)
                d_checkout_d = datetime.datetime(data.checkout.year,
                                                 data.checkout.month,
                                                 data.checkout.day, 0, 0)

                checkin_now = datetime.datetime(d_checkin.year,
                                                d_checkin.month, d_checkin.day,
                                                0, 0)
                checkout_now = datetime.datetime(d_checkout.year,
                                                 d_checkout.month,
                                                 d_checkout.day, 0, 0)

                time_range = DateTimeRange(checkin_now, checkout_now)
                for date in time_range.range(datetime.timedelta(days=1)):
                    check = date
                    if d_checkin_d == check or d_checkout_d == check:
                        status = False

        if status == True:
            lama_menginap = datetime.datetime(
                d_checkout.year, d_checkout.month,
                d_checkout.day, 0, 0) - datetime.datetime(
                    d_checkin.year, d_checkin.month, d_checkin.day, 0, 0)

            d_total_harga = int(lama_menginap.days) * int(d_kamar.harga)

            transaksi.create(id_user=session['iduser'],
                             id_kamar=id_kamar,
                             nama_pemesan=d_nama,
                             no_ktp=d_ktp,
                             checkin=d_checkin,
                             checkout=d_checkout,
                             total_harga=d_total_harga,
                             waktu_transaksi=datetimenow())
            return redirect(url_for('index'))
        return redirect(url_for('index'))
    return redirect(url_for('index'))
def saveDateRange(start, end):
    givenRange = DateTimeRange(start, end)

    tables = ['Jobs', 'ReviewJobs']
    for table in tables:
        data = []
        query = 'SELECT * FROM ' + table
        cur.execute(query)
        results = [list(each) for each in cur.fetchall()]

        for job in results:
            dateRange = job[15]
            d = [each.lstrip().rstrip() for each in dateRange.split("-")]

            s = d[0].split("/")
            startFormat = str(int(s[2]) + 2000) + "/" + s[1] + "/" + s[0]

            inRange = False

            if len(d) > 1:
                e = d[1].split("/")
                endFormat = str(int(e[2]) + 2000) + "/" + e[1] + "/" + e[0]

                tableRange = DateTimeRange(startFormat, endFormat)



                for day in tableRange.range(relativedelta(days=1)):
                    if day in givenRange:
                        inRange = True

            else:
                inRange = startFormat in givenRange

            if inRange:
                data.append(job)

        columnNames = names.get(table)

        file = "Date Range for " + table + " from " + start.replace("/", "-") + " to " + end.replace("/", "-") + ".csv"

        if len(data) > 0:
            data.insert(0, columnNames)
            data.insert(1, [])

            for i in range(len(data)):
                line = data[i]
                if (i == 0):
                    open(file, 'w+').close()
                with open(file, 'a', newline='') as fp:
                    a = csv.writer(fp, delimiter=',')
                    line = [line]
                    a.writerows(line)
Пример #9
0
def add_cron_job(cron_day, stop_date):
    days_ = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
    cron_day = days_.index(cron_day)
    today_ = datetime.date.today()

    date1 = datetime.datetime.strptime(str(today_), '%Y-%m-%d')
    date2 = datetime.datetime.strptime(stop_date,
                                       '%Y-%m-%d') - datetime.timedelta(days=1)
    time_range = DateTimeRange(date1, date2)

    for value in time_range.range(datetime.timedelta(days=1)):
        if cron_day == value.weekday():
            return True

    return False
Пример #10
0
def apithree(request, start_date, end_date):
    response = requests.get(
        'https://gitlab.com/-/snippets/2094509/raw/master/sample_json_2.json')
    all_data = response.json()
    time_range_2 = DateTimeRange(start_date, end_date)
    all_date = []
    for value in time_range_2.range(datetime.timedelta(minutes=1)):
        (all_date.append(str(value.date()) + ' ' + str(value.time())))

    time_sets = set()
    for i in all_date:
        time_sets.add(i)

    runtime = 0
    downtime = 0

    for data in all_data:
        if data['time'] in time_sets:
            if data['runtime'] <= 1021:
                runtime += data['runtime']
            if data['runtime'] > 1021:
                evaluated_downtime = data['runtime'] - 1021
                downtime += evaluated_downtime
                runtime += 1021
    # print(runtime,downtime)
    total_runtime = round(((runtime) / (runtime + downtime)) * 100, 2)
    # conversion on runtime and downtime seconds to hh:mm:ss

    minute, sec = divmod(runtime, 60)
    hour, minute = divmod(minute, 60)
    runtime = ("%dh:%02dm:%02ds" % (hour, minute, sec))

    minute, sec = divmod(downtime, 60)
    hour, minute = divmod(minute, 60)
    downtime = ("%dh:%02dm:%02ds" % (hour, minute, sec))
    # =====================================================
    result = {
        'runtime': runtime,
        'downtime': downtime,
        'utilization': total_runtime
    }
    print(result)

    return JsonResponse(result, safe=False)
Пример #11
0
def get_vacancies_by_search_phrase_async2(search_dict):
    search_str = '&'.join(f'{k}={v}' for k, v in search_dict.items())
    url = f'{VACANCY_URL}?{search_str}'
    print(url)

    start_time = datetime.utcnow().replace(tzinfo=timezone.utc)
    end_time = start_time - timedelta(**TIME_PERIOD)

    step_time_delta = timedelta(**TIME_STEP)
    time_range = DateTimeRange(end_time, start_time)
    time_interval_list = [(start + step_time_delta, start)
                          for start in time_range.range(step_time_delta)][::-1]
    vacancy_id_list = asyncio.run(get_search_res_list(url, time_interval_list))
    vacancy_list = asyncio.run(get_vacancies_coro(list(set(vacancy_id_list))))

    with open(f'{OUTPUT_DIR}/{search_str}{start_time}{end_time}.json',
              mode='w',
              encoding='utf8') as out:
        json.dump(vacancy_list, out, ensure_ascii=False)
    return len(vacancy_list)
Пример #12
0
def check_hours(request, query=None):
    picked_date = request.GET["datechoosen"]
    doctor_id = request.GET["id_doctor"]

    all_appointments = appoint_models.Appointment.objects.filter(
        date=picked_date, doctor_id=doctor_id)

    current_appointments = []
    time_list = []
    time_to_display = []

    for appointment in all_appointments:
        if appointment.start_time.hour < 10:
            hours = '0' + str(appointment.start_time.hour)
        else:
            hours = appointment.start_time.hour

        if appointment.start_time.minute == 0:
            minutes = '00'
        else:
            minutes = appointment.start_time.minute
        current_appointments.append(f'{hours}:{minutes}')

    time_range = DateTimeRange("2015-01-01T09:00:00", "2015-01-01T17:00:00")
    for value in time_range.range(relativedelta(minutes=30)):
        time_list.append(str(value.time())[0:5])

    for current_time in time_list:
        if current_time not in current_appointments:
            time_to_display.append(current_time)

    result = {
        i: {
            "time": time_to_display[i]
        }
        for i in range(0, len(time_to_display))
    }
    return JsonResponse(result)
def jobsInDateRange(start, end):
    givenRange = DateTimeRange(start, end)
    tables = ['Jobs', 'ReviewJobs']
    data = []
    for table in tables:
        query = 'SELECT DateRange, JobID, CountryOfWinner FROM ' + table
        cur.execute(query)
        results = [list(each) for each in cur.fetchall()]

        for job in results:
            dateRange = job[0]
            d = [each.lstrip().rstrip() for each in dateRange.split("-")]

            s = d[0].split("/")
            startFormat = str(int(s[2]) + 2000) + "/" + s[1] + "/" + s[0]

            inRange = False

            endFormat = ''

            if len(d) > 1:
                e = d[1].split("/")
                endFormat = str(int(e[2]) + 2000) + "/" + e[1] + "/" + e[0]

                tableRange = DateTimeRange(startFormat, endFormat)

                for day in tableRange.range(relativedelta(days=1)):
                    if day in givenRange:
                        inRange = True

            else:
                inRange = startFormat in givenRange

            if inRange:
                data.append([job[1], job[2]])

    return data
Пример #14
0
def index(request, start_date, end_date):
    response = requests.get(
        'https://gitlab.com/-/snippets/2094509/raw/master/sample_json_1.json')
    data = response.json()
    data_ = json.dumps(data)

    time_range_2 = DateTimeRange(start_date, end_date)
    all_date = []

    for value in time_range_2.range(datetime.timedelta(minutes=1)):
        (all_date.append(str(value.date()) + ' ' + str(value.time())))
    # print(all_date)
    shiftA = []
    shiftB = []
    shiftC = []
    shift1 = DateTimeRange(start_date[:start_date.index('T')] + ' ' + '06:00',
                           start_date[:start_date.index('T')] + ' ' + '14:00')
    for i in shift1.range(datetime.timedelta(minutes=1)):
        shiftA.append(str(i.date()) + ' ' + str(i.time()))

    shift2 = DateTimeRange(start_date[:start_date.index('T')] + ' ' + '14:00',
                           start_date[:start_date.index('T')] + ' ' + '20:00')
    for i in shift2.range(datetime.timedelta(minutes=1)):
        shiftB.append(str(i.date()) + ' ' + str(i.time()))
    shift3 = DateTimeRange(start_date[:start_date.index('T')] + ' ' + '20:00',
                           start_date[:start_date.index('T')] + ' ' + '23:59')
    # print(shift3)
    for i in shift3.range(datetime.timedelta(minutes=1)):
        shiftC.append(str(i.date()) + ' ' + str(i.time()))

    shift3 = DateTimeRange(end_date[:end_date.index('T')] + ' ' + '00:00',
                           end_date[:end_date.index('T')] + ' ' + '06:00')
    # print(shift3)
    for i in shift3.range(datetime.timedelta(minutes=1)):
        shiftC.append(str(i.date()) + ' ' + str(i.time()))
    # print(shiftC)
    res = {}
    count_prodA = 0
    count_prodB = 0
    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftA:
                if i['production_A'] == True:
                    # print(i)
                    count_prodA += 1

    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftA:
                if i['production_B'] == True:
                    # print(i)
                    count_prodB += 1

    res['shiftA'] = {
        'production_A_count': count_prodA,
        'production_B_count': count_prodB,
    }
    # print(res)
    count_prodA = 0
    count_prodB = 0
    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftB:
                if i['production_A'] == True:
                    # print(i)
                    count_prodA += 1
    # print(count)

    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftB:
                if i['production_B'] == True:
                    # print(i)
                    count_prodB += 1
    # print(count)
    res['shiftB'] = {
        'production_A_count': count_prodA,
        'production_B_count': count_prodB,
    }

    # print(shiftC)
    count_prodA = 0
    count_prodB = 0
    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftC:
                if i['production_A'] == True:
                    # print(i)

                    count_prodA += 1
    # print(count)

    # print(shiftC)
    print('==============')
    for i in data:
        if i['time'] in all_date:
            if i['time'] in shiftC:

                if i['production_B'] == True:
                    # print(i)
                    count_prodB += 1
    # print(count)
    res['shiftC'] = {
        'production_A_count': count_prodA,
        'production_B_count': count_prodB,
    }
    # print(res)
    return JsonResponse(res, safe=False)
Пример #15
0
def get_grid_geojson(bus_ids,
                     time_range,
                     flip_coordinates=True,
                     stations_source='overpass'):
    """
    Generate the grid GeoJson based on the values saved in 'grid_cells' table.
    This can be used to overlay it on top of the routes in the jupyter notebooks.
    For each hour in the time interval it will generate a grid.
    It also maps the bus stations to each cell in order to provide 'buses_count_subset' and
    'buses_count_total' in the returned GeoJson, that are the frequency of buses (counted at a single station)
    for the given bus_ids in each cell and the frequency of all buses in each cell. This can be used as a ratio
    to show the percentage of the subset in terms of max frequencies.

    Args:
        bus_ids (list): List of the bus ids.
        time_range (tuple): Time range in format ('saturday', '10:03', '11:03').
        flip_coordinates (bool, optional): Flip the coordinates. Defaults to True.
        stations_source (str, optional): The source of the stations' coordinates. 
        Valid values: 'overpass', 'here'. Defaults to 'overpass'.

    Returns:
        dict: The dict that represents the GeoJson.
    """

    conn = sqlite3.connect(db)
    cursor = conn.cursor()

    if stations_source == 'overpass':
        stmt_buses_subset = f"""SELECT cell_id, substr(time, 0, 3) AS interval, count(*) FROM departures d, stations_cells_overpass sc 
            WHERE d.station_id = sc.station_id AND bus_id IN ({','.join(['?'] * len(bus_ids))}) AND day = ? AND time BETWEEN ? AND ? 
            GROUP BY cell_id, interval;"""

        stmt_buses_all = f"""SELECT cell_id, substr(time, 0, 3) as interval, count(*) FROM departures d, stations_cells_overpass sc 
            WHERE d.station_id = sc.station_id AND day = ? AND time BETWEEN ? AND ? GROUP BY cell_id, interval;"""

    elif stations_source == 'here':
        stmt_buses_subset = f"""SELECT cell_id, substr(time, 0, 3) as interval, count(*) FROM departures d, stations_cells_here sc 
            WHERE d.station_id = sc.station_id AND bus_id IN ({','.join(['?'] * len(bus_ids))}) AND day = ? AND time BETWEEN ? AND ? 
            GROUP BY cell_id, interval;"""

        stmt_buses_all = f"""SELECT cell_id, substr(time, 0, 3) as interval, count(*) FROM departures d, stations_cells_here sc 
            WHERE d.station_id = sc.station_id AND day = ? AND time BETWEEN ? AND ? GROUP BY cell_id, interval;"""

    else:
        cursor.close()
        conn.close()
        raise Exception('Invalid stations_source!')

    buses_count_subset = {}
    for cell, interval, count in cursor.execute(
            stmt_buses_subset, (*bus_ids, *time_range)).fetchall():
        d = {}
        d[interval] = count
        buses_count_subset.setdefault(cell, {}).update(d)

    buses_count_total = {}
    for cell, interval, count in cursor.execute(stmt_buses_all,
                                                [*time_range]).fetchall():
        d = {}
        d[interval] = count
        buses_count_total.setdefault(cell, {}).update(d)

    geo_json = {
        "type": "FeatureCollection",
        "properties": {
            "time_range":
            f"{time_range[0]}: {time_range[1]} - {time_range[2]}",
            "bus_ids": bus_ids
        },
        "features": []
    }

    tr = DateTimeRange(*time_range[-2:])

    stmt = 'SELECT id, x_axis, y_axis, upper_left, upper_right, lower_right, lower_left FROM grid_cells;'

    for cell_id, x_axis, y_axis, u_left, u_right, l_right, l_left in cursor.execute(
            stmt).fetchall():
        coordinates_list = [
            list(
                map(
                    float,
                    u_left.split(',')[::-1]
                    if flip_coordinates else u_left.split(','))),
            list(
                map(
                    float,
                    u_right.split(',')[::-1]
                    if flip_coordinates else u_right.split(','))),
            list(
                map(
                    float,
                    l_right.split(',')[::-1]
                    if flip_coordinates else l_right.split(','))),
            list(
                map(
                    float,
                    l_left.split(',')[::-1]
                    if flip_coordinates else l_left.split(',')))
        ]

        for time in tr.range(datetime.timedelta(hours=1)):
            time_hours = time.strftime('%H')
            if cell_id in buses_count_subset and time_hours in buses_count_subset[
                    cell_id]:
                subset = buses_count_subset[cell_id][time_hours]
            else:
                subset = 0

            if cell_id in buses_count_total and time_hours in buses_count_total[
                    cell_id]:
                total = buses_count_total[cell_id][time_hours]
            else:
                total = 0

            perc = subset / total if total else 0
            cell_color = plt.cm.get_cmap('Reds')(
                perc * 1.5)  # 5 - to increase the shade
            cell_color = mpl.colors.to_hex(cell_color)
            perc = round(perc * 100, 2)

            cell = {
                "type": "Feature",
                "properties": {
                    "matrix_coordinates": f"({x_axis},{y_axis})",
                    "buses_count_subset": subset,
                    "buses_count_total": total,
                    "popup":
                    f'({x_axis}, {y_axis}) {subset}/{total} ({perc}%)',
                    # "fillColor": cell_color,
                    "style": {
                        'fillColor': cell_color,
                        'color': 'black',
                        'weight': 0.5,
                        'dashArray': '5',
                        'fillOpacity': 0.5
                    },
                    "time": "2020-10-10T" + time.strftime('%H:%M') + ":00"
                },
                "geometry": {
                    "type": "Polygon",
                    "coordinates": [coordinates_list]
                }
            }
            geo_json['features'].append(cell)

    cursor.close()
    conn.close()

    return geo_json
Пример #16
0
def get_available_youbike_numbers_dfs_per_weekday(df, weekdays='all'):
    '''
    df: dataframe
    weekdays: all or a list of weekdays, ex: ['Mon', 'Tue']
    '''
    key_column = '可借車數'
    time_range = DateTimeRange("00:00:00", "23:55:00")
    index_selectors = []
    dfs_dict = {}
    start_date = '2018-01-01'
    end_date = '2018-06-15'
    start_date_dict = {
        'Mon': '2018-01-01',
        'Tue': '2018-01-02',
        'Wed': '2018-01-03',
        'Thu': '2018-01-04',
        'Fri': '2018-01-05',
        'Sat': '2018-01-06',
        'Sun': '2018-01-07'
    }

    if weekdays == 'all':
        weekdays = start_date_dict.keys()

    for value in time_range.range(relativedelta(minutes=+5)):
        time_str_list = str(value).split(' ')[-1].split(':')  # XX:XX:XX
        time_hour = int(time_str_list[0])
        time_min = int(time_str_list[1])
        time_sec = int(time_str_list[2])
        index_selectors.append(
            time(hour=time_hour, minute=time_min, second=time_sec))
    thresh = len(index_selectors) * 0.9

    for weekday in weekdays:
        df_weekday = df[df['星期幾'] == weekday]
        df_weekday = df_weekday[[key_column]]

        date_range = pd.date_range(start=start_date_dict[weekday],
                                   end=end_date,
                                   freq="7D")
        df_new = pd.DataFrame()

        for date in date_range:
            start_date_ = date
            end_date_ = date + DateOffset(days=1)
            mask = (df_weekday.index > start_date_) & (df_weekday.index <=
                                                       end_date_)

            df_ = df_weekday.loc[mask]
            df_ = df_.rename(columns={key_column: str(start_date_.date())})
            df_.index = df_.index.time

            if df_new.empty:
                df_new = df_
            else:
                df_new = pd.concat([df_new, df_], axis=1)

        df_new = (df_new.loc[index_selectors].dropna(
            axis=1,
            thresh=thresh).fillna(method='ffill').fillna(method='bfill'))
        dfs_dict[weekday] = df_new

    return dfs_dict
Пример #17
0
def main():
    db_conn1 = sqlite3.connect(
        r'D:\PythonCodes\vnpy\.vntrader\symb_list.db'
    )  # Connect to a database that hosts all stock symbols (i.e. corporate database)
    db_conn2 = sqlite3.connect(
        r'D:\PythonCodes\vnpy\.vntrader\database.db'
    )  # Connect to the main database that host all stock data (i.e. bar database)

    # dict_example = {'name':['Microsoft Corp.'],
    #             'cname': ['微软公司'],
    #             'symbol':['MSFT']}
    # dict_example = {'name':['Microsoft Corp.','Apple Company'],
    #             'cname': ['微软公司', '苹果公司'],
    #             'symbol':['MSFT', 'AAPL']}
    # dict_example = {'name':['Microsoft Corp.','Apple Company', 'Facebook'],
    #             'cname': ['微软公司', '苹果公司', '脸书'],
    #             'symbol':['MSFT', 'AAPL', 'FB']}
    # dict_example = {'name':['Microsoft Corp.','Apple Company', 'Facebook', 'Amazon'],
    #             'cname': ['微软公司', '苹果公司', '脸书','亚马逊'],
    #             'symbol':['MSFT', 'AAPL', 'FB', 'AMZN']}
    # df_example = pd.DataFrame.from_dict(dict_example)
    # df_example.to_csv(Path.cwd().joinpath('temp.csv'), encoding='utf_8_sig', index=False)
    # df_example = pd.read_csv(Path.cwd().joinpath('temp.csv'), encoding='utf_8_sig')

    # df_allstocks = ak.get_us_stock_name()     # Download all stock symbols using AkShare service. Expect to run this line and update stock symbols periodically.
    # df_allstocks.to_csv(Path.cwd().joinpath('temp.csv'), encoding='utf_8_sig', index=False)   # Save all stock symbols to a csv file. This is for testing purpose.
    df_example = pd.read_csv(
        Path.cwd().joinpath('temp.csv'), encoding='utf_8_sig'
    )  # Load all stock symbols from the csv file. This is for testing purpose.
    # df_example = df_example.iloc[0:2, :]    # Only take a few lines for testing. This is for testing purpose.

    df_example.to_sql(
        "dbcorpdata", db_conn1,
        if_exists='replace')  # Save all stock symbols to corporate database

    df_corpsdata_dl = pd.read_sql_query(
        "SELECT * from dbcorpdata",
        db_conn1)  # Load all stock symbols from corporate database
    df_bardata_ex = pd.read_sql_query(
        "SELECT * from dbbardata",
        db_conn2)  # Load all existing stock data from bardata database

    totalSymbol = len(df_corpsdata_dl.symbol)
    procSymbol = 0
    forcedFullLoad = False

    for s in df_corpsdata_dl.symbol:  # For each symbol read from the corporate database

        try:
            procSymbol += 1
            progress(procSymbol, totalSymbol)
            bars = []

            bar_latestBarOneStock_ex = database_manager.get_newest_bar_data(
                s, Exchange.LOCAL, Interval.DAILY
            )  # Find the latest bar record for that symbol from the bar database
            df_allBarOneStock_dl = ak.stock_us_daily(
                symbol=s, adjust="qfq"
            ).fillna(method='ffill').fillna(
                0
            )  # Download the history data for that symbol using AkShare service.
            # Fill NaN or Null fields with previous value, and then zero.

            if (
                (bar_latestBarOneStock_ex is not None) and (~forcedFullLoad)
            ):  # If the bar database contains this symbol, and not full load we will decide if incremental data will be needed or full history data will be saved to bar database
                lastDayDate_ex = bar_latestBarOneStock_ex.datetime.replace(
                    tzinfo=None
                )  # VNPY datetime is aware type, but the AkShare datetime is unaware type
                latestDayDate_dl = df_allBarOneStock_dl.index[
                    -1]  # Be careful of the difference between last day and latest date.

                dailyDataMatched = comp_bar(
                    bar_latestBarOneStock_ex, df_allBarOneStock_dl
                )  # This is a simplified logic check to compare the OHLC prices and see if they are all equal.

                if dailyDataMatched:  # If the close prices from existing and new sources match, we assume data remain correct and will only incrementally update
                    time_range = DateTimeRange(
                        lastDayDate_ex, latestDayDate_dl
                    )  # Find the date range for incremental update
                    for dt in time_range.range(timedelta(days=1)):
                        # print(dt)
                        if dt == latestDayDate_dl:  # When last date equals latest date, there is still a day in the date range, and we need to break the loop
                            # print('I am going to break...')
                            break
                        bar = BarData(
                            symbol=s,
                            exchange=Exchange.LOCAL,
                            datetime=dt,  # Here dt is a native datetime object
                            interval=Interval.DAILY,
                            volume=df_allBarOneStock_dl.loc[dt].volume,
                            open_price=df_allBarOneStock_dl.loc[dt].open,
                            high_price=df_allBarOneStock_dl.loc[dt].high,
                            low_price=df_allBarOneStock_dl.loc[dt].low,
                            close_price=df_allBarOneStock_dl.loc[dt].close,
                            open_interest=0,
                            gateway_name='Sim')
                        bars.append(bar)
                        # print('only add incremental updates for '+s)

                else:  # If the close prices from existing and new sources do not match, we assume data are corrupted and will fully update
                    for i, dt in enumerate(df_allBarOneStock_dl.index):
                        bar = BarData(
                            symbol=s,
                            exchange=Exchange.LOCAL,
                            datetime=dt.to_pydatetime(
                            ),  # Convert to a datetime object
                            interval=Interval.DAILY,
                            volume=df_allBarOneStock_dl.loc[dt].volume,
                            open_price=df_allBarOneStock_dl.loc[dt].open,
                            high_price=df_allBarOneStock_dl.loc[dt].high,
                            low_price=df_allBarOneStock_dl.loc[dt].low,
                            close_price=df_allBarOneStock_dl.loc[dt].close,
                            open_interest=0,
                            gateway_name='Sim')
                        bars.append(bar)
                        # print('correct database data for '+s)

            else:  # If bar database does not have this symbol, or just want to force full load,  we will fully update
                for i, dt in enumerate(df_allBarOneStock_dl.index):
                    bar = BarData(
                        symbol=s,
                        exchange=Exchange.LOCAL,
                        datetime=dt.to_pydatetime(
                        ),  # Convert to a datetime object
                        interval=Interval.DAILY,
                        volume=df_allBarOneStock_dl.loc[dt].volume,
                        open_price=df_allBarOneStock_dl.loc[dt].open,
                        high_price=df_allBarOneStock_dl.loc[dt].high,
                        low_price=df_allBarOneStock_dl.loc[dt].low,
                        close_price=df_allBarOneStock_dl.loc[dt].close,
                        open_interest=0,
                        gateway_name='Sim')
                    bars.append(bar)
                    # print('reload data for '+s)

            database_manager.save_bar_data(
                bars)  # Push the updates to the bar database
            print("Saved stock data of " + s + " into database.")

        except:  # When exceptoin occurs, assume it is because database buffer full, reconnect databases.
            time.sleep(5)
            print('Exception detected. Now reconnect to the databases.')
            db_conn1.close()
            db_conn2.close()
            db_conn1 = sqlite3.connect(
                r'D:\PythonCodes\vnpy\.vntrader\symb_list.db')
            db_conn2 = sqlite3.connect(
                r'D:\PythonCodes\vnpy\.vntrader\database.db')

    time.sleep(5)
    db_conn1.close()  # When done with the database, close the connection
    db_conn2.close()  # When done with the database, close the connection
Пример #18
0
def apithree(request, start_date, end_date):
    response = requests.get(
        'https://gitlab.com/-/snippets/2094509/raw/master/sample_json_3.json')
    data = response.json()
    data_ = json.dumps(data)
    time_range_2 = DateTimeRange(start_date, end_date)
    all_date = []

    for value in time_range_2.range(datetime.timedelta(minutes=1)):
        (all_date.append(str(value.date()) + ' ' + str(value.time())))

    new_data = []
    for d in data:
        if d['time'] in all_date:
            if d['state'] == False:
                d['id'] = int(d['id'][d['id'].index('0') + 1:])
                d['belt2'] = 0
                new_data.append(d)

            if d['state'] == True:
                d['belt1'] = 0
                d['id'] = int(d['id'][d['id'].index('0') + 1:])
                new_data.append(d)

    v = {}
    for i in new_data:
        v[i['id']] = 0

    for i in new_data:
        if i['id'] in v.keys():
            v[i['id']] += i['belt1']
            v[i['id']] += i['belt2']
    print(v)
    s = {}
    for i in new_data:
        s[i['id']] = 0
    for i in new_data:
        if i['id'] in s.keys():
            s[i['id']] += 1
            # v[i['id']]+=1
    print(s)

    for i in v:
        if i in s:
            v[i] = (v[i] // s[i])
    print(v)

    for i in new_data:
        if i['id'] in v:
            if i['belt1'] != 0:
                i['belt1'] = v[i['id']]
    # print(new_data)
            if i['belt2'] != 0:
                i['belt2'] = v[i['id']]
    for i in new_data:
        print(i)
    result_output = []
    seen = set()
    for i in new_data:
        if i['id'] not in seen:
            seen.add(i['id'])
            result_output.append(i)

    for i in result_output:
        i.pop('time')
        i.pop('state')
        i['avg_belt1'] = i.pop('belt1')
        i['avg_belt2'] = i.pop('belt2')

    result_output.sort(key=lambda k: k['id'])
    # print(result_output)
    return JsonResponse(result_output, safe=False)
Пример #19
0
def dateRange(start, end):
    rangeList = []
    time_range = DateTimeRange(start, end)
    for value in time_range.range(datetime.timedelta(days=1)):
        rangeList.append(value.strftime('%Y-%m-%d'))
    return rangeList
Пример #20
0
#1. Принимаю от пользователя дату
#2. Составляю расписание раз в 2 дня (через день)
#3. На 30 дней ДД ММ ГГ и день недели
#4. Если это воскресенье, то перенести на понеделтник и опять через день

import datetime
#прошу у пользователся дату
date_entry = input("Введите дату в формате ДД, ММ, ГГГГ ")

#перевожу в формат даты ДД ММ ГГГГ - если ввести формат по-другому, то программа не запустится
dt = datetime.datetime.strptime(date_entry, "%d, %m, %Y")

#вывод списка по дням
#скачать pip DateTimeRange (https://pypi.org/project/DateTimeRange/#get-iterator)
from datetimerange import DateTimeRange
# date_End - финальная дата через 30 дней
date_End = datetime.timedelta(days=30)
time_range = DateTimeRange(dt, dt + date_End)
print("Расписание на месяц: ")
#список выводится через день (days=2)
for value in time_range.range(datetime.timedelta(days=2)):
    #проверка на то, что будут выводиться дни с пн до сб
    if value.weekday() in range(0, 6):
        print(value.strftime("%A, %d %B, %Y"))
Пример #21
0
    def post(self, request):
        print("I'm in POST")
        print("Request body: ", request.body)
        print("Request data: ", request.data)

        serializer = ScheduleSerializer(data=request.data, many=True)
        list_exceptions = []
        list_every = []

        if serializer.is_valid():

            print("Data is valid")

            # serializer.save()
            start_day_list = []
            end_day_list = []
            start_time_list = []
            end_time_list = []

            for i in range(0, len(serializer.validated_data)):
                start_day_list.append(serializer.validated_data[i]['startday'])
                end_day_list.append(serializer.validated_data[i]['endday'])
                start_time_list.append(serializer.validated_data[i]['starttime'])
                end_time_list.append(serializer.validated_data[i]['endtime'])

            scheduler.pause()

            """ For all days which are given, check if it is ...
                ... one exception day (e.g. 2019-08-12) or an interval (e.g. 2019-08-12 - 2019-08-14)
                ... an every day job (e.g. Every Monday

            Make a continuous list for all exception days
                -> 	e.g. '2019-11-19 10:10:00 - 2019-11-22 17:15:00', '2019-11-20 05:20:00 - 2019-11-24 21:45:00'
                becomes  '2019-11-19 10:10:00 - 2019-11-24 21:45:00'

            Make a continuous list for all every day jobs
            -> 	e.g Every Monday 8:00 - 24:00 and Every Tuesday 00:00 - 17:00 becomes Mon-Tues 8:00 - 17:00)

            In the end:
                - Find out if evey day jobs intersect exception jobs
                - Make a list of all start and stop times for the every day jobs, so the exception days can run
                - Add the jobs to the scheduler
            """
            for d in range(0, len(start_day_list)):

                if not start_day_list[d].startswith('Every'):

                    if not end_day_list[d]:

                        start_date = str(start_day_list[d]) + str(' ') + str(start_time_list[d]) + str(':00')
                        start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S')

                        # if day ends with 24:00, the end day will be the next day at 00:00
                        stop_date = str(start_day_list[d]) + str(' ') + str(end_time_list[d]) + str(':00')
                        stop_date = change_24_to_00(stop_date)

                        test_range = DateTimeRange(start_date, stop_date)
                        # check for intersections of time ranges in list
                        list_exceptions = check_intersect_with_dates_list(list_exceptions, test_range)

                    else:

                        start_date = str(start_day_list[d]) + str(' ') + str(start_time_list[d]) + str(':00')
                        start_date = datetime.datetime.strptime(start_date, '%Y-%m-%d %H:%M:%S')
                        start_date_interval = str(end_day_list[d]) + str(' ') + str(start_time_list[d]) + str(':00')

                        # if day ends with 24:00, the end day will be the next day at 00:00
                        stop_date = str(end_day_list[d]) + str(' ') + str(end_time_list[d]) + str(':00')
                        stop_date = change_24_to_00(stop_date)

                        stop_date_interval = str(start_day_list[d]) + str(' ') + str(end_time_list[d]) + str(':00')
                        stop_date_interval = change_24_to_00(stop_date_interval)

                        time_range1 = DateTimeRange(start_date, start_date_interval)
                        time_range2 = DateTimeRange(stop_date_interval, stop_date)

                        # Go in 1 day steps from start date to end date because 
                        # the start time and end time is valid for every date between the interval days
                        # check for intersections of time ranges

                        for i1, j1 in zip(time_range1.range(datetime.timedelta(days=1)),
                                          time_range2.range(datetime.timedelta(days=1))):
                            test_range = DateTimeRange(i1, j1)
                            list_exceptions = check_intersect_with_dates_list(list_exceptions, test_range)

                if start_day_list[d].startswith('Every'):
                    list_every = check_intersect_with_every_list(get_day_of_every(start_day_list[d]),
                                                                 start_time_list[d], end_time_list[d], list_every)

            # process List_days for 24:00 entries and change it to "12 am the next day"
            if list_every:
                for i in range(0, len(list_every)):
                    if list_every[i][2] == "24:00":
                        list_every[i] = every_change_24_to_00_end(list_every[i][0], list_every[i][1], list_every[i][2])

            list_exceptions = [str(i) for i in list_exceptions]
            sorted_exception_list = sorted(list_exceptions)
            cron_start_stop, new_list_exceptions = split_time_ranges_and_make_job_list(sorted_exception_list,
                                                                                       list_every)
            new_list_exceptions = sorted(list(set(new_list_exceptions)))
            add_all_jobs(cron_start_stop, list_every, new_list_exceptions, scheduler)

            scheduler.print_jobs()
            register_events(scheduler)
            scheduler.resume()

            return Response(serializer.data, status=status.HTTP_201_CREATED)

        return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
    dateRange = job[0]
    d = [each.lstrip().rstrip() for each in dateRange.split("-")]

    s = d[0].split("/")
    startFormat = str(int(s[2]) + 2000) + "/" + s[1] + "/" + s[0]

    inRange = False

    if len(d) > 1:
        e = d[1].split("/")
        endFormat = str(int(e[2]) + 2000) + "/" + e[1] + "/" + e[0]

        tableRange = DateTimeRange(startFormat, endFormat)

        for day in tableRange.range(relativedelta(days=1)):
            if day in givenRange:
                inRange = True

    else:
        inRange = startFormat in givenRange

    if inRange:
        seen += 1
        data.append(float(job[1]))

query = "SELECT DateRange, ConvertedFinalCost FROM Jobs WHERE ConvertedFinalCost != 'None' AND Year = 2018"
cur.execute(query)
results = [list(each) for each in cur.fetchall()]
for j in range(len(results)):
    job = results[j]