示例#1
0
    def divide(self, years=None, months=None, days=None, hours=None, minutes=None, seconds=None):
        dct = locals()
        keywords = {}
        for key in dct:
            if key != 'self' and dct[key]:
                keywords[key] = dct[key]

        delta = td(**keywords)

        new_ranges = []
        for dt_range in self:
            start = dt_range[0]
            end = dt_range[1]
            start2 = start
            end2 = start2 + delta
            if end2 < end:
                while end2 < end:
                    old_start2 = start2
                    old_end2 = end2
                    new_ranges.append([old_start2, old_end2])
                    start2 = old_end2 + td(seconds=1)
                    end2 = start2 + delta
                new_ranges.append([old_end2 + td(seconds=1), end])
            else:
                new_ranges.append([start, end])

        self.__init__(new_ranges)
示例#2
0
 def fix_duration(segment):
     dur = int(segment['duration'].total_seconds()*1000)
     if dur < 0:
         segment['duration'] = td(milliseconds=(86400000 + dur))
     else:
         segment['duration'] = td(milliseconds=dur)
     return segment
    def __init__(self, data_window, no_required_data, min_damper_sp,
                 excess_damper_threshold, desired_oaf, cfm, eer, analysis):
        # Initialize data arrays.
        self.oat_values = []
        self.rat_values = []
        self.mat_values = []
        self.oad_values = []
        self.fan_spd_values = []
        self.timestamp = []

        self.economizing = None

        # Application result messages
        self.alg_result_messages = \
            ["The OAD should be at the minimum position but is significantly above this value.",
             "No problems detected.",
             "Inconclusive results, could not verify the status of the economizer."]
        # Map configurable parameters
        self.max_dx_time = td(minutes=60) if td(minutes=60) > data_window else data_window * 3/2
        self.data_window = data_window
        self.no_required_data = no_required_data
        self.min_damper_sp = min_damper_sp
        self.excess_damper_threshold = excess_damper_threshold
        self.economizing_dict = {key: 25.0 for key in self.excess_damper_threshold}
        self.inconsistent_date = {key: 23.2 for key in self.excess_damper_threshold}
        self.desired_oaf = desired_oaf
        self.analysis = analysis
        self.cfm = cfm
        self.eer = eer
示例#4
0
    def updateTime(self):

        if self.Stop == True:
            return
        elif self.Reset == True:
            self.timeFrame.delete(1.0,END)
            self.timeFrame.insert(1.0, "00:00")
            self.timeFrame.tag_add("center",1.0,END)
            return
        else:
            # Allow any fractional second to be added to the time silently
            # Since we are working in whole seconds, the other alternative
            # is to allow fractions of a second to disappear
            # This is bad - in the worst case, it looks like we skip a second
            # at the start of the timing period, which scares people!
            self.seconds = int(math.floor(time.time() - self.initialTime)) + self.interstitial

            self.s = td(seconds = self.seconds)
            d = td(1,1,1) + self.s

            t_str = "%(minutes)02d:%(seconds)02d" % {"minutes" : int(d.seconds/60), "seconds" : d.seconds % 60}

            self.timeFrame.delete(1.0,END)
            self.timeFrame.insert(1.0,t_str)
            self.timeFrame.tag_add("center",1.0,END)
            self.after(50,self.updateTime)
    def end_curtail(self, current_time):
        _log.info("Stagger release: {}".format(self.stagger_release))

        if self.stagger_release:
            _log.info("Stagger release enabled.")

            if self.device_group_size is None:
                _log.debug("Run stagger release setup.")
                self.next_curtail_confirm = current_time + self.curtail_confirm
                self.stagger_release_setup()
                self.next_release = current_time + td(minutes=self.current_stagger.pop(0))
                self.reset_devices()

            if current_time >= self.next_release and self.current_stagger:
                _log.debug("Release group stagger.")
                self.reset_devices()
                self.next_release = current_time + td(minutes=self.current_stagger.pop(0))
                _log.debug("Next scheduled release: {}".format(self.next_release))

            if current_time >= self.break_end:
                _log.debug("Release all in contingency.")
                self.reinitialize_stagger()
            return

        _log.debug("Current devices held curtailed: {}".format(self.devices_curtailed))
        self.reinitialize_stagger()
示例#6
0
 def test_merge_____two_calendars__disjoint(self):
     # first calendar
     cal = Calendar()
     cal.events = {
             Event(name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=20)),
             Event(name='Test #2', begin=dt(2016, 6, 10, 20, 50), duration=td(minutes=20)),
             Event(name='Test #3', begin=dt(2016, 6, 10, 21, 30), duration=td(minutes=30)),
             }
     self.calendar.merge(cal)
     pprint(self.calendar.events)
     # second calendar
     cal = Calendar()
     cal.events = {
             Event(name='Test #4', uid='d', begin=dt(2016, 6, 11, 12, 10), duration=td(minutes=20)),
             Event(name='Test #5', uid='e', begin=dt(2016, 6, 11, 12, 40), duration=td(minutes=20)),
             Event(name='Test #6', uid='f', begin=dt(2016, 6, 11, 13, 30), duration=td(minutes=30)),
             }
     self.calendar.merge(cal)
     pprint(self.calendar.events)
     assert self.calendar.events == {
             Event(name='Test #1', uid='0', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=20)),
             Event(name='Test #2', uid='1', begin=dt(2016, 6, 10, 20, 50), duration=td(minutes=20)),
             Event(name='Test #3', uid='2', begin=dt(2016, 6, 10, 21, 30), duration=td(minutes=30)),
             Event(name='Test #4', uid='3', begin=dt(2016, 6, 10, 12, 10), duration=td(minutes=20)),
             Event(name='Test #5', uid='4', begin=dt(2016, 6, 10, 12, 40), duration=td(minutes=20)),
             Event(name='Test #6', uid='5', begin=dt(2016, 6, 10, 12, 30), duration=td(minutes=30)),
             }
示例#7
0
文件: event.py 项目: guyzmo/ics.py
 def test_join(self):
     # disjoined events
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=20))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 50), duration=td(minutes=20))
     with pytest.raises(ValueError):
         event_a.join(event_b)
     with pytest.raises(ValueError):
         event_b.join(event_a)
     # intersected events
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 30), duration=td(minutes=30))
     assert event_a.join(event_b) == Event(name=None, uid=event_a.uid, begin=event_a.begin, end=event_b.end)
     assert event_b.join(event_a) == Event(name=None, uid=event_b.uid, begin=event_a.begin, end=event_b.end)
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 30), duration=td(minutes=30))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
     assert event_a.join(event_b) == Event(name=None, uid=event_a.uid, begin=event_b.begin, end=event_a.end)
     assert event_b.join(event_a) == Event(name=None, uid=event_b.uid, begin=event_b.begin, end=event_a.end)
     # included events
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 00), duration=td(minutes=60))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
     assert event_a.join(event_b) == Event(name=None, uid=event_a.uid, begin=event_a.begin, end=event_a.end)
     assert event_b.join(event_a) == Event(name=None, uid=event_b.uid, begin=event_a.begin, end=event_a.end)
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 00), duration=td(minutes=60))
     assert event_a.join(event_b) == Event(name=None, uid=event_a.uid, begin=event_b.begin, end=event_b.end)
     assert event_b.join(event_a) == Event(name=None, uid=event_b.uid, begin=event_b.begin, end=event_b.end)
     event = Event(uid='0', name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
     event.join(event)
     assert event == Event(uid='0', name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=30))
示例#8
0
 def test_has_slot(self):
     self.calendar.events = {
             Event(name=None, uid='0', begin=dt(2016, 6, 10, 20, 00), duration=td(minutes=20)),
             Event(name=None, uid='1', begin=dt(2016, 6, 10, 21, 00), duration=td(minutes=20)),
             Event(name=None, uid='2', begin=dt(2016, 6, 10, 22, 00), duration=td(minutes=180)),
             }
     assert not self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 19, 30), duration=td(minutes=60)),
             )
     assert not self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 20, 30), duration=td(minutes=60)),
             )
     assert not self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 23, 00), duration=td(minutes=60)),
             )
     assert self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 19, 30), duration=td(minutes=10)),
             )
     assert self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 20, 30), duration=td(minutes=10)),
             )
     assert self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 10, 21, 21), duration=td(minutes=10)),
             )
     assert self.calendar.has_slot(
             Event(name='Test #1', uid='a', begin=dt(2016, 6, 11,  1,  1), duration=td(minutes=10)),
             )
示例#9
0
    def test_merge___three_calendars__overlapping_three_events(self):
        '''
        testing overlapping 3 events (each one in a different calendar):

            <20:00→20:20> <20:10→20:50> <20:40→21:00>
            = <20:00→21:00>

        '''
        cal = Calendar()
        cal.events = {
                Event(name='Test #1', begin=dt(2016, 6, 10, 20, 00), duration=td(minutes=20)),
                }
        self.calendar.merge(cal)
        assert len(self.calendar.events) == 1
        cal = Calendar()
        cal.events = {
                Event(name='Test #2', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=40)),
                }
        self.calendar.merge(cal)
        assert len(self.calendar.events) == 1
        cal = Calendar()
        cal.events = {
                Event(name='Test #3', begin=dt(2016, 6, 10, 20, 40), duration=td(minutes=20)),
                }
        self.calendar.merge(cal)
        assert len(self.calendar.events) == 1
        res = Calendar()
        cal.events = {
                Event(name=None, uid='0', begin=dt(2016, 6, 10, 20, 00), duration=td(minutes=60)),
                }
        assert self.calendar.events == cal.events
    def __init__(self, data_window, no_required_data, excess_oaf_threshold,
                 min_damper_sp, excess_damper_threshold, desired_oaf,
                 cfm, eer, analysis):
        self.oat_values = []
        self.rat_values = []
        self.mat_values = []
        self.oad_values = []
        self.timestamp = []
        self.fan_spd_values = []
        self.economizing = None

        # Application thresholds (Configurable)
        self.cfm = cfm
        self.eer = eer
        self.max_dx_time = td(minutes=60) if td(minutes=60) > data_window else data_window * 3 / 2
        self.data_window = data_window
        self.no_required_data = no_required_data
        self.excess_oaf_threshold = excess_oaf_threshold
        self.min_damper_sp = min_damper_sp
        self.desired_oaf = desired_oaf
        self.excess_damper_threshold = excess_damper_threshold
        self.economizing_dict = {key: 36.0 for key in self.excess_damper_threshold}
        self.invalid_oaf_dict = {key: 31.2 for key in self.excess_damper_threshold}
        self.inconsistent_date = {key: 35.2 for key in self.excess_damper_threshold}
        self.analysis = analysis
def get_index(idx):
    """Returns an iterator over the rows of the data"""

    if idx not in _INDICES_DICT.keys():
        print "Index %s not found" % idx
        print "Possible Indices are: %s" % (", ".join(_INDICES_DICT.keys()))
        return None

    start_dt = _INDICES_DICT[idx][1]
    s = dt.strptime(start_dt, DATE_FORMAT)
    e = dt.now()
    e2 = s + td(days=PREF_DAYS)
    delta = e - s
    all_data = []

    while e > s:
        e_ = e2.strftime(DATE_FORMAT)
        s_ = s.strftime(DATE_FORMAT)
        r = _do_get_index(idx, s_, e_)
        if r:
            print len(r)
            all_data.extend(r)

        time.sleep(random.randint(1, 5))
        s = e2 + td(days=1)
        e2 = s + td(days=PREF_DAYS)
        if e2 > e:
            e2 = e

    return all_data
示例#12
0
    def schedule(self, cr, uid, activity_id, date_scheduled=None,
                 context=None):
        """
        If a specific ``date_scheduled`` parameter is not specified.
        The `_POLICY['schedule']` dictionary value will be used to find
        the closest time to the current time from the ones specified
        (0 to 23 hours)

        Then it will call :meth:`schedule<activity.nh_activity.schedule>`

        :returns: ``True``
        :rtype: bool
        """
        if not date_scheduled:
            hour = td(hours=1)
            schedule_times = []
            for s in self._POLICY['schedule']:
                schedule_times.append(
                    dt.now().replace(hour=s[0], minute=s[1],
                                     second=0, microsecond=0))
            date_schedule = dt.now().replace(
                minute=0, second=0, microsecond=0) + td(hours=2)
            utctimes = [fields.datetime.utc_timestamp(
                cr, uid, t, context=context) for t in schedule_times]
            while all([date_schedule.hour != date_schedule.strptime(
                    ut, DTF).hour for ut in utctimes]):
                date_schedule += hour
            date_scheduled = date_schedule.strftime(DTF)
        return super(nh_clinical_patient_observation_weight, self).schedule(
            cr, uid, activity_id, date_scheduled, context=context)
示例#13
0
def nextAlarm(ref, config):
    ind = ("monday", "tuesday", "wednesday", "thursday", "friday", "saturday", "sunday")

    imp = safeConfGet(config, "alarm", "impromptu")
    if imp:
        newTime = strToDt(imp, ref)
        if newTime > ref:
            config.remove_option("alarm", "impromptu")
            return newTime

    today = safeConfGet(config, "alarm", ind[ref.weekday()])
    if today:
        newTime = strToDt(today, ref)
        if newTime > ref:
            return newTime

    if imp:
        config.remove_option("alarm", "impromptu")
        return strToDt(imp, ref + td(days=1))

    tomorrow = safeConfGet(config, "alarm", ind[(ref.weekday() + 1) % 7])
    if tomorrow:
        return strToDt(tomorrow, ref + td(days=1))

    return None
示例#14
0
def checks(request):
    if request.method == "GET":
        q = Check.objects.filter(user=request.user)
        doc = {"checks": [check.to_dict() for check in q]}
        return JsonResponse(doc)

    elif request.method == "POST":
        check = Check(user=request.user)
        check.name = str(request.json.get("name", ""))
        check.tags = str(request.json.get("tags", ""))
        if "timeout" in request.json:
            check.timeout = td(seconds=request.json["timeout"])
        if "grace" in request.json:
            check.grace = td(seconds=request.json["grace"])

        check.save()

        # This needs to be done after saving the check, because of
        # the M2M relation between checks and channels:
        if request.json.get("channels") == "*":
            check.assign_all_channels()

        return JsonResponse(check.to_dict(), status=201)

    # If request is neither GET nor POST, return "405 Method not allowed"
    return HttpResponse(status=405)
示例#15
0
def _update(check, spec):
    if "name" in spec:
        check.name = spec["name"]

    if "tags" in spec:
        check.tags = spec["tags"]

    if "timeout" in spec and "schedule" not in spec:
        check.kind = "simple"
        check.timeout = td(seconds=spec["timeout"])

    if "grace" in spec:
        check.grace = td(seconds=spec["grace"])

    if "schedule" in spec:
        check.kind = "cron"
        check.schedule = spec["schedule"]
        if "tz" in spec:
            check.tz = spec["tz"]

    check.save()

    # This needs to be done after saving the check, because of
    # the M2M relation between checks and channels:
    if "channels" in spec:
        if spec["channels"] == "*":
            check.assign_all_channels()
        elif spec["channels"] == "":
            check.channel_set.clear()

    return check
    def period_selected(self, selected_period_number):
        """"""

        t_set = lib.timecards_set()
        if selected_period_number == 1:
            print("Week")
            self.reminders = lib.reminders(
                dt.now() - td(days=REMINDERS_DAYS_BACK), dt.now(), t_set, 'week')
        elif selected_period_number == 2:
            print("BiWeek")
            self.reminders = lib.reminders(
                dt.now() - td(days=REMINDERS_DAYS_BACK), dt.now(), t_set, 'biweek')
        elif selected_period_number == 3:
            print("SemiMonth")
            self.reminders = lib.reminders(
                dt.now() - td(days=REMINDERS_DAYS_BACK), dt.now(), t_set,
                'semimonth')
        elif selected_period_number == 4:
            print("Month")
            self.reminders = lib.reminders(
                dt.now() - td(days=REMINDERS_DAYS_BACK), dt.now(), t_set, 'month')
        else:
            print("Error Bad Selection")
            return
        self.listbox.delete(0, tkinter.END)
        for reminder in self.reminders:
            rmdr = '%s %s %s %s' % (reminder[0].client.name, reminder[0].employee.firstname + ' ' +
                                    reminder[0].employee.lastname,
                                    dt.strftime(reminder[1], tk_forms.DATE_OUTPUT_READABLE_FORMAT),
                                    dt.strftime(reminder[2], tk_forms.DATE_OUTPUT_READABLE_FORMAT))
            self.listbox.insert(tkinter.END, rmdr)
示例#17
0
def add_pushover(request):
    if settings.PUSHOVER_API_TOKEN is None or settings.PUSHOVER_SUBSCRIPTION_URL is None:
        raise Http404("pushover integration is not available")

    if request.method == "POST":
        # Initiate the subscription
        nonce = get_random_string()
        request.session["po_nonce"] = nonce

        failure_url = settings.SITE_ROOT + reverse("hc-channels")
        success_url = settings.SITE_ROOT + reverse("hc-add-pushover") + "?" + urlencode({
            "nonce": nonce,
            "prio": request.POST.get("po_priority", "0"),
        })
        subscription_url = settings.PUSHOVER_SUBSCRIPTION_URL + "?" + urlencode({
            "success": success_url,
            "failure": failure_url,
        })

        return redirect(subscription_url)

    # Handle successful subscriptions
    if "pushover_user_key" in request.GET:
        if "nonce" not in request.GET or "prio" not in request.GET:
            return HttpResponseBadRequest()

        # Validate nonce
        if request.GET["nonce"] != request.session.get("po_nonce"):
            return HttpResponseForbidden()

        # Validate priority
        if request.GET["prio"] not in ("-2", "-1", "0", "1", "2"):
            return HttpResponseBadRequest()

        # All looks well--
        del request.session["po_nonce"]

        if request.GET.get("pushover_unsubscribed") == "1":
            # Unsubscription: delete all Pushover channels for this user
            Channel.objects.filter(user=request.user, kind="po").delete()
            return redirect("hc-channels")
        else:
            # Subscription
            user_key = request.GET["pushover_user_key"]
            priority = int(request.GET["prio"])

            channel = Channel(user=request.team.user, kind="po")
            channel.value = "%s|%d" % (user_key, priority)
            channel.save()
            channel.assign_all_checks()
            return redirect("hc-channels")

    # Show Integration Settings form
    ctx = {
        "page": "channels",
        "po_retry_delay": td(seconds=settings.PUSHOVER_EMERGENCY_RETRY_DELAY),
        "po_expiration": td(seconds=settings.PUSHOVER_EMERGENCY_EXPIRATION),
    }
    return render(request, "integrations/add_pushover.html", ctx)
def convert(blt_file, outfile):

    tagCategory = ["duplicate_announce", "new_prefix", "transit_change", "community_change", "duplicate_withdrawal", "prepending_add", "prepending_change", "prepending_remove", "path_switching", "origin_change", "remove_prefix", "other_change"]

    pickleDictionary = dict()           # pickleDictionary[tagName][time] = [value]
    for tn in tagCategory:
        pickleDictionary[tn] = dict()

    for blt_file in blt_file:
        date = blt_file.split("/")[-1].split(".")[0]
        if "_" in date:
            date = date.split("_")[0]
        print "reading " + date + " now"

        blt_files = glob.glob(blt_file)
        
        if len(blt_files)==0:
            sys.exit()
                        
        blt_files.sort()

        initialAnalize = True
    #    BGP4MP|1421366399|A|195.66.225.76|251|207.150.172.0/22|251 1239 3257 21840|IGP|195.66.225.76|0|0|1239:321 1239:1000 1239:1004 65020:20202|NAG|| #new_prefix
        for bf in blt_files:
            blt = open(bf, "r")
            for line in blt:
                res = line.split(" #")
                message = res[0]
                _tagNames = res[1:]
                tagNames = list()
                for tagName in _tagNames:
                    tagNames.append(tagName.split("\n")[0])
                for tagName in tagNames:
                    if tagName not in tagCategory:
                        print "There is no [" + tagName + "] in this programs tagCategory."
                        continue

                timeStamp = dt.utcfromtimestamp(float(message.split("|")[1])).strftime("%Y/%m/%d %H:%M")
                timeStamp = dt.strptime(timeStamp, "%Y/%m/%d %H:%M")
                if initialAnalize == True:
                    preTime = timeStamp
                    initialAnalize = False

                while timeStamp > preTime + td(minutes=1):
                    for tc in tagCategory:
                        pickleDictionary[tc][preTime + td(minutes=1)] = 0
                    preTime += td(minutes=1)

                for tagName in tagNames:
                    if timeStamp not in pickleDictionary[tagName]:
                        for tc in tagCategory:
                            pickleDictionary[tc][timeStamp] = 0

                    pickleDictionary[tagName][timeStamp] += 1
                preTime = timeStamp
        
        
    with open(outfile, "wb") as f:
        pickle.dump(pickleDictionary, f)
    def test_reminder_forgetting(self, capsys):
        """
        test forgetting a model
        """
        logger.debug('testing reminder forgetting, week')
        ####

        self.args.payroll_run_date = self.payroll_run_date
        self.args.period = 'week'
        r_set = reminders_set(self.session, self.args)
        tbl = []
        tcards = timecards(self.session, self.args)
        t_set = timecards_set(self.session, self.args)
        logger.debug('All timecards ever submitted')
        for t in tcards:
            tbl.append([t[0].id, t[0].period_start, t[0].period_end, t[1].active, t[1].title, t[2].active, t[3].active,
                        1 if timecard_hash(t[0]) in r_set else 0,
                        timecard_hash(t[0])])
        logger.debug(tabulate(tbl,
                              headers=['id', 'start', 'end', 'contract-active', 'contract-title', 'employee-active',
                                       'client-active', 'already-has-reminder', 'timecard-hash']))

        # Reminder outstanding presented to user for selection
        self.args.period = 'week'
        week_reminders_to_be_sent = reminders(self.session, self.payroll_run_date - td(days=30), self.payroll_run_date,
                                         t_set, self.args)
        contract_of_forgotten_reminder = week_reminders_to_be_sent[0][0]
        logger.debug('Pending Week Reminders BEFORE forgetting first reminder')
        tbl = []
        for r in week_reminders_to_be_sent:
            # conract, start, end = t
            tbl.append([r[0].id, r[0].title, '%s %s' % (r[0].employee.firstname, r[0].employee.lastname), r[1], r[2]])
        logger.debug(tabulate(tbl, headers=['id', 'title', 'employee', 'start', 'end']))
        assert 5 == len(week_reminders_to_be_sent)
        self.args.number = 1
        logger.debug('Forgetting first reminder')
        forget_reminder(self.session, self.payroll_run_date - td(days=30), dt.now(), t_set, self.args)

        t_set = timecards_set(self.session, self.args)
        self.args.period = 'week'
        week_reminders_to_be_sent = reminders(self.session, self.payroll_run_date - td(days=30), self.payroll_run_date,
                                         t_set, self.args)
        logger.debug('Pending Week Reminders AFTER forgetting first reminder')
        tbl = []
        for r in week_reminders_to_be_sent:
            tbl.append([r[0].id, r[0].title, '%s %s' % (r[0].employee.firstname, r[0].employee.lastname), r[1], r[2]])
        logger.debug(tabulate(tbl, headers=['id', 'title', 'employee', 'start', 'end']))
        assert 4 == len(week_reminders_to_be_sent)
        # former second reminder is current first reminder, first reminder forgotten
        assert dt(2016, 7, 18) == week_reminders_to_be_sent[0][1]
        assert dt(2016, 7, 24) == week_reminders_to_be_sent[0][2]
        invs = self.session.query(Invoice).all()
        last_inv = invs[len(invs)-1:len(invs)][0]
        logger.debug(last_inv)
        logger.debug(last_inv.period_start)
        assert contract_of_forgotten_reminder == last_inv.contract
        assert date_to_datetime(last_inv.period_start) == dt(2016, 7, 4)
        assert date_to_datetime(last_inv.period_end) == dt(2016, 7, 10)
        assert last_inv.voided == True
示例#20
0
def align_pv(zonetemp_array, peak_ind, val_ind, dtime):
    """
    align_pv takes the indices of peaks (peak_ind) and indices of
    valleys (val_ind) and ensures that there is only one valley
    in-between two consecutive peaks and only one peak between two
    consecutive valleys.  If there are two or more peaks between
    valleys the largest value is kept.  If there are two or more
    valleys between two peaks then the smallest value is kept.
    :param zonetemp_array:
    :param peak_ind:
    :param val_ind:
    :param dtime:
    :return:
    """
    try:
        reckon = 0
        aligned = False
        find_peak = True if peak_ind[0] < val_ind[0] else False
        begin = 0
        while not aligned:
            if find_peak:
                while peak_ind[reckon + 1] < val_ind[reckon + begin]:
                    if zonetemp_array[peak_ind[reckon]] > zonetemp_array[peak_ind[reckon + 1]]:
                        peak_ind = np.delete(peak_ind, reckon + 1)
                    else:
                        peak_ind = np.delete(peak_ind, reckon)
                if (dtime[val_ind[reckon + begin]] - dtime[peak_ind[reckon]]) <= td(minutes=3):
                    val_ind = np.delete(val_ind, reckon + begin)
                    peak_ind = np.delete(peak_ind, reckon + 1)
                else:
                    find_peak = False
                    begin += 1
                    if begin > 1:
                        begin = 0
                        reckon += 1
            else:
                while val_ind[reckon + 1] < peak_ind[reckon + begin]:
                    if zonetemp_array[val_ind[reckon]] > zonetemp_array[val_ind[reckon + 1]]:
                        val_ind = np.delete(val_ind, reckon)
                    else:
                        val_ind = np.delete(val_ind, reckon + 1)
                if (dtime[peak_ind[reckon + begin]] - dtime[val_ind[reckon]]) <= td(minutes=3):
                    val_ind = np.delete(val_ind, reckon + 1)
                    peak_ind = np.delete(peak_ind, reckon + begin)
                else:
                    find_peak = True
                    begin += 1
                    if begin > 1:
                        begin = 0
                        reckon += 1
            if (reckon + 1) == min(val_ind.size, peak_ind.size):
                aligned = True
        if peak_ind.size > val_ind.size:
            peak_ind = np.resize(peak_ind, val_ind.size)
        elif val_ind.size > peak_ind.size:
            val_ind = np.resize(val_ind, peak_ind.size)
        return peak_ind, val_ind
    except:
        return np.empty(0), np.empty(0)
示例#21
0
def day_interval(year, month, day, mode = "dt"):
    """
    """
    start, end = dt(year, month, day), dt(year, month, day) + td(days=1) - td(seconds=1)
    if mode == "dt":
        return start, end
    elif mode == "str":
        return str(start), str(end)
示例#22
0
def test_inequality_for_userresult():
    lunch_break = td(minutes=33)
    afternoon_work = td(hours=4, minutes=30)
    out_evening = dt(2016, 6, 2, 17, 32)
    myobj1 = UserResult(td(hours=4), lunch_break, afternoon_work, out_evening)
    myobj2 = UserResult(td(hours=4, minutes=1), lunch_break, afternoon_work, out_evening)

    assert myobj1 != myobj2
示例#23
0
 def test_join_disjoined(self):
     # disjoined events
     event_a = Event(name='Test #1', begin=dt(2016, 6, 10, 20, 10), duration=td(minutes=20))
     event_b = Event(name='Test #2', begin=dt(2016, 6, 10, 20, 50), duration=td(minutes=20))
     with pytest.raises(ValueError):
         event_a.join(event_b)
     with pytest.raises(ValueError):
         event_b.join(event_a)
示例#24
0
	def __init__(self,user,limit=1000):
		self.cursor = tweepy.Cursor(myapi.user_timeline, id = user).items(limit)
		self.user = myapi.get_user(user)
		self.tweets = list(self.cursor)
		if self.user.utc_offset == None:
			self.offset = td()
		else:
			self.offset = td(seconds=self.user.utc_offset)
def create_feature18(_date, weather_library):
    """ 18時時点での予想を実施する特徴ベクトルを作る
    16時と性能差があまりない。
    _date: 予報対象日
    """
    print("16:00, feature of ", _date)
    weather_kumamoto = weather_library["47819"]
    weather_asootohime = weather_library["1240"]
    weather_unzendake = weather_library["47818"]
    weather_shimabara = weather_library["0962"]
    pre_date = _date - td(days=1)
    y, m, d = pre_date.year, pre_date.month, pre_date.day
    _feature = []
    _feature += [get_season(_date)]
    _feature += [get_measurement_value(dt(y, m, d,  6), weather_asootohime, "気温")]
    _feature += [get_measurement_value(dt(y, m, d, 14), weather_asootohime, "気温")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_asootohime, "気温")]
    _feature += [get_measurement_value(dt(y, m, d,  6), weather_kumamoto, "気温")]
    _feature += [get_measurement_value(dt(y, m, d, 14), weather_kumamoto, "気温")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_kumamoto, "気温")]
    _feature += [get_diff(dt(y, m, d, 14), dt(y, m, d, 18), weather_kumamoto, "気温")]
    _feature += [get_diff(dt(y, m, d, 14), dt(y, m, d, 18), weather_asootohime, "気温")]
    _feature += [get_diff2(dt(y, m, d, 18), weather_unzendake, weather_shimabara, "気温")]
    _feature += [get_average(dt(y, m, d, 18), weather_asootohime, "気温", range(0, 72))]
    _feature += [get_average(dt(y, m, d, 18), weather_asootohime, "降水量", range(0, 72))]
    _feature += [get_average(dt(y, m, d, 18), weather_asootohime, "降水量", range(0, 24))]
    _feature += [get_average(dt(y, m, d, 18), weather_asootohime, "日照時間", range(0, 12))]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_asootohime, "風速")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_asootohime, "風向")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_unzendake, "風速")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_unzendake, "風向")]

    _feature += [get_someone(dt(y, m, d, 18), weather_asootohime, "風速", range(0, 6), max)]
    _feature += [get_someone(dt(y, m, d, 18), weather_asootohime, "気温", range(0, 6), max)]
    _feature += [get_someone(dt(y, m, d, 18), weather_unzendake, "風速", range(0, 6), max)]
    _feature += [get_someone(dt(y, m, d, 18), weather_unzendake, "気温", range(0, 6), max)]

    _feature += [get_average(dt(y, m, d, 18), weather_asootohime, "風速", range(1, 3))]
    _feature += [get_measurement_value(dt(y, m, d,  6), weather_kumamoto, "露点温度")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_kumamoto, "露点温度")]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_kumamoto, "現地気圧")]
    _feature += [get_TTd(_date, 14, weather_kumamoto)]
    _feature += [get_TTd(_date, 18, weather_kumamoto)]
    _feature += [get_TTd(_date, 14, weather_unzendake)]
    _feature += [get_TTd(_date, 18, weather_unzendake)]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_kumamoto, "蒸気圧")]
    _feature += [get_diff(dt(y, m, d, 15), dt(y, m, d, 18), weather_kumamoto, "現地気圧")]
    _feature += [minus(get_average(dt(y, m, d, 18), weather_unzendake, "現地気圧", range(0, 72)), get_measurement_value(dt(y, m, d, 18), weather_unzendake, "現地気圧"))]
    _feature += [get_measurement_value(dt(y, m, d, 18), weather_kumamoto, "湿度")]
    _feature += [get_measurement_value(dt(y, m, d,  6) - td(days=1), weather_kumamoto, "視程")]
    _feature += [get_measurement_value(dt(y, m, d, 21) - td(days=1), weather_unzendake, "視程")]
    _feature += [get_measurement_value(dt(y, m, d, 21) - td(days=1), weather_kumamoto, "雲量")]
    #print("fuga")
    _feature = [-math.e if x == None else x for x in _feature] # 欠損値を-eに置換
    _feature = [-math.e if x == "休止中" else x for x in _feature] # 欠損値を-eに置換
    _feature = [-math.e if x == "#" else x for x in _feature] # 欠損値を-eに置換
    #print(_feature)
    return np.array(_feature)
示例#26
0
def test_inequality_for_usertime():
    in_morning = dt(2016, 6, 2, 9, 1)
    out_lunch = dt(2016, 6, 2, 13, 2)
    in_lunch = dt(2016, 6, 2, 13, 35)
    total_hours = td(hours=8)
    myobj1 = UserTime(in_morning, out_lunch, in_lunch, total_hours, td(minutes=30))
    myobj2 = UserTime(in_morning, out_lunch, in_lunch, total_hours, td(minutes=29))

    assert myobj1 != myobj2
    def setUp(self):
        super(ListChecksTestCase, self).setUp()

        self.checks = [
            Check(user=self.alice, name="Alice 1", timeout=td(seconds=3600), grace=td(seconds=900)),
            Check(user=self.alice, name="Alice 2", timeout=td(seconds=86400), grace=td(seconds=3600)),
        ]
        for check in self.checks:
            check.save()
示例#28
0
    def test_no_end_element(self):
        self.assertListEqual(
            [],
            list(generic_range(
                dt(2001, 5, 3), dt(2001, 5, 3), td(seconds=60))))

        self.assertListEqual(
            [dt(2015, 3, 6), dt(2015, 3, 7), dt(2015, 3, 8)],
            list(generic_range(
                dt(2015, 3, 6), dt(2015, 3, 9), td(days=1))))
def get_url_string_dates(d1, d2):
    d1 = date(*d1)
    d2 = date(*d2)    
    days_lst = [d1+td(days=i) for i in xrange((d2-d1).days)]
    urls = ['index.cgi?month='+str(day.month)+'&day='+str(day.day)+'&year='+str(day.year) for day in days_lst]
    urls = [box_url+url for url in urls][1:]
    day_lst2 = [str(d1 + td(days=day)) for day in xrange((d2-d1).days)]
    day_s_lst2 = [''.join(str(day)) for day in day_lst2][1:]
    #don't want todays hence [1:]
    return urls, day_s_lst2
示例#30
0
def test_equality_for_usertime():
    in_morning = dt(2016, 6, 2, 9, 1)
    out_lunch = dt(2016, 6, 2, 13, 2)
    in_lunch = dt(2016, 6, 2, 13, 35)
    total_hours = td(hours=8)
    min_lunch_length = td(minutes=30)
    myobj1 = UserTime(in_morning, out_lunch, in_lunch, total_hours, min_lunch_length)
    myobj2 = UserTime(in_morning, out_lunch, in_lunch, total_hours, min_lunch_length)

    assert myobj1 == myobj2
示例#31
0
 def emit(self, record):
     if self.send_time + td(seconds=15) < dt.now():
         self._send_last_logs()
         self.send_time = dt.now()
示例#32
0
def main():
    global in_cal
    print('Init')
    # get the list of added events
    try:
        with open("added.json", 'rb+') as infile:
            in_cal = pickle.load(infile)
    except FileNotFoundError:
        in_cal = []

    graph = gr.GraphAPI(access_token=at)

    # authenticate with google
    home_dir = os.path.expanduser('~')
    credential_dir = os.path.join(home_dir, '.credentials')
    if not os.path.exists(credential_dir):
        os.makedirs(credential_dir)
    credential_path = os.path.join(credential_dir,
                                   'fb-calendar.json')

    store = Storage(credential_path)
    creds = store.get()
    if not creds or creds.invalid:
        auth = OAuth2WebServerFlow(client_id=get_file_line("code/g_client_id"),
                            client_secret=get_file_line("code/g_client_secret"),
                            scope="https://www.googleapis.com/auth/calendar",
                            redirect_uri="http://localhost/")
        creds = tools.run_flow(auth, store)


    # now we can use the creds and send our access token along for the ride!
    http = creds.authorize(httplib2.Http())
    service = discovery.build('calendar', 'v3', http=http)

    all_e = graph.get_object(id="me", fields="events{id, name, timezone, start_time, end_time, event_times, rsvp_status}")
    events = all_e['events']['data']

    for e in events:
        ename = e['name']
        eid = e['id']
        etimezone = e['timezone']
        rsvp = e['rsvp_status']
        try:
            # for i in range(0, len(e['event_times']), 1)):
            estart = e['start_time']
            eend = e['end_time']
            if estart == '' and eend == '':
                estart = e['event_times'][0]['start_time']
                eend = e['event_times'][0]['end_time']
        except KeyError:
            # might be that only start time specified, so try get the start time
            try:
                estart = e['start_time']
                if estart != '':
                    # problem is, we have no enddate
                    eend = estart
                    eend = dp.parse(eend)
                    eend = eend + td(hours=1)
                    eend = str(eend.isoformat())
            except KeyError:
                estart=''
                eend=''

        if not (eid in in_cal) and rsvp == "attending":
            # we want to add it into the calendar
            add = {
                'summary': ename,
                'start': {
                    'dateTime': estart,
                    'timeZone': etimezone,
                },
                'end': {
                    'dateTime': eend,
                    'timeZone': etimezone,
                },
                'reminders': {
                    'useDefault': False,
                    'overrides': [
                        {'method': 'popup', 'minutes': 60},
                    ],
                },
            }
            # event = service.events().insert(calendarId=cal_id, body=add).execute()
            # print ('Event created: %s' % (event.get('htmlLink')))
            in_cal.append(eid)

        # reset.
        estart = ''
        eend = ''
示例#33
0
def add_pushover(request):
    if settings.PUSHOVER_API_TOKEN is None or settings.PUSHOVER_SUBSCRIPTION_URL is None:
        raise Http404("pushover integration is not available")

    if request.method == "POST":
        # Initiate the subscription
        nonce = get_random_string()
        request.session["po_nonce"] = nonce

        failure_url = settings.SITE_ROOT + reverse("hc-channels")
        success_url = settings.SITE_ROOT + reverse(
            "hc-add-pushover") + "?" + urlencode(
                {
                    "nonce": nonce,
                    "prio": request.POST.get("po_priority", "0"),
                })
        subscription_url = settings.PUSHOVER_SUBSCRIPTION_URL + "?" + urlencode(
            {
                "success": success_url,
                "failure": failure_url,
            })

        return redirect(subscription_url)

    # Handle successful subscriptions
    if "pushover_user_key" in request.GET:
        if "nonce" not in request.GET or "prio" not in request.GET:
            return HttpResponseBadRequest()

        # Validate nonce
        if request.GET["nonce"] != request.session.get("po_nonce"):
            return HttpResponseForbidden()

        # Validate priority
        if request.GET["prio"] not in ("-2", "-1", "0", "1", "2"):
            return HttpResponseBadRequest()

        # All looks well--
        del request.session["po_nonce"]

        if request.GET.get("pushover_unsubscribed") == "1":
            # Unsubscription: delete all Pushover channels for this user
            Channel.objects.filter(user=request.user, kind="po").delete()
            return redirect("hc-channels")
        else:
            # Subscription
            user_key = request.GET["pushover_user_key"]
            priority = int(request.GET["prio"])

            return do_add_channel(request, {
                "kind": "po",
                "value": "%s|%d" % (user_key, priority),
            })

    # Show Integration Settings form
    ctx = {
        "page": "channels",
        "po_retry_delay": td(seconds=settings.PUSHOVER_EMERGENCY_RETRY_DELAY),
        "po_expiration": td(seconds=settings.PUSHOVER_EMERGENCY_EXPIRATION),
    }
    return render(request, "integrations/add_pushover.html", ctx)
示例#34
0
    def handle(self, *args, **options):

        if not settings.DEBUG:
            raise Exception("Trying to seed in production.")

        delete = options['delete']
        print("Deleting old entries: " + str(delete))
        if delete:
            FrontPageNews.objects.all().delete()

        if delete:
            User.objects.all().delete()

        print("Creating superuser admin")
        User.objects.create_user(username='******',
                                 password='******',
                                 first_name=fake.first_name(),
                                 last_name=fake.last_name(),
                                 address=fake.address(),
                                 email='*****@*****.**',
                                 about=fake.text(),
                                 birthday=fake.date_time_between_dates(
                                     datetime_start=None,
                                     datetime_end=None,
                                     tzinfo=None),
                                 is_superuser=True,
                                 is_staff=True)

        if delete:
            NablaGroup.objects.all().delete()

        count = random.randint(5, 10)
        print("Creating %d NablaGroups" % count)
        ngroups = [
            NablaGroup.objects.create(name=fake.word() + "-" + str(i) +
                                      "-komitéen") for i in range(count)
        ]

        if delete:
            FysmatClass.objects.all().delete()

        count = random.randint(7, 10)
        print("Creating %d FysmatClasses" % count)
        year = dt.now().year
        classes = [
            FysmatClass.objects.create(starting_year=year - i,
                                       name="kull%d" % (year - i))
            for i in range(count)
        ]

        count = random.randint(50, 100)
        print("Creating %d NablaUsers" % count)
        for i in range(count):
            username = "******" % i

            user = User.objects.create_user(
                username=username,
                first_name=fake.first_name(),
                last_name=fake.last_name(),
                address=fake.address(),
                password='******',
                email=username + '@stud.ntnu.no',
                ntnu_card_number=str(random.randint(int(1E7),
                                                    int(1E10) - 1)),
                about=fake.text(),
                birthday=fake.date_time_between_dates(datetime_start=None,
                                                      datetime_end=None,
                                                      tzinfo=None),
            )

            nabla_group = random.choice(ngroups)
            nabla_group.user_set.add(user)

            fysmat_class = random.choice(classes)
            fysmat_class.user_set.add(user)

        if delete:
            NewsArticle.objects.all().delete()

        count = random.randint(10, 20)
        print("Creating %d News" % count)
        for i in range(count):
            article = NewsArticle.objects.create(headline=s(),
                                                 body=g(),
                                                 lead_paragraph=g())
            f = FrontPageNews()
            f.content_object = article
            f.save()

        if delete:
            Event.objects.all().delete()

        count = random.randint(10, 20)
        print("Creating %d Events" % count)
        for i in range(count):
            start = fake.date_time_between_dates(datetime_start=dt.now(),
                                                 datetime_end=(dt.now() +
                                                               td(30)))

            event = Event.objects.create(headline=s(),
                                         body=g(),
                                         lead_paragraph=g(),
                                         short_name=ss(),
                                         event_start=start,
                                         event_end=start + td(hours=4),
                                         organizer=fake.name(),
                                         location=fake.address(),
                                         registration_required=True,
                                         registration_deadline=(dt.now() +
                                                                td(30)),
                                         registration_start=dt.now(),
                                         places=10)
            f = FrontPageNews()
            f.content_object = event
            f.save()
示例#35
0
    def load_message_handler(self, peer, sender, bus, topic, headers, message):
        """
        Call back method for building power meter. Calculates the average
        building demand over a configurable time and manages the curtailment
        time and curtailment break times.
        :param peer:
        :param sender:
        :param bus:
        :param topic:
        :param headers:
        :param message:
        :return:
        """
        # Use instantaneous power or average building power.
        data = message[0]
        current_power = data[self.power_point]
        current_time = parse(headers["Date"])

        power_max, power_min = self.generate_power_points(current_power)
        _log.debug("QUANTITIES: max {} - min {} - cur {}".format(
            power_max, power_min, current_power))

        topic_suffix = "/".join([self.logging_topic, "BuildingFlexibility"])
        message = {
            "MaximumPower": power_max,
            "MinimumPower": power_min,
            "AveragePower": current_power
        }
        self.publish_record(topic_suffix, message)

        if self.bldg_power:
            current_average_window = self.bldg_power[-1][0] - self.bldg_power[
                0][0] + td(seconds=15)
        else:
            current_average_window = td(minutes=0)

        if current_average_window >= self.average_building_power_window and current_power > 0:
            self.bldg_power.append(
                (current_time, current_power, power_min, power_max))
            self.bldg_power.pop(0)
        elif current_power > 0:
            self.bldg_power.append(
                (current_time, current_power, power_min, power_max))

        smoothing_constant = 2.0 / (len(self.bldg_power) +
                                    1.0) * 2.0 if self.bldg_power else 1.0
        smoothing_constant = smoothing_constant if smoothing_constant <= 1.0 else 1.0
        power_sort = list(self.bldg_power)
        power_sort.sort(reverse=True)
        avg_power_max = 0.
        avg_power_min = 0.
        avg_power = 0.

        for n in xrange(len(self.bldg_power)):
            avg_power += power_sort[n][1] * smoothing_constant * (
                1.0 - smoothing_constant)**n
            avg_power_min += power_sort[n][2] * smoothing_constant * (
                1.0 - smoothing_constant)**n
            avg_power_max += power_sort[n][3] * smoothing_constant * (
                1.0 - smoothing_constant)**n
        self.avg_power = avg_power
        self.power_min = avg_power_min
        self.power_max = avg_power_max
示例#36
0
df = pd.read_excel(r'C:\Users\J20032\Documents\FTTIY_20190221.xlsx')
pn = '261K775G04'

# brute force handling for sampled parts
samps = ['261K775G03', '261K775G04', '255K250G07']

df = df[df['Part'] == str(pn)]
#def get_yields(pn,df):
# NOTE: add handling for sample inspected parts (i.e. columns will have sample inspected values)
date_init = d = df.Date.min()
date_end = df.Date.max()
dates = []
while d < date_end:
    dates = dates + [d]
    d = d + td(days=1)
if pn in samps:
    CL = 0.98
elif df['Date'].unique()[0] > 20:
    CL = get_20day_avg(df)
else:
    tot_insp = df[(df['Part'] == pn) & (df['WCTR_CD'] != 'MYVARWK') &
                  (df['Oper'] == 4500)].shape[0]
    tot_accept = df[(df['Part'] == pn) & (df['WCTR_CD'] != 'MYVARWK') &
                    (df['Oper'] == 4500) &
                    (df['Result'] == 'ACCEPTED')].shape[0]
    CL = tot_accept / tot_insp
fttiy = [[
    'Date', 'QtyInspected', 'QtyAccepted', 'Yield', 'Centerline', 'UCL', 'LCL'
]]
for n in dates:
    def test_it_obeys_next_report_date(self):
        self.profile.next_report_date = now() + td(days=1)
        self.profile.save()

        found = Command().handle_one_monthly_report()
        self.assertFalse(found)
示例#38
0
 def test_get_status_handles_paused_check(self):
     check = Check()
     check.status = "paused"
     check.last_ping = timezone.now() - td(days=1, minutes=30)
     self.assertEqual(check.get_status(), "paused")
示例#39
0
    def test_get_status_obeys_down_status(self):
        check = Check()
        check.status = "down"
        check.last_ping = timezone.now() - td(minutes=1)

        self.assertEqual(check.get_status(), "down")
示例#40
0
def _seed_flights_and_task(_db,
                           names: list,
                           days_before: int = None,
                           days_after: int = None,
                           limiter=250):
    print('Seeding Flights and Tasks table')
    count = 0
    df = pd.DataFrame(pd.read_pickle(get_app_data_path('flights.p')))

    if days_after is not None and days_after is not None:
        df = df.loc[(df.TIME >= now() - td(days=days_before))
                    & (df.TIME <= now() + td(days=days_after))].reset_index(
                        drop=True)

    df.rename(columns={
        'FL': 'flight_num',
        'TER': 'terminal',
        'TIME': 'scheduled_time',
        'TYPE': 'type_',
        'PAX': 'pax',
        'CONTAINERS': 'num_containers',
        "BAY": 'bay'
    },
              inplace=True)

    mixture = rng.normal(-2.5, 3, len(df)) + rng.normal(2.5, 3, len(df))
    df['actual_time'] = [
        t + td(minutes=m) for t, m in zip(df.scheduled_time, mixture)
    ]

    otime = now()
    time = otime + td(minutes=1)
    ddf = df.to_dict('records')
    for e in ProgressEnumerate(ddf):
        _db.session.add(Flights(**e))

        nc = e['num_containers']
        if nc > 0:

            _containers = [4 for _ in range(nc // 4)]
            if nc % 4 != 0:
                _containers.append(nc % 4)

            at = e['actual_time']

            if e['type_'] == 'A':
                source = e['flight_num']
                dest = e['terminal'] + 'HOT'
                rt = at + td(minutes=rng.triangular(1, 1.8, 3))
            else:
                source = e['terminal'] + 'HOT'
                dest = e['flight_num']
                rt = at - td(minutes=35)

            ts = rt + td(minutes=rng.uniform(0, 1.5))
            ct = ts + td(minutes=rng.triangular(16, 17, 18))

            ttt = None
            if at <= time:
                status = 'done'
                driver = rng.choice(names)
                ttt = (ct - ts).total_seconds()
            elif ct > now():
                status = 'er'
                driver = rng.choice(names)
                ct = None
            else:
                status = 'ready'
                ct = None
                if at <= otime:
                    driver = rng.choice(names)
                else:
                    driver = None

            for c in _containers:

                task_data = {
                    'status': status,
                    'ready_time': rt,
                    'completed_time': ct,
                    'flight_time': at,
                    'driver': driver,
                    'containers': c,
                    'source': source,
                    'destination': dest,
                    'bay': e['bay'],
                    'task_start_time': ts,
                    'task_time_taken': ttt
                }
                _db.session.add(Tasks(**task_data))

                if count % limiter == 0:
                    _db.session.commit()
                count += 1

    try:
        print("Committing data. This may take a while..")
        _db.session.commit()
    except SQLAlchemyError as e:
        _db.session.rollback()
        print("ERROR: Mass commit failed!!!! ",
              e,
              sep='\n',
              end='\n',
              file=sys.stderr)
示例#41
0
CODES_SCHEMA: dict = {  # rf_unknown
    _0001: {
        NAME: "rf_unknown",
        I_: r"^00FFFF02(00|FF)$",  # loopback
        W_: r"^(0[0-9A-F]|F[CF])000005(01|05)$",
    },  # TODO: there appears to be a dodgy? RQ/RP for UFC
    _0002: {  # WIP: outdoor_sensor - CODE_IDX_COMPLEX?
        NAME: "outdoor_sensor",
        I_: r"^0[0-4][0-9A-F]{4}(00|01|02|05)$",  # Domoticz sends ^02!!
        RQ: r"^00$",  # NOTE: sent by an RFG100
    },
    _0004: {  # zone_name
        NAME: "zone_name",
        I_: r"^0[0-9A-F]00([0-9A-F]){40}$",  # RP is same, null_rp: xxxx,7F*20
        RQ: r"^0[0-9A-F]00$",
        EXPIRES: td(days=1),
    },
    _0005: {  # system_zones
        NAME: "system_zones",
        #  I --- 34:092243 --:------ 34:092243 0005 012 000A0000-000F0000-00100000
        I_: r"^(00[01][0-9A-F]{5}){1,3}$",
        RQ: r"^00[01][0-9A-F]$",  # f"00{zone_type}", evohome wont respond to 00
        RP: r"^00[01][0-9A-F]{3,5}$",
        EXPIRES: False,
    },
    _0006: {  # schedule_sync  # TODO: what for DHW schedule?
        NAME: "schedule_sync",
        RQ: r"^00$",
        RP: r"^0005[0-9A-F]{4}$",
    },
    _0008: {  # relay_demand, TODO: check RP
示例#42
0
    def _compute_history(self, history_range):
        to_date = date.today()
        if history_range == "months":
            delta = rd(months=1)
        elif history_range == "weeks":
            delta = rd(weeks=1)
        else:
            delta = rd(days=1)
        last_dates = {}
        last_qtys = {}
        product_ids = []
        for product in self:
            _logger.debug(
                "Computing '%s' history for product: %s",
                history_range,
                product,
            )
            product_ids.append(product.id)
            history_ids = self.env['product.history'].search([
                ('history_range', '=', history_range),
                ('product_id', '=', product.id)
            ])
            if history_ids:
                self.env.cr.execute(
                    """
                    SELECT to_date, end_qty FROM product_history
                    WHERE product_id = %s
                    AND history_range = %s
                    ORDER BY "id" DESC LIMIT 1
                """, (product.id, history_range))
                last_record = self.env.cr.fetchone()
                last_date = last_record and last_record[0]
                last_qty = last_record and last_record[1] or 0
                from_date = last_date + td(days=1)
            else:
                self.env.cr.execute(
                    """
                    SELECT date FROM stock_move
                    WHERE product_id = %s
                    ORDER BY "date" LIMIT 1
                """, (product.id, ))
                fetch = self.env.cr.fetchone()
                from_date = fetch and fetch[0].date() or to_date
                if history_range == "months":
                    from_date = date(from_date.year, from_date.month, 1)
                elif history_range == "weeks":
                    from_date = from_date - td(days=from_date.weekday())
                last_qty = 0
            last_dates[product.id] = from_date
            last_qtys[product.id] = last_qty

        product_ids.sort()
        last_date = min(last_dates.values())

        sql = """
            SELECT
                MIN(sm.id),
                sm.product_id,
                DATE_TRUNC('day', sm.date),
                sm.state,
                SUM(sm.product_qty) AS product_qty,
                orig.usage,
                dest.usage
            FROM stock_move AS sm,
                 stock_location AS orig,
                 stock_location AS dest
            WHERE
                sm.location_id = orig.id
                AND sm.location_dest_id = dest.id
                AND sm.product_id in %s
                AND sm.date >= %s
                AND sm.state != 'cancel'
            GROUP BY
                sm.product_id,
                DATE_TRUNC('day', sm.date),
                sm.state,
                orig.usage,
                dest.usage
            ORDER BY
                sm.product_id,
                DATE_TRUNC('day', sm.date)
        """
        params = (tuple(product_ids), fields.Datetime.to_string(last_date))
        self.env.cr.execute(sql, params)
        stock_moves = self.env.cr.fetchall()

        for product_id in product_ids:
            stock_moves_product = []

            while len(stock_moves):
                if stock_moves[0][1] == product_id:
                    stock_moves_product.append(stock_moves.pop(0))
                else:
                    break

            if not stock_moves_product:
                continue

            product = self.env['product.product'].browse(product_id)
            from_date = last_dates.get(product_id)
            last_qty = last_qtys.get(product_id, 0)
            history_id = False

            while from_date + delta <= to_date:
                stock_moves_product_dates = []
                start_qty = last_qty
                last_date = from_date + delta - td(days=1)
                purchase_qty = sale_qty = loss_qty = 0
                incoming_qty = outgoing_qty = 0

                i_move = 0
                while i_move < len(stock_moves_product):
                    if stock_moves_product[i_move][2].date() >= from_date and \
                            stock_moves_product[i_move][2].date() <= last_date:
                        stock_moves_product_dates.append(
                            stock_moves_product.pop(i_move))
                    else:
                        i_move += 1

                for move in stock_moves_product_dates:
                    if move[3] == 'done':
                        if move[5] == 'internal':
                            if move[6] == 'supplier':
                                purchase_qty -= move[4]
                            elif move[6] == 'customer':
                                sale_qty -= move[4]
                            elif move[6] == 'inventory':
                                loss_qty -= move[4]
                        elif move[6] == 'internal':
                            if move[5] == 'supplier':
                                purchase_qty += move[4]
                            elif move[5] == 'customer':
                                sale_qty += move[4]
                            elif move[5] == 'inventory':
                                loss_qty += move[4]
                    else:
                        if move[5] == 'internal':
                            if move[6] == 'supplier':
                                incoming_qty -= move[4]
                            elif move[6] == 'customer':
                                outgoing_qty -= move[4]
                            elif move[6] == 'inventory':
                                outgoing_qty -= move[4]
                        elif move[6] == 'internal':
                            if move[5] == 'supplier':
                                incoming_qty += move[4]
                            elif move[5] == 'customer':
                                outgoing_qty += move[4]
                            elif move[5] == 'inventory':
                                outgoing_qty += move[4]

                last_qty = start_qty + purchase_qty + sale_qty + loss_qty

                vals = {
                    'product_id': product_id,
                    'product_tmpl_id': product.product_tmpl_id.id,
                    'location_id': self.env['stock.location'].search([])[0].id,
                    'from_date': dt.strftime(from_date, "%Y-%m-%d"),
                    'to_date': dt.strftime(last_date, "%Y-%m-%d"),
                    'purchase_qty': purchase_qty,
                    'sale_qty': sale_qty,
                    'loss_qty': loss_qty,
                    'start_qty': start_qty,
                    'end_qty': last_qty,
                    'virtual_qty': last_qty + incoming_qty + outgoing_qty,
                    'incoming_qty': incoming_qty,
                    'outgoing_qty': outgoing_qty,
                    'history_range': history_range,
                }
                history_id = self.env['product.history'].create(vals)
                from_date = last_date + td(days=1)

            if history_id:
                if history_range == "months":
                    product.last_history_month = history_id.id
                elif history_range == "weeks":
                    product.last_history_week = history_id.id
                else:
                    product.last_history_day = history_id.id
示例#43
0
 def _search_age(self, operator, value):
     today = fDate.from_string(fDate.today())
     value_days = td(days=value)
     value_date = fDate.to_string(today - value_days)
     return [('date_release', operator, value_date)]
示例#44
0
def dates_list(max_dates=10):
    """Generate a timeseries dates list."""
    now = dt.now()
    return [str(now + td(days=i * 10))[0:10] for i in range(max_dates)]
示例#45
0
    def get_data(self,
                 start_date,
                 end_date,
                 variables,
                 lowerleft_lat,
                 lowerleft_lon,
                 upperright_lat,
                 upperright_lon,
                 update=None):
        '''
        Retrieve data from ncWMS
        :param start_date:  datetime.datetime: Earliest requested date
        :param end_date:    datetime.datetime: Latest requested date
        :param variables:   List of variables to retrieve
        :param lowerleft_lat:  Latitude (WGS84) degrees N, lower left of bounding box
        :param lowerleft_lon:   Longitude (WGS84) degrees E, lower left of bounding box
        :param upperright_lat:  Latitude (WGS84) degrees N, upper right of bounding box
        :param upperright_lon:   Longitude (WGS84) degrees E, upper right of bounding box
        :param update:  A QT progressBar object (optional)
        :return:
        '''

        # Validate input params
        self.check_bbox(lower_left_lat=lowerleft_lat,
                        lower_left_lon=lowerleft_lon,
                        upper_right_lat=upperright_lat,
                        upper_right_lon=upperright_lon)
        self.check_vars(variables)
        self.check_times(start_date, end_date)

        self.request_params = {
            'vars': variables,
            'start_date': start_date,
            'end_date': end_date,
            'bbox':
            [lowerleft_lat, lowerleft_lon, upperright_lat, upperright_lon]
        }
        start_dates = pd.date_range(start_date,
                                    end_date,
                                    freq='%dD' %
                                    (self.request_length, )).to_datetime()

        # Create queue of retrievals, and safeguard against over-running dataset end date
        for s in range(0, len(start_dates)):
            if self.killed:
                break

            if s == len(start_dates) - 1:
                end_date_candidate = end_date + td(seconds=3600 * 24 - 1)
                final_date = True
            else:
                end_date_candidate = start_dates[s +
                                                 1] - td(seconds=self.time_res)
                final_date = False

            if end_date_candidate > self.end_date:
                final_date = True
                end_date_candidate = self.end_date

            self.results[start_dates[s]] = self.retrieve(
                start_dates[s], end_date_candidate
            )  # Get data from start date to next start date minus time resolution
            if update is not None:
                update.emit({
                    'progress':
                    100 * float(s) / float(len(start_dates)),
                    'message':
                    ' (%s to %s)' % (start_dates[s].strftime('%Y-%m-%d'),
                                     end_date_candidate.strftime('%Y-%m-%d'))
                })

            if final_date:
                break
        self.convert_to_nc3()
        update.emit({'progress': 100, 'message': ' Cleaning up...'})
示例#46
0
    def query_historian(self, device_info):
        """
        Query VOLTTRON historian for all points in device_info
        for regression period.  All data will be combined and aggregated
        to a common interval (i.e., 1Min).
        :param device_info: dict; {regression token: query topic}
        :return:
        """
        aggregated_df = None
        rpc_start = self.start
        rpc_end = rpc_start + td(hours=8)
        # get data via query to historian
        # Query loop for device will continue until start > end
        # or all data for regression period is obtained.
        while rpc_start < self.end.astimezone(pytz.UTC):
            df = None
            # If exclude_weekend_holidays is True then do not query for
            # these times.  Reduces rpc calls and message bus traffic.
            if self.exclude_weekends_holidays:
                if is_weekend_holiday(rpc_start, rpc_end, self.local_tz):
                    rpc_start = rpc_start + td(hours=8)
                    rpc_end = rpc_start + td(minutes=479)
                    if rpc_end > self.end.astimezone(UTC_TZ):
                        rpc_end = self.end.astimezone(UTC_TZ)
                    continue

            for token, topic in device_info.items():
                rpc_start_str = format_timestamp(rpc_start)
                rpc_end_str = format_timestamp(rpc_end)
                _log.debug("RPC start {} - RPC end {} - topic {}".format(
                    rpc_start_str, rpc_end_str, topic))
                # Currently historian is limited to 1000 records per query.
                result = self.vip.rpc.call(
                    self.data_source,
                    'query',
                    topic=topic,
                    start=rpc_start_str,
                    end=rpc_end_str,
                    order='FIRST_TO_LAST',
                    count=1000,
                    external_platform=self.external_platform).get(timeout=300)
                _log.debug(result)
                if not bool(result) or "values" not in result or \
                        ("values" in result and not bool(result["values"])):
                    _log.debug(
                        'ERROR: empty RPC return for '
                        'coefficient *%s* at %s', token, rpc_start)
                    break
                # TODO:  check if enough data is present and compensate for significant missing data
                data = pd.DataFrame(result['values'], columns=['Date', token])
                data['Date'] = pd.to_datetime(data['Date'])
                # Data is aggregated to some common frequency.
                # This is important if data has different seconds/minutes.
                # For minute trended data this is set to 1Min.
                data = data.groupby([
                    pd.Grouper(key='Date',
                               freq=self.data_aggregation_frequency)
                ]).mean()
                df = data if df is None else pd.merge(
                    df, data, how='outer', left_index=True, right_index=True)

            if aggregated_df is None:
                aggregated_df = df
            else:
                aggregated_df = aggregated_df.append(df)

            # Currently 8 hours is the maximum interval that the historian
            # will support for one minute data.  1000 max records can be
            # returned per query and each query has 2 fields timestamp, value.
            # Note:  If trending is at sub-minute interval this logic would
            # need to be revised to account for this or the count in historian
            # could be increased.
            rpc_start = rpc_start + td(hours=8)
            if rpc_start + td(minutes=479) <= self.end.astimezone(pytz.UTC):
                rpc_end = rpc_start + td(minutes=479)  #
            else:
                rpc_end = self.end.astimezone(pytz.UTC)
        return aggregated_df
示例#47
0
from datetime import datetime as dt, timedelta as td
from olympusphotosync import utils


now = dt.now().replace(microsecond=0)

@mark.parametrize('spec,expect', [
    ('2017/11/20',          dt(2017, 11, 20, 0, 0)),
    ('2017-11-20',          dt(2017, 11, 20, 0, 0)),
    ('2017-10-29T12:52:09', dt(2017, 10, 29, 12, 52, 9)),
    ('2017-10-29 12:52',    dt(2017, 10, 29, 12, 52, 0)),
    ('12:52:09',            now.replace(hour=12, minute=52, second=9)),
    ('12:52',               now.replace(hour=12, minute=52, second=0)),
    ('today',               now.replace(hour=0, minute=0, second=0)),
    ('1h',                  now - td(hours=1)),
    ('10d',                 now - td(days=10)),
])
def test_parse_timespec(spec, expect):
    assert utils.parse_timespec(spec) == expect


def test_parse_timespec_failure():
    assert utils.parse_timespec('not a date') is None


@mark.parametrize('size,expect', [
    (2048,       '2.00KiB'),
    (123235,     '120.35KiB'),
    (121234624,  '115.62MiB'),
    (2**34,      '16.00GiB'),
            'power': None,
            'last_change': dt(2000, 1, 1)
        },
        'deadzone': {
            'temp': 0.1,
            'time': 180
        },
        'daycycle': {
            'avgT': 27,
            'deltaT': 2,
            'coldest_hour': 2
        }
    },
    'leds': {
        'state': None,
        'sunrise': td(hours=7),
        'sunset': td(hours=21)
    },
    'thermistors': {
        'adc': ADCPi(0x68, 0x69, 18)
    }
}
runNo = dt.now().strftime("%Y%m%d%H%M")
verbose = False
cycle_log = []
thermistor_volts = []


#
# Initialisation
#
示例#49
0
    def __init__(self, config_path, **kwargs):
        super(TransactiveIlcCoordinator, self).__init__(**kwargs)
        config = utils.load_config(config_path)
        campus = config.get("campus", "")
        building = config.get("building", "")
        logging_topic = config.get("logging_topic", "tnc")
        self.target_topic = '/'.join(
            ['record', 'target_agent', campus, building, 'goal'])
        self.logging_topic = '/'.join(
            [logging_topic, campus, building, "TCILC"])
        cluster_configs = config["clusters"]
        self.clusters = ClusterContainer()

        for cluster_config in cluster_configs:
            device_cluster_config = cluster_config["device_cluster_file"]
            load_type = cluster_config.get("load_type", "discreet")

            if device_cluster_config[0] == "~":
                device_cluster_config = os.path.expanduser(
                    device_cluster_config)

            cluster_config = utils.load_config(device_cluster_config)
            cluster = DeviceClusters(cluster_config, load_type)
            self.clusters.add_curtailment_cluster(cluster)

        self.device_topic_list = []
        self.device_topic_map = {}
        all_devices = self.clusters.get_device_name_list()
        occupancy_schedule = config.get("occupancy_schedule", False)
        self.occupancy_schedule = init_schedule(occupancy_schedule)
        for device_name in all_devices:
            device_topic = topics.DEVICES_VALUE(campus=campus,
                                                building=building,
                                                unit=device_name,
                                                path="",
                                                point="all")

            self.device_topic_list.append(device_topic)
            self.device_topic_map[device_topic] = device_name

        power_token = config["power_meter"]
        power_meter = power_token["device"]
        self.power_point = power_token["point"]
        self.current_time = None
        self.power_meter_topic = topics.DEVICES_VALUE(campus=campus,
                                                      building=building,
                                                      unit=power_meter,
                                                      path="",
                                                      point="all")
        self.demand_limit = None
        self.bldg_power = []
        self.avg_power = 0.
        self.last_demand_update = None
        self.demand_curve = None
        self.power_prices = None
        self.power_min = None
        self.power_max = None

        self.average_building_power_window = td(
            minutes=config.get("average_building_power_window", 15))
        self.minimum_update_time = td(
            minutes=config.get("minimum_update_time", 5))
        self.market_name = config.get("market", "electric")
        self.tz = None
        # self.prices = power_prices
        self.oat_predictions = []
        self.comfort_to_dollar = config.get('comfort_to_dollar', 1.0)

        self.prices_from = config.get("prices_from", 'pubsub')
        self.prices_topic = config.get("price_topic", "prices")
        self.prices_file = config.get("price_file")
        self.join_market(self.market_name, BUYER, None, self.offer_callback,
                         None, self.price_callback, self.error_callback)
示例#50
0
    async def _do_private(self, message):
        # remind me of class
        if 'remind' in message.content:
            match = re.search(
                r'((MAA|CSE|MIE|ECO|PHY)[0-9]{3})',
                message.content,
            )

            if not match:
                await self._no(message)
                return

            ccode = match.group(0)

            # create user's calendar
            if message.author.id not in self._calendars:
                await message.channel.send(
                    'I need your synapses ical link !')
                return

            # create remind task
            cal = self._calendars[message.author.id]
            cal.remindme(ccode, message.author)

            await message.channel.send(
                f'will remind you of {ccode} 5 minutes before')

        if 'calendar/ical' in message.content:
            async with message.channel.typing():
                match = re.search(
                    r'https://[^\s]+',
                    message.content,
                )

                if not match:
                    await self._no(message)
                    return

                url = match.group(0)
                reminder = await Reminder.from_link(url)
                self._calendars[message.author.id] = reminder

                await message.channel.send(
                    'your calendar is in my mind ;)')

            # boast possibilities
            await message.channel.send((
                'you can now ask:\n'
                ' - "next class",\n'
                ' - "classes today",\n'
                ' - "classes tomorrow",\n'
                ' - "remind me next MAA306",'
            ))

        if 'next class' in message.content:
            if message.author.id not in self._calendars:
                await self._no(message)
                return

            reminder = self._calendars[message.author.id]
            msg = next(iter(reminder.listme()))
            await message.channel.send(msg)

        if 'classes' in message.content:
            if message.author.id not in self._calendars:
                await self._no(message)
                return

            if 'today' in message.content:
                date = dt.today().date()
            elif 'tomorrow' in message.content:
                date = (dt.today() + td(days=1)).date()
            else:
                await self._no(message)
                return

            reminder = self._calendars[message.author.id]
            for msg in reminder.listme(date=date):
                await message.channel.send(msg)
示例#51
0
def run(modelDir, inputDict):
    try:
        ''' Run the model in its directory. '''
        # Check whether model exist or not
        if not os.path.isdir(modelDir):
            os.makedirs(modelDir)
            inputDict["created"] = str(dt.now())
        # MAYBEFIX: remove this data dump. Check showModel in web.py and renderTemplate()
        with open(pJoin(modelDir, "allInputData.json"), "w") as inputFile:
            json.dump(inputDict, inputFile, indent=4)
        # Copy spcific climate data into model directory
        inputDict["climateName"], latforpvwatts = zipCodeToClimateName(
            inputDict["zipCode"])
        shutil.copy(
            pJoin(__metaModel__._omfDir, "data", "Climate",
                  inputDict["climateName"] + ".tmy2"),
            pJoin(modelDir, "climate.tmy2"))
        # Ready to run
        startTime = dt.now()
        # Set up SAM data structures.
        ssc = nrelsam2013.SSCAPI()
        dat = ssc.ssc_data_create()
        # Required user inputs.
        ssc.ssc_data_set_string(dat, "file_name", modelDir + "/climate.tmy2")
        ssc.ssc_data_set_number(dat, "system_size",
                                float(inputDict["SystemSize"]))
        # SAM options where we take defaults.
        ssc.ssc_data_set_number(dat, "derate", 0.97)
        ssc.ssc_data_set_number(dat, "track_mode", 0)
        ssc.ssc_data_set_number(dat, "azimuth", 180)
        ssc.ssc_data_set_number(dat, "tilt_eq_lat", 1)
        # Run PV system simulation.
        mod = ssc.ssc_module_create("pvwattsv1")
        ssc.ssc_module_exec(mod, dat)
        # Set the timezone to be UTC, it won't affect calculation and display, relative offset handled in pvWatts.html
        startDateTime = "2013-01-01 00:00:00 UTC"
        # Timestamp output.
        outData = {}
        outData["timeStamps"] = [
            dt.strftime(
                dt.strptime(startDateTime[0:19], "%Y-%m-%d %H:%M:%S") +
                td(**{"hours": x}), "%Y-%m-%d %H:%M:%S") + " UTC"
            for x in range(int(8760))
        ]
        # HACK: makes it easier to calculate some things later.
        outData["pythonTimeStamps"] = [
            dt(2012, 1, 1, 0) + x * td(hours=1) for x in range(8760)
        ]
        # Geodata output.
        outData["city"] = ssc.ssc_data_get_string(dat, "city")
        outData["state"] = ssc.ssc_data_get_string(dat, "state")
        outData["lat"] = ssc.ssc_data_get_number(dat, "lat")
        outData["lon"] = ssc.ssc_data_get_number(dat, "lon")
        outData["elev"] = ssc.ssc_data_get_number(dat, "elev")
        # Weather output.
        outData["climate"] = {}
        outData["climate"][
            "Global Horizontal Radiation (W/m^2)"] = ssc.ssc_data_get_array(
                dat, "gh")
        outData["climate"][
            "Plane of Array Irradiance (W/m^2)"] = ssc.ssc_data_get_array(
                dat, "poa")
        outData["climate"]["Ambient Temperature (F)"] = ssc.ssc_data_get_array(
            dat, "tamb")
        outData["climate"]["Cell Temperature (F)"] = ssc.ssc_data_get_array(
            dat, "tcell")
        outData["climate"]["Wind Speed (m/s)"] = ssc.ssc_data_get_array(
            dat, "wspd")
        # Power generation.
        outData["powerOutputAc"] = ssc.ssc_data_get_array(dat, "ac")

        # TODO: INSERT TJ CODE BELOW
        tjCode(inputDict, outData)
        del outData["pythonTimeStamps"]
        # TODO: INSERT TJ CODE ABOVE

        # Stdout/stderr.
        outData["stdout"] = "Success"
        outData["stderr"] = ""
        # Write the output.
        with open(pJoin(modelDir, "allOutputData.json"), "w") as outFile:
            json.dump(outData, outFile, indent=4)
        # Update the runTime in the input file.
        endTime = dt.now()
        inputDict["runTime"] = str(
            td(seconds=int((endTime - startTime).total_seconds())))
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
    except:
        # If input range wasn't valid delete output, write error to disk.
        cancel(modelDir)
        thisErr = traceback.format_exc()
        print 'ERROR IN MODEL', modelDir, thisErr
        inputDict['stderr'] = thisErr
        with open(os.path.join(modelDir, 'stderr.txt'), 'w') as errorFile:
            errorFile.write(thisErr)
        with open(pJoin(modelDir, "allInputData.json"), "w") as inFile:
            json.dump(inputDict, inFile, indent=4)
示例#52
0
    def test_hours_work(self):
        s = format_hms(td(seconds=62 + 60 * 60))
        self.assertEqual(s, "1 h 1 min 2 sec")

        s = format_hms(td(seconds=60 * 60))
        self.assertEqual(s, "1 h 0 min 0 sec")
示例#53
0
    def __init__(self, config_path, **kwargs):
        """
        Constructor for
        :param config_path:
        :param kwargs:
        """
        super(RegressionAgent, self).__init__(**kwargs)
        config = utils.load_config(config_path)
        self.debug = config.get("debug", True)
        # Read equipment configuration parameters
        self.regression_inprogress = False
        site = config.get('campus', '')
        building = config.get('building', '')
        device = config.get('device', '')
        subdevices = config.get('subdevices', [])
        device_points = config.get('device_points')
        subdevice_points = config.get('subdevice_points')

        # VIP identity for the VOLTTRON historian
        self.data_source = config.get('historian_vip', 'crate.prod')
        # External platform for remote RPC call.
        self.external_platform = config.get("external_platform", "")

        if device_points is None and subdevice_points is None:
            _log.warning('Missing device or subdevice points in config.')
            _log.warning("Cannot perform regression! Exiting!")
            sys.exit()
        if not device and not subdevices:
            _log.warning('Missing device topic(s)!')

        model_struc = config.get('model_structure')
        model_dependent = config.get('model_dependent')
        model_independent = config.get('model_independent')
        regress_hourly = config.get('regress_hourly', True)
        shift_dependent_data = config.get("shift_dependent_data", False)
        post_processing = config.get('post_processing')
        # All parameters related to running in simulation - for time keeping only
        self.simulation = config.get("simulation", False)
        self.simulation_data_topic = config.get("simulation_data_topic",
                                                "devices")
        simulation_interval = config.get("simulation_regression_interval", 15)
        self.simulation_regression_interval = td(days=simulation_interval)
        self.simulation_initial_time = None

        if model_struc is None or model_dependent is None or model_independent is None:
            _log.exception(
                'At least one of the model fields is missing in config')
            sys.exit()

        device_list = subdevices if subdevices else [device]
        self.device_list = {}
        self.regression_list = {}
        for unit in device_list:
            self.device_list[unit] = Device(site, building, device, unit,
                                            device_points, subdevice_points)
            self.regression_list[unit] = Regression(
                model_independent, model_dependent, model_struc,
                regress_hourly, shift_dependent_data, post_processing,
                self.debug)

        # Aggregate data to this value of minutes
        self.data_aggregation_frequency = config.get(
            "data_aggregation_frequency", "h")

        # This  sets up the cron schedule to run once every 10080 minutes
        # Once every 7 days
        self.run_schedule = config.get("run_schedule", "*/10080 * * * *")
        self.training_interval = int(config.get('training_interval', 5))
        if self.training_interval < 5 and "h" in self.data_aggregation_frequency:
            _log.debug("There is a limited number of days in regression!!")
            _log.debug("Update aggregation frequency for hourly to 15 minute!")
            self.data_aggregation_frequency = "15min"

        self.exclude_weekends_holidays = config.get(
            "exclude_weekends_holidays", True)
        self.run_onstart = config.get("run_onstart", True)

        self.one_shot = config.get('one_shot', False)

        self.local_tz = pytz.timezone(config.get('local_tz', 'US/Pacific'))
        # If one shot is true then start and end should be specified
        if self.one_shot:
            self.start = config.get('start')
            self.end = config.get('end')

        self.coefficient_results = {}
        self.exec_start = None
        _log.debug("Validate historian running vip: %s - platform %s",
                   self.data_source, self.external_platform)
示例#54
0
def _update(check, spec):
    # First, validate the supplied channel codes/names
    if "channels" not in spec:
        # If the channels key is not present, don't update check's channels
        new_channels = None
    elif spec["channels"] == "*":
        # "*" means "all project's channels"
        new_channels = Channel.objects.filter(project=check.project)
    elif spec.get("channels") == "":
        # "" means "empty list"
        new_channels = []
    else:
        # expect a comma-separated list of channel codes or names
        new_channels = set()
        available = list(Channel.objects.filter(project=check.project))

        for s in spec["channels"].split(","):
            if s == "":
                raise BadChannelException("empty channel identifier")

            matches = [c for c in available if str(c.code) == s or c.name == s]
            if len(matches) == 0:
                raise BadChannelException("invalid channel identifier: %s" % s)
            elif len(matches) > 1:
                raise BadChannelException("non-unique channel identifier: %s" %
                                          s)

            new_channels.add(matches[0])

    need_save = False
    if check.pk is None:
        # Empty pk means we're inserting a new check,
        # and so do need to save() it:
        need_save = True

    if "name" in spec and check.name != spec["name"]:
        check.name = spec["name"]
        need_save = True

    if "tags" in spec and check.tags != spec["tags"]:
        check.tags = spec["tags"]
        need_save = True

    if "desc" in spec and check.desc != spec["desc"]:
        check.desc = spec["desc"]
        need_save = True

    if "manual_resume" in spec and check.manual_resume != spec["manual_resume"]:
        check.manual_resume = spec["manual_resume"]
        need_save = True

    if "methods" in spec and check.methods != spec["methods"]:
        check.methods = spec["methods"]
        need_save = True

    if "timeout" in spec and "schedule" not in spec:
        new_timeout = td(seconds=spec["timeout"])
        if check.kind != "simple" or check.timeout != new_timeout:
            check.kind = "simple"
            check.timeout = new_timeout
            need_save = True

    if "grace" in spec:
        new_grace = td(seconds=spec["grace"])
        if check.grace != new_grace:
            check.grace = new_grace
            need_save = True

    if "schedule" in spec:
        if check.kind != "cron" or check.schedule != spec["schedule"]:
            check.kind = "cron"
            check.schedule = spec["schedule"]
            need_save = True

    if "tz" in spec and check.tz != spec["tz"]:
        check.tz = spec["tz"]
        need_save = True

    if need_save:
        check.alert_after = check.going_down_after()
        check.save()

    # This needs to be done after saving the check, because of
    # the M2M relation between checks and channels:
    if new_channels is not None:
        check.channel_set.set(new_channels)

    return check
示例#55
0
    def test_get_status_handles_past_grace(self):
        check = Check()
        check.status = "up"
        check.last_ping = timezone.now() - td(days=2)

        self.assertEqual(check.get_status(), "down")
示例#56
0
def test_times(date, holidays):
    """holidays harus berupa list"""
    if date in holidays:
        """hari libur"""
        schedule = "holiday"
        hour_in = td(seconds=0)
        hour_out = td(seconds=0)
        check_in = td(seconds=0)
        check_out = td(seconds=0)
        test_late_in(hour_in, check_in)
        test_early_out(hour_in, check_in)
        test_overtime(hour_in, hour_out, check_in, check_out)
        test_worktime(hour_in, hour_out, check_in, check_out)
        test_totaltime(check_in, check_out)
        test_overtype(hour_in, check_in)
        normal = 0

    else:
        if date.weekday == 6:
            """hari minggu """
            schedule = "sunday"
            hour_in = td(seconds=0)
            hour_out = td(seconds=0)
            check_in = td(seconds=0)
            check_out = td(seconds=0)
            test_late_in(hour_in, check_in)
            test_early_out(hour_in, check_in)
            test_overtime(hour_in, hour_out, check_in, check_out)
            test_worktime(hour_in, hour_out, check_in, check_out)
            test_totaltime(check_in, check_out)
            test_overtype(hour_in, check_in)
            normal = 0

        elif date.weekday == 5:
            """hari sabtu """
            schedule = "saturday"
            hour_in = td(hours=8)
            hour_out = td(hours=13)
            check_in = td(hours=ri(7, 8))
            if check_in.hours == 8:
                check_in + td(minutes=ri(0, 15), seconds=ri(0, 59))
            else:
                check_in + td(minutes=ri(0, 59), seconds=ri(0, 59))
            check_out = td(hours=ri(15, 18),
                           minutes=ri(0, 59),
                           seconds=ri(0, 59))
            test_late_in(hour_in, check_in)
            test_early_out(hour_in, check_in)
            test_overtime(hour_in, hour_out, check_in, check_out)
            test_worktime(hour_in, hour_out, check_in, check_out)
            test_totaltime(check_in, check_out)
            test_overtype(hour_in, check_in)
            normal = 1

        else:
            """senin s/d jumat"""
            schedule = "normal day"
            hour_in = td(hours=8)
            hour_out = td(hours=16)
            check_in = td(hours=ri(7, 8))
            if check_in.hours == 8:
                check_in + td(minutes=ri(0, 15), seconds=ri(0, 59))
            else:
                check_in + td(minutes=ri(0, 59), seconds=ri(0, 59))
            check_out = td(hours=ri(15, 18),
                           minutes=ri(0, 59),
                           seconds=ri(0, 59))
            test_late_in(hour_in, check_in)
            test_early_out(hour_in, check_in)
            test_overtime(hour_in, hour_out, check_in, check_out)
            test_worktime(hour_in, hour_out, check_in, check_out)
            test_totaltime(check_in, check_out)
            test_overtype(hour_in, check_in)
示例#57
0
    def test_status_works_with_grace_period(self):
        check = Check()
        check.status = "up"
        check.last_ping = timezone.now() - td(days=1, minutes=30)

        self.assertEqual(check.get_status(), "grace")
示例#58
0
def test_overtype(hour_in, check_in):
    if test_overtime > 0:
        return round((hour_in - check_in) / td(hours=1), 2)
    else:
        return " "
示例#59
0
 def _inverse_age(self):
     today = fDate.from_string(fDate.today())
     for book in self.filtered('date_release'):
         d = td(days=book.age_days) - today
         book.date_release = fDate.to_string(d)
         print book.date_release
示例#60
0
from datetime import timedelta as td

from cachecow.decorators import cached_function
from django.db.models.signals import post_save
from django.shortcuts import get_object_or_404, Http404

from canvas.models import UserInfo
from canvas.redis_models import RealtimeChannel
from drawquest.apps.drawquest_auth.models import User
from drawquest.apps.following import models as following_models
from drawquest.apps.quest_comments.models import QuestComment
from drawquest.apps.quests.models import Quest
from website.apps.canvas_auth.models import User as CanvasUser


@cached_function(timeout=td(days=7), key=[
    'user_profile',
    lambda username: username,
])
def user_profile(username):
    user = get_object_or_404(User.objects.select_related('userinfo', 'userinfo__avatar'), username=username)

    if not user.is_active:
        raise Http404("Deactivated user.")

    follow_counts = following_models.counts(user)

    return {
        'user': user.details(),
        'bio': user.userinfo.bio_text,
        'quest_completion_count': Quest.completed_by_user_count(user),