def get(self):
		user = users.get_current_user()
		logging.info(user)
		user_email = user.email()
		role = RoleEntity.get_by_id('admin')
		members = role.members
		if not members:
			members = []
		if user_email not in members:
			logging.critical("Unauthorized request : " + user_email)
			self.response.write("Unauthorized request")
			return

		self.response.headers['Content-Type'] = 'application/csv;charset=UTF-8'
		self.response.headers['Content-disposition'] = 'attachment; filename=all_feedbacks.csv' 
		csv_writer = csv.writer(self.response.out, delimiter=',', quotechar='"', quoting=csv.QUOTE_ALL)
		u_csv_writer = UnicodeWriter(self.response.out)
		qry = FeedbackEntity.query().order(-FeedbackEntity.date_created)
		fbs = qry.fetch()
		fms = []
		# csv_writer.writerow(['id','restaurant','entrie','quantity','flavor','deliverman','overall','timestamp'])
		u_csv_writer.writerow(['id','restaurant','entrie','quantity','flavor','deliverman','overall','timestamp','comment'])
		for fb in fbs:
			created_time = fb.date_created
			if created_time:
				utc_zone = tz.gettz('UTC')
				cst_zone = tz.gettz('America/Chicago')
				created_time = created_time.replace(tzinfo=utc_zone)
				created_time_cst = created_time.astimezone(cst_zone)
				created_time_cst = created_time_cst.strftime('%Y-%m-%d')
			else:
				created_time_cst = ""
			u_csv_writer.writerow([str(fb.key.id()),fb.restaurant or "",fb.entrie or "", fb.quantity or "", fb.flavor or "", fb.deliverman or "", fb.overall_rating or "", created_time_cst, fb.comment or ""])
def extract_daytime_passes(overpass_times, sunrise_local, sunset_local, local_time_zone):
    '''extract_daytime_passes takes a list of datetimes, usually from the futs.time_above_the_horizon function, and returns only those that occur during daylight hours.

    overpass_times = the list of sat overpass times  to extract the daylight ones from. - format is [(rise-time, fall-time, max-elevation-time), ...] as datetime objects in the UTC time zone.
    sunrise_local = sunrise datetime in its local time zone.
    sunset_local = sunset datetime in its local time zone.
    local_time_zone = the local time zone for the ground position

    Returns: daylight_passes in the same format as the input overpasses - [(rise-time, fall-time, max-elevation-time), ...] as datetime objects in the UTC time zone.

    Contact:
    [email protected]
    '''
    # is a time before or after sunrise?
    utc_zone = tz.gettz('UTC') # convert local sunrise and set times to their utc equivelent.
    local_zone = tz.gettz(local_time_zone)
    sunrise_with_tz = sunrise_local.replace(tzinfo=local_zone)
    sunset_with_tz = sunset_local.replace(tzinfo=local_zone)
    sunrise_in_utc = sunrise_with_tz.astimezone(utc_zone)
    sunset_in_utc = sunset_with_tz.astimezone(utc_zone)
    daylight_passes=[] # an empty list ready for the addition of the daylight passes.
    # compare the times of overpass with the sunrise and set in the utc space.
    for an_overpass in overpass_times:
        assert an_overpass[0] < an_overpass[1], "There is an issue with the order of the overpass elements. This function assumes that the risetime is the first element in each list triplet as per the output of my futs.time_above_the_horizon function."
        sat_risetime = an_overpass[0]
        sat_risetime = sat_risetime.replace(tzinfo=utc_zone)
        # print (sat_risetime > sunrise_in_utc) & (sat_risetime < sunset_in_utc)
        if (sat_risetime > sunrise_in_utc) & (sat_risetime < sunset_in_utc):
            daylight_passes.append(an_overpass)
        # print sat_risetime.astimezone(local_zone)
    # daylight_passes[0][0].replace(tzinfo=utc_zone).astimezone(local_zone)

    return daylight_passes
 def action_ship_create(self):
     res = super(SaleOrder, self).action_ship_create()
     user_tz = self.env['res.users'].browse(self._uid).tz
     from_zone = tz.gettz('UTC')
     to_zone = tz.gettz(user_tz)
     for order in self:
         for picking in order.picking_ids:
             if order.requested_date:
                 datetime_requested = \
                     datetime.strptime(order.requested_date,
                                       '%Y-%m-%d %H:%M:%S').\
                     replace(tzinfo=from_zone).astimezone(to_zone)
                 date_requested = datetime.strftime(datetime_requested,
                                                    '%Y-%m-%d')
                 date_effective = date_requested
             else:
                 date_requested = False
                 datetime_effective = \
                     datetime.strptime(order.commitment_date,
                                       '%Y-%m-%d %H:%M:%S').\
                     replace(tzinfo=from_zone).astimezone(to_zone)
                 date_effective = datetime.strftime(datetime_effective,
                                                    '%Y-%m-%d')
             vals = {'note': order.note,
                     'requested_date': date_requested,
                     'effective_date': date_effective,
                     }
             if order.supplier_id and picking.state != 'cancel' \
                     and not picking.supplier_id:
                 vals.update({'supplier_id': order.supplier_id.id})
             picking.write(vals)
     return res
Example #4
0
def deltaSinceLastArrive(name):
    results = list(mongo.find({'name' : name}).sort('created', DESCENDING).limit(1))
    if not results:
        return datetime.timedelta.max
    now = datetime.datetime.now(tz.gettz('UTC'))
    last = results[0]['created'].replace(tzinfo=tz.gettz('UTC'))
    return now - last
def validate_event(event):
    if "volumes" not in event and "volume_tags" not in event:
        raise Exception('event should contain a volumes or volume_tags key')
    if "volume_tags" not in event:
        event["volume_tags"] = {}
    if "region" not in event:
        event["region"] = "us-east-1"
    if "dry_run" not in event:
        event["dry_run"] = True
    if "timezone" not in event:
        event["timezone"] = "UTC"
    if "time" not in event:
        event["time"] = datetime.now(tz.gettz(event["timezone"]))
    else:
        event["time"] = parser.parse(event['time']).astimezone(tz.gettz(event["timezone"]))
    if "hours" not in event:
        event["hours"] = 0
    if "days" not in event:
        event["days"] = 0
    if "weeks" not in event:
        event["weeks"] = 0
    if "months" not in event:
        event["months"] = 0
    if "years" not in event:
        event["years"] = 0
    return event
Example #6
0
    def test_fromdate(self):

        dt = date(2013, 2, 3)

        result = arrow.Arrow.fromdate(dt, tz.gettz('US/Pacific'))

        assertEqual(result._datetime, datetime(2013, 2, 3, tzinfo=tz.gettz('US/Pacific')))
Example #7
0
    def _parse_str(tz_expr):

        _tzinfo = None
        name = None

        if tz_expr == 'local':
            _tzinfo = tz.gettz()

        else:

            iso_match = TimeZone.tz_re.match(tz_expr)

            if iso_match:
                sign, hours, minutes = iso_match.groups()
                seconds = int(hours) * 3600 + int(minutes) * 60

                if sign == '-':
                    seconds *= -1

                _tzinfo = tz.tzoffset(None, seconds)

            else:
                _tzinfo = tz.gettz(tz_expr)

        return _tzinfo
Example #8
0
    def test_aware_different_tz(self):

        result = arrow.Arrow.range('day',
            datetime(2013, 1, 1, tzinfo=tz.gettz('US/Eastern')),
            datetime(2013, 1, 3, tzinfo=tz.gettz('US/Pacific')))

        [assertEqual(r.tzinfo, tz.gettz('US/Eastern')) for r in result]
Example #9
0
    def test_fromdatetime_tzinfo_arg(self):

        dt = datetime(2013, 2, 3, 12, 30, 45, 1)

        result = arrow.Arrow.fromdatetime(dt, tz.gettz('US/Pacific'))

        assertEqual(result._datetime, dt.replace(tzinfo=tz.gettz('US/Pacific')))
Example #10
0
    def test_replace_tzinfo(self):

        arw = arrow.Arrow.utcnow().to('US/Eastern')

        result = arw.replace(tzinfo=tz.gettz('US/Pacific'))

        assertEqual(result, arw.datetime.replace(tzinfo=tz.gettz('US/Pacific')))
Example #11
0
    def test_aware_same_tz(self):

        result = arrow.Arrow.range('day',
            arrow.Arrow(2013, 1, 1, tzinfo=tz.gettz('US/Pacific')),
            arrow.Arrow(2013, 1, 3, tzinfo=tz.gettz('US/Pacific')))

        [assertEqual(r.tzinfo, tz.gettz('US/Pacific')) for r in result]
 def feedback_list(self, request):
     check_admin(endpoints)
     qry = FeedbackEntity.query().order(-FeedbackEntity.date_created)
     fbs = qry.fetch()
     fms = []
     for fb in fbs:
         created_time = fb.date_created
         if created_time:
             utc_zone = tz.gettz("UTC")
             cst_zone = tz.gettz("America/Chicago")
             created_time = created_time.replace(tzinfo=utc_zone)
             created_time_cst = created_time.astimezone(cst_zone)
             created_time_cst = created_time_cst.strftime("%Y-%m-%d")
         else:
             created_time_cst = ""
         fm = FeedbackWithIdMessage(
             id=str(fb.key.id()),
             restaurant=fb.restaurant,
             entrie=fb.entrie,
             quantity=fb.quantity,
             flavor=fb.flavor,
             deliverman=fb.deliverman,
             comment=fb.comment,
             overall_rating=fb.overall_rating,
             date=created_time_cst,
             useremail=fb.useremail,
         )
         fms.append(fm)
     return FeedbackCollectionMessage(feedbacks=fms)
Example #13
0
 def setUp(self):
     self.local_now = datetime.datetime.now().replace(tzinfo=tz.gettz(
         settings.TIME_ZONE))
     self.utc_now = self.local_now.astimezone(tz.gettz('UTC'))
     self.test_model = TestModel.objects.create(
         name='Test', time_set_manually=self.utc_now
     )
    def local2utc(self, dt):

        tenant_timezone = self.tenant_timezone
        from_zone = tz.gettz(tenant_timezone)
        to_zone = tz.gettz('UTC')
        local = dt.replace(tzinfo=from_zone)
        return local.astimezone(to_zone).replace(tzinfo = None)
Example #15
0
    def run(self):
        # record_associations
        try:
            # KeyError if it's never run successfully
            # TypeError if self.job_information is None
            last_run = self.job_information['last_success']
        except (KeyError, TypeError):
            last_run = (datetime.datetime.now(tz.gettz('UTC')) -
                        datetime.timedelta(days=self.config.days_into_past))

        # bugzilla runs on PST, so we need to communicate in its time zone
        PST = tz.gettz('PST8PDT')
        last_run_formatted = last_run.astimezone(PST).strftime('%Y-%m-%d')
        query = self.config.query % last_run_formatted
        for (
            bug_id,
            status,
            resolution,
            short_desc,
            signature_set
        ) in self._iterator(query):
            try:
                # each run of this loop is a transaction
                self.database_transaction_executor(
                    self.inner_transaction,
                    bug_id,
                    status,
                    resolution,
                    short_desc,
                    signature_set
                )
            except NothingUsefulHappened:
                pass
Example #16
0
    def test_changes_display_dict_datetime(self):
        timestamp = datetime.datetime(2017, 1, 10, 15, 0, tzinfo=timezone.utc)
        date = datetime.date(2017, 1, 10)
        time = datetime.time(12, 0)
        dtm = DateTimeFieldModel(label='DateTimeField model', timestamp=timestamp, date=date, time=time, naive_dt=self.now)
        dtm.save()
        localized_timestamp = timestamp.astimezone(gettz(settings.TIME_ZONE))
        self.assertTrue(dtm.history.latest().changes_display_dict["timestamp"][1] == \
                        dateformat.format(localized_timestamp, settings.DATETIME_FORMAT),
                        msg=("The datetime should be formatted according to Django's settings for"
                             " DATETIME_FORMAT"))
        timestamp = timezone.now()
        dtm.timestamp = timestamp
        dtm.save()
        localized_timestamp = timestamp.astimezone(gettz(settings.TIME_ZONE))
        self.assertTrue(dtm.history.latest().changes_display_dict["timestamp"][1] == \
                        dateformat.format(localized_timestamp, settings.DATETIME_FORMAT),
                        msg=("The datetime should be formatted according to Django's settings for"
                             " DATETIME_FORMAT"))

        # Change USE_L10N = True
        with self.settings(USE_L10N=True, LANGUAGE_CODE='en-GB'):
            self.assertTrue(dtm.history.latest().changes_display_dict["timestamp"][1] == \
                        formats.localize(localized_timestamp),
                        msg=("The datetime should be formatted according to Django's settings for"
                             " USE_L10N is True with a different LANGUAGE_CODE."))
Example #17
0
def showcsv(request, event_id):
    response = HttpResponse(content_type='text/csv')
    response['Content-Disposition'] = 'attachment; filename="%s.csv"' %(_event.title)
    
    
    _event = get_object_or_404(Events, pk=event_id)
    _user = request.user
    engage_list = Engagement.objects.filter(event=_event)

    writer = csv.writer(response)
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('America/Vancouver')
    _starttime = _event.starttime
    _localtime = _starttime.astimezone(to_zone)
    _endtime = _event.endtime
    _localtime2 = _endtime.astimezone(to_zone)

    writer.writerow(['event title', _event.title])
    writer.writerow(['contact email', _event.contact_email])
    writer.writerow(['start time', _localtime])
    writer.writerow(['end time', _localtime2])
    writer.writerow(['total participants', len(engage_list)])
    writer.writerow(['Number', 'username', 'email'])
    counter = 0
    for engage in engage_list:
        engage_user = engage.user
        counter = counter + 1
        writer.writerow([counter, engage_user.username, engage_user.email])
#        Engagement.objects.create(user=_user, event=_event, engage_time=timezone.now(), is_engage=True)
    return response
Example #18
0
def dumpdate(epoch=None,fmt='myl',tz='local'):
  """Return a date string from epoch
  predefined formats are
    myf: %Y-%m-%d--%H-%M-%S--%z
    myh: %Y-%m-%d %H:%M:%S %z
    myl: %Y-%m-%d %H:%M:%S.SSS
    rfc: %a, %d %b %Y %H:%M:%S %z
    epoch :%s
    iso : %Y%m%d%H%M%S%z
  predefined timezone are:
    bnl  :  America/New_York ,
    cern :  Europe/Zurich ,
    fnal :  America/Chicago ,
    lbl  :  America/Los_Angeles ,
    Z    :  UTC'
  """
  if epoch is None:
    epoch=time.time()
  fmt=myfmt.get(fmt,fmt)
  tz=gettz(myzones.get(tz))
  dt=datetime.fromtimestamp(epoch).replace(tzinfo=gettz()).astimezone(tz)
  s=dt.strftime(fmt)
  if 'SSS' in s:
    s=s.replace('SSS',('%06d'%dt.microsecond)[:3])
  return s
Example #19
0
def engage(request, event_id):
    _event = get_object_or_404(Events, pk=event_id)
    _user = request.user
    if _event.engaged_people == _event.capacity:
        messages.success(request, "Join fails.")
        return HttpResponseRedirect(reverse('events:detail', args=(_event.id,)))
    else:
        _event.engaged_people += 1
        _event.save()
        if Engagement.objects.filter(user=_user, event=_event).count() == 0:
            Engagement.objects.create(user=_user, event=_event, engage_time=timezone.now(), is_engage=True)
            from_zone = tz.gettz('UTC')
            to_zone = tz.gettz('America/Vancouver')
            _starttime = _event.starttime
            _localtime = _starttime.astimezone(to_zone)
            subject = '[QuickActivity - Do Not Reply] You Have Joined an Activity!'
            message = 'Hi ' + _user.username + ',\n\n' + 'You have successfully joined the activity ' + _event.title +'. The starting time is ' + unicode(_localtime) + ' and the location is ' + _event.location + '.\n\nHave fun!\n\nYours,\nQuickActivity Team\n'
            try:
                send_mail(subject, message, '*****@*****.**', [_user.email], fail_silently=True)
            except BadHeaderError:
                return HttpResponse('Invalid header found.')
            messages.success(request, "Join succeeds.")
        else:
			messages.success(request, "Join fails.")
        
    	return HttpResponseRedirect(reverse('events:detail', args=(_event.id,)))
 def getLocalDatetime(self):
     from_zone = tz.gettz('UTC')
     to_zone = tz.gettz('America/New_York')
     utc = datetime.utcnow()
     utc = utc.replace(tzinfo=from_zone)
     now = utc.astimezone(to_zone)
     return now
Example #21
0
def create_call(number,time,*args):
	timezone = args[0] if len(args) > 0 and len(args[0]) > 2 else None
	message = args[1] if len(args) > 1 else None
	call = Call(None)
	number_obj = Number(number)
	now = datetime.datetime.utcnow().replace(tzinfo=tz.tzutc())
	if not number_obj.exists():
		number_obj.create()
	if timezone:
		number_obj.set("tz",timezone)
	dt = parse(time)
	if dt.tzinfo == None:
		try:
			stored_timezone = tz.gettz(number_obj.get("tz")) if tz.gettz(number_obj.get("tz")) else tz.gettz(timezone)
			dt = dt.replace(tzinfo=stored_timezone)
		except TypeError:
			dt = dt.replace(tzinfo=tz.gettz(timezone))
	else:
		dt = dt.replace(tzinfo=tz.gettz(timezone))
	if dt.tzinfo == None:
		dt = dt.replace(tzinfo=tz.tzutc())
	if dt < now:
		dt = dt + datetime.timedelta(days=1)

	resp = twilio.twiml.Response()
	resp.message("Alarm created for %s" % (dt.strftime("%c %Z")))
	dt = dt.astimezone(tz.tzutc())
	call.create(number,dt,message)
	return str(resp)
Example #22
0
def optimise_data(rows):

    # Get UTC time and local time from dateutil package
    # utc_zone = tz.tzutc()
    # local_time_zone = tz.tzlocal()
    utc_zone = tz.gettz('UTC')
    local_time_zone = tz.gettz('Australia/Melbourne')

    # Iterate over data from the database
    for row in rows:

        # Convert UTC time to local time
        utc = datetime.strptime(row[0], '%Y-%m-%d %H:%M:%S')
        utc = utc.replace(tzinfo=utc_zone).astimezone(tz=None)

        local_time = utc.astimezone(local_time_zone)
        local_time = datetime.strftime(local_time, '%Y-%m-%d %H:%M:%S')

        # Assign again for future use in flask script
        time_data.append(local_time)
        
        humidity_data.append(row[1])
        temperature_data.append(row[2])
        pressure_data.append(row[3])
        discomfort_data.append(row[4])
def build():
    utc = datetime.utcnow()
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('America/Los_Angeles')
    utc = utc.replace(tzinfo=from_zone)
    la_time = utc.astimezone(to_zone)

    os.chdir(POETROID_PATH)
    os.system("git pull origin master")
    os.chdir(POEMS_PATH)
    os.system("git pull origin master")
    os.chdir(FROZEN_PIE_PATH)
    os.system("../env/bin/python pie.py --config " + POETROID_PATH + os.sep + CONFIG)
    os.chdir(POETROID_PATH)

    print 'Uploading %s to Amazon S3 bucket %s' % (INDEX_HTML, BUCKET_NAME)

    k = Key(BUCKET)
    k.key = 'index.html'
    k.set_contents_from_filename(INDEX_HTML)

    for jsfile in glob(JS_DIR + os.sep + "*.js"):
        k = Key(BUCKET)
        filename = os.path.basename(jsfile)
        k.key = filename
        k.set_contents_from_filename(jsfile)

    update_yaml()

    deploy_time = 'Deployed at ' + str(la_time) + "\n"
    with open(LOG_FILE, "a") as mylog:
        mylog.write(deploy_time)

    return deploy_time
Example #24
0
def index(request):
    channels = Channel.objects.filter(status__in=[Channel.ENABLED, Channel.PAUSED])
    latest_readings = {}
    user_system = Preference.objects.get(user=request.user).measurement_system

    # for time conversion  UTC to local
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('America/New_York')

    for channel in channels:
        channel_system = channel.channel_type.measurement_system
        latest = Reading.objects.filter(channel=channel, is_valid=True).order_by('-monitor_time')
        if len(latest) > 0:
            reading = latest[0]
            if user_system == channel_system:
                value = reading.value
            elif user_system == Unit.IMPERIAL:
                value = channel.channel_type.units.m_to_i(reading.value)
            else:
                value = channel.channel_type.units.i_to_m(reading.value)

            utc = reading.monitor_time.replace(tzinfo=from_zone)
            eastern = utc.astimezone(to_zone).strftime("%b %d, %Y %H:%M:%S")
            units = channel.get_unit_abbrevs()[user_system]
            alerts = Alert.objects.filter(channel=channel, active=True)
            if len(alerts) > 0:
                status = 'alert'
            else:
                status = 'ok'
            latest_readings[channel.__str__()] = [value, channel.id, units, eastern, status]
    return render(request, 'monitor/dashboard.html', {'latest_readings': latest_readings})
Example #25
0
def get_readings(request, channel_id, days=30):
    chan = Channel.objects.get(pk=channel_id)
    days_ago = datetime.timedelta(days=int(days))
    earliest = datetime.datetime.now() - days_ago
    readings = Reading.objects.filter(channel=chan, monitor_time__gt=earliest, is_valid=True).order_by('monitor_time')
    response_data = OrderedDict()
    user_pref = Preference.objects.get(user = request.user)
    user_system = user_pref.measurement_system
    channel_system = chan.channel_type.measurement_system
    response_data['unit'] = chan.get_units()[user_system]

    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('America/New_York')

    for reading in readings:
        if channel_system == user_system:
            value = reading.value
        elif user_system == Unit.IMPERIAL:
            value = reading.channel.channel_type.units.m_to_i(reading.value)
        else:
            value = reading.channel.channel_type.units.i_to_m(reading.value)
        
        utc = reading.monitor_time.replace(tzinfo=from_zone)
        eastern = utc.astimezone(to_zone)
        
        response_data[eastern.strftime("%Y-%m-%d %H:%M:%S")] = value
    
    return HttpResponse(json.dumps(response_data), content_type="application/json")
Example #26
0
    def __init__(self, disabled=False, video=None, uptime=None):
        self.disabled = disabled
        self.video = video
        self.uptime = uptime

        self.from_zone = tz.gettz('UTC')
        self.to_zone = tz.gettz('PDT')

        self.file_format = 'log'

        self.controller = None

        self.gps_now = None
        self.prev_json_datapoints = None
        self.json_datapoints = []

        if not disabled:
            if self.video.camera:
                callback = lambda: self.callback(video=video)
            else:
                callback = None

            self.controller = \
                GPSController(callback=callback, throttle_callback=True)

            self.controller.start()
def get_intraday_data(symbol, interval_seconds=61, num_days=15):
    """Hits Google Finance Website and creates a dataframe of historical prices"""
    import datetime
    print('pulling data from Google...')
    # Specify URL string based on function inputs.
    url_string = 'http://www.google.com/finance/getprices?q={0}'.format(symbol.upper())
    url_string += "&i={0}&p={1}d&f=d,o,h,l,c,v".format(interval_seconds,num_days)
    # Request the text, and split by each line
    r = requests.get(url_string).text.split()
    # Split each line by a comma, starting at the 8th line
    r = [line.split(',') for line in r[7:]]
    # Save data in Pandas DataFrame
    df = pd.DataFrame(r, columns=['Datetime','Close','High','Low','Open','Volume'])
    # Convert UNIX to Datetime format
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('US/Pacific')
    datetimed = [datetime.datetime.fromtimestamp(int(x[1:])) for x in list(df['Datetime'])]
    datetimed_as_utc = [x.replace(tzinfo=from_zone) for x in datetimed]
    datetimed_as_pst = [x.astimezone(to_zone).replace(tzinfo=None) for x in datetimed_as_utc]
    df['Datetime'] = datetimed_as_pst
    #df['Datetime'] = df['Datetime'].apply(lambda x: datetime.datetime.fromtimestamp(int(x[1:])))
    cols = ['Datetime', 'Open', 'High', 'Low', 'Close']
    df = df[cols]
    for column_name in ['Open', 'High', 'Low', 'Close']:
    #converts above columns 
        df[column_name] = pd.to_numeric(df[column_name])
    return df
Example #28
0
def changeTime(utcTime):
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('CST')
    utc =utcTime
    utc = utc.replace(tzinfo=from_zone)
    local = utc.astimezone(to_zone)
    return datetime.strftime(local, "%Y-%m-%d %H:%M:%S")
Example #29
0
def parse_tweets(tweets):
    """ Parse an array of tweets
        The bits we care about look like this:
            tweet['text'] # the tweet
            tweet['geo']={u'type': u'Point', u'coordinates': [41.8169215, -71.4063959]}
            tweet['entities']['hashtags']=[{u'indices': [11, 18], u'text': u'double'}, {u'indices': [19, 24], u'text': u'hash'}]

            Since the api orders tweets like the timeline does, return
    """
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz(TZ)
    now = datetime.datetime.now()
    now = now.replace(tzinfo=from_zone)
    now = now.astimezone(to_zone)

    for tweet in tweets:
        # Thanks Stack Overflow: http://stackoverflow.com/questions/4770297/python-convert-utc-datetime-string-to-local-datetime
        twitter_date = parser.parse(tweet["created_at"])
        twitter_date.replace(tzinfo=from_zone)
        local_date = twitter_date.astimezone(to_zone)
        # we only care about today's tweets.
        if now.day == local_date.day:
            #print HASHTAG_TRIGGER
            hashtags = [tag['text'] for tag in tweet['entities']['hashtags']]
            if tweet['geo'] != None:
                if HASHTAG_TRIGGER in hashtags:
                    return tweet['text'], tweet['geo']
            else:
                pass # since this has no geo data for us
    return None, None
Example #30
0
def print_grid(zone_names, options):
    """
    Print the tzgrid.

    @param zone_names list of zones to print
    @param options command line options to control printing behavior
    """
    tzs = []
    for name in zone_names:
        tz = gettz(name)
        tzs.append(tuple([name, gettz(name)]))

    size = label_size(tzs)
    tzs = get_sorted_zones(tzs, options)

    if not options.twelve:
        fmt = get_color_label_format(size, tz) + "   %s"
        times = format_range_hours_days(
                tzs[0][1], options.width - size - 5, options)
        print(fmt % ("", times))

    for name, tz in tzs:
        fmt = get_color_label_format(size, tz) + " | %s"
        if (options.twelve):
            times = format_range_am_pm(tz, options.width - size - 5, options)
        else:
            times = format_range_hours(tz, options.width - size - 5, options)

        print(fmt % (name, times))
Example #31
0
def field_value(field_name,
                bushfire=None,
                request=None,
                url_type="auto",
                is_upper=None,
                external_email=False):
    """
    Return the value of model field to dispay in the email
    """
    if bushfire:
        try:
            if field_name == "origin_point_geo":
                return bushfire.origin_geo
            elif field_name == "region":
                if is_upper == True:
                    return bushfire.region.name.upper()
                else:
                    return bushfire.region.name
            elif field_name == "district":
                if is_upper == True:
                    return bushfire.district.name.upper()
                else:
                    return bushfire.district.name
            elif field_name == "fire_number":
                if request and not external_email:
                    return mark_safe("<a href='{}'>{}</a>".format(
                        utils.get_bushfire_url(request, bushfire, url_type),
                        bushfire.fire_number))
                else:
                    return bushfire.fire_number
            elif field_name == "url_link":
                return mark_safe("<a href='{0}'>{0}</a>".format(
                    utils.get_bushfire_url(request, bushfire, url_type)))
            elif field_name == "url":
                return utils.get_bushfire_url(request, bushfire, url_type)
            elif field_name == "report_status":
                return bushfire.report_status_name
            elif field_name == "latitude_degree":
                return LatLon.Latitude(bushfire.origin_point.get_y()).degree
            elif field_name == "latitude_minute":
                return LatLon.Latitude(bushfire.origin_point.get_y()).minute
            elif field_name == "latitude_second":
                return LatLon.Latitude(bushfire.origin_point.get_y()).second
            elif field_name == "longitude_degree":
                return LatLon.Longitude(bushfire.origin_point.get_x()).degree
            elif field_name == "longitude_minute":
                return LatLon.Longitude(bushfire.origin_point.get_x()).minute
            elif field_name == "longitude_second":
                return LatLon.Longitude(bushfire.origin_point.get_x()).second

            value = getattr(bushfire,
                            FIELD_MAPPING.get(field_name) or field_name)
            if field_name == "dfes_incident_no":
                return value or "Not available"
            elif value is None:
                return "-"
            elif type(value) == type(True):
                return "Yes" if value else "No"
            elif field_name == "dispatch_pw":
                return "Yes" if value == 1 else "No"
            elif isinstance(value, datetime.datetime):
                return value.astimezone(tz.gettz(
                    settings.TIME_ZONE)).strftime('%Y-%m-%d %H:%M')
            else:
                value = str(value).strip()
                return value or "-"
        except:
            return "-"
    else:
        return "-"
Example #32
0
def fetch_wind_solar_forecasts(zone_key='US-SPP',
                               session=None,
                               target_datetime=None,
                               logger=getLogger(__name__)):
    """
    Requests the load forecast (in MW) of a given zone
    Arguments:
    zone_key (optional) -- used in case a parser is able to fetch multiple zones
    session (optional) -- request session passed in order to re-use an existing session
    target_datetime (optional) -- used if parser can fetch data for a specific day
    logger (optional) -- handles logging when parser is run as main
    Return:
    A list of dictionaries in the form:
    {
      'zoneKey': 'US-SPP',
      'datetime': '2017-01-01T00:00:00Z',
      'value': 28576.0,
      'source': 'mysource.com'
    }
    """

    if not target_datetime:
        target_datetime = datetime.datetime.now()

    if isinstance(target_datetime, datetime.datetime):
        dt = target_datetime
    else:
        dt = parser.parse(target_datetime)
    FORECAST_URL = 'https://marketplace.spp.org/file-browser-api/download/midterm-resource-forecast?path=%2F{0}%2F{1:02d}%2F{2:02d}%2FOP-MTRF-{0}{1:02d}{2:02d}0000.csv'.format(
        dt.year, dt.month, dt.day)

    raw_data = get_data(FORECAST_URL)

    # sometimes there is a leading whitespace in column names
    raw_data.columns = raw_data.columns.str.lstrip()

    data = []
    for index, row in raw_data.iterrows():
        forecast = row.to_dict()

        dt = parser.parse(
            forecast['GMTIntervalEnd']).replace(tzinfo=tz.gettz('Etc/GMT'))

        try:
            solar = float(forecast['Wind Forecast MW'])
            wind = float(forecast['Solar Forecast MW'])
        except ValueError:
            # can be NaN
            continue

        datapoint = {
            'datetime': dt,
            'production': {
                'solar': solar,
                'wind': wind,
            },
            'zoneKey': zone_key,
            'source': 'spp.org'
        }

        data.append(datapoint)

    return data
def tm13():
   Localtime=datetime.now(tz=tz.gettz('Asia/Shanghai')).strftime("%Y-%m-%d %H:%M:%S.%f", )
   timeArray = datetime.strptime(Localtime, "%Y-%m-%d %H:%M:%S.%f")
   timeStamp = int(time.mktime(timeArray.timetuple())*1000+timeArray.microsecond/1000)
   return timeStamp  
Example #34
0
    def changes_display_dict(self):
        """
        :return: The changes recorded in this log entry intended for display to users as a dictionary object.
        """
        # Get the model and model_fields
        from auditlog.registry import auditlog

        model = self.content_type.model_class()
        model_fields = auditlog.get_model_fields(model._meta.model)
        changes_display_dict = {}
        # grab the changes_dict and iterate through
        for field_name, values in self.changes_dict.items():
            # try to get the field attribute on the model
            try:
                field = model._meta.get_field(field_name)
            except FieldDoesNotExist:
                changes_display_dict[field_name] = values
                continue
            values_display = []
            # handle choices fields and Postgres ArrayField to get human readable version
            choices_dict = None
            if getattr(field, "choices") and len(field.choices) > 0:
                choices_dict = dict(field.choices)
            if (
                hasattr(field, "base_field")
                and isinstance(field.base_field, Field)
                and getattr(field.base_field, "choices")
                and len(field.base_field.choices) > 0
            ):
                choices_dict = dict(field.base_field.choices)

            if choices_dict:
                for value in values:
                    try:
                        value = ast.literal_eval(value)
                        if type(value) is [].__class__:
                            values_display.append(
                                ", ".join(
                                    [choices_dict.get(val, "None") for val in value]
                                )
                            )
                        else:
                            values_display.append(choices_dict.get(value, "None"))
                    except ValueError:
                        values_display.append(choices_dict.get(value, "None"))
                    except:
                        values_display.append(choices_dict.get(value, "None"))
            else:
                try:
                    field_type = field.get_internal_type()
                except AttributeError:
                    # if the field is a relationship it has no internal type and exclude it
                    continue
                for value in values:
                    # handle case where field is a datetime, date, or time type
                    if field_type in ["DateTimeField", "DateField", "TimeField"]:
                        try:
                            value = parser.parse(value)
                            if field_type == "DateField":
                                value = value.date()
                            elif field_type == "TimeField":
                                value = value.time()
                            elif field_type == "DateTimeField":
                                value = value.replace(tzinfo=timezone.utc)
                                value = value.astimezone(gettz(settings.TIME_ZONE))
                            value = formats.localize(value)
                        except ValueError:
                            pass
                    # check if length is longer than 140 and truncate with ellipsis
                    if len(value) > 140:
                        value = "{}...".format(value[:140])

                    values_display.append(value)
            verbose_name = model_fields["mapping_fields"].get(
                field.name, getattr(field, "verbose_name", field.name)
            )
            changes_display_dict[verbose_name] = values_display
        return changes_display_dict
Example #35
0
INTERVAL_15MIN = "15-minute"
INTERVAL_HOURLY = "hourly"
INTERVAL_DAILY = "daily"
INTERVAL_MONTHLY = "monthly"
INTERVAL_SUM = "total"
INTERVAL_TRUTHS = [
    INTERVAL_15MIN, INTERVAL_HOURLY, INTERVAL_DAILY, INTERVAL_MONTHLY,
    INTERVAL_SUM
]
ZEROFILL_TRUTHS = ['yes', 'true', '1', 'zerofill']
MIN_INTERVAL = 15  # minimum temporal resolution of the data, in minutes

# configure timezone objects
TZ_STRING = "America/New_York"
TZ = timezone(TZ_STRING)
TZI = tz.gettz(TZ_STRING)
TZINFOS = {'EDT': TZI, 'EST': TZI}

# Constants for data storage and attributes
TIMESTAMP_FIELD = 'Timestamp'
RAINFALL_FIELD = 'Rainfall (in)'
RAINFALL_NODATA_STRING = "N/D"
SOURCE_FIELD = 'Source'
ID_FIELD = 'SID'
INTERVAL_FIELD = 'Interval'
INTERVAL_15MIN = "15min"
INTERVAL_5MIN = "5min"
SENSOR_PIXELS = 'pixels'
SENSOR_GAUGES = 'gauges'
STATUS_CALIBRATED = 'calibrated'
STATUS_REALTIME = 'realtime'
Example #36
0
 def get_prep_value(self, value):
     if isinstance(value, datetime.datetime):
         return value.astimezone(tz.gettz('UTC'))
     return value
Example #37
0
from datetime import datetime

from astral import LocationInfo
from astral.sun import sun
from dateutil import tz

city = LocationInfo("Madrid", "Spain", "Europe/Madrid", 44.419177, -3.703295)
print(f"Information for {city.name}/{city.region}")
print(f"Timezone: {city.timezone}")
print(f"Latitude: {city.latitude:.02f}; Longitude: {city.longitude:.02f}\n")

s = sun(city.observer, date=datetime.now())

from_zone = tz.gettz('UTC')
madrid_zone = tz.gettz('Europe/Madrid')

print(f'Dawn Madrid time: {s["dawn"].replace(tzinfo=from_zone).astimezone(madrid_zone).strftime("%d/%m/%Y %H:%M:%S")}')
print(
    f'Sunrise Madrid time: {s["sunrise"].replace(tzinfo=from_zone).astimezone(madrid_zone).strftime("%d/%m/%Y %H:%M:%S")}')
print(f'Noon Madrid time: {s["noon"].replace(tzinfo=from_zone).astimezone(madrid_zone).strftime("%d/%m/%Y %H:%M:%S")}')
print(
    f'Sunset Madrid time: {s["sunset"].replace(tzinfo=from_zone).astimezone(madrid_zone).strftime("%d/%m/%Y %H:%M:%S")}')
print(f'Dusk Madrid time: {s["dusk"].replace(tzinfo=from_zone).astimezone(madrid_zone).strftime("%d/%m/%Y %H:%M:%S")}')
Example #38
0
from dateutil import tz
from dateutil.tz import tzutc

from cubedash._utils import alchemy_engine
from cubedash.summary import SummaryStore
from cubedash.summary._extents import GridRegionInfo
from cubedash.summary._schema import CUBEDASH_SCHEMA
from datacube.index import Index
from datacube.index.hl import Doc2Dataset
from datacube.model import Range
from datacube.utils import read_documents
from .asserts import expect_values as _expect_values

TEST_DATA_DIR = Path(__file__).parent / "data"

DEFAULT_TZ = tz.gettz("Australia/Darwin")


def _populate_from_dump(session_dea_index, expected_type: str,
                        dump_path: Path):
    ls8_nbar_scene = session_dea_index.products.get_by_name(expected_type)
    dataset_count = 0

    create_dataset = Doc2Dataset(session_dea_index)

    for _, doc in read_documents(dump_path):
        label = doc["ga_label"] if ("ga_label" in doc) else doc["id"]
        dataset, err = create_dataset(
            doc, f"file://example.com/test_dataset/{label}")
        assert dataset is not None, err
        created = session_dea_index.datasets.add(dataset)
Example #39
0
def start():
    global jd_cookie
    scriptHeader = """
════════════════════════════════════════
║                                      ║
║     京东一键开卡-公众号iosrule          ║
║                                      ║
════════════════════════════════════════
"""
    print(scriptHeader)
    Readint()
    iscookie()
    for i in range(len(cookiesList)):
        if github == 0:
            print(f"【{i+1}】-【用户名:{pinNameList[i]}】-任务开始\n")
        else:
            print(f"【{i+1}】-任务开始\n")
        jd_cookie = cookiesList[i]
        #if i!=2:
        #continue
        Yijiankaika()


if __name__ == '__main__':
    print(
        'Localtime',
        datetime.now(tz=tz.gettz('Asia/Shanghai')).strftime(
            "%Y-%m-%d %H:%M:%S", ))
    start()
Example #40
0
def days_till_expiration(expiration):
    return (datetime.datetime.now() - datetime.datetime.strptime(
        expiration, "%Y-%m-%d").replace(
            hour=16, minute=0, second=0, tzinfo=tz.gettz("ET"))).total_seconds()/(3600*24)
Example #41
0
def convert_time(current_date):
    here = tz.tzlocal()
    utc = tz.gettz('UTC')
    print utc, type(current_date)
    gmt = current_date.replace(tzinfo=utc)
    return gmt.astimezone(here)
Example #42
0
def utc_to_local(string):
    mydate = parse(string)
    HERE = tz.tzlocal()
    UTC = tz.gettz('UTC')
    result = mydate.replace(tzinfo=UTC)
    return result.astimezone(HERE)
Example #43
0
def utcIsoToLocalVtrTime(datedate):
    return (parser.parse(datedate).astimezone(tz.gettz("Europe/Stockholm")) -
            datetime.timedelta(minutes=10)).strftime("%H:%M")
Example #44
0
        sdf = pd.read_csv(sqin_read,
                          usecols=[0, 1],
                          header=1,
                          parse_dates=[0],
                          names=['date', 'nwm_sqin'],
                          na_values=[' ', '', 'na', 'NA', '-999'])
        sdf.nwm_sqin *= 35.3147  #### !!!!! convert simulated flow (cms) to cfs !!!!! ####
        if year == years[0]:
            sqin_df = sdf
        else:
            ## merge dataframes (https://pandas.pydata.org/pandas-docs/stable/merging.html)
            sqin_df = pd.concat([sqin_df, sdf])
        sqin_read.close()

    # convert NWM UTC timestep to local (mountain time zone)
    from_zone = tz.gettz('UTC')
    to_zone = tz.gettz('America/Denver')
    sqin_df['date'] = sqin_df['date'].dt.tz_localize(from_zone).dt.tz_convert(
        to_zone)
    sqin_df['date'] = sqin_df['date'].dt.tz_localize(None)

    print('Parsing obs-qin data...')
    qin_read = open(input_qin_dir + comid_obs_pair[comid], 'r')
    qin_df = pd.read_csv(qin_read,
                         usecols=[0, 1],
                         header=1,
                         parse_dates=[0],
                         names=['date', 'obs_qin'],
                         na_values=[' ', '', 'na', 'NA', '-999'])
    qin_read.close()
Example #45
0
    def analyse(self, request: WSGIRequest) -> HttpResponse:
        """Perform an analysis of the database in the given timeframe."""
        startdate = request.POST.get("startdate")
        starttime = request.POST.get("starttime")
        enddate = request.POST.get("enddate")
        endtime = request.POST.get("endtime")
        if not startdate or not starttime or not enddate or not endtime:
            return HttpResponseBadRequest("All fields are required")

        start = dateparse.parse_datetime(startdate + "T" + starttime)
        end = dateparse.parse_datetime(enddate + "T" + endtime)

        if start is None or end is None:
            return HttpResponseBadRequest("invalid start-/endtime given")
        if start >= end:
            return HttpResponseBadRequest("start has to be before end")

        start = timezone.make_aware(start)
        end = timezone.make_aware(end)

        played = (PlayLog.objects.all().filter(created__gte=start).filter(
            created__lt=end))
        requested = (RequestLog.objects.all().filter(
            created__gte=start).filter(created__lt=end))
        played_count = (played.values("song__url", "song__artist",
                                      "song__title").values(
                                          "song__url",
                                          "song__artist",
                                          "song__title",
                                          count=models.Count("song__url"),
                                      ).order_by("-count"))
        played_votes = (PlayLog.objects.all().filter(
            created__gte=start).filter(created__lt=end).order_by("-votes"))
        devices = requested.values("address").values(
            "address", count=models.Count("address"))

        response = {
            "songs_played":
            len(played),
            "most_played_song":
            (song_utils.displayname(played_count[0]["song__artist"],
                                    played_count[0]["song__title"]) +
             f" ({played_count[0]['count']})"),
            "highest_voted_song": (played_votes[0].song_displayname() +
                                   f" ({played_votes[0].votes})"),
            "most_active_device":
            (devices[0]["address"] + f" ({devices[0]['count']})"),
        }
        requested_by_ip = requested.filter(address=devices[0]["address"])
        for i in range(6):
            if i >= len(requested_by_ip):
                break
            response["most_active_device"] += "\n"
            if i == 5:
                response["most_active_device"] += "..."
            else:
                response["most_active_device"] += requested_by_ip[
                    i].item_displayname()

        binsize = 3600
        number_of_bins = math.ceil((end - start).total_seconds() / binsize)
        request_bins = [0 for _ in range(number_of_bins)]

        for request_log in requested:
            seconds = (request_log.created - start).total_seconds()
            index = int(seconds / binsize)
            request_bins[index] += 1

        current_time = start
        current_index = 0
        response["request_activity"] = ""
        while current_time < end:
            response["request_activity"] += current_time.strftime("%H:%M")
            response["request_activity"] += ":\t" + str(
                request_bins[current_index])
            response["request_activity"] += "\n"
            current_time += timedelta(seconds=binsize)
            current_index += 1

        localtz = tz.gettz(settings.TIME_ZONE)
        playlist = ""
        for play_log in played:
            localtime = play_log.created.astimezone(localtz)
            playlist += "[{:02d}:{:02d}] {}\n".format(
                localtime.hour, localtime.minute, play_log.song_displayname())
        response["playlist"] = playlist

        return JsonResponse(response)
Example #46
0
import getpass
import simplejson as json
import base64

import argparse

RECOVERY_LOG = '/tmp/recovery.log'

# end_time is 3 mins before now
PERIOD_END_NOW = timedelta(minutes=3)

# period length
PERIOD_LENGTH = timedelta(minutes=1)

GAE_TZ = tz.gettz('US/Pacific')

logger = logging.getLogger()

last_offset = None
last_time_period = None

ENCODING = "ISO-8859-1"


def _get_level(level):
    # TODO - better?
    if logservice.LOG_LEVEL_DEBUG == level:
        return "DEBUG"
    if logservice.LOG_LEVEL_INFO == level:
        return "INFO"
Example #47
0
import sqlite3 as lite
import time
import os
from dateutil import tz

# multicasting packages
import socket
import struct
import sys
import time
import json
import ast
import pandas as pd

# set timezone
tz = tz.gettz('Pacific/Auckland')
timezone = 'Pacific/Auckland'
os.environ['TZ'] = timezone
time.tzset()


def send(dict_msg, ip='224.0.2.0', port=10000):
    # send multicast query to all devices in the network
    multicast_group = (ip, port)
    # Create the datagram socket
    sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)

    # Set a timeout so the socket does not block
    # indefinitely when trying to receive data.
    sock.settimeout(3)
Example #48
0
    def fetch_pipeline_to_advance(self,
                                  advance_pipeline_name,
                                  advance_stage_name,
                                  check_ci_stage_name=None,
                                  relative_to=None):
        """
        Given:
            - the name of a pipeline to manually advance (the advancement pipeline)
            - a datetime representing the current time (typically the production release time)
        find the advancement pipeline that should be advanced/deployed to production.

        The algorithm:
        - Query the value stream map containing the upstream pipeline materials of the advancement pipeline.
        - Find the initial upstream pipeline material.
        - Check the time at which the first job of the first stage was triggered.
        - If the job was triggered before the last release time relative to the passed-in time, found.
        - If not, keep going backwards into pipeline history until found.

        Params:
            advance_pipeline_name (str): Pipeline name which contains the manual stage to advance.
            advance_stage_name (str): Stage name in pipeline which requires manual advancement.
            relative_to (datetime): Datetime relative to which the release should occur.
                If None, use the current datetime.

        Returns:
            PipelineInstance: Named tuple containing pipeline instance to advance.
        """
        def has_advanced(pipeline_instance, stage_name):
            """
            Check to see if a pipeline from a value stream map has been advanced.
            """
            return pipeline_instance.stage(stage_name).data['scheduled']

        def stage_failed(pipeline_instance, stage_name):
            """
            Check to see if a stage failed, if the stage name is not None
            """
            return stage_name is not None and pipeline_instance.stage(stage_name).data.get('result') != 'Passed'

        # Compute the previous release cutoff (in UTC) relative to the passed-in time -or- now.
        utc_zone = tz.gettz('UTC')
        relative_time = relative_to if relative_to else datetime.now(utc_zone)
        previous_release_cutoff = default_expected_release_date(relative_time - timedelta(days=1))

        LOG.info(
            'Checking for advancement pipeline "%s" relative to time %s, mapping to last release time %s.',
            advance_pipeline_name, relative_time, previous_release_cutoff
        )

        # Go backwards in advancement pipeline history, starting with the most recent run.
        for advancement_pipeline in self.client.pipelines.full_history(advance_pipeline_name):
            # Get the full instance information for the initial pipeline determined by the value stream map.
            vsm = advancement_pipeline.value_stream_map()
            initial_pipeline_inst = self.client.pipelines.get(vsm[0].data.name, vsm[0].data.counter)

            # Find the trigger timestamp from the first job in the first stage of the first pipeline.
            # Trigger timestamp is in milliseconds since the epoch time - convert to seconds.
            trigger_time = initial_pipeline_inst.stages()[0].jobs()[0].data.get('scheduled_date') / 1000

            # Convert the trigger timestamp to a UTC datetime.
            utc_trigger_time = datetime.utcfromtimestamp(trigger_time).replace(tzinfo=utc_zone)

            # Was the initial pipeline in the value stream map was triggered before the last release time?
            if utc_trigger_time < previous_release_cutoff:
                # Found the most recent pipeline to be triggered before the last release time.

                # Log relevant information.
                est_time = utc_trigger_time.astimezone(tz.gettz('America/New_York'))
                LOG.info('Found pipeline to advance: %s', advancement_pipeline.url)
                LOG.info('From initial pipeline: %s', initial_pipeline_inst.url)
                LOG.info('Initial pipeline %s was triggered at %s', initial_pipeline_inst.data.name, est_time)

                if stage_failed(advancement_pipeline, check_ci_stage_name):
                    LOG.info('Stage %s failed on %s, skipping to next older build',
                             check_ci_stage_name,
                             advancement_pipeline.url)
                    continue

                # Check to see if the pipeline has already been advanced.
                if has_advanced(advancement_pipeline, advance_stage_name):
                    LOG.info('But pipeline has already been advanced!')
                    raise AdvancementPipelineAlreadyAdvanced(
                        'Advancement pipeline "{}" found - but its stage "{}" has already been advanced.'.format(
                            advance_pipeline_name,
                            advance_stage_name
                        )
                    )

                # Return the advancement pipeline instance.
                return PipelineInstance(
                    advancement_pipeline.data.name,
                    advancement_pipeline.data.counter,
                    advancement_pipeline.url
                )

            elif has_advanced(advancement_pipeline, advance_stage_name):
                # This pipeline has already been advanced. Since we'd expect not to advance a pipeline
                # earlier than the last one advanced, stop at this point.
                raise AdvancementPipelineNotFound(
                    'More recent advanced pipeline was found - stopping historical search.'
                )

        raise AdvancementPipelineNotFound(
            'Could not find advancement pipeline for "{}" relative to time {},'
            ' which maps to last release time {}.'.format(
                advance_pipeline_name, relative_time, previous_release_cutoff
            )
        )
Example #49
0
def _at_midnight(a_date: date, tzinfo=gettz("Europe/Paris")) -> datetime:
    return datetime.combine(a_date, time(hour=0, minute=0, tzinfo=tzinfo))
Example #50
0
def parse_schedule(soup):
    # There is a single CRN tag associated with every course, so we can use it to find all the info for each course.
    crn_tags = [ac.parent.findNext('td') for ac in soup.findAll('acronym')]

    events = []
    warnings = []

    for crn in crn_tags:
        # td < tr < table
        meta_table = crn.parent.parent
        course_title, course_code, course_section = meta_table.find(
            'caption').string.split(' - ')

        # Week, Type, Time, Days, Where, Date Range, Schedule Type, Instructors
        times_table = [
            list(tr.findAll('td'))
            for tr in meta_table.findNext('table').findAll('tr')[1:]
            if meta_table.findNext('table').find('caption').string ==
            "Scheduled Meeting Times"
        ]
        for row in times_table:
            times, day, location, dates, kind, instructor = [
                td.string.replace('\xa0', '') if td.string else td.text
                for td in row
            ][1:]
            try:
                start_time, end_time = (datetime.strptime(
                    time_string, "%I:%M %p")
                                        for time_string in times.split(' - '))
                start_date, end_date = (datetime.strptime(
                    date_string, "%b %d, %Y")
                                        for date_string in dates.split(' - '))
                if not start_date or not end_date or not start_time or not end_time:
                    raise ValueError('start or end date/time not defined')
            except ValueError:
                warnings += [
                    'Course does not have an assigned meeting time: ' +
                    course_title + ' ' + kind
                ]
                continue
            class_dates = weekday.weekday_range(start_date, end_date, day)
            for date_ in class_dates:
                datetime_ = datetime(year=date_.year,
                                     month=date_.month,
                                     day=date_.day)
                start_datetime = (
                    datetime_ + timedelta(hours=start_time.hour,
                                          minutes=start_time.minute,
                                          seconds=start_time.second)).replace(
                                              tzinfo=gettz('America/Toronto'))
                end_datetime = (datetime_ +
                                timedelta(hours=end_time.hour,
                                          minutes=end_time.minute,
                                          seconds=end_time.second)).replace(
                                              tzinfo=gettz('America/Toronto'))
                event = Event(begin=start_datetime, end=end_datetime)
                event.name = kind + ': ' + course_title
                event.summary = kind + ': ' + course_title
                if 'Synchronous' in location:
                    event.location = 'OT Online'
                else:
                    event.location = f'Ontario Tech University\n2000 Simcoe St N, Oshawa, ON L1G 0C5\n{location}'
                event.description = 'CRN: %s\nCourse Code: %s\nSection: %s\nInstructor: %s\n' % (
                    crn.string, course_code, course_section,
                    instructor.replace('   ', ' '))
                events += [event]

    return events, warnings
Example #51
0
    def test_two_args_datetime_tz_str(self):

        result = self.factory.get(datetime(2013, 1, 1), 'US/Pacific')

        assertEqual(result._datetime,
                    datetime(2013, 1, 1, tzinfo=tz.gettz('US/Pacific')))
Example #52
0
 def time_local(self, iana_str):
     self.time_utc().astimezone(tz.gettz(iana_str))
Example #53
0
    def test_tz_str(self):

        assertDtEqual(self.factory.now('EST'), datetime.now(tz.gettz('EST')))
Example #54
0
"""An attempt at making a period abstraction which does not suck.
Notably, a period abstraction which plays nice with timezones.

"""
from abc import ABCMeta, abstractmethod
from datetime import date, datetime, timedelta, time
from typing import Any, Iterator

from dateutil.tz import gettz

EUROPE_PARIS = gettz("Europe/Paris")


def _at_midnight(a_date: date, tzinfo=gettz("Europe/Paris")) -> datetime:
    return datetime.combine(a_date, time(hour=0, minute=0, tzinfo=tzinfo))


class PeriodError(Exception):
    """Base class for period exceptions."""


def raise_if_not_date(candidate: Any) -> None:
    """Raise an exception if the given value is not strictly a date."""
    # That looks weird, Pythonic way would be to use `isinstance`, but
    # as `date` inherits from `datetime`, a `datetime` would pass the
    # test (and we do not want datetimes here).
    if type(candidate) != date:
        raise PeriodError("Given value is not strictly a date")


def raise_if_not_datetime_ta(candidate: Any) -> None:
Example #55
0
def main():
    """Command line tool to upload a Remind file to CalDAV"""

    parser = ArgumentParser(
        description='Command line tool to upload a Remind file to CalDAV')
    parser.add_argument(
        '-z',
        '--zone',
        default='Europe/Berlin',
        help='Timezone of Remind file (default: Europe/Berlin)')
    parser.add_argument(
        '-s',
        '--startdate',
        type=lambda s: parse(s).date(),
        default=date.today() - timedelta(weeks=12),
        help='Start offset for remind call (default: -12 weeks)')
    parser.add_argument(
        '-m',
        '--month',
        type=int,
        default=15,
        help=
        'Number of month to generate calendar beginning wit stadtdate (default: 15)'
    )
    parser.add_argument('-d',
                        '--delete',
                        action='store_true',
                        help='Delete old events')
    parser.add_argument('-r',
                        '--davurl',
                        required=True,
                        help='The URL of the CalDAV server')
    parser.add_argument('-u',
                        '--davuser',
                        help='The username for the CalDAV server')
    parser.add_argument('-p',
                        '--davpass',
                        help='The password for the CalDAV server')
    parser.add_argument('-i',
                        '--insecure',
                        action='store_true',
                        help='Ignore SSL certificate')
    parser.add_argument(
        'infile',
        nargs='?',
        default=expanduser('~/.reminders'),
        help='The Remind file to process (default: ~/.reminders)')
    parser.add_argument(
        '-o',
        '--old',
        default=None,
        help=
        'The old reference Remind file (entries not in the current one will be deleted from dav)'
    )
    args = parser.parse_args()

    zone = gettz(args.zone)
    # Manually set timezone name to generate correct ical files
    # (python-vobject tests for the zone attribute)
    zone.zone = args.zone

    if args.infile == '-':
        remind = Remind(args.infile, zone, args.startdate, args.month)
        vobject = remind.stdin_to_vobject(stdin.read())
    else:
        remind = Remind(args.infile, zone, args.startdate, args.month)
        vobject = remind.to_vobject()

    if hasattr(vobject, 'vevent_list'):
        ldict = {event.uid.value: event for event in vobject.vevent_list}
    else:
        ldict = {}

    if args.davuser and args.davpass:
        user = args.davuser
        passwd = args.davpass
    else:
        try:
            (user, _,
             passwd) = netrc().authenticators(urlparse(args.davurl).netloc)
        except (IOError, TypeError):
            if not args.davuser:
                print(
                    'rem2dav: Error, argument -u/--davuser or netrc is required'
                )
                return 1
            user = args.davuser
            try:
                from keyring import get_password
                passwd = get_password(urlparse(args.davurl).netloc, user)
            except ImportError:
                passwd = None
            if not passwd:
                passwd = getpass()

    client = DAVClient(args.davurl,
                       username=user,
                       password=passwd,
                       ssl_verify_cert=not args.insecure)
    calendar = Calendar(client, args.davurl)

    rdict = {
        splitext(basename(event.canonical_url))[0].replace('%40', '@'): event
        for event in calendar.events()
    }

    if args.old:
        old = Remind(args.old, zone, args.startdate, args.month)
        old_vobject = old.to_vobject()

        if hasattr(old_vobject, 'vevent_list'):
            odict = {
                event.uid.value: event
                for event in old_vobject.vevent_list
            }
            intersect = rdict.keys() & odict.keys()
            rdict = {key: rdict[key] for key in intersect}
        else:
            rdict = {}

    local = ldict.keys() - rdict.keys()
    for uid in local:
        ncal = iCalendar()
        ncal.add(ldict[uid])
        calendar.add_event(ncal.serialize())

    if args.delete or args.old:
        remote = rdict.keys() - ldict.keys()
        for uid in remote:
            rdict[uid].delete()
Example #56
0
    def test_one_arg_tzinfo(self):

        expected = datetime.utcnow().replace(tzinfo=tz.tzutc()).astimezone(
            tz.gettz('US/Pacific'))

        assertDtEqual(self.factory.get(tz.gettz('US/Pacific')), expected)
Example #57
0
def local_time():
    return datetime.utcnow().replace(tzinfo=tz.tzutc()).astimezone(tz.gettz('America/Los_Angeles'))
Example #58
0
 def get_tz(cls, tz):
     found = cls.TZ_ALIASES.get(tz)
     if found:
         return tzutil.gettz(found)
     return tzutil.gettz(tz.title())
Example #59
0
mako.runtime.UNDEFINED = ''

from mako.exceptions import TemplateLookupException

from tornado.web import RequestHandler

from app.account.models import User
from app.session.models import Session

from sqlalchemy.orm.exc import NoResultFound, MultipleResultsFound

template_dir = os.path.join(os.path.dirname(__file__), 'template')

from dateutil import tz

from_zone = tz.gettz('UTC')  # UTC Zone
to_zone = tz.gettz('CST')  # China Zone


def lytime(t, f='%m-%d %H:%M', UTC=False):

    if t:

        if UTC:
            local = t
        else:
            utc = t.replace(tzinfo=from_zone)
            local = utc.astimezone(to_zone)

        return datetime.datetime.strftime(local, f)
def main():
  
 
#experiment_ids = ['djznw', 'djzny', 'djznq', 'djzns', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkbhu', 'djznu', 'dkhgu' ] # All 12
 #experiment_ids = ['djzny', 'djzns', 'djznw', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq' ] 
 #experiment_ids = ['djzny', 'djzns', 'djznu', 'dkbhu', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkhgu'] 
 #experiment_ids = ['djzns', 'djznu', 'dkbhu', 'dkjxq', 'dklyu', 'dkmbq', 'dklwu', 'dklzq', 'dkhgu'] 
 #experiment_ids = ['dklzq', 'dkmbq', 'dkjxq', 'dklwu', 'dklyu', 'djzns']
#experiment_ids = ['djzns' ] 
 #experiment_ids = ['dkhgu','dkjxq']
 
    for p_level in plot_levels:

        # Set pressure height contour min/max
        if p_level == 925:
            clevgh_min = -24.
            clevgh_max = 24.
        elif p_level == 850:
            clevgh_min = -24.
            clev_max = 24.
        elif p_level == 700:
            clevgh_min = -24.
            clev_max = 24.
        elif p_level == 500:
            clevgh_min = -24.
            clevgh_max = 24.
        else:
            print 'Contour min/max not set for this pressure level'

# Set potential temperature min/max       
        if p_level == 925:
            clevpt_min = -3.
            clevpt_max = 100.
        elif p_level == 850:
            clevpt_min = -3.
            clevpt_max = 3.
        elif p_level == 700:
            clevpt_min = -3.
            clevpt_max = 3.
        elif p_level == 500:
            clevpt_min = -3.
            clevpt_max = 3.
        else:
            print 'Potential temperature min/max not set for this pressure level'

 # Set specific humidity min/max       
        if p_level == 925:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p_level == 850:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p_level == 700:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        elif p_level == 500:
            clevsh_min = -0.0025
            clevsh_max = 0.0025
        else:
            print 'Specific humidity min/max not set for this pressure level'

        clevs_lin = np.linspace(clevgh_min, clevgh_max, num=24)

        
        p_level_constraint = iris.Constraint(pressure=p_level)

                        
        for plot_diag in plot_diags:
        
        

        

            for experiment_id in experiment_ids:
            
                expmin1 = experiment_id[:-1]

                pp_file = '%s_%s_on_p_levs_mean_by_hour.pp' % (experiment_id, plot_diag)
                pfile = '%s%s/%s/%s' % (pp_file_path, expmin1, experiment_id, pp_file)
                pcube = iris.load_cube(pfile, p_level_constraint)
                #cube=iris.analysis.maths.multiply(pcube,3600)
                # For each hour in cube

                height_pp_file = '%s_408_on_p_levs_mean_by_hour.pp' % (experiment_id)
                height_pfile = '%s%s/%s/%s' % (pp_file_path, expmin1, experiment_id, height_pp_file)
                height_cube = iris.load_cube(height_pfile, p_level_constraint)

                #time_coords = cube_f.coord('time')
                add_hour_of_day(pcube, pcube.coord('time'))
     
                add_hour_of_day(height_cube, height_cube.coord('time'))
    

                    
                #pdb.set_trace()

                #del height_cube, pcube, height_cube_diff, cube_diff

                for t, time_cube in enumerate(pcube.slices(['grid_latitude', 'grid_longitude'])):
                    
                    hour=time_cube.coord('hour').points[0]

                    if hour==23:
                        hour=0

                    #pdb.set_trace()

                    cube_diff_slice = pcube.extract(iris.Constraint(hour=hour+1))
                
                    p_cube_difference = cube_diff_slice - time_cube
                    
                    #pdb.set_trace()
                
                    print time_cube
                    time_cube_408 = height_cube.extract(iris.Constraint(hour=hour))
                    height_cube_diff_slice = height_cube.extract(iris.Constraint(hour=hour+1))
                    
                    height_cube_difference = height_cube_diff_slice -  time_cube_408
                    # Get  time of averagesfor plot title
  

                    h = u.num2date(np.array(time_cube.coord('hour').points, dtype=float)[0]).strftime('%H%M')
                    hplus1 = u.num2date(np.array(time_cube.coord('hour').points, dtype=float)[0]+1.).strftime('%H%M')

                    #Convert to India time

                    from_zone = tz.gettz('UTC')
                    to_zone = tz.gettz('Asia/Kolkata')
                    
                    h_utc = u.num2date(np.array(time_cube.coord('hour').points, dtype=float)[0]).replace(tzinfo=from_zone)
                   
                    h_local = h_utc.astimezone(to_zone).strftime('%H%M')
                
                    fig = plt.figure(**figprops)
         
                    cmap=plt.cm.RdBu_r
                    
                    ax = plt.axes(projection=ccrs.PlateCarree(), extent=(lon_low,lon_high,lat_low+degs_crop_bottom,lat_high-degs_crop_top))
                    
                    m =\
                        Basemap(llcrnrlon=lon_low,llcrnrlat=lat_low,urcrnrlon=lon_high,urcrnrlat=lat_high, rsphere = 6371229)
                    #pdb.set_trace()
                    lat = p_cube_difference.coord('grid_latitude').points
                    lon = p_cube_difference.coord('grid_longitude').points
                    
                    cs = p_cube_difference.coord_system('CoordSystem')
                    
                    lons, lats = np.meshgrid(lon, lat) 
                    lons, lats = iris.analysis.cartography.unrotate_pole\
                                (lons,lats, cs.grid_north_pole_longitude, cs.grid_north_pole_latitude)
                    
                    
                    x,y = m(lons,lats)
                    
                    
                    if plot_diag=='temp':
                        min_contour = clevpt_min
                        max_contour = clevpt_max
                        cb_label='K' 
                        main_title='geopotential height (grey contours), potential temperature (colours),\
                                          and wind (vectors) %s UTC    %s IST' % (h, h_local)
                        tick_interval=1
                    elif plot_diag=='sp_hum':
                        min_contour = clevsh_min
                        max_contour = clevsh_max 
                        cb_label='kg/kg' 
                        main_title='8km  Explicit model (dklyu) minus 8km parametrised model geopotential height (grey contours), specific humidity (colours),\
                                         and wind (vectors) %s-%s UTC    %s-%s IST' % (h, h_local)
                        tick_interval=0.0005
                        
                    clevs = np.linspace(min_contour, max_contour, 32)
                    clevs = np.linspace(-3, 3, 32)
                    cont = plt.contourf(x,y,p_cube_difference.data, clevs, cmap=cmap, extend='both')
                    
                    
                    #cont = iplt.contourf(time_cube, cmap=cmap, extend='both')
                    

                    cs_lin = iplt.contour(height_cube_difference, clevs_lin,colors='#262626',linewidths=1.)
                    plt.clabel(cs_lin, fontsize=14, fmt='%d', color='black')
                    
                    #del time_cube
                     
                    #plt.clabel(cont, fmt='%d')
                    #ax.stock_img()
                    ax.coastlines(resolution='110m', color='#262626') 
                     
                    gl = ax.gridlines(draw_labels=True,linewidth=0.5, color='#262626', alpha=0.5, linestyle='--')
                    gl.xlabels_top = False
                    gl.ylabels_right = False
                    #gl.xlines = False
                    dx, dy = 10, 10

                    gl.xlocator = mticker.FixedLocator(range(int(lon_low_tick),int(lon_high_tick)+dx,dx))
                    gl.ylocator = mticker.FixedLocator(range(int(lat_low_tick),int(lat_high_tick)+dy,dy))
                    gl.xformatter = LONGITUDE_FORMATTER
                    gl.yformatter = LATITUDE_FORMATTER
                    
                    gl.xlabel_style = {'size': 12, 'color':'#262626'}
                    #gl.xlabel_style = {'color': '#262626', 'weight': 'bold'}
                    gl.ylabel_style = {'size': 12, 'color':'#262626'}         
                    
                    cbar = fig.colorbar(cont, orientation='horizontal', pad=0.05, extend='both')
                    cbar.set_label('%s' % cb_label, fontsize=10, color='#262626') 
                    #cbar.set_label(time_cube.units, fontsize=10, color='#262626')
                    cbar.set_ticks(np.arange(min_contour, max_contour+tick_interval,tick_interval))
                    ticks = (np.arange(min_contour, max_contour+tick_interval,tick_interval))
                    cbar.set_ticklabels(['${%.1f}$' % i for i in ticks])
                    
                    cbar.ax.tick_params(labelsize=10, color='#262626')
                    
                    
                    #main_title='Mean Rainfall for EMBRACE Period -%s UTC (%s IST)' % (h, h_local)
                    #main_title=time_cube.standard_name.title().replace('_',' ')
                    #model_info = re.sub(r'[(\']', ' ', model_info)
                    #model_info = re.sub(r'[\',)]', ' ', model_info)
                    #print model_info
                    
                    file_save_name = '%s_%s_%s_hour_%s_diff_from_%s' % (experiment_id, plot_diag, p_level, hplus1, h)
                    save_dir = '%s%s/%s' % (save_path, experiment_id, plot_diag)
                    if not os.path.exists('%s' % save_dir): os.makedirs('%s' % (save_dir))
                    
                    #plt.show()

                    fig.savefig('%s/%s_notitle.png' % (save_dir, file_save_name), format='png', bbox_inches='tight')
                    
                    
                    plt.title('%s UTC %s IST' % (h, h_local))
                    fig.savefig('%s/%s_short_title.png' % (save_dir, file_save_name) , format='png', bbox_inches='tight')
                    
                    
                    #model_info=re.sub('(.{68} )', '\\1\n', str(model_name_convert_title.main(experiment_id)), 0, re.DOTALL)
                    #plt.title('\n'.join(wrap('%s\n%s' % (main_title, model_info), 1000,replace_whitespace=False)), fontsize=16)               
                    #fig.savefig('%s/%s.png' % (save_dir, file_save_name), format='png', bbox_inches='tight')
 
                    fig.clf()
                    plt.close()
                    #del time_cube
                    gc.collect()