示例#1
0
 def test_small(self):
     config = get_config(PATH + 'small.conf')
     comp_config = {
         'calendars': {
             'home': {'path': os.path.expanduser('~/.calendars/home/'),
                      'color': 'dark green', 'readonly': False,
                      'type': 'calendar'},
             'work': {'path': os.path.expanduser('~/.calendars/work/'),
                      'readonly': True, 'color': '',
                      'type': 'calendar'}},
         'sqlite': {'path': os.path.expanduser('~/.local/share/khal/khal.db')},
         'locale': {
             'local_timezone': get_localzone(),
             'default_timezone': get_localzone(),
             'timeformat': '%H:%M',
             'dateformat': '%d.%m.',
             'longdateformat': '%d.%m.%Y',
             'datetimeformat': '%d.%m. %H:%M',
             'longdatetimeformat': '%d.%m.%Y %H:%M',
             'firstweekday': 0,
             'encoding': 'utf-8',
             'unicode_symbols': True,
             'weeknumbers': False,
         },
         'default': {
             'default_calendar': None,
             'default_command': 'calendar',
             'print_new': 'False',
             'show_all_days': False,
             'days': 2,
         }
     }
     for key in comp_config:
         assert config[key] == comp_config[key]
示例#2
0
文件: cf.py 项目: cfstacks/stacks
def print_events(conn, stack_name, follow, lines=100, from_dt=datetime.fromtimestamp(0, tz=pytz.UTC)):
    """Prints tabulated list of events"""
    events_display = []
    seen_ids = set()
    next_token = None

    while True:
        events, next_token = get_events(conn, stack_name, next_token)
        status = get_stack_status(conn, stack_name)
        normalize_events_timestamps(events)
        if follow:
            events_display = [(ev.timestamp.astimezone(tzlocal.get_localzone()), ev.resource_status, ev.resource_type,
                               ev.logical_resource_id, ev.resource_status_reason) for ev in events
                              if ev.event_id not in seen_ids and ev.timestamp >= from_dt]
            if len(events_display) > 0:
                print(tabulate(events_display, tablefmt='plain'), flush=True)
                seen_ids |= set([event.event_id for event in events])
            if status not in IN_PROGRESS_STACK_STATES and next_token is None:
                break
            if next_token is None:
                time.sleep(5)
        else:
            events_display.extend([(event.timestamp.astimezone(tzlocal.get_localzone()), event.resource_status,
                                    event.resource_type, event.logical_resource_id, event.resource_status_reason)
                                   for event in events])
            if len(events_display) >= lines or next_token is None:
                break

    if not follow:
        print(tabulate(events_display[:lines], tablefmt='plain'), flush=True)

    return status
示例#3
0
    def create_from_raw(cls):
        """
        Creates MinuteData from raw EfergyData
        """
        try:
            latest_minute = cls.objects.latest('timestamp')
            start_time = latest_minute.timestamp
            # Why do I have to do this?!?!
            start_time = start_time + timedelta(minutes=1)
            start_time = start_time.astimezone(get_localzone())
        except cls.DoesNotExist:
            start_time = pytz.utc.localize(datetime(2000, 1, 1, 0, 0, 0))

        last_minute = now().replace(second=0, microsecond=0)
        end_time = last_minute.astimezone(get_localzone())

        cursor = connection.cursor()
        # NOTE: This code is MySQL-specific!
        # NOTE: When taking the HOUR() or MINUTE() from a datetime column, it
        # is important to explicitly convert the value to the local timezone
        # first otherwise, the calculations will be for UTC.
        cursor.execute("""
            INSERT INTO efergy_minutedata (
                SELECT
                    NULL,
                    FROM_UNIXTIME(FLOOR(UNIX_TIMESTAMP(`timestamp`)/60)*60) as 'timestamp',
                    HOUR(CONVERT_TZ(`timestamp`, '+00:00', @@global.time_zone)) * 60 + MINUTE(CONVERT_TZ(`timestamp`, '+00:00', @@global.time_zone)) as 'minute',
                    TRUNCATE(AVG(`watts`), 6) as 'watts'
                FROM `efergy_efergydata`
                WHERE `timestamp` >= %s AND `timestamp` < %s
                GROUP BY 2
            ) ON DUPLICATE KEY UPDATE `timestamp`=`timestamp`""",
            (start_time, end_time))
示例#4
0
def read(subject, library, startdate=None, enddate=None, fields=None):
    '''
    Searches for data requested by params. Returns data as a pandas dataframe, with datetimes as localzone

    With no datetimes provided, will return first 100000 results. With startdate only, will provide 100000 results from 
    the starttdate. With enddate, will provide everything up to enddate. 

    @param: startdate; datetime; start date to query
    @param: enddate; datetime; end date to query
    @param: subject; string; what data to query
    @param: fields; [str]; fields to return
    @param: library; where to store it

    @return: data of 'subject' from 'daterange' with timezones converted to local
    '''

    startdate = addTZ(startdate)
    enddate = addTZ(enddate)

    date_range = DateRange(startdate, enddate)

    df = library.read(subject, date_range, fields)

    if df.index.tzinfo is None:
        df.index = df.index.tz_localize(pytz.utc).tz_convert(tzlocal.get_localzone())
    else:
        df.index = df.index.tz_convert(tzlocal.get_localzone())

    return df
示例#5
0
    def _parse_time_range(cls, date, start_time, end_time, now):
        parsed_date = cls._parse_date(date, now)
        parsed_start = cls._parse_time(start_time)
        parsed_end = cls._parse_time(end_time)

        if parsed_date is None:
            if parsed_start is None:
                raise ValueError('Failed to create event: --date or --start options are missing.')
            # set date today or tomorrow
            dt = now.date()
            if (parsed_start.hour, parsed_start.minute) < (now.hour, now.minute):
                dt += timedelta(days=1)
        else:
            if parsed_start is None:
                if parsed_end is not None:
                    raise ValueError('Failed to create event: --date option with --end is set but --start is missing.')
                # all-day event
                t = get_localzone().localize(datetime(parsed_date.year, parsed_date.month, parsed_date.day))
                return EventTime(False, t), EventTime(False, t)
            dt = parsed_date

        # set start and end event time
        start = get_localzone().localize(datetime.combine(dt, parsed_start))

        if parsed_end is None:
            end = start + cls.DEFAULT_CREATE_DURATION
        else:
            end = get_localzone().localize(datetime.combine(dt, parsed_end))
            if parsed_start > parsed_end:
                end += timedelta(days=1)
        return EventTime(True, start), EventTime(True, end)
示例#6
0
    def test_seedset_export(self, mock_rabbit_worker_class):
        mock_rabbit_worker = MagicMock(spec=RabbitWorker)
        mock_rabbit_worker_class.side_effect = [mock_rabbit_worker]

        export = Export.objects.create(user=self.user,
                                       export_type="test_type",
                                       export_format="json",
                                       dedupe=True,
                                       item_date_start=datetime.datetime.now(get_localzone()),
                                       item_date_end=datetime.datetime.now(get_localzone()),
                                       harvest_date_start=datetime.datetime.now(get_localzone()),
                                       harvest_date_end=datetime.datetime.now(get_localzone()))
        export.seed_set = self.seedset
        export.save()

        request_export(export)

        # Export start message sent
        name, args, kwargs = mock_rabbit_worker.mock_calls[0]
        self.assertEqual("send_message", name)
        message = args[0]
        self.assertEqual(message["id"], export.export_id)
        self.assertEqual(message["path"], export.path)
        self.assertEqual(message["type"], export.export_type)
        self.assertEqual(message["format"], export.export_format)
        self.assertTrue(message["dedupe"])
        self.assertEqual(iso8601.parse_date(message["item_date_start"]), export.item_date_start)
        self.assertEqual(iso8601.parse_date(message["item_date_end"]), export.item_date_end)
        self.assertEqual(iso8601.parse_date(message["harvest_date_start"]), export.harvest_date_start)
        self.assertEqual(iso8601.parse_date(message["harvest_date_end"]), export.harvest_date_end)
        self.assertEqual(message["seedset"]["id"], export.seed_set.seedset_id)
        self.assertEqual("export.start.test_platform.test_type", args[1])
示例#7
0
  def set_heating_trigger(self, proportion, on):
    self.proportional_time = proportion
    if self.heating_trigger is not None:
      try:
        self.heating_trigger.remove()
      except JobLookupError as e:
        pass
      self.heating_trigger = None

    if on:
      if proportion < self.config['heating_settings']['proportional_heating_interval_minutes']:
        run_date = self.time_on + datetime.timedelta(0,self.proportional_time * 60)
        logger.info('New proportional time: ' + str(proportion) + '/' + str(self.config['heating_settings']['proportional_heating_interval_minutes']) +\
          ' mins - will turn off at ' + str(run_date.astimezone(get_localzone())))
        self.heating_trigger = self.sched.add_job(\
          self.process, trigger='date', run_date=run_date, name='Proportional off at ' + str(run_date.astimezone(get_localzone())))
    else:
      if proportion > 0:
        if self.time_off is None:
          self.time_off = pytz.utc.localize(datetime.datetime.utcnow())
        run_date = self.time_off + datetime.timedelta(0,(self.config['heating_settings']['proportional_heating_interval_minutes'] - self.proportional_time) * 60)
        logger.info('New proportional time: ' + str(proportion) + '/' + str(self.config['heating_settings']['proportional_heating_interval_minutes']) +\
          ' mins - will turn on at ' + str(run_date.astimezone(get_localzone())))
        self.heating_trigger = self.sched.add_job(\
          self.process, trigger='date', run_date=run_date, name='Proportional on at ' + str(run_date.astimezone(get_localzone())))
示例#8
0
 def test_small(self):
     config = get_config(PATH + 'small.conf')
     assert config == {
         'calendars': {
             'home': {'path': os.path.expanduser('~/.calendars/home/'), 'color': 'dark green', 'readonly': False},
             'work': {'path': os.path.expanduser('~/.calendars/work/'), 'readonly': True, 'color': ''}},
         'sqlite': {'path': os.path.expanduser('~/.local/share/khal/khal.db')},
         'locale': {
             'local_timezone': get_localzone(),
             'default_timezone': get_localzone(),
             'timeformat': '%H:%M',
             'dateformat': '%d.%m.',
             'longdateformat': '%d.%m.%Y',
             'datetimeformat': '%d.%m. %H:%M',
             'longdatetimeformat': '%d.%m.%Y %H:%M',
             'firstweekday': 0,
             'encoding': 'utf-8',
             'unicode_symbols': True,
         },
         'default': {
             'default_command': 'calendar',
             'debug': False,
             'default_calendar': 'home',
         }
     }
示例#9
0
文件: s3.py 项目: nathants/py-aws
def ls(s3_url,
       recursive=False,
       exit_codes: 'exit 1 if there are no results' = True):
    """
    list bucket contents
    """
    if not s3_url:
        for bucket in _retry(_client().list_buckets)()['Buckets']:
            yield '%s %s' % (str(bucket['CreationDate'].astimezone(tzlocal.get_localzone()))[:-6],
                             bucket['Name'])
    else:
        bucket, *prefix = s3_url.split('s3://')[-1].split('/')
        kw = {'Bucket': bucket,
              'Prefix': '/'.join(prefix),
              'Delimiter': '' if recursive else '/'}
        results = False
        while True:
            resp = _retry(_client().list_objects_v2)(**kw)
            logging.debug(pprint.pformat(resp))
            for pre in resp.get('CommonPrefixes', []):
                results = True
                yield 'PRE %s' % pre['Prefix']
            for key in resp.get('Contents', []):
                results = True
                yield '%s %s %s %s' % (str(key['LastModified'].astimezone(tzlocal.get_localzone()))[:-6],
                                       key['Size'],
                                       key['Key'],
                                       key['StorageClass'])
            if resp['IsTruncated']:
                kw['ContinuationToken'] = resp['NextContinuationToken']
            else:
                break
        if not results and exit_codes:
            sys.exit(1)
示例#10
0
文件: toggl.py 项目: skarbot/utils
    def get_time_logs(self, ignore_marked=True):
        """Get all time logs from toggl

        *ignore_marked* entries? if True check for the tags in every
        entry and filter them

        """
        start_date = datetime.datetime.now(
            tz=get_localzone()) - datetime.timedelta(self.time_period)

        end_date = datetime.datetime.now(tz=get_localzone())

        # Ignore microseconds
        start_date = start_date.replace(microsecond=0).isoformat()
        end_date = end_date.replace(microsecond=0).isoformat()

        payload = {
            'start_date': start_date,
            'end_date': end_date
            }

        try:
            request_object = requests.get(self.url,
                                          params=payload,
                                          auth=(self.api_key, 'api_token'))
        except Exception:
            logging.error('Failed to make request')
            raise

        if request_object.status_code != 200:
            logging.error('Failed to fetch time logs please see the response:\n{0}'
                          .format(request_object.content))
            return list()

        return self._filter(request_object.json(), ignore_marked)
示例#11
0
def UtcToLocalTime(utctime):
	if isinstance(utctime, datetime.datetime):
		return utctime.replace(tzinfo=pytz.utc).astimezone(tzlocal.get_localzone())
	elif isinstance(utctime, list):
		return [ t.replace(tzinfo=pytz.utc).astimezone(tzlocal.get_localzone()) for t in utctime ]
	else:
		raise Exception('UTC time ' + str(utctime) + ' has unknown type ' + str(type(utctime)))
 def __init__(self, d, t, interaction, winner, quantity, item, cp=False):
     if cp:
         if d:
             month, day = map(int, d.strip('[] ').split('/'))
         else:
             month, day = 1,1
         if t:
             hour, minute = map(int, t.strip('[] ').split(':'))
         else:
             hour, minute = 0, 0
         if tzlocal_present:
             self.datetime = tzlocal.get_localzone().localize(
                 datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute))
         else:
             self.datetime = datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute)
     else:
         if tzlocal_present:
             #self.datetime = tzlocal.get_localzone().localize(datetime.datetime.strptime(d+t, '%Y%m%d%H%M%S'))
             self.datetime = tzlocal.get_localzone().localize(datetime.datetime(year=int(d[:4]), month=int(d[4:6]),
                 day=int(d[6:]), hour=int(t[:2]), minute=int(t[2:4]), second=int(t[4:])))
         else:
             #self.datetime = datetime.datetime.strptime(d+t, '%Y%m%d%H%M%S')
             self.datetime = datetime.datetime(year=int(d[:4]), month=int(d[4:6]),
                 day=int(d[6:]), hour=int(t[:2]), minute=int(t[2:4]), second=int(t[4:]))
     
     self.winner = winner or ''
     
     quantity = quantity or ''
     quantity = (int(quantity.strip().replace(',','') or 0) or
                      int(''.join(item.split(' x ')[1:]).replace(',','') or 1))
     
     item = item.split(' x ')[0].rstrip('!.').rsplit(' erhalten', maxsplit=1)[0]
     
     self.interaction = interaction or ''
     
     if interaction in {'lost', 'placed a bet of', 'discarded', 'spent'}:
         self.gain_item = ''
         self.gain_value = 0
         self.loss_item = item
         self.loss_value = quantity * -1
     elif interaction == 'sold':
         item, gain = item.rsplit(' for ', maxsplit=1)
         self.loss_item = item
         self.loss_value = -1
         quantity, item = gain.split(maxsplit=1)
         self.gain_item = item
         self.gain_value = int(quantity.replace(',', ''))
     elif interaction == "didn't win any":
         self.gain_item = item
         self.gain_value = 0
         self.loss_item = ''
         self.loss_value = 0
     else:
         self.gain_item = item
         self.gain_value = quantity
         self.loss_item = ''
         self.loss_value = 0
示例#13
0
	def updateInfo(self,force=False):
		self.logger.debug("[TvShowSchedule] updateInfo method called")
		v30DaysAgo = datetime.datetime.now(tzlocal.get_localzone()) - datetime.timedelta(days=30)
		if force:
			self['info']['infoUpdate'] = v30DaysAgo
		# If update is forced or info is indicated and newer than 30 days ago
		if force or self['info'].setdefault('infoUpdate',v30DaysAgo) <= v30DaysAgo:
			t = myTvDB.myTvDB()
			if 'overview' in t[self.seriesid].data.keys() and t[self.seriesid].data['overview'] is not None:
				overview = t[self.seriesid].data['overview']
			else:
				overview = ""
			bannerAvailable = (
				self.bannerDir is not None
				and 'banner' in t[self.seriesid].data
				and t[self.seriesid].data['banner'] is not None
			)
			episodesList = t[self.seriesid].getEpisodesList()
			arrList = [0]
			for key in range(1,99):
				if key in episodesList.keys():
					arrList.append(episodesList[key])
				else:
					break
			episodesList = arrList
			info={
				'seriesname':t[self.seriesid].data['seriesname'],
				'overview':overview,
				'infoUpdate':datetime.datetime.now(tzlocal.get_localzone()),
				'banner': bannerAvailable,
				'episodesList':episodesList
			}
			self.set(info=info,autoComplete=False)
			if bannerAvailable and not self.isBannerUpdated():
				self.updateBanner(t[self.seriesid].data['banner'])

			if self['season'] * self['episode'] > 0:
				try:
					t[self.seriesid][self['season']][self['episode']]
				except:
					self.logger.error("[tvShowSchedule] {0} S{1:02}E{2:02} does not exist. Reset to next episode"
						.format(self.seriesid,self['season'],self['episode']))
					self.set(season=0,episode=0,status=0,autoComplete=True)
					return
				episodeData = t[self.seriesid][self['season']][self['episode']]
				firstaired = episodeData['firstaired']
				if isinstance(firstaired,basestring):
					firstaired = dateutil.parser.parse(firstaired)
				if firstaired is None:
					self.logger.error("[tvShowSchedule] No firstaired for {0}".format(unicode(firstaired)))
					raise Exception("No firstaired for {0}".format(unicode(firstaired)))
				if firstaired.tzinfo is None or firstaired.tzinfo.utcoffset(firstaired) is None:
					firstaired = tzlocal.get_localzone().localize(firstaired)
				info['firstaired'] = firstaired
			self.set(info=info,autoComplete=False)
		self.setDefault()
		self.logger.debug("[tvShowSchedule] TvShow has been updated. New value:\n {0}".format(unicode(self.data)))
示例#14
0
文件: titds.py 项目: t0mk/titds
def time_entries():
    week_ago = datetime.datetime.now(tz=tzlocal.get_localzone()).replace(microsecond=0) - datetime.timedelta(days=7)
    params = {}
    params = {
        'start_date': week_ago.isoformat(),
        'end_date': datetime.datetime.now(tz=tzlocal.get_localzone()).replace(microsecond=0).isoformat()
    }

    print(req('time_entries', params, 'GET'))
示例#15
0
 def iscurrent(self,thread,created_utc,days_old=7):
     if thread=="weekly":
         check = datetime.utcnow().replace(tzinfo=tz.utc).astimezone(tzlocal.get_localzone())
         created = datetime.utcfromtimestamp(created_utc).replace(tzinfo=tz.utc)
         created_local = created.astimezone(tzlocal.get_localzone())
         if (check - created_local).total_seconds() <= ((days_old*24)+4)*3600:
             logging.debug("Thread created within last %s days (+4 hour buffer), %s seconds ago: %s.", days_old, (check - created_local).total_seconds(), created.strftime("%Y-%m-%d %I:%M:%S %p %Z"))
             return True
     return False
示例#16
0
    def convert(self, writers_schema, value):
        if not isinstance(value, datetime.datetime):
            if isinstance(value, datetime.date):
                value = tzlocal.get_localzone().localize(
                    datetime.datetime(value.year, value.month, value.day, 0, 0, 0, 0))

        if value.tzinfo is None:
            value = tzlocal.get_localzone().localize(value)
        value = (time.mktime(value.utctimetuple()) - EPOCH_TT) + value.microsecond / 1000000.0
        return long(value * 1000000)
示例#17
0
    def dateoflastweekly(self):
        if self.weeklycheck():
            return datetime.utcnow().replace(tzinfo=tz.utc).astimezone(tzlocal.get_localzone()).date()
        else:
            for d in (datetime.utcnow().replace(tzinfo=tz.utc).astimezone(tzlocal.get_localzone()).date() - timedelta(days=x) for x in range(1, 7)):
                if self.weeklycheck(d):
                    return d

        logging.error("Error in dateoflastweekly(), returning today's date...")
        return datetime.utcnow().replace(tzinfo=tz.utc).astimezone(tzlocal.get_localzone()).date()
示例#18
0
文件: s3.py 项目: nathants/py-aws
def ls_versions(s3_url,
                recursive=False,
                latest: 'only show the latest version of a key' = False,
                version_id: 'include version-ids as the last column of output'=False,
                exit_codes: 'exit 1 if there are no results' = True):
    """
    list bucket contents, including versions of keys
    """
    if not s3_url:
        for bucket in _retry(_client().list_buckets)()['Buckets']:
            yield '%s %s' % (str(bucket['CreationDate'].astimezone(tzlocal.get_localzone()))[:-6],
                             bucket['Name'])
    else:
        bucket, *prefix = s3_url.split('s3://')[-1].split('/')
        kw = {'Bucket': bucket,
              'Prefix': '/'.join(prefix),
              'Delimiter': '' if recursive else '/'}
        results = False
        while True:
            resp = _retry(_client().list_object_versions)(**kw)
            logging.debug(pprint.pformat(resp))
            for pre in resp.get('CommonPrefixes', []):
                results = True
                yield 'PRE %s' % pre['Prefix']
            for version in resp.get('Versions', []):
                if not latest or version['IsLatest']:
                    results = True
                    yield '%s %s %s %s %s %s' % (
                        str(version['LastModified'].astimezone(tzlocal.get_localzone()))[:-6],
                        version['Size'],
                        version['Key'],
                        version['StorageClass'],
                        'LATEST' if version['IsLatest'] else 'HISTORICAL',
                        version['VersionId'] if version_id else '',
                    )
            for delete in resp.get('DeleteMarkers', []):
                if not latest or delete['IsLatest']:
                    results = True
                    yield '%s %s %s %s %s %s' % (
                        str(delete['LastModified'].astimezone(tzlocal.get_localzone()))[:-6],
                        '-',
                        delete['Key'],
                        '-',
                        'DELETED' if delete['IsLatest'] else 'HISTORICAL-DELETE',
                        delete['VersionId'] if version_id else '',
                    )
            if resp['IsTruncated']:
                if 'NextKeyMarker' in resp:
                    kw['KeyMarker'] = resp['NextKeyMarker']
                if 'NextVersionIdMarker' in resp:
                    kw['VersionIdMarker'] = resp['NextVersionIdMarker']
            else:
                break
        if not results and exit_codes:
            sys.exit(1)
示例#19
0
 def gamecheck(self,thisgame=1,gamecount=1,just_get_time=False):
     if self.games[thisgame].get('gamesub'): return True #game thread is already posted
     if self.games[thisgame].get('doubleheader') and str(self.games[thisgame].get('gameNumber'))=='2':
         if self.SETTINGS.get('GAME_THREAD').get('HOLD_DH_GAME2_THREAD') and not just_get_time:
             if self.games[self.games[thisgame].get('othergame')].get('doubleheader') and not self.games[self.games[thisgame].get('othergame')].get('final'):
                 logging.info("Holding doubleheader Game %s until Game %s is final, sleeping for 5 seconds...", self.games[thisgame].get('gameNumber'), self.games[self.games[thisgame].get('othergame')].get('gameNumber'))
                 time.sleep(5)
                 return False
         else:
             logging.debug("Doubleheader Game 2 start time: %s; Game 1 start time: %s", self.games[thisgame].get('gameInfo').get('date_object'), self.games[self.games[thisgame].get('othergame')].get('gameInfo').get('date_object'))
             if self.games[self.games[thisgame].get('othergame')].get('gameInfo').get('date_object') > self.games[thisgame].get('gameInfo').get('date_object'): #game 1 start time is after game 2 start time
                 logging.info("Detected doubleheader Game 2 start time is before Game 1 start time. Using Game 1 start time + 3.5 hours for Game 2...")
                 self.games[thisgame]['gameInfo'].update({'date_object' : self.games[self.games[thisgame].get('othergame')].get('gameInfo').get('date_object') + timedelta(hours=3, minutes=30)}) #use game 1 start time + 3.5 hours for game 2 start time
                 logging.debug("Game 2 start time: %s; Game 1 start time: %s", self.games[thisgame].get('gameInfo').get('date_object'), self.games[self.games[thisgame].get('othergame')].get('gameInfo').get('date_object'))
     if just_get_time:
         post_time = self.games[thisgame].get('gameInfo').get('date_object').replace(hour=self.games[thisgame].get('gameInfo').get('date_object').hour - self.SETTINGS.get('GAME_THREAD').get('HOURS_BEFORE'))
         if self.SETTINGS.get('GAME_THREAD').get('POST_BY'):
             post_by_object = tzlocal.get_localzone().localize(datetime.strptime(datetime.today().strftime("%Y-%m-%d ") + self.SETTINGS.get('GAME_THREAD').get('POST_BY'), "%Y-%m-%d %I%p"))
             if post_by_object < post_time: return post_by_object
         return post_time
     if self.games[thisgame].get('status').get('detailedState').startswith('Postponed') or self.games[thisgame].get('status').get('detailedState').startswith('Suspended') or self.games[thisgame].get('status').get('detailedState').startswith('Cancelled'):
         if self.SETTINGS.get('POST_THREAD').get('ENABLED'):
             logging.info("Game %s is %s, skipping game thread...",thisgame, self.games[thisgame].get('status').get('detailedState'))
             self.games[thisgame].update({'skipflag':True})
         else:
             logging.info("Game %s is %s, overriding hours before setting for game thread since postgame thread is disabled...", thisgame, self.games[thisgame].get('status').get('detailedState'))
         return True #go ahead and post the postgame thread since the game is postponed/suspended/canceled
     if self.games[thisgame].get('status',{}).get('abstractGameState') in ['Final','Live']: return True #game has already started (or ended)
     while True:
         if self.SETTINGS.get('GAME_THREAD').get('POST_BY') and self.pregamecheck(self.SETTINGS.get('GAME_THREAD').get('POST_BY'),persist=False):
             logging.info("POST_BY time reached for Game %s game thread...", thisgame)
             return True
         check = datetime.utcnow().replace(tzinfo=tz.utc).astimezone(tzlocal.get_localzone())
         if self.games[thisgame].get('gameInfo').get('date_object_utc').astimezone(tzlocal.get_localzone()) >= check:
             if (self.games[thisgame].get('gameInfo').get('date_object_utc').astimezone(tzlocal.get_localzone()) - check).seconds <= (self.SETTINGS.get('GAME_THREAD').get('HOURS_BEFORE') * 60 * 60):
                 return True
             else:
                 post_time = self.games[thisgame].get('gameInfo').get('date_object_utc').astimezone(tzlocal.get_localzone()).replace(hour=self.games[thisgame].get('gameInfo').get('date_object_utc').astimezone(tzlocal.get_localzone()).hour - self.SETTINGS.get('GAME_THREAD').get('HOURS_BEFORE'))
                 if self.SETTINGS.get('GAME_THREAD').get('POST_BY'):
                     post_by_object = tzlocal.get_localzone().localize(datetime.strptime(datetime.today().strftime("%Y-%m-%d ") + self.SETTINGS.get('GAME_THREAD').get('POST_BY'), "%Y-%m-%d %I%p"))
                     if post_by_object < post_time: 
                         logging.debug("Time to post game thread (based on POST_BY): %s", post_by_object)
                     else:
                         logging.debug("Time to post game thread (based on HOURS_BEFORE): %s", post_time)
                 if gamecount>1:
                     logging.info("Not time to post Game %s game thread yet, sleeping for 5 seconds...", thisgame)
                     time.sleep(5)
                 return False
         else:
             return True
示例#20
0
    def __init__(
            self,
            filelist=None,
            period=None,
            duration=None
        ):

        if period > 366:
            # This is completely arbitrary
            raise ValueError("Periods longer than an year are currently not supported")

        uid=0
       
        for ctab in self.crontabs:


            for job in ctab:

                if job.is_valid() and job.is_enabled():
                    
                    # Creates an event in ical for every valid job occurence in PERIOD
                    current_period = 0
                    schedule = job.schedule()
                    
                    while (current_period < period):

                        # Calculates the next occurency period from now
                        next_oc = schedule.get_next().replace(tzinfo=tzlocal.get_localzone())
                        current_period = (next_oc - datetime.datetime.now(tzlocal.get_localzone())).days

                        event = icalendar.Event()
                        event.add("uid", uid)

                        # We prefer comments over commands, if possible
                        if job.comment:
                            event.add("summary", job.comment)
                        else:
                            event.add("summary", job.command)

                        event.add("description" , job.command)

                        # For readability, all "events" have 10 minutes(see default_settings.py:
                        event.add("dtstart", next_oc)
                        event.add("dtend", next_oc + datetime.timedelta(minutes=duration))
                        event.add("dtstamp", datetime.datetime.now(tzlocal.get_localzone()))

                        self.calendar.add_component(event)
                        uid += 1
示例#21
0
文件: jslib.py 项目: valmynd/jslib
	def __init__(self, value=None, month=None, date=1, hours=0, minutes=0, seconds=0, milliseconds=0):
		"""
		Date(value) // -> integer representing a Unix Timestamp
		Date(dateString) // -> string representing an RFC2822 or ISO 8601 date
		Date(year, month, day [, hour, minute, second, millisecond])
		"""
		import pytz # pip install pytz
		import tzlocal # pip install tzlocal
		import dateutil.parser # pip install dateutil

		Object.__init__(self)
		if value is None:
			dt = datetime.datetime.now() # maybe non-standard, works in firefox and chrome, not tested in other browsers
		elif month is not None:
			dt = datetime.datetime(value, month, date, hours, minutes, seconds, milliseconds)
		elif isinstance(value, (int, float, Number)):
			dt = datetime.datetime.fromtimestamp(value)
		else:
			dt = dateutil.parser.parse(value)
		# http://stackoverflow.com/a/4771733/852994
		# http://stackoverflow.com/a/17363006/852994
		# http://stackoverflow.com/a/13287083/852994
		self.ltz = tzlocal.get_localzone()
		self.utz = pytz.utc
		self.ldt = self.ltz.localize(dt) if dt.tzinfo is None else dt.astimezone(self.ltz)
		self.udt = self.ldt.astimezone(self.utz)
		self.value = self.ltd
示例#22
0
    def pyLocal(self, time_zone=''):

        ''' a method to report a python datetime from a labDT object

        :param time_zone: [optional] string with timezone to report in
        :return: string with date and time info
        '''

    # validate inputs
        get_tz = get_localzone()
        title = 'Timezone input for labDT.pyLocal'
        if time_zone:
            # if time_zone.lower() in ('utc', 'uct', 'universal', 'zulu'):
            #     raise ValueError('time_zone cannot be UTC. %s requires a local timezone value. Try:\nfor tz in pytz.all_timezones:\n  print tz' % title)
            try:
                get_tz = tz.gettz(time_zone)
            except:
                raise ValueError('\n%s is not a valid timezone format. Try:\nfor tz in pytz.all_timezones:\n  print tz' % title)

    # construct python datetime from labDT
        dT = self.astimezone(get_tz)
        dt_kwargs = {
            'year': dT.year,
            'month': dT.month,
            'day': dT.day,
            'hour': dT.hour,
            'minute': dT.minute,
            'second': dT.second,
            'microsecond': dT.microsecond,
            'tzinfo': dT.tzinfo
        }
        return labDT(**dt_kwargs)
    def _get_date(self, cr, uid, ids, field_names, arg, context=None):  
       """
       _get_date: Get start date, last visit date
        :return res: dictionary value 
        """
        
       #search details, order by DATE       
       clsSer = self.pool.get('crea8s.nail.service.package');
       clsSerDet = self.pool.get('crea8s.nail.service.package.detail');
       res = {}
       dCurrentDate = datetime.now(get_localzone())
       strFM = '%Y-%m-%d'     #         dCurrentDate.strftime(strFM)
       dCurrDate = dCurrentDate.strftime(strFM)
       for service in self.browse(cr, uid, ids, context=context):
           
           dLastDate = dCurrDate
           dStartDate = dCurrDate
           res[service.id] = {}
           arrIDs = clsSerDet.search(cr, uid, [('service_number','=',service.id)], order='date')
           
           if len(arrIDs) > 0:
               objDet = clsSerDet.browse(cr, uid, arrIDs[0], context=None)
               dStartDate = objDet.date
               objDet = clsSerDet.browse(cr, uid, arrIDs[len(arrIDs)-1], context=None)
               dLastDate = objDet.date
           res[service.id]['start_date'] = dStartDate
           res[service.id]['late_visit_date'] = dLastDate
#            print res
#            res[service.id] = dLastDate # no multi='aaaa'
       return res
示例#24
0
def setup_scheduler(manager):
    """Configure and start apscheduler"""
    global scheduler
    if logging.getLogger().getEffectiveLevel() > logging.DEBUG:
        logging.getLogger('apscheduler').setLevel(logging.WARNING)
    # Since APScheduler runs in a separate thread, slower devices can sometimes get a DB lock, so use a separate db
    # for the jobs to avoid this
    db_filename = os.path.join(manager.config_base, 'db-%s-jobs.sqlite' % manager.config_name)
    # in case running on windows, needs double \\
    db_filename = db_filename.replace('\\', '\\\\')
    database_uri = 'sqlite:///%s' % db_filename
    jobstores = {'default': SQLAlchemyJobStore(url=database_uri)}
    # If job was meant to run within last day while daemon was shutdown, run it once when continuing
    job_defaults = {'coalesce': True, 'misfire_grace_time': 60 * 60 * 24}
    try:
        timezone = tzlocal.get_localzone()
        if timezone.zone == 'local':
            timezone = None
    except pytz.UnknownTimeZoneError:
        timezone = None
    except struct.error as e:
        # Hiding exception that may occur in tzfile.py seen in entware
        log.warning('Hiding exception from tzlocal: %s', e)
        timezone = None
    if not timezone:
        # The default sqlalchemy jobstore does not work when there isn't a name for the local timezone.
        # Just fall back to utc in this case
        # FlexGet #2741, upstream ticket https://github.com/agronholm/apscheduler/issues/59
        log.info('Local timezone name could not be determined. Scheduler will display times in UTC for any log'
                 'messages. To resolve this set up /etc/timezone with correct time zone name.')
        timezone = pytz.utc
    scheduler = BackgroundScheduler(jobstores=jobstores, job_defaults=job_defaults, timezone=timezone)
    setup_jobs(manager)
示例#25
0
 def test_init_clock(self):
     from tzlocal import get_localzone
     local_tz_name = get_localzone().zone
     clocks = Clocks(local_tz_name)
     assert len(clocks.clock_keys) == 1
     assert local_tz_name == clocks.get_clock_by_idx(0).tz.zone
     return
示例#26
0
    def _get_timezone(self, tz):
        """
        Find and return the time zone if possible
        """
        # special Local timezone
        if tz == 'Local':
            try:
                return tzlocal.get_localzone()
            except pytz.UnknownTimeZoneError:
                return '?'

        # we can use a country code to get tz
        # FIXME this is broken for multi-timezone countries eg US
        # for now we just grab the first one
        if len(tz) == 2:
            try:
                zones = pytz.country_timezones(tz)
            except KeyError:
                return '?'
            tz = zones[0]

        # get the timezone
        try:
            zone = pytz.timezone(tz)
        except pytz.UnknownTimeZoneError:
            return '?'
        return zone
示例#27
0
def setup_scheduler(manager):
    """Configure and start apscheduler"""
    global scheduler
    if logging.getLogger().getEffectiveLevel() > logging.DEBUG:
        logging.getLogger('apscheduler').setLevel(logging.WARNING)
    jobstores = {'default': SQLAlchemyJobStore(engine=manager.engine, metadata=Base.metadata)}
    # If job was meant to run within last day while daemon was shutdown, run it once when continuing
    job_defaults = {'coalesce': True, 'misfire_grace_time': 60 * 60 * 24}
    try:
        timezone = tzlocal.get_localzone()
        if timezone.zone == 'local':
            timezone = None
    except pytz.UnknownTimeZoneError:
        timezone = None
    except struct.error as e:
        # Hiding exception that may occur in tzfile.py seen in entware
        log.warning('Hiding exception from tzlocal: %s', e)
        timezone = None
    if not timezone:
        # The default sqlalchemy jobstore does not work when there isn't a name for the local timezone.
        # Just fall back to utc in this case
        # FlexGet #2741, upstream ticket https://bitbucket.org/agronholm/apscheduler/issue/59
        log.info('Local timezone name could not be determined. Scheduler will display times in UTC for any log'
                 'messages. To resolve this set up /etc/timezone with correct time zone name.')
        timezone = pytz.utc
    scheduler = BackgroundScheduler(jobstores=jobstores, job_defaults=job_defaults, timezone=timezone)
    setup_jobs(manager)
示例#28
0
文件: __init__.py 项目: 2mny/mylar
    def __init__(self, year=None, month=None, day=None, week=None, day_of_week=None, hour=None,
                 minute=None, second=None, start_date=None, end_date=None, timezone=None):
        if timezone:
            self.timezone = astimezone(timezone)
        elif isinstance(start_date, datetime) and start_date.tzinfo:
            self.timezone = start_date.tzinfo
        elif isinstance(end_date, datetime) and end_date.tzinfo:
            self.timezone = end_date.tzinfo
        else:
            self.timezone = get_localzone()

        self.start_date = convert_to_datetime(start_date, self.timezone, 'start_date')
        self.end_date = convert_to_datetime(end_date, self.timezone, 'end_date')

        values = dict((key, value) for (key, value) in six.iteritems(locals())
                      if key in self.FIELD_NAMES and value is not None)
        self.fields = []
        assign_defaults = False
        for field_name in self.FIELD_NAMES:
            if field_name in values:
                exprs = values.pop(field_name)
                is_default = False
                assign_defaults = not values
            elif assign_defaults:
                exprs = DEFAULT_VALUES[field_name]
                is_default = True
            else:
                exprs = '*'
                is_default = True

            field_class = self.FIELDS_MAP[field_name]
            field = field_class(field_name, exprs, is_default)
            self.fields.append(field)
示例#29
0
文件: core.py 项目: DavidWittman/jrnl
def check_output_time_inline(context, text):
    out = context.stdout_capture.getvalue()
    local_tz = tzlocal.get_localzone()
    utc_time = date_parser.parse(text)
    date = utc_time + local_tz._utcoffset
    local_date = date.strftime("%Y-%m-%d %H:%M")
    assert local_date in out, local_date
示例#30
0
    def humanFriendly(self, time_zone='', include_day=True, include_time=True):

        ''' a method to report a human friendly string from a labDT object

        :param time_zone: [optional] string with timezone to report in
        :return: string with date and time info
        '''

    # validate inputs
        zeroHourPattern = re.compile('\s0\d:')
        title = 'Timezone input for labDT.humanFriendly'
        human_format = ''
        if include_day:
            human_format += '%A, '
        human_format += '%B %d, %Y'
        if include_time:
            human_format += ' %I:%M%p %Z'
        get_tz = get_localzone()
        if time_zone:
            # if time_zone.lower() in ('utc', 'uct', 'universal', 'zulu'):
            #     raise ValueError('time_zone cannot be UTC. %s requires a local timezone value. Try:\nfor tz in pytz.all_timezones:\n  print tz' % title)
            try:
                get_tz = tz.gettz(time_zone)
            except:
                raise ValueError('%s is not a valid timezone format. Try:\nfor tz in pytz.all_timezones:\n  print tz' % title)
        
    # construct human friendly string from labDT
        dtLocal = self.astimezone(get_tz)
        dtString = format(dtLocal, human_format)
        zeroHour = zeroHourPattern.findall(dtString)
        if zeroHour:
            noZero = zeroHour[0].replace(' 0',' ')
            dtString = zeroHourPattern.sub(noZero,dtString)
        return dtString
示例#31
0
 def __init__(self, run_date=None, timezone=None):
     timezone = astimezone(timezone) or get_localzone()
     if run_date is not None:
         self.run_date = convert_to_datetime(run_date, timezone, 'run_date')
     else:
         self.run_date = datetime.now(timezone)
示例#32
0
    def delete_service(self, serviceId):
        method = "DELETE"
        url = self.base + "/" + serviceId
        data = "{\"globalSubscriberId\":\"Demonstration\", \"serviceType\":\"vIMS\"}"
        t1 = datetime.datetime.now(tzlocal.get_localzone())
        response = self.client.request(method,
                                       url,
                                       name=self.base,
                                       headers=self.headers,
                                       data=data)
        t2 = datetime.datetime.now(tzlocal.get_localzone())
        delta = t2 - t1
        data = collections.OrderedDict()
        data['datetime'] = datetime.datetime.now(
            tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
        data['method'] = method
        data['url'] = self.base
        data['status_code'] = response.status_code
        data['transaction_time'] = (delta.seconds * 10
                                    ^ 6 + delta.microseconds) / 1000
        fcntl.flock(self.transaction_file, fcntl.LOCK_EX)
        self.transaction_file.write(
            json.dumps(data, default=self.myconverter) + "\n")
        self.transaction_file.flush()
        os.fsync(self.transaction_file)
        fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
        operationId = response.json()['operationId']

        # Get the request status
        method = "GET"
        url = self.base + "/" + serviceId + "/operations/" + operationId
        url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
        count = 1
        while count < 50:
            tt1 = datetime.datetime.now(tzlocal.get_localzone())
            response = self.client.request(method,
                                           url,
                                           name=url1,
                                           headers=self.headers)
            tt2 = datetime.datetime.now(tzlocal.get_localzone())
            delta = tt2 - tt1
            result = response.json()['operationStatus']['result']
            progress = response.json()['operationStatus']['progress']
            data = collections.OrderedDict()
            data['datetime'] = datetime.datetime.now(
                tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
            data['method'] = method
            data['url'] = url1
            data['status_code'] = response.status_code
            data['count'] = count
            data['result'] = result
            data['progress'] = progress
            data['transaction_time'] = (delta.seconds * 10
                                        ^ 6 + delta.microseconds) / 1000
            fcntl.flock(self.transaction_file, fcntl.LOCK_EX)
            self.transaction_file.write(
                json.dumps(data, default=self.myconverter) + "\n")
            self.transaction_file.flush()
            os.fsync(self.transaction_file)
            fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
            if result == "finished" or result == "error":
                break
            else:
                time.sleep(1)
                count = count + 1

        if result == "finished":
            result = "success"
        else:
            result = "failure"
        t3 = datetime.datetime.now(tzlocal.get_localzone())
        delta = t3 - t1
        data = collections.OrderedDict()
        data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
        data['operation'] = "volte_delete"
        data['result'] = result
        data['duration'] = round(
            delta.seconds + Decimal(delta.microseconds / 1000000.0), 3)
        fcntl.flock(self.operation_file, fcntl.LOCK_EX)
        self.operation_file.write(
            json.dumps(data, default=self.myconverter) + "\n")
        self.operation_file.flush()
        os.fsync(self.operation_file)
        fcntl.flock(self.operation_file, fcntl.LOCK_UN)
示例#33
0
    def create_service(self):
        # Post a E2E service instantiation request to SO
        method = "POST"
        url = self.base
        service_instance_name = ''.join(
            random.choice(string.ascii_uppercase + string.digits)
            for _ in range(10))
        data = self.service_creation_body % service_instance_name

        t1 = datetime.datetime.now(tzlocal.get_localzone())
        response = self.client.request(method,
                                       url,
                                       headers=self.headers,
                                       data=data)
        t2 = datetime.datetime.now(tzlocal.get_localzone())
        delta = t2 - t1
        data = collections.OrderedDict()
        data['datetime'] = datetime.datetime.now(
            tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
        data['method'] = method
        data['url'] = url
        data['status_code'] = response.status_code
        data['transaction_time'] = (delta.seconds * 10
                                    ^ 6 + delta.microseconds) / 1000
        fcntl.flock(self.transaction_file, fcntl.LOCK_EX)
        self.transaction_file.write(
            json.dumps(data, default=self.myconverter) + "\n")
        self.transaction_file.flush()
        os.fsync(self.transaction_file)
        fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
        serviceId = response.json()['service']['serviceId']
        operationId = response.json()['service']['operationId']

        # Get the request status
        method = "GET"
        url = self.base + "/" + serviceId + "/operations/" + operationId
        url1 = "/ecomp/mso/infra/e2eServiceInstances/v3/{serviceId}/operations/{operationId}"
        count = 1
        while count < 50:
            tt1 = datetime.datetime.now()
            response = self.client.request(method,
                                           url,
                                           name=url1,
                                           headers=self.headers)
            tt2 = datetime.datetime.now()
            delta = tt2 - tt1
            result = response.json()['operationStatus']['result']
            progress = response.json()['operationStatus']['progress']
            data = collections.OrderedDict()
            data['datetime'] = datetime.datetime.now(
                tzlocal.get_localzone()).strftime("%Y-%m-%dT%H:%M:%S%Z")
            data['method'] = method
            data['url'] = url1
            data['status_code'] = response.status_code
            data['count'] = count
            data['result'] = result
            data['progress'] = progress
            data['transaction_time'] = (delta.seconds * 10
                                        ^ 6 + delta.microseconds) / 1000
            fcntl.flock(self.transaction_file, fcntl.LOCK_EX)
            self.transaction_file.write(
                json.dumps(data, default=self.myconverter) + "\n")
            self.transaction_file.flush()
            os.fsync(self.transaction_file)
            fcntl.flock(self.transaction_file, fcntl.LOCK_UN)
            if result == "finished" or result == "error":
                break
            else:
                time.sleep(1)
                count = count + 1

        if result == "finished":
            result = "success"
        else:
            result = "failure"
        t3 = datetime.datetime.now(tzlocal.get_localzone())
        delta = t3 - t1
        data = collections.OrderedDict()
        data['datetime'] = t1.strftime("%Y-%m-%dT%H:%M:%S%Z")
        data['operation'] = "volte_create"
        data['result'] = result
        data['duration'] = round(
            delta.seconds + Decimal(delta.microseconds / 1000000.0), 3)
        fcntl.flock(self.operation_file, fcntl.LOCK_EX)
        self.operation_file.write(
            json.dumps(data, default=self.myconverter) + "\n")
        self.operation_file.flush()
        os.fsync(self.operation_file)
        fcntl.flock(self.operation_file, fcntl.LOCK_UN)

        self.delete_service(serviceId)
def mvOverview():
    # extract MVSense over view data for a camera from the analytics API
    MVZones = []
    animation_option = {"startup": True, "duration": 1000, "easing": 'out'}

    #First we have the logic for creating the page with all the historical details of zone below,
    #further down is the logic to show the Overview of all cameras and their respective zones
    if request.method == 'POST':
        # This is for the historical detail
        zoneDetails = request.form['zone_details']
        print("zoneDetails=",zoneDetails)
        #zoneDetailsTuple contains: [camera serial, camera name, zone id, zone name]
        zoneDetailsTuple=zoneDetails.split(',')
        theSERIAL=zoneDetailsTuple[0]
        theCameraNAME=zoneDetailsTuple[1]
        theZoneID=zoneDetailsTuple[2]
        theZoneNAME=zoneDetailsTuple[3]

        data = getMVHistory(theSERIAL,theZoneID)
        if data != 'link error':


            print("getMVHistory returned:", data)

            MVHistory = json.loads(data)
            # add a chart

            # now create the chart object using the serial as the name and the name of the device as the title
            mv_history_chart = ColumnChart("mvhistorychart", options={"title": "Camera: "+theCameraNAME+" Zone: "+theZoneNAME,
                                                                                 "width": 1000,
                                                                                 "height": 500,
                                                                                 "hAxis.title": "Hour",
                                                                                 "animation": animation_option})
            mv_history_chart.add_column("string", "Zone")
            mv_history_chart.add_column("number", "Visitors")
            print(data)
            the_rows = []
            theHoursDict = dict()
            theHoursMaxEntrancesDict = dict()
            theHoursMaxEntrancesTimestampDict = dict()
            theLocalHoursMaxEntrancesTimestampDict = dict()

            for j in range(len(MVHistory)):
                # grab all events in MVHistory, then
                # tabulate and summarize in hour blocks
                # example startTS: "2019-08-05T17:06:46.312Z" example endTs: "2019-08-05T17:07:46.312Z"
                # also, for each hour that has entrances, select the timeframe where there are
                # the most and extract a snapshot 30 seconds after that timestamp to show below in the page

                thisStartTs = MVHistory[j]["startTs"]
                thisEndTs = MVHistory[j]["endTs"]

                thisHour = thisEndTs.partition('T')[2][:2]

                theEndTsTimeStamp=datetime.strptime(thisEndTs, "%Y-%m-%dT%H:%M:%S.%fZ")

                thisMinuteMedTimestamp= time.mktime(theEndTsTimeStamp.timetuple())-30
                thisMinuteMedISOts=datetime.fromtimestamp(thisMinuteMedTimestamp).isoformat()+"Z"

                #convert to localtimezone
                local_timezone = tzlocal.get_localzone()  # get pytz tzinfo
                local_timezone_str = str(local_timezone)
                theLocalEndTsTimeStamp = theEndTsTimeStamp.replace(tzinfo=pytz.utc).astimezone(local_timezone)

                thislocalMinuteMedTimestamp= time.mktime(theLocalEndTsTimeStamp.timetuple())-30
                thislocalMinuteMedISOts = datetime.fromtimestamp(thislocalMinuteMedTimestamp).isoformat() + "Z"
                localHour = thislocalMinuteMedISOts.partition('T')[2][:2]

                #print("Timestamp string:",thisEndTs )

                #print("Numerical equivalent: ", thisMinuteMedTimestamp)
                #print("Local Numerical equivalent: ", thislocalMinuteMedTimestamp)
                #print("ISO equivalent: ", thisMinuteMedISOts)
                #print("local ISO equivalent: ", thislocalMinuteMedISOts)

                thisEntrances = MVHistory[j]["entrances"]

                # Now we will use localHour instead of thisHour as the Dict key to hold the accounting for body
                # detection per hour since that is what is shown on the graph, it should behave the same otherwise
                # as when we used thisHour originally, but show a local hour instead of UTC which was confusing.

                if localHour in theHoursDict.keys():
                    #increase the number of entrances of this hour slot
                    theHoursDict[localHour]=theHoursDict[localHour]+thisEntrances
                    #check to see if the entrances for this minute are the most for this hour
                    if thisEntrances>theHoursMaxEntrancesDict[localHour]:
                        #if so, make these entrances the most for the timeframe and save the timestamp for the
                        #middle of the minute with the most entrances
                        theHoursMaxEntrancesDict[localHour]=thisEntrances
                        theHoursMaxEntrancesTimestampDict[localHour]=thisMinuteMedISOts
                        #keep track of local version as well
                        theLocalHoursMaxEntrancesTimestampDict[localHour]=thislocalMinuteMedISOts

                else:
                    #if this is the first time we see this timeslot, make the current entrances
                    #the starting balance for the dict entry
                    theHoursDict[localHour] = thisEntrances
                    theHoursMaxEntrancesDict[localHour] = thisEntrances
                    #only keep timestamp if there is at least one entry detected
                    if thisEntrances>0:
                        theHoursMaxEntrancesTimestampDict[localHour] = thisMinuteMedISOts
                        theLocalHoursMaxEntrancesTimestampDict[localHour] = thislocalMinuteMedISOts
                    else:
                        theHoursMaxEntrancesTimestampDict[localHour]=''
                        theLocalHoursMaxEntrancesTimestampDict[localHour]=''


            for dEntryKey in theHoursDict.keys():
                the_rows.append([dEntryKey, theHoursDict[dEntryKey]])

            mv_history_chart.add_rows(the_rows)
            charts.register(mv_history_chart)

            print("Max Entrances Timestamps: ", theHoursMaxEntrancesTimestampDict)
            print("Max Local Entrances Timestamps: ", theLocalHoursMaxEntrancesTimestampDict)

            #theScreenshots is an array of arays in the format [ timestamp string,  snapshot URL ]
            #this is to be passed to the form that will render them
            theScreenshots=[]

            for dTimeStampKey in theHoursMaxEntrancesTimestampDict.keys():
                if theHoursMaxEntrancesTimestampDict[dTimeStampKey]!='':
                    screenShotURLdata=getCameraScreenshot(theSERIAL,theHoursMaxEntrancesTimestampDict[dTimeStampKey])
                    print("getCameraSCreenshot returned: ",screenShotURLdata)
                    if  screenShotURLdata != 'link error':
                        screenShotURL = json.loads(screenShotURLdata)
                        #Passing theLocalHoursMaxEntrancesTimestampDict[dTimeStampKey] instead of theHoursMaxEntrancesTimestampDict[dTimeStampKey] below
                        #to show a local timestamp we calculated in a previous loop
                        theScreenshots.append([ theLocalHoursMaxEntrancesTimestampDict[dTimeStampKey], screenShotURL["url"]])

            # wait for the URLs to be valid
            print("Waiting 10 seconds...")
            time.sleep(10)
            return render_template("mvHistory.html", historyChart=mv_history_chart, snapshotsArray=theScreenshots, localTimezone=local_timezone_str)
    else:

        devices_data=getDevices()
        if devices_data != 'link error':

            AllDevices=json.loads(devices_data)

            #theDeviceCharts is just a list (array) of the names of the charts constructed with the
            #google charts flask library. They are to be iterated through to place on the History/details page
            theDeviceCharts=[]
            #theDeviceDetails is a list of the details of each camera device. Each entry has a serial number, label and
            #a list of zones for which there is zoneID and label
            theDeviceDetails=[]

            theChartNum=0

            for theDevice in AllDevices:
                theModel=theDevice["model"]

                if theModel[:4] not in COLLECT_CAMERAS_MVSENSE_CAPABLE:
                    continue

                data=getMVOverview(theDevice["serial"])
                if data == 'link error':
                    continue

                print("getMVOverview returned:" , data)
                MVZones=json.loads(data)

                zonesdetaildata=getMVZones(theDevice["serial"])
                if zonesdetaildata == 'link error':
                    continue

                print("getMVZones returned:" , zonesdetaildata)
                MVZonesDetails=json.loads(zonesdetaildata)

                # add a chart
                #first add the name of the chart to the list of charts to be displayed in the page
                theDeviceCharts.append("chart"+str(theChartNum))

                #now append the top level details of the camera for this chart to theDeviceDetails
                theDeviceDetails.append([theDevice["serial"],theDevice["name"],[]])

                #now create the chart object using the serial as the name and the name of the device as the title
                mv_overview_chart = ColumnChart("chart"+str(theChartNum), options={"title": theDevice["name"],
                                                                          "width": 800,
                                                                          "height": 400,
                                                                          "hAxis.title": "Hour",
                                                                          "animation": animation_option})
                mv_overview_chart.add_column("string", "Zone")
                mv_overview_chart.add_column("number", "Visitors")
                print(data)
                the_rows = []
                for j in range(len(MVZones)):
                    thisZone=MVZones[j]
                    #assuming same number of zone entries overviews than number of zones here
                    thisZoneDetails=MVZonesDetails[j]
                    the_rows.append([ str(thisZoneDetails["label"]), thisZone["entrances"] ])
                    # store away the zoneID and serial of the camera to pass to the form so when someone clicks
                    # on a bar or button to expand detail, it comes back to this function in the POST section
                    # to know which zone from which camera to use


                    #we are assuming below that we have a chart per each MV capable camera, if that changes
                    # we need to figure out aonther way to index theDeviceDetails or use some other method
                    # to store/retrieve the data besides a list of lists
                    theDeviceDetails[theChartNum][2].append([thisZoneDetails["zoneId"], thisZoneDetails["label"]])

                mv_overview_chart.add_rows(the_rows)
                charts.register(mv_overview_chart)

                theChartNum+=1

            print("Rendering overview form with:")
            print("allTheDetails=",theDeviceDetails)

            return render_template("mvOverview.html",allTheCharts=theDeviceCharts,allTheDetails=theDeviceDetails)

        else:
            return render_template('error.html'), 404
示例#35
0
def _tz_localize(s: pd.Series) -> pd.Series:
    return s.dt.tz_localize('utc').dt.tz_convert(
        get_localzone().zone).dt.tz_localize(None)
示例#36
0
                               '/StreamingHistory*.json')

    # - Make a df called streams with all the streaming history
    streams = pd.DataFrame()
    for i in streaming_hist:
        i_df = pd.read_json(i)
        streams = pd.concat([i_df, streams])

    # Sort the df by endTime
    # streams.sort_values('endTime', inplace=True)
    # streams.reset_index(drop=True, inplace=True)
    # Removing all rows where 0 msPlayed is recorded
    # streams = streams[streams['msPlayed'] != 0].copy()

    # Get local timezone of where script is run
    local_tz = str(get_localzone())
    # Convert endTime to local timezone
    streams['endTime'] = pd.to_datetime(streams["endTime"]).dt.tz_localize(
        'Europe/Stockholm').dt.tz_convert(local_tz)
    # Drop timezone
    streams['endTime'] = streams['endTime'].dt.tz_localize(None)

    # Convert msPlayed to a Timedelta series called playTime
    streams['playTime'] = pd.TimedeltaIndex(streams['msPlayed'], unit='ms')
    # Get startTime from endTime - playTime
    streams['startTime'] = streams['endTime'] - streams['playTime']

    # Create inLibrary column - is true if tracks are saved in library and false if not saved in library
    streams['inLibrary'] = (
        streams['artistName'].isin(tracks['artist'])
        & streams['trackName'].isin(tracks['track'])).astype(bool)
示例#37
0
from flask import request, jsonify
import datetime, pytz, tzlocal, yaml, collections, logging

local_timezone = tzlocal.get_localzone()

# need to format it ourselves as python logging doesn't support
# anachronistic log messages
def format_client_time(t):
    t = datetime.datetime.utcfromtimestamp(t/1000.)
    ms = t.microsecond/1000.
    t -= datetime.timedelta(microseconds=t.microsecond)
    t = t.replace(tzinfo=pytz.utc).astimezone(local_timezone)
    return "{0}.{1:03}".format(t.strftime("%Y-%m-%d %H:%M:%S"),int(ms))
        
def safe_filename(fn):
    return "".join([x for x in fn if re.match(r'[\w.-]',x)])

class BAIDebugEndpoint(object):
    def __init__(self,debug_mode_path):
        self.debug_mode_path = debug_mode_path
    
    def post(self):
        with open(self.debug_mode_path) as f:
            debug_config = yaml.load(f)
            streams = []
            datasets = collections.defaultdict(list)
            received = request.get_json()
            inst = received['instance_id']
            
            blackbox_logger = logging.getLogger("blackbox")
            
示例#38
0
"""Globals and Helpers for TMDB content discovery."""
from random import shuffle

import tmdbsimple as tmdb
from conreq.utils import cache, log
from conreq.utils.generic import is_key_value_in_list
from conreq.utils.multiprocessing import ReturnThread
from tzlocal import get_localzone

# TMDB API key is safe to hard-code. It can only access publicly available data.
tmdb.API_KEY = "112fd4c96274603f68620c78067d5422"
_logger = log.get_logger(__name__)
_timezone = get_localzone().zone

# Globals
ANIME_CHECK_FALLBACK = True
LANGUAGE = "en-US"
MAX_RECOMMENDED_PAGES = 10
MAX_SHUFFLED_PAGES = 30
# Days, Hours, Minutes, Seconds
EXTERNAL_ID_CACHE_TIMEOUT = 7 * 24 * 60 * 60
DISCOVER_CACHE_TIMEOUT = 3 * 24 * 60 * 60
GET_BY_TMDB_ID_CACHE_TIMEOUT = 4 * 60 * 60
GET_BY_TVDB_ID_CACHE_TIMEOUT = 4 * 60 * 60
GET_GENRES_CACHE_TIMEOUT = 30 * 24 * 60 * 60
IS_ANIME_CACHE_TIMEOUT = 7 * 24 * 60 * 60
RECOMMENDED_CACHE_TIMEOUT = 14 * 24 * 60 * 60
SIMILAR_CACHE_TIMEOUT = 14 * 24 * 60 * 60
COLLECTION_CACHE_TIMEOUT = 14 * 24 * 60 * 60
PERSON_CACHE_TIMEOUT = 14 * 24 * 60 * 60
SHUFFLED_PAGE_CACHE_TIMEOUT = 1 * 24 * 60 * 60
示例#39
0
def parse(args):
    """
    Define the available arguments
    """
    try:
        timezone = get_localzone()
    except Exception:  # pragma: no cover
        timezone = 'UTC'
    if timezone == 'local':
        timezone = 'UTC'
    parser = argparse.ArgumentParser(
        description="""Bootstrap a django CMS project.
Major usage modes:

- wizard: djangocms -w -p /path/whatever project_name: ask for all the options through a
          CLI wizard.

- batch: djangocms project_name: runs with the default values plus any
         additional option provided (see below) with no question asked.

- config file: djangocms_installer --config-file /path/to/config.ini project_name: reads values
               from an ini-style config file.

Check https://djangocms-installer.readthedocs.io/en/latest/usage.html for detailed usage
information.
""",
        formatter_class=argparse.RawTextHelpFormatter)
    parser.add_argument('--config-file',
                        dest='config_file',
                        action='store',
                        default=None,
                        help='Configuration file for djangocms_installer')
    parser.add_argument('--config-dump',
                        dest='config_dump',
                        action='store',
                        default=None,
                        help='Dump configuration file with current args')
    parser.add_argument('--db',
                        '-d',
                        dest='db',
                        action=DbAction,
                        default='sqlite://localhost/project.db',
                        help='Database configuration (in URL format). '
                        'Example: sqlite://localhost/project.db')
    parser.add_argument('--i18n',
                        '-i',
                        dest='i18n',
                        action='store',
                        choices=('yes', 'no'),
                        default='yes',
                        help='Activate Django I18N / L10N setting; this is '
                        'automatically activated if more than '
                        'language is provided')
    parser.add_argument('--use-tz',
                        '-z',
                        dest='use_timezone',
                        action='store',
                        choices=('yes', 'no'),
                        default='yes',
                        help='Activate Django timezone support')
    parser.add_argument(
        '--timezone',
        '-t',
        dest='timezone',
        required=False,
        default=timezone,
        action='store',
        help='Optional default time zone. Example: Europe/Rome')
    parser.add_argument('--reversion',
                        '-e',
                        dest='reversion',
                        action='store',
                        choices=('yes', 'no'),
                        default='yes',
                        help='Install and configure reversion support '
                        '(only for django CMS 3.2 and 3.3)')
    parser.add_argument('--permissions',
                        dest='permissions',
                        action='store',
                        choices=('yes', 'no'),
                        default='no',
                        help='Activate CMS permission management')
    parser.add_argument('--pip-options',
                        help='pass custom pip options',
                        default='')
    parser.add_argument(
        '--languages',
        '-l',
        dest='languages',
        action='append',
        help=
        'Languages to enable. Option can be provided multiple times, or as a '
        'comma separated list. Only language codes supported by Django can '
        'be used here. Example: en, fr-FR, it-IT')
    parser.add_argument('--django-version',
                        dest='django_version',
                        action='store',
                        choices=data.DJANGO_SUPPORTED,
                        default=data.DJANGO_DEFAULT,
                        help='Django version')
    parser.add_argument('--cms-version',
                        '-v',
                        dest='cms_version',
                        action='store',
                        choices=data.DJANGOCMS_SUPPORTED,
                        default=data.DJANGOCMS_DEFAULT,
                        help='django CMS version')
    parser.add_argument('--parent-dir',
                        '-p',
                        dest='project_directory',
                        default='',
                        action='store',
                        help='Optional project parent directory')
    parser.add_argument('--bootstrap',
                        dest='bootstrap',
                        action='store',
                        choices=('yes', 'no'),
                        default='no',
                        help='Use Twitter Bootstrap Theme')
    parser.add_argument('--templates',
                        dest='templates',
                        action='store',
                        default='no',
                        help='Use custom template set')
    parser.add_argument(
        '--starting-page',
        dest='starting_page',
        action='store',
        choices=('yes', 'no'),
        default='no',
        help='Load a starting page with examples after installation '
        '(english language only). Choose "no" if you use a '
        'custom template set.')
    parser.add_argument(dest='project_name',
                        action='store',
                        help='Name of the project to be created')

    # Command that lists the supported plugins in verbose description
    parser.add_argument(
        '--list-plugins',
        '-P',
        dest='plugins',
        action='store_true',
        help='List plugins that\'s going to be installed and configured')

    # Command that lists the supported plugins in verbose description
    parser.add_argument(
        '--dump-requirements',
        '-R',
        dest='dump_reqs',
        action='store_true',
        help='It dumps the requirements that would be installed according to '
        'parameters given. Together with --requirements argument is useful '
        'for customizing the virtualenv')

    # Advanced options. These have a predefined default and are not asked
    # by config wizard.
    parser.add_argument(
        '--no-input',
        '-q',
        dest='noinput',
        action='store_true',
        default=True,
        help='Don\'t run the configuration wizard, just use the '
        'provided values')
    parser.add_argument('--wizard',
                        '-w',
                        dest='wizard',
                        action='store_true',
                        default=False,
                        help='Run the configuration wizard')
    parser.add_argument(
        '--verbose',
        dest='verbose',
        action='store_true',
        default=False,
        help='Be more verbose and don\'t swallow subcommands output')
    parser.add_argument('--filer',
                        '-f',
                        dest='filer',
                        action='store_true',
                        default=True,
                        help='Install and configure django-filer plugins '
                        '- Always enabled')
    parser.add_argument('--requirements',
                        '-r',
                        dest='requirements_file',
                        action='store',
                        default=None,
                        help='Externally defined requirements file')
    parser.add_argument('--no-deps',
                        '-n',
                        dest='no_deps',
                        action='store_true',
                        default=False,
                        help='Don\'t install package dependencies')
    parser.add_argument('--no-plugins',
                        dest='no_plugins',
                        action='store_true',
                        default=False,
                        help='Don\'t install plugins')
    parser.add_argument('--no-db-driver',
                        dest='no_db_driver',
                        action='store_true',
                        default=False,
                        help='Don\'t install database package')
    parser.add_argument('--no-sync',
                        '-m',
                        dest='no_sync',
                        action='store_true',
                        default=False,
                        help='Don\'t run syncdb / migrate after bootstrapping')
    parser.add_argument('--no-user',
                        '-u',
                        dest='no_user',
                        action='store_true',
                        default=False,
                        help='Don\'t create the admin user')
    parser.add_argument('--template',
                        dest='template',
                        action='store',
                        default=None,
                        help='The path or URL to load the django project '
                        'template from.')
    parser.add_argument(
        '--extra-settings',
        dest='extra_settings',
        action='store',
        default=None,
        help='The path to an file that contains extra settings.')
    parser.add_argument('--skip-empty-check',
                        '-s',
                        dest='skip_project_dir_check',
                        action='store_true',
                        default=False,
                        help='Skip the check if project dir is empty.')
    parser.add_argument('--delete-project-dir',
                        '-c',
                        dest='delete_project_dir',
                        action='store_true',
                        default=False,
                        help='Delete project directory on creation failure.')
    parser.add_argument('--utc',
                        dest='utc',
                        action='store_true',
                        default=False,
                        help='Use UTC timezone.')

    if '--utc' in args:
        for action in parser._positionals._actions:
            if action.dest == 'timezone':
                action.default = 'UTC'

    # If config_args then pretend that config args came from the stdin and run parser again.
    config_args = ini.parse_config_file(parser, args)
    args = parser.parse_args(config_args + args)
    if not args.wizard:
        args.noinput = True
    else:
        args.noinput = False

    if not args.project_directory:
        args.project_directory = args.project_name
    args.project_directory = os.path.abspath(args.project_directory)

    # First of all, check if the project name is valid
    if not validate_project(args.project_name):
        sys.stderr.write(
            'Project name "{0}" is not a valid app name, or it\'s already defined. '
            'Please use only numbers, letters and underscores.\n'.format(
                args.project_name))
        sys.exit(3)

    # Checking the given path
    setattr(args, 'project_path',
            os.path.join(args.project_directory, args.project_name).strip())
    if not args.skip_project_dir_check:
        if (os.path.exists(args.project_directory) and [
                path for path in os.listdir(args.project_directory)
                if not path.startswith('.')
        ]):
            sys.stderr.write(
                'Path "{0}" already exists and is not empty, please choose a different one\n'
                'If you want to use this path anyway use the -s flag to skip this check.\n'
                ''.format(args.project_directory))
            sys.exit(4)

    if os.path.exists(args.project_path):
        sys.stderr.write(
            'Path "{0}" already exists, please choose a different one\n'.
            format(args.project_path))
        sys.exit(4)

    if args.config_dump and os.path.isfile(args.config_dump):
        sys.stdout.write(
            'Cannot dump because given configuration file "{0}" exists.\n'.
            format(args.config_dump))
        sys.exit(8)

    args = _manage_args(parser, args)

    # what do we want here?!
    # * if languages are given as multiple arguments, let's use it as is
    # * if no languages are given, use a default and stop handling it further
    # * if languages are given as a comma-separated list, split it and use the
    #   resulting list.

    if not args.languages:
        try:
            args.languages = [locale.getdefaultlocale()[0].split('_')[0]]
        except Exception:  # pragma: no cover
            args.languages = ['en']
    elif isinstance(args.languages, six.string_types):
        args.languages = args.languages.split(',')
    elif len(args.languages) == 1 and isinstance(args.languages[0],
                                                 six.string_types):
        args.languages = args.languages[0].split(',')

    args.languages = [lang.strip().lower() for lang in args.languages]
    if len(args.languages) > 1:
        args.i18n = 'yes'
    args.aldryn = False
    args.filer = True

    # Convert version to numeric format for easier checking
    try:
        django_version, cms_version = supported_versions(
            args.django_version, args.cms_version)
        cms_package = data.PACKAGE_MATRIX.get(
            cms_version, data.PACKAGE_MATRIX[data.DJANGOCMS_LTS])
    except RuntimeError as e:  # pragma: no cover
        sys.stderr.write(compat.unicode(e))
        sys.exit(6)
    if django_version is None:  # pragma: no cover
        sys.stderr.write(
            'Please provide a Django supported version: {0}. Only Major.Minor '
            'version selector is accepted\n'.format(', '.join(
                data.DJANGO_SUPPORTED)))
        sys.exit(6)
    if cms_version is None:  # pragma: no cover
        sys.stderr.write(
            'Please provide a django CMS supported version: {0}. Only Major.Minor '
            'version selector is accepted\n'.format(', '.join(
                data.DJANGOCMS_SUPPORTED)))
        sys.exit(6)

    if not getattr(args, 'requirements_file'):
        requirements = []

        # django CMS version check
        if args.cms_version == 'develop':
            requirements.append(cms_package)
            warnings.warn(data.VERSION_WARNING.format('develop', 'django CMS'))
        elif args.cms_version == 'rc':  # pragma: no cover
            requirements.append(cms_package)
        elif args.cms_version == 'beta':  # pragma: no cover
            requirements.append(cms_package)
            warnings.warn(data.VERSION_WARNING.format('beta', 'django CMS'))
        else:
            requirements.append(cms_package)

        if args.cms_version in ('rc', 'develop'):
            requirements.extend(data.REQUIREMENTS['cms-master'])
        elif LooseVersion(cms_version) >= LooseVersion('3.5'):
            requirements.extend(data.REQUIREMENTS['cms-3.5'])
        elif LooseVersion(cms_version) >= LooseVersion('3.4'):
            requirements.extend(data.REQUIREMENTS['cms-3.4'])

        if not args.no_db_driver:
            requirements.append(args.db_driver)
        if not args.no_plugins:
            if args.cms_version in ('rc', 'develop'):
                requirements.extend(data.REQUIREMENTS['plugins-master'])
            elif LooseVersion(cms_version) >= LooseVersion('3.5'):
                requirements.extend(data.REQUIREMENTS['plugins-3.5'])
            elif LooseVersion(cms_version) >= LooseVersion('3.4'):
                requirements.extend(data.REQUIREMENTS['plugins-3.4'])
            requirements.extend(data.REQUIREMENTS['filer'])

        if args.aldryn:  # pragma: no cover
            requirements.extend(data.REQUIREMENTS['aldryn'])

        # Django version check
        if args.django_version == 'develop':  # pragma: no cover
            requirements.append(data.DJANGO_DEVELOP)
            warnings.warn(data.VERSION_WARNING.format('develop', 'Django'))
        elif args.django_version == 'beta':  # pragma: no cover
            requirements.append(data.DJANGO_BETA)
            warnings.warn(data.VERSION_WARNING.format('beta', 'Django'))
        else:
            requirements.append('Django<{0}'.format(
                less_than_version(django_version)))

        # Timezone support
        if args.use_timezone:
            requirements.append('pytz')

        if django_version == '1.8':
            requirements.extend(data.REQUIREMENTS['django-1.8'])
        elif django_version == '1.9':
            requirements.extend(data.REQUIREMENTS['django-1.9'])
        elif django_version == '1.10':
            requirements.extend(data.REQUIREMENTS['django-1.10'])
        elif django_version == '1.11':
            requirements.extend(data.REQUIREMENTS['django-1.11'])
        elif django_version == '2.0':  # pragma: no cover
            requirements.extend(data.REQUIREMENTS['django-2.0'])

        requirements.extend(data.REQUIREMENTS['default'])

        setattr(args, 'requirements', '\n'.join(requirements).strip())

    # Convenient shortcuts
    setattr(args, 'cms_version', cms_version)
    setattr(args, 'django_version', django_version)
    setattr(
        args, 'settings_path',
        os.path.join(args.project_directory, args.project_name,
                     'settings.py').strip())
    setattr(
        args, 'urlconf_path',
        os.path.join(args.project_directory, args.project_name,
                     'urls.py').strip())

    if args.config_dump:
        ini.dump_config_file(args.config_dump, args, parser)

    return args
示例#40
0
# -*- coding: utf-8 -*-
"""Helper functions to create test content and work around API
inconsistencies among Archetypes and Dexterity.
"""
from datetime import datetime
from datetime import timedelta
from tzlocal import get_localzone

today = datetime.today()
tomorrow = today + timedelta(days=1)

TZNAME = get_localzone().zone


def create_standard_content_for_tests(portal):
    """Create one instance of each standard content type, at least."""
    from DateTime import DateTime
    from plone import api

    with api.env.adopt_roles(['Manager']):
        api.content.create(
            container=portal,
            type='Collection',
            title=u'Mandelbrot set',
            description=u'Image gallery of a zoom sequence',
            query=[{
                'i': 'Type',
                'o': 'plone.app.querystring.operation.string.is',
                'v': ['Image'],
            }],
        )
示例#41
0
from tzlocal import get_localzone

#import html2text

#max = not more than 30 000 000

#num_digits = 8

base_url = 'https://api.github.com/users?per_page=100&since='

dig_arr = []

max_digits = 0

utc = pytz.timezone('UTC')
system_tz = get_localzone()


#fancy countdown function
def countdown(t):
    t = int(t)
    while t:
        mins, secs = divmod(t, 60)
        timeformat = '{:02d}:{:02d}'.format(mins, secs)
        #python2
        #print('\r'+ timeformat, end="")
        sys.stdout.write('\r' + timeformat)
        sys.stdout.flush()
        #python3
        #print(timeformat, end='\r')
        time.sleep(1)
示例#42
0
 def localZone(self):   
     from tzlocal import get_localzone         
     self.scr.insert(tk.INSERT, get_localzone())
示例#43
0
 def test_server_timezone(self):
     assert h.get_display_timezone() == tzlocal.get_localzone()
示例#44
0
    def create_recurrence(self, schedule_data, schedule_result):
        """
        Creates Mediasite schedule recurrence. Specifically, this is the datetimes which a recording schedule
        will produce presentations with.

        params:
            schedule_data: dictionary containing various necessary data for creating mediasite scheduling
            schedule_result: data provided from Mediasite after a schedule is produced

        returns:
            resulting response from the mediasite web api request
        """
        logging.info("Creating schedule recurrence(s) for '" +
                     schedule_data["schedule_name"])

        #convert duration minutes to milliseconds as required by Mediasite system
        recurrence_duration = int(
            schedule_data["schedule_duration"]) * 60 * 1000

        #translate various values gathered from the UI to Mediasite-friendly conventions
        recurrence_type = self.mediasite.model.translate_schedule_recurrence_pattern(
            schedule_data["schedule_recurrence"])

        #creates a recurrence using post_data created below
        def request_create_recurrence(post_data):
            result = self.mediasite.api_client.request(
                "post",
                "Schedules('" + schedule_result["Id"] + "')/Recurrences", "",
                post_data).json()

            if self.mediasite.experienced_request_errors(result):
                return result
            else:
                if "odata.error" in result:
                    logging.error(result["odata.error"]["code"] + ": " +
                                  result["odata.error"]["message"]["value"])
                else:
                    self.mediasite.model.add_recurrence(result)

        result = ""

        #for one-time recurrence creation
        if recurrence_type == "None":
            post_data = {
                "MediasiteId":
                schedule_result["Id"],
                "RecordDuration":
                recurrence_duration,
                "StartRecordDateTime":
                schedule_data["schedule_start_datetime_utc_string"],
                "EndRecordDateTime":
                schedule_data["schedule_end_datetime_utc_string"],
                "RecurrencePattern":
                recurrence_type,
                "RecurrenceFrequency":
                schedule_data["schedule_recurrence_freq"],
                "DaysOfTheWeek":
                self.translate_schedule_days_of_week(schedule_data)
            }

            result = request_create_recurrence(post_data)

        elif recurrence_type == "Weekly":
            #for weekly recurrence creation
            """
            NOTE: due to bugs in Mediasite system we defer to using one-time schedules for each day of the week
            specified within the provided time frame. This produces more accurate and stable results as of the time
            of writing this in Feb 2018.
            """

            #determine date range for use in creating single instances which are less error-prone
            datelist = self.recurrence_datelist_generator(schedule_data)

            #for each date in the list produced above, we create post data and request a one-time schedule recurrence
            for date in datelist:

                ms_friendly_datetime_start_utc = self.convert_datetime_local_to_utc(
                    date)
                ms_friendly_datetime_start = ms_friendly_datetime_start_utc.strftime(
                    "%Y-%m-%dT%H:%M:%S")

                #find our current timezone
                local_tz = tzlocal.get_localzone()

                #check if current datetime is dst or not
                now = datetime.datetime.now()
                #is_now_dst = now.astimezone(local_tz).dst() != datetime.timedelta(0)
                is_now_dst = local_tz.localize(
                    now).dst() != datetime.timedelta(0)

                #check if future datetime is dst or not
                #is_later_dst = date.astimezone(local_tz).dst() != datetime.timedelta(0)
                is_later_dst = local_tz.localize(
                    date).dst() != datetime.timedelta(0)

                #compare between the current and future datetimes to find differences and adjust
                if is_now_dst != is_later_dst:

                    #convert future datetime to be an hour more or less bast on dst comparison above
                    if is_now_dst and not is_later_dst:
                        ms_friendly_datetime_start = (
                            ms_friendly_datetime_start_utc +
                            datetime.timedelta(hours=1)
                        ).strftime("%Y-%m-%dT%H:%M:%S")

                    elif not is_now_dst and is_later_dst:
                        ms_friendly_datetime_start = (
                            ms_friendly_datetime_start_utc -
                            datetime.timedelta(hours=1)
                        ).strftime("%Y-%m-%dT%H:%M:%S")

                post_data = {
                    "MediasiteId": schedule_result["Id"],
                    "RecordDuration": recurrence_duration,
                    "StartRecordDateTime": ms_friendly_datetime_start,
                    "RecurrencePattern": "None",
                }

                request_create_recurrence(post_data)

            return result

        return result
示例#45
0
def get_stream_data():
    '''
    Pulls data from MyData/EndSong.json and returns a df of the data
    '''
    df = pd.read_json('MyData/EndSong.json', lines=True)
    df = df[[
        'ts', 'username', 'ms_played', 'master_metadata_track_name',
        'master_metadata_album_artist_name',
        'master_metadata_album_album_name', 'reason_start', 'reason_end',
        'shuffle', 'offline', 'incognito_mode', 'episode_name',
        'episode_show_name'
    ]].copy()

    local_tz = str(get_localzone())
    df['ts_utc'] = pd.to_datetime(df['ts'])
    df['ts_tz'] = df['ts_utc'].dt.tz_convert(local_tz)
    df = df.sort_values('ts_utc').reset_index(drop=True).copy()
    df = df.fillna('').copy()

    df = df.rename(columns=dict(master_metadata_track_name='track',
                                master_metadata_album_artist_name='artist',
                                master_metadata_album_album_name='album',
                                episode_show_name='show')).copy()

    # - Removes all entries with null tracka and episodes - counts for 1.5 percent of ms_played sum
    # -  Not sure what these records are
    df = df[(df['track'] != '') | (df['episode_name'] != '')].copy()

    # - make audio type column - podcast or music
    def audio_kind(row):
        if row['track'] == '' and row['artist'] == '' and row['album'] == '':
            return 'Podcast'
        elif row['episode_name'] == '' and row['show'] == '':
            return 'Music'
        else:
            return 'Other'

    df['audio_kind'] = df.apply(lambda row: audio_kind(row), axis=1)

    # - make time played column 0-30 (skipped) or 30+ (played) if music and other for podcast
    def skipped(row):
        if row['ms_played'] < 30000:
            return True
        else:
            return False

    df['skipped'] = df.apply(lambda row: skipped(row), axis=1)

    # - Create season column
    def season(x):
        if x > 2 and x < 6:
            return 'Spring'
        elif x > 5 and x < 9:
            return 'Summer'
        elif x > 8 and x < 12:
            return 'Fall'
        else:
            return 'Winter'

    df['season'] = df['ts_tz'].dt.month.apply(season)

    # - Move datetime columns to fathest left on df
    df = df[['ts_utc', 'ts_tz'] + list(df.columns[1:-5]) +
            list(df.columns[-3:])].copy()

    # - Add month and week column
    df['month'] = df['ts_tz'].dt.strftime("%b '%y")
    df['week'] = df['ts_tz'].dt.strftime("Week %W '%y")
    df['weekday_#'] = df['ts_tz'].dt.weekday
    df['weekday'] = df['weekday_#'].apply(lambda x: list(calendar.day_name)[x])

    # - Get song/podcast total time from Spotify API
    # - Change bash brothers to Lonely Island
    df['artist'] = df['artist'].replace(
        'The Unauthorized Bash Brothers Experience', 'The Lonely Island')
    return df
示例#46
0
    def run_until_complete(
        cls,
        service_files: Union[List, set],
        configuration: Optional[Dict] = None,
        watcher: Any = None,
    ) -> None:
        def stop_services() -> None:
            asyncio.ensure_future(_stop_services())

        async def _stop_services() -> None:
            if cls._close_waiter and not cls._close_waiter.done():
                cls._close_waiter.set_result(None)
                for service in cls.services:
                    try:
                        service.stop_service()
                    except Exception:
                        pass
                if cls._stopped_waiter:
                    cls._stopped_waiter.set_result(None)
            if cls._stopped_waiter:
                await cls._stopped_waiter

        def sigintHandler(*args: Any) -> None:
            sys.stdout.write("\b\b\r")
            sys.stdout.flush()
            logging.getLogger("system").warning(
                "Received <ctrl+c> interrupt [SIGINT]")
            cls.restart_services = False

        def sigtermHandler(*args: Any) -> None:
            logging.getLogger("system").warning(
                "Received termination signal [SIGTERM]")
            cls.restart_services = False

        logging.basicConfig(level=logging.DEBUG)

        loop = asyncio.get_event_loop()
        if loop and loop.is_closed():
            loop = asyncio.new_event_loop()
            asyncio.set_event_loop(loop)

        for signame in ("SIGINT", "SIGTERM"):
            loop.add_signal_handler(getattr(signal, signame), stop_services)

        signal.siginterrupt(signal.SIGTERM, False)
        signal.siginterrupt(signal.SIGUSR1, False)
        signal.signal(signal.SIGINT, sigintHandler)
        signal.signal(signal.SIGTERM, sigtermHandler)

        if watcher:

            async def _watcher_restart(
                    updated_files: Union[List, set]) -> None:
                cls.restart_services = True

                for file in service_files:
                    try:
                        ServiceImporter.import_service_file(file)
                    except (SyntaxError, IndentationError) as e:
                        logging.getLogger("exception").exception(
                            "Uncaught exception: {}".format(str(e)))
                        logging.getLogger("watcher.restart").warning(
                            "Service cannot restart due to errors")
                        cls.restart_services = False
                        return

                pre_import_current_modules = [m for m in sys.modules.keys()]
                cwd = os.getcwd()
                for file in updated_files:
                    if file.lower().endswith(".py"):
                        module_name = file[:-3].replace("/", ".")
                        module_name_full_path = "{}/{}".format(
                            os.path.realpath(cwd),
                            file)[:-3].replace("/", ".")
                        try:
                            for m in pre_import_current_modules:
                                if m == module_name or (
                                        len(m) > len(file)
                                        and module_name_full_path.endswith(m)):
                                    ServiceImporter.import_module(file)
                        except (SyntaxError, IndentationError) as e:
                            logging.getLogger("exception").exception(
                                "Uncaught exception: {}".format(str(e)))
                            logging.getLogger("watcher.restart").warning(
                                "Service cannot restart due to errors")
                            cls.restart_services = False
                            return

                logging.getLogger("watcher.restart").warning(
                    "Restarting services")
                stop_services()

            watcher_future = loop.run_until_complete(
                watcher.watch(loop=loop, callback_func=_watcher_restart))

        cls.restart_services = True
        init_modules = [m for m in sys.modules.keys()]
        safe_modules = [
            "__future__",
            "__main__",
            "_abc",
            "_asyncio",
            "_bisect",
            "_blake2",
            "_bootlocale",
            "_bz2",
            "_cares",
            "_cares.lib",
            "_cffi_backend",
            "_codecs",
            "_collections",
            "_collections_abc",
            "_compat_pickle",
            "_compression",
            "_contextvars",
            "_ctypes",
            "_cython_0_29_21",
            "_datetime",
            "_decimal",
            "_elementtree",
            "_frozen_importlib",
            "_frozen_importlib_external",
            "_functools",
            "_hashlib",
            "_heapq",
            "_imp",
            "_io",
            "_json",
            "_locale",
            "_lzma",
            "_markupbase",
            "_opcode",
            "_operator",
            "_pickle",
            "_posixsubprocess",
            "_queue",
            "_random",
            "_sha3",
            "_sha512",
            "_signal",
            "_sitebuiltins",
            "_socket",
            "_sre",
            "_ssl",
            "_stat",
            "_string",
            "_struct",
            "_thread",
            "_uuid",
            "_warnings",
            "_weakref",
            "_weakrefset",
            "abc",
            "aioamqp",
            "aioamqp.channel",
            "aioamqp.constants",
            "aioamqp.envelope",
            "aioamqp.exceptions",
            "aioamqp.frame",
            "aioamqp.properties",
            "aioamqp.protocol",
            "aioamqp.version",
            "aiobotocore",
            "aiobotocore._endpoint_helpers",
            "aiobotocore.args",
            "aiobotocore.client",
            "aiobotocore.config",
            "aiobotocore.credentials",
            "aiobotocore.endpoint",
            "aiobotocore.eventstream",
            "aiobotocore.hooks",
            "aiobotocore.paginate",
            "aiobotocore.parsers",
            "aiobotocore.response",
            "aiobotocore.session",
            "aiobotocore.signers",
            "aiobotocore.utils",
            "aiobotocore.waiter",
            "aiodns",
            "aiodns.error",
            "aiohttp",
            "aiohttp._frozenlist",
            "aiohttp._helpers",
            "aiohttp._http_parser",
            "aiohttp._http_writer",
            "aiohttp._websocket",
            "aiohttp.abc",
            "aiohttp.base_protocol",
            "aiohttp.client",
            "aiohttp.client_exceptions",
            "aiohttp.client_proto",
            "aiohttp.client_reqrep",
            "aiohttp.client_ws",
            "aiohttp.connector",
            "aiohttp.cookiejar",
            "aiohttp.formdata",
            "aiohttp.frozenlist",
            "aiohttp.hdrs",
            "aiohttp.helpers",
            "aiohttp.http",
            "aiohttp.http_exceptions",
            "aiohttp.http_parser",
            "aiohttp.http_websocket",
            "aiohttp.http_writer",
            "aiohttp.locks",
            "aiohttp.log",
            "aiohttp.multipart",
            "aiohttp.payload",
            "aiohttp.payload_streamer",
            "aiohttp.resolver",
            "aiohttp.signals",
            "aiohttp.streams",
            "aiohttp.tcp_helpers",
            "aiohttp.tracing",
            "aiohttp.typedefs",
            "aiohttp.web",
            "aiohttp.web_app",
            "aiohttp.web_exceptions",
            "aiohttp.web_fileresponse",
            "aiohttp.web_log",
            "aiohttp.web_middlewares",
            "aiohttp.web_protocol",
            "aiohttp.web_request",
            "aiohttp.web_response",
            "aiohttp.web_routedef",
            "aiohttp.web_runner",
            "aiohttp.web_server",
            "aiohttp.web_urldispatcher",
            "aiohttp.web_ws",
            "aioitertools",
            "aioitertools.__version__",
            "aioitertools.asyncio",
            "aioitertools.builtins",
            "aioitertools.helpers",
            "aioitertools.itertools",
            "aioitertools.types",
            "argparse",
            "async_timeout",
            "asyncio",
            "asyncio.base_events",
            "asyncio.base_futures",
            "asyncio.base_subprocess",
            "asyncio.base_tasks",
            "asyncio.constants",
            "asyncio.coroutines",
            "asyncio.events",
            "asyncio.exceptions",
            "asyncio.format_helpers",
            "asyncio.futures",
            "asyncio.locks",
            "asyncio.log",
            "asyncio.protocols",
            "asyncio.queues",
            "asyncio.runners",
            "asyncio.selector_events",
            "asyncio.sslproto",
            "asyncio.staggered",
            "asyncio.streams",
            "asyncio.subprocess",
            "asyncio.tasks",
            "asyncio.transports",
            "asyncio.trsock",
            "asyncio.unix_events",
            "atexit",
            "attr",
            "attr._compat",
            "attr._config",
            "attr._funcs",
            "attr._make",
            "attr._next_gen",
            "attr._version_info",
            "attr.converters",
            "attr.exceptions",
            "attr.filters",
            "attr.setters",
            "attr.validators",
            "base64",
            "binascii",
            "bisect",
            "botocore",
            "botocore.args",
            "botocore.auth",
            "botocore.awsrequest",
            "botocore.client",
            "botocore.compat",
            "botocore.config",
            "botocore.configloader",
            "botocore.configprovider",
            "botocore.credentials",
            "botocore.discovery",
            "botocore.docs",
            "botocore.docs.bcdoc",
            "botocore.docs.bcdoc.docstringparser",
            "botocore.docs.bcdoc.restdoc",
            "botocore.docs.bcdoc.style",
            "botocore.docs.client",
            "botocore.docs.docstring",
            "botocore.docs.example",
            "botocore.docs.method",
            "botocore.docs.paginator",
            "botocore.docs.params",
            "botocore.docs.service",
            "botocore.docs.shape",
            "botocore.docs.sharedexample",
            "botocore.docs.utils",
            "botocore.docs.waiter",
            "botocore.endpoint",
            "botocore.errorfactory",
            "botocore.eventstream",
            "botocore.exceptions",
            "botocore.handlers",
            "botocore.history",
            "botocore.hooks",
            "botocore.httpsession",
            "botocore.loaders",
            "botocore.model",
            "botocore.monitoring",
            "botocore.paginate",
            "botocore.parsers",
            "botocore.regions",
            "botocore.response",
            "botocore.retries",
            "botocore.retries.adaptive",
            "botocore.retries.base",
            "botocore.retries.bucket",
            "botocore.retries.quota",
            "botocore.retries.special",
            "botocore.retries.standard",
            "botocore.retries.throttling",
            "botocore.retryhandler",
            "botocore.serialize",
            "botocore.session",
            "botocore.signers",
            "botocore.translate",
            "botocore.utils",
            "botocore.validate",
            "botocore.vendored",
            "botocore.vendored.requests",
            "botocore.vendored.requests.exceptions",
            "botocore.vendored.requests.packages",
            "botocore.vendored.requests.packages.urllib3",
            "botocore.vendored.requests.packages.urllib3.exceptions",
            "botocore.vendored.six",
            "botocore.vendored.six.moves",
            "botocore.vendored.six.moves.urllib",
            "botocore.vendored.six.moves.urllib.request",
            "botocore.vendored.six.moves.urllib_parse",
            "botocore.waiter",
            "builtins",
            "bz2",
            "calendar",
            "cchardet",
            "cchardet._cchardet",
            "cchardet.version",
            "certifi",
            "certifi.core",
            "cgi",
            "codecs",
            "collections",
            "collections.abc",
            "colorama",
            "colorama.ansi",
            "colorama.ansitowin32",
            "colorama.initialise",
            "colorama.win32",
            "colorama.winterm",
            "concurrent",
            "concurrent.futures",
            "concurrent.futures._base",
            "contextlib",
            "contextvars",
            "copy",
            "copyreg",
            "ctypes",
            "ctypes._endian",
            "cython_runtime",
            "datetime",
            "dateutil",
            "dateutil._common",
            "dateutil._version",
            "dateutil.parser",
            "dateutil.parser._parser",
            "dateutil.parser.isoparser",
            "dateutil.relativedelta",
            "dateutil.tz",
            "dateutil.tz._common",
            "dateutil.tz._factories",
            "dateutil.tz.tz",
            "decimal",
            "dis",
            "email",
            "email._encoded_words",
            "email._parseaddr",
            "email._policybase",
            "email.base64mime",
            "email.charset",
            "email.encoders",
            "email.errors",
            "email.feedparser",
            "email.header",
            "email.iterators",
            "email.message",
            "email.parser",
            "email.quoprimime",
            "email.utils",
            "encodings",
            "encodings.aliases",
            "encodings.latin_1",
            "encodings.utf_8",
            "enum",
            "errno",
            "fnmatch",
            "functools",
            "genericpath",
            "getopt",
            "getpass",
            "gettext",
            "google",
            "google.protobuf",
            "grp",
            "hashlib",
            "heapq",
            "hmac",
            "html",
            "html.entities",
            "html.parser",
            "http",
            "http.client",
            "http.cookies",
            "http.server",
            "idna",
            "idna.core",
            "idna.idnadata",
            "idna.intranges",
            "idna.package_data",
            "importlib",
            "importlib._bootstrap",
            "importlib._bootstrap_external",
            "importlib.abc",
            "importlib.machinery",
            "importlib.resources",
            "importlib.util",
            "inspect",
            "io",
            "ipaddress",
            "itertools",
            "jmespath",
            "jmespath.ast",
            "jmespath.compat",
            "jmespath.exceptions",
            "jmespath.functions",
            "jmespath.lexer",
            "jmespath.parser",
            "jmespath.visitor",
            "json",
            "json.decoder",
            "json.encoder",
            "json.scanner",
            "keyword",
            "linecache",
            "locale",
            "logging",
            "logging.handlers",
            "lzma",
            "marshal",
            "math",
            "mimetypes",
            "multidict",
            "multidict._abc",
            "multidict._compat",
            "multidict._multidict",
            "multidict._multidict_base",
            "netrc",
            "ntpath",
            "numbers",
            "opcode",
            "operator",
            "os",
            "os.path",
            "pamqp",
            "pamqp.body",
            "pamqp.constants",
            "pamqp.decode",
            "pamqp.encode",
            "pamqp.exceptions",
            "pamqp.frame",
            "pamqp.header",
            "pamqp.heartbeat",
            "pamqp.specification",
            "pathlib",
            "pickle",
            "platform",
            "posix",
            "posixpath",
            "pwd",
            "pycares",
            "pycares._cares",
            "pycares._version",
            "pycares.errno",
            "pycares.utils",
            "pyexpat",
            "pyexpat.errors",
            "pyexpat.model",
            "pytz",
            "pytz.exceptions",
            "pytz.lazy",
            "pytz.tzfile",
            "pytz.tzinfo",
            "queue",
            "quopri",
            "random",
            "re",
            "reprlib",
            "select",
            "selectors",
            "shlex",
            "shutil",
            "signal",
            "site",
            "six",
            "six.moves",
            "socket",
            "socketserver",
            "sre_compile",
            "sre_constants",
            "sre_parse",
            "ssl",
            "stat",
            "string",
            "struct",
            "subprocess",
            "sys",
            "tempfile",
            "termios",
            "threading",
            "time",
            "token",
            "tokenize",
            "tomodachi",
            "tomodachi.__version__",
            "tomodachi.cli",
            "tomodachi.config",
            "tomodachi.container",
            "tomodachi.envelope",
            "tomodachi.envelope.json_base",
            "tomodachi.protocol.json_base",
            "tomodachi.envelope.proto_build",
            "tomodachi.envelope.proto_build.protobuf",
            "tomodachi.envelope.proto_build.protobuf.sns_sqs_message_pb2",
            "tomodachi.envelope.protobuf_base",
            "tomodachi.helpers",
            "tomodachi.helpers.crontab",
            "tomodachi.helpers.dict",
            "tomodachi.helpers.execution_context",
            "tomodachi.helpers.logging",
            "tomodachi.helpers.middleware",
            "tomodachi.importer",
            "tomodachi.invoker",
            "tomodachi.invoker.base",
            "tomodachi.invoker.decorator",
            "tomodachi.launcher",
            "tomodachi.protocol",
            "tomodachi.protocol.json_base",
            "tomodachi.protocol.protobuf_base",
            "tomodachi.transport",
            "tomodachi.transport.amqp",
            "tomodachi.transport.aws_sns_sqs",
            "tomodachi.transport.http",
            "tomodachi.transport.schedule",
            "tomodachi.watcher",
            "traceback",
            "types",
            "typing",
            "typing.io",
            "typing.re",
            "typing_extensions",
            "tzlocal",
            "tzlocal.unix",
            "tzlocal.utils",
            "unicodedata",
            "urllib",
            "urllib.error",
            "urllib.parse",
            "urllib.request",
            "urllib.response",
            "urllib3",
            "urllib3._collections",
            "urllib3._version",
            "urllib3.connection",
            "urllib3.connectionpool",
            "urllib3.contrib",
            "urllib3.contrib._appengine_environ",
            "urllib3.exceptions",
            "urllib3.fields",
            "urllib3.filepost",
            "urllib3.packages",
            "urllib3.packages.six",
            "urllib3.packages.six.moves",
            "urllib3.packages.six.moves.http_client",
            "urllib3.packages.six.moves.urllib",
            "urllib3.packages.six.moves.urllib.parse",
            "urllib3.packages.ssl_match_hostname",
            "urllib3.poolmanager",
            "urllib3.request",
            "urllib3.response",
            "urllib3.util",
            "urllib3.util.connection",
            "urllib3.util.queue",
            "urllib3.util.request",
            "urllib3.util.response",
            "urllib3.util.retry",
            "urllib3.util.ssl_",
            "urllib3.util.timeout",
            "urllib3.util.url",
            "urllib3.util.wait",
            "uu",
            "uuid",
            "warnings",
            "weakref",
            "wrapt",
            "wrapt.decorators",
            "wrapt.importer",
            "wrapt.wrappers",
            "xml",
            "xml.etree",
            "xml.etree.ElementPath",
            "xml.etree.ElementTree",
            "xml.etree.cElementTree",
            "yarl",
            "yarl._quoting",
            "yarl._quoting_c",
            "yarl._url",
            "zipimport",
            "zlib",
        ]

        restarting = False
        while cls.restart_services:
            init_timestamp = time.time()
            init_timestamp_str = datetime.datetime.utcfromtimestamp(
                init_timestamp).isoformat() + "Z"

            process_id = os.getpid()

            event_loop_alias = ""
            event_loop_version = ""
            try:
                if "uvloop." in str(loop.__class__):
                    event_loop_alias = "uvloop"
                    import uvloop  # noqa  # isort:skip

                    event_loop_version = str(uvloop.__version__)
                elif "asyncio." in str(loop.__class__):
                    event_loop_alias = "asyncio"
                else:
                    event_loop_alias = "{}.{}".format(
                        loop.__class__.__module__, loop.__class__.__name__)
            except Exception:
                event_loop_alias = str(loop)

            clear_services()
            clear_execution_context()
            set_execution_context({
                "tomodachi_version": tomodachi.__version__,
                "python_version": platform.python_version(),
                "system_platform": platform.system(),
                "process_id": process_id,
                "init_timestamp": init_timestamp_str,
                "event_loop": event_loop_alias,
            })

            if event_loop_alias == "uvloop" and event_loop_version:
                set_execution_context({
                    "uvloop_version": event_loop_version,
                })

            if watcher:
                tz: Any = None
                utc_tz: Any = None

                try:
                    import pytz  # noqa  # isort:skip
                    import tzlocal  # noqa  # isort:skip

                    utc_tz = pytz.UTC
                    try:
                        tz = tzlocal.get_localzone()
                        if not tz:
                            tz = pytz.UTC
                    except Exception:
                        tz = pytz.UTC
                except Exception:
                    pass

                init_local_datetime = (
                    datetime.datetime.fromtimestamp(init_timestamp)
                    if tz and tz is not utc_tz and str(tz) != "UTC" else
                    datetime.datetime.utcfromtimestamp(init_timestamp))

                print("---")
                print("Starting tomodachi services (pid: {}) ...".format(
                    process_id))
                for file in service_files:
                    print("* {}".format(file))

                print()
                print("Current version: tomodachi {} on Python {}".format(
                    tomodachi.__version__, platform.python_version()))
                print("Event loop implementation: {}{}".format(
                    event_loop_alias, " {}".format(event_loop_version)
                    if event_loop_version else ""))
                if tz:
                    print("Local time: {} {}".format(
                        init_local_datetime.strftime(
                            "%B %d, %Y - %H:%M:%S,%f"), str(tz)))
                print("Timestamp in UTC: {}".format(init_timestamp_str))
                print()
                print(
                    "File watcher is active - code changes will automatically restart services"
                )
                print("Quit running services with <ctrl+c>")
                print()

            cls._close_waiter = asyncio.Future()
            cls._stopped_waiter = asyncio.Future()
            cls.restart_services = False

            try:
                cls.services = set([
                    ServiceContainer(ServiceImporter.import_service_file(file),
                                     configuration) for file in service_files
                ])
                result = loop.run_until_complete(
                    asyncio.wait([
                        asyncio.ensure_future(service.run_until_complete())
                        for service in cls.services
                    ]))
                exception = [
                    v.exception()
                    for v in [value for value in result if value][0]
                    if v.exception()
                ]
                if exception:
                    raise cast(Exception, exception[0])
            except tomodachi.importer.ServicePackageError:
                pass
            except Exception as e:
                logging.getLogger("exception").exception(
                    "Uncaught exception: {}".format(str(e)))

                if isinstance(e, ModuleNotFoundError):  # pragma: no cover
                    missing_module_name = str(getattr(e, "name", None) or "")
                    if missing_module_name:
                        color = ""
                        color_reset = ""
                        try:
                            import colorama  # noqa  # isort:skip

                            color = colorama.Fore.WHITE + colorama.Back.RED
                            color_reset = colorama.Style.RESET_ALL
                        except Exception:
                            pass

                        print("")
                        print(
                            "{}[fatal error] The '{}' package is missing or cannot be imported.{}"
                            .format(color, missing_module_name, color_reset))
                        print("")

                if restarting:
                    logging.getLogger("watcher.restart").warning(
                        "Service cannot restart due to errors")
                    logging.getLogger("watcher.restart").warning(
                        "Trying again in 1.5 seconds")
                    loop.run_until_complete(asyncio.wait([asyncio.sleep(1.5)]))
                    if cls._close_waiter and not cls._close_waiter.done():
                        cls.restart_services = True
                    else:
                        for signame in ("SIGINT", "SIGTERM"):
                            loop.remove_signal_handler(getattr(
                                signal, signame))
                else:
                    for signame in ("SIGINT", "SIGTERM"):
                        loop.remove_signal_handler(getattr(signal, signame))

            current_modules = [m for m in sys.modules.keys()]
            for m in current_modules:
                if m not in init_modules and m not in safe_modules:
                    del sys.modules[m]

            importlib.reload(tomodachi.container)
            importlib.reload(tomodachi.invoker)
            importlib.reload(tomodachi.invoker.base)
            importlib.reload(tomodachi.importer)

            restarting = True

        if watcher:
            if not watcher_future.done():
                try:
                    watcher_future.set_result(None)
                except RuntimeError:  # pragma: no cover
                    watcher_future.cancel()
                if not watcher_future.done():  # pragma: no cover
                    try:
                        loop.run_until_complete(watcher_future)
                    except (Exception, CancelledError):
                        pass
示例#47
0
class BlindsSchedule:
    # class constants for days of the week, these values should NOT be changed
    SUNDAY = "sunday"
    MONDAY = "monday"
    TUESDAY = "tuesday"
    WEDNESDAY = "wednesday"
    THURSDAY = "thursday"
    FRIDAY = "friday"
    SATURDAY = "saturday"

    DAYS_OF_WEEK = [
        MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY
    ]

    # object attributes describing the schedule
    _default_mode = None
    _default_pos = None
    _timezone = get_localzone()
    _schedule = {
        SUNDAY: [],
        MONDAY: [],
        TUESDAY: [],
        WEDNESDAY: [],
        THURSDAY: [],
        FRIDAY: [],
        SATURDAY: []
    }
    '''
    Constructor for BlindsSchedule. Initializes it with the default mode and position, and a 
    schedule of time blocks if provided. 
    Calls self.validate at the end to ensure that the created object is valid.
    The schedule is also sorted and checked for conflicts.  
    '''
    def __init__(self,
                 default_mode,
                 default_pos=None,
                 schedule=None,
                 timezone=None):
        self._default_mode = default_mode
        self._default_pos = default_pos

        if timezone is not None:
            self._timezone = timezone

        if schedule is not None:
            self._schedule = schedule

        # validate and checking for conflicts are separated to ensure that the time blocks can be sorted without error
        self.validate()
        self.sortScheduleBlocks()
        self.checkHasNoTimeConflicts()

    '''
    Sort the schedule using sortedTimeBlockList
    '''

    def sortScheduleBlocks(self):
        for day in BlindsSchedule.DAYS_OF_WEEK:
            self._schedule[day] = BlindsSchedule.sortedTimeBlockList(
                self._schedule[day])

    '''
    Validates the BlindsSchedule object. Returns True if the BlindsSchedule is properly defined, and throws exceptions otherwise. 
    InvalidBlindsScheduleException is thrown when the parameters of the object itself are invalid, such as 
    having an improperly defined schedule or improperly set default behaviour. 

    The validation assumes that the ScheduleTimeBlocks themselves are valid, as these should be validated upon their
    creation and update and thus should be valid by the time they are using in the BlindsSchedule object. This is a 
    conscious choice to improve performace of the validate calls. 

    Does NOT check for time conflicts
    '''

    def validate(self):
        # checks for default mode and position
        if not isinstance(self._default_mode, BlindMode):
            raise InvalidBlindsScheduleException(
                "default mode must be a value from the BlindMode enum")

        if self._default_mode == BlindMode.MANUAL and self._default_pos is None:
            raise InvalidBlindsScheduleException(
                "default position must be specified when using BlindMode.MANUAL"
            )
        elif self._default_mode == BlindMode.MANUAL and (
                self._default_pos > 100 or self._default_pos < -100):
            raise InvalidBlindsScheduleException(
                "default position must a value from -100 to 100")

        # checks for _schedule being well formatted
        if not isinstance(self._schedule, dict) or (set(
                self._schedule.keys()) != set(BlindsSchedule.DAYS_OF_WEEK)):
            raise InvalidBlindsScheduleException(
                "schedule must be a dictionary with keys being the days of the week and values being lists of ScheduleTimeBlocks"
            )

        for day in BlindsSchedule.DAYS_OF_WEEK:
            timeBlockList = self._schedule[day]
            if not all(
                    map(lambda x: isinstance(x, ScheduleTimeBlock),
                        timeBlockList)):
                raise InvalidBlindsScheduleException(
                    "schedule must be a dictionary with keys being the days of the week and values being lists of ScheduleTimeBlocks"
                )

        return True

    '''
    Checks for scheduling conflicts within a BlindsSchedule object. 
    Will raise BlindSchedulingException when there conflicting time blocks on a given day. 
    The should only be called after the time blocks for each day are sorted. 
    Otherwise, returns True when there are no conflicts. 
    '''

    def checkHasNoTimeConflicts(self):
        for day in BlindsSchedule.DAYS_OF_WEEK:
            # checks for time conflicts
            if BlindsSchedule.hasConflict(self._schedule[day]):
                raise BlindSchedulingException(
                    "schedule has conflicting time blocks on " + day)

        return True

    '''
    Returns timeBlockList of ScheduleTimeBlocks sorted by start times. Does NOT check for time conflicts.
    Assumes that all the time blocks are valid. 
    '''

    @staticmethod
    def sortedTimeBlockList(timeBlockList):
        return sorted(timeBlockList, key=lambda x: x._start)

    '''
    Checks for time block conflicts given a list of ScheduleTimeBlocks sorted by start time.
    A time conflict is defined as having time blocks with overlapping durations.
    Assumes that all the time blocks are valid. 

    Returns True if there is a conflict, false otherwise. 
    '''

    @staticmethod
    def hasConflict(timeBlockList):
        lastTimeBlock = None
        for timeBlock in timeBlockList:
            if lastTimeBlock is not None and timeBlock._start < lastTimeBlock._end:
                return True
            lastTimeBlock = timeBlock

        return False

    '''
    Parses a timezone formatted like GMT-0600 and returns a pytz timezone for it. 
    Currently does not support half hour differences from GMT, as these are not included in the 
    list of supported timezones in pytz for Etc/GMT-X. While they do exist for the specific place names, the conversion 
    is much more difficult.
    '''

    @staticmethod
    def tzFromGmtString(gmtStr):
        tzLocationRegex = r"\S+\/\S+"  # matches things like Canada/Mountain
        matches = re.search(tzLocationRegex, gmtStr)
        if matches:
            try:
                fullStr = matches.group(0)
                tz = timezone(fullStr)
                return tz
            except Exception as e:
                raise InvalidTimeZoneStringException(
                    "Invalid timezone string: " + gmtStr +
                    ", causing error: " + str(e))

        tzRegex = r"GMT([+-])(\d{4})"  # matches things like GMT-0600
        gmtTimezoneBase = "Etc/GMT"

        matches = re.search(tzRegex, gmtStr)

        if not matches:
            raise InvalidTimeZoneStringException("Invalid timezone string: " +
                                                 gmtStr)

        plusminus = matches.group(1)
        offset = int(matches.group(2)) / 100

        if (offset != int(offset) or (plusminus == "+" and offset > 12)
                or (plusminus == "-" and offset > 14)):
            raise InvalidTimeZoneStringException(
                "Offset must be an integer number of hours between -14 and 12")

        # Reverse sign since POSIX timezones have opposite notion of ISO 8601 convention
        # See: https://en.wikipedia.org/wiki/Tz_database
        if plusminus == "+":
            plusminus = "-"
        elif plusminus == "-":
            plusminus = "+"

        return timezone(gmtTimezoneBase + plusminus + str(int(offset)))

    '''
    Converts a pytz timezone of the type Etc/GMT-X to a string formatted like "GMT-XXXX" 
    '''

    @staticmethod
    def tzToGmtString(tz):
        tzGmtNameRegex = r"Etc/GMT([+-])(\d+)"
        matches = re.search(tzGmtNameRegex, tz.zone)

        if matches:
            plusminus = matches.group(1)
            offset = int(matches.group(2))

            # convert offset to the 4 digit form
            offset = offset * 100
            if offset >= 1000:
                offset_str = str(offset)
            else:
                offset_str = "0" + str(offset)

            return "GMT" + plusminus + offset_str

        else:
            return tz.zone

    '''
    Returns a JSON-like dictionary representation of the BlindsSchedule object.
    '''

    @staticmethod
    def toDict(schedule):
        jsonDict = dict()
        if schedule is not None:
            jsonDict["default_mode"] = schedule._default_mode.name

            jsonDict["default_pos"] = schedule._default_pos

            jsonDict["timezone"] = BlindsSchedule.tzToGmtString(
                schedule._timezone)

            jsonDict["schedule"] = dict()
            for day in BlindsSchedule.DAYS_OF_WEEK:
                jsonDict["schedule"][day] = list(
                    map(lambda x: ScheduleTimeBlock.toDict(x),
                        schedule._schedule[day]))

        return jsonDict

    '''
    Returns a BlindsSchedule object based on the provided JSON-like dictionary. 
    Raises InvalidBlindsScheduleException for missing keys.
    '''

    @staticmethod
    def fromDict(jsonDict):
        try:
            default_mode = BlindMode[jsonDict["default_mode"]]
            default_pos = jsonDict["default_pos"]
            # account for integer casee
            if default_pos is not None:
                default_pos = int(default_pos)

            tz = BlindsSchedule.tzFromGmtString(jsonDict["timezone"])

            parsed_sched = jsonDict["schedule"]

            blindsSchedule = BlindsSchedule(default_mode,
                                            default_pos,
                                            timezone=tz)
            for day in BlindsSchedule.DAYS_OF_WEEK:
                blindsSchedule._schedule[day] = list(
                    map(lambda x: ScheduleTimeBlock.fromDict(x),
                        parsed_sched[day]))

            blindsSchedule.validate()
            blindsSchedule.sortScheduleBlocks()
            blindsSchedule.checkHasNoTimeConflicts()

            return blindsSchedule

        except KeyError as error:
            raise InvalidBlindsScheduleException("Missing key in json: " +
                                                 str(error))

    '''
    Returns a JSON representation of a valid BlindsSchedule object.
    Provides additional keyword arguments for pretty printing and sorting the json output's keys. 
    '''

    @staticmethod
    def toJson(schedule, pretty=False, sortKeys=False):
        if pretty:
            return json.dumps(BlindsSchedule.toDict(schedule),
                              sort_keys=sortKeys,
                              indent=4)
        else:
            return json.dumps(BlindsSchedule.toDict(schedule),
                              sort_keys=sortKeys)

    '''
    Parses a json representation of the the schedule and returns a new BlindsSchedule object. 
    Raises InvalidBlindsScheduleException for missing keys in the JSON string. 
    '''

    @staticmethod
    def fromJson(scheduleJson):
        parsedDict = json.loads(scheduleJson)

        return BlindsSchedule.fromDict(parsedDict)
示例#48
0
import bisect
import os
import dateutil
import tzlocal

DEFAULT_TIME_ZONE_NAME = tzlocal.get_localzone().zone  # 'Europe/London'
TIME_ZONE_DATA_SOURCE = '/usr/share/zoneinfo/'


class TimezoneError(Exception):
    pass


class tzfile(dateutil.tz.tzfile):
    def _find_ttinfo(self, dtm, laststd=0):
        """Faster version of parent class's _find_ttinfo() as this uses bisect rather than a linear search."""
        if dtm is None:
            # This will happen, for example, when a datetime.time object gets utcoffset() called.
            raise ValueError(
                'tzinfo object can not calculate offset for date %s' % dtm)
        ts = ((dtm.toordinal() - dateutil.tz.EPOCHORDINAL) * 86400 +
              dtm.hour * 3600 + dtm.minute * 60 + dtm.second)
        idx = bisect.bisect_right(self._trans_list, ts)
        if len(self._trans_list) == 0 or idx == len(self._trans_list):
            return self._ttinfo_std
        if idx == 0:
            return self._ttinfo_before
        if laststd:
            while idx > 0:
                tti = self._trans_idx[idx - 1]
                if not tti.isdst:
示例#49
0
文件: commands.py 项目: bbeck/stack
def _log_stack_event(event, tz=tzlocal.get_localzone()):
    print event["Timestamp"].astimezone(tz).strftime("%Y-%m-%dT%H:%M:%S"), \
      event["ResourceStatus"], \
      event["ResourceType"], \
      event["LogicalResourceId"], \
      event.get("ResourceStatusReason", "")
示例#50
0
def cli(ctx, db):
    ctx.obj = LocalCalendar(open_db(db), tzlocal.get_localzone())
示例#51
0
# http://stackoverflow.com/questions/36932/how-can-i-represent-an-enum-in-python
def enum(*sequential, **named):
    enums = dict(zip(sequential, range(len(sequential))), **named)
    return type('Enum', (), enums)


def tuple_builder(answer=None, text=None):
    tup = namedtuple('Response', ['answer', 'text'])
    tup.answer = answer if answer is not None else None
    tup.text = text if text is not None else ''
    return tup


OPENING_HOUR = 10
CLOSING_HOUR = 18
LOCAL_TZ = tzlocal.get_localzone()


def during_business_hours(time):
    '''
    Checks if a given time is within business hours. Currently is true
    from 10:00 to 17:59. Also checks to make sure that the day is a weekday.

    Args:
        time (Datetime): A datetime object to check.
    '''
    if time.tzinfo is not None:
        here = time.astimezone(LOCAL_TZ)
    else:
        here = time.replace(tzinfo=pytz.utc).astimezone(LOCAL_TZ)
    return (OPENING_HOUR <= here.hour < CLOSING_HOUR
示例#52
0
 def end_date(self):
     date_string = self.vacation_json.get("endDate")
     utc_date = dateutil.parser.parse(date_string)
     local_tz = get_localzone()
     return utc_date.replace(tzinfo=timezone.utc).astimezone(local_tz)
示例#53
0
def elastic_to_dataframe(es,
                         index,
                         query="*",
                         start=None,
                         end=None,
                         sort=None,
                         timestampfield="@timestamp",
                         datecolumns=[],
                         _source=[],
                         size=None,
                         scrollsize=5000):
    """Convert an elastic collection to a dataframe.
    Parameters:
    es -- The elastic connection object
    query -- (optional) The elastic query
    start -- (optional) The time range start if any
    end -- (optional) The time range start if any
    sort -- (optional) The column we want to sort on
    timestampfield -- (optional) The timestamp field used by the start and stop parameters
    datecolumns -- (optional) A collection of columns that must be converted to dates
    _source -- (optional) columns we want to retrieve
    size -- (optional) The max number of recrods we want to retrieve
    scrollsize -- (optional) The size of the scroll to use
    """

    logger = logging.getLogger()
    array = []
    recs = []
    scroll_ids = []

    version = int(get_es_info(es).get('version').get('number').split('.')[0])

    finalquery = {
        "_source": _source,
        "query": {
            "bool": {
                "must": [{
                    "query_string": {
                        "query": query,
                        "analyze_wildcard": True
                    }
                }]
            }
        }
    }

    if start is not None:
        finalquery["query"]["bool"]["must"].append({"range": {}})

        finalquery["query"]["bool"]["must"][
            len(finalquery["query"]["bool"]["must"]) -
            1]["range"][timestampfield] = {
                "gte": int(start.timestamp()) * 1000,
                "lte": int(end.timestamp()) * 1000,
                "format": "epoch_millis"
            }

    if sort is not None:
        finalquery["sort"] = sort

    logger.debug(finalquery)

    if size is not None and size < scrollsize:
        scrollsize = size

    res = es.search(index=index, size=scrollsize, scroll='1m', body=finalquery)

    sid = res['_scroll_id']
    scroll_ids.append(sid)
    scroll_size = None
    if version < 7:
        scroll_size = res['hits']['total']
    else:
        scroll_size = res['hits']['total']['value']

    array = []
    for res2 in res["hits"]["hits"]:
        res2["_source"]["_id"] = res2["_id"]
        res2["_source"]["_index"] = res2["_index"]

        array.append(res2["_source"])

    recs = len(res['hits']['hits'])

    break_flag = False

    while (scroll_size > 0):
        res = es.scroll(scroll_id=sid, scroll='2m')
        sid = res['_scroll_id']
        scroll_ids.append(sid)
        scroll_size = len(res['hits']['hits'])
        logger.info("scroll size: " + str(scroll_size))
        logger.info("Next page:" + str(len(res['hits']['hits'])))
        recs += len(res['hits']['hits'])

        for res2 in res["hits"]["hits"]:
            if size is not None and len(array) >= size:
                break_flag = True
                break

            res2["_source"]["_id"] = res2["_id"]
            res2["_source"]["_index"] = res2["_index"]
            array.append(res2["_source"])

        if break_flag:
            break

    df = pd.DataFrame(array)

    if len(datecolumns) > 0 and len(df) > 0:
        containertimezone = pytz.timezone(tzlocal.get_localzone().zone)

        for col in datecolumns:
            if col not in df.columns:
                df[col] = None
            else:
                if df[col].dtype == "int64":
                    df[col] = pd.to_datetime(
                        df[col], unit='ms',
                        utc=True).dt.tz_convert(containertimezone)
                else:
                    df[col] = pd.to_datetime(
                        df[col], utc=True).dt.tz_convert(containertimezone)

    es.clear_scroll(body={'scroll_id': scroll_ids})
    return df
示例#54
0
    def run(self):
        logger.info('Starting recording\n'
                    'Channel name      => {0}\n'
                    'Channel number    => {1}\n'
                    'Program title     => {2}\n'
                    'Start date & time => {3}\n'
                    'End date & time   => {4}'.format(self._recording.channel_name,
                                                      self._recording.channel_number,
                                                      self._recording.program_title,
                                                      self._recording.start_date_time_in_utc.astimezone(
                                                          get_localzone()).strftime('%Y-%m-%d %H:%M:%S'),
                                                      self._recording.end_date_time_in_utc.astimezone(
                                                          get_localzone()).strftime('%Y-%m-%d %H:%M:%S')))
        actual_start_date_time_in_utc = datetime.now(pytz.utc)

        self._create_recording_directory_tree()
        persisted_recording_id = '{0}'.format(uuid.uuid4())
        self._save_manifest_file(None,
                                 actual_start_date_time_in_utc.strftime('%Y-%m-%d %H:%M:%S%z'),
                                 persisted_recording_id,
                                 None,
                                 'Started')

        for number_of_times_attempted_to_download_playlist_m3u8 in range(1, 11):
            try:
                # <editor-fold desc="Download playlist.m3u8">
                playlist_m3u8_content = SmoothStreamsProxy.download_playlist_m3u8('127.0.0.1',
                                                                                  '/live/playlist.m3u8',
                                                                                  self._recording.channel_number,
                                                                                  self._id,
                                                                                  'hls')
                # </editor-fold>

                self._save_manifest_file(None,
                                         actual_start_date_time_in_utc.strftime('%Y-%m-%d %H:%M:%S%z'),
                                         persisted_recording_id,
                                         None,
                                         'In Progress')

                playlist_m3u8_object = m3u8.loads(playlist_m3u8_content)
                chunks_url = '/live/{0}'.format(playlist_m3u8_object.data['playlists'][0]['uri'])

                break
            except requests.exceptions.HTTPError:
                time_to_sleep_before_next_attempt = math.ceil(
                    number_of_times_attempted_to_download_playlist_m3u8 / 5) * 5

                logger.error('Attempt #{0}\n'
                             'Failed to download playlist.m3u8\n'
                             'Will try again in {1} seconds'.format(number_of_times_attempted_to_download_playlist_m3u8,
                                                                    time_to_sleep_before_next_attempt))

                time.sleep(time_to_sleep_before_next_attempt)
        else:
            logger.error('Exhausted attempts to download playlist.m3u8')

            logger.info('Canceling recording\n'
                        'Channel name      => {0}\n'
                        'Channel number    => {1}\n'
                        'Program title     => {2}\n'
                        'Start date & time => {3}\n'
                        'End date & time   => {4}'.format(self._recording.channel_name,
                                                          self._recording.channel_number,
                                                          self._recording.program_title,
                                                          self._recording.start_date_time_in_utc.astimezone(
                                                              get_localzone()).strftime('%Y-%m-%d %H:%M:%S'),
                                                          self._recording.end_date_time_in_utc.astimezone(
                                                              get_localzone()).strftime('%Y-%m-%d %H:%M:%S')))

            self._save_manifest_file(datetime.now(pytz.utc).strftime('%Y-%m-%d %H:%M:%S%z'),
                                     actual_start_date_time_in_utc.strftime('%Y-%m-%d %H:%M:%S%z'),
                                     persisted_recording_id,
                                     None,
                                     'Canceled')

            return

        vod_playlist_m3u8_object = None
        downloaded_segment_file_names = []

        while not self._stop_recording_event.is_set():
            try:
                # <editor-fold desc="Download chunks.m3u8">
                chunks_url_components = urllib.parse.urlparse(chunks_url)
                chunks_query_string_parameters = dict(urllib.parse.parse_qsl(chunks_url_components.query))

                channel_number_parameter_value = chunks_query_string_parameters.get('channel_number', None)
                client_uuid_parameter_value = chunks_query_string_parameters.get('client_uuid', None)
                nimble_session_id_parameter_value = chunks_query_string_parameters.get('nimblesessionid', None)
                smooth_streams_hash_parameter_value = chunks_query_string_parameters.get('wmsAuthSign', None)

                nimble_session_id_parameter_value = SmoothStreamsProxy.map_nimble_session_id(
                    '127.0.0.1',
                    chunks_url_components.path,
                    channel_number_parameter_value,
                    client_uuid_parameter_value,
                    nimble_session_id_parameter_value,
                    smooth_streams_hash_parameter_value)

                chunks_m3u8_content = SmoothStreamsProxy.download_chunks_m3u8('127.0.0.1',
                                                                              chunks_url_components.path,
                                                                              channel_number_parameter_value,
                                                                              client_uuid_parameter_value,
                                                                              nimble_session_id_parameter_value)
                # </editor-fold>
                chunks_m3u8_download_date_time_in_utc = datetime.now(pytz.utc)
                chunks_m3u8_total_duration = 0
                chunks_m3u8_object = m3u8.loads(chunks_m3u8_content)

                if not vod_playlist_m3u8_object:
                    vod_playlist_m3u8_object = chunks_m3u8_object

                indices_of_skipped_segments = []
                for (segment_index, segment) in enumerate(chunks_m3u8_object.segments):
                    segment_url = '/live/{0}'.format(segment.uri)
                    segment_url_components = urllib.parse.urlparse(segment_url)
                    segment_query_string_parameters = dict(urllib.parse.parse_qsl(segment_url_components.query))
                    segment_file_name = re.sub(r'(/.*)?(/)(.*\.ts)', r'\3', segment_url_components.path)

                    chunks_m3u8_total_duration += segment.duration

                    if segment_file_name not in downloaded_segment_file_names:
                        try:
                            # <editor-fold desc="Download ts file">
                            channel_number_parameter_value = segment_query_string_parameters.get('channel_number', None)
                            client_uuid_parameter_value = segment_query_string_parameters.get('client_uuid', None)
                            nimble_session_id_parameter_value = segment_query_string_parameters.get('nimblesessionid',
                                                                                                    None)

                            ts_file_content = SmoothStreamsProxy.download_ts_file('127.0.0.1',
                                                                                  segment_url_components.path,
                                                                                  channel_number_parameter_value,
                                                                                  client_uuid_parameter_value,
                                                                                  nimble_session_id_parameter_value)
                            # </editor-fold>
                            logger.debug('Downloaded segment\n'
                                         'Segment => {0}'.format(segment_file_name))

                            downloaded_segment_file_names.append(segment_file_name)
                            self._save_segment_file(segment_file_name, ts_file_content)

                            segment.uri = '{0}?client_uuid={1}&program_title={2}'.format(
                                segment_file_name,
                                client_uuid_parameter_value,
                                urllib.parse.quote(self._recording.base_recording_directory))

                            if segment not in vod_playlist_m3u8_object.segments:
                                vod_playlist_m3u8_object.segments.append(segment)
                        except requests.exceptions.HTTPError:
                            logger.error('Failed to download segment\n'
                                         'Segment => {0}'.format(segment_file_name))
                    else:
                        logger.debug('Skipped segment since it was already downloaded\n'
                                     'Segment => {0} '.format(segment_file_name))

                        indices_of_skipped_segments.append(segment_index)

                for segment_index_to_delete in indices_of_skipped_segments:
                    del chunks_m3u8_object.segments[segment_index_to_delete]
            except requests.exceptions.HTTPError:
                logger.error('Failed to download chunks.m3u8')

                return

            current_date_time_in_utc = datetime.now(pytz.utc)
            wait_duration = chunks_m3u8_total_duration - (
                    current_date_time_in_utc - chunks_m3u8_download_date_time_in_utc).total_seconds()
            if wait_duration > 0:
                self._stop_recording_event.wait(wait_duration)

        if vod_playlist_m3u8_object:
            vod_playlist_m3u8_object.playlist_type = 'VOD'
            self._save_playlist_file('playlist.m3u8', '{0}\n'
                                                      '{1}'.format(vod_playlist_m3u8_object.dumps(), '#EXT-X-ENDLIST'))

        self._save_manifest_file(datetime.now(pytz.utc).strftime('%Y-%m-%d %H:%M:%S%z'),
                                 actual_start_date_time_in_utc.strftime('%Y-%m-%d %H:%M:%S%z'),
                                 persisted_recording_id,
                                 'playlist.m3u8',
                                 'Completed')

        SmoothStreamsProxy.delete_active_recording(self._recording)

        logger.info('Finished recording\n'
                    'Channel name      => {0}\n'
                    'Channel number    => {1}\n'
                    'Program title     => {2}\n'
                    'Start date & time => {3}\n'
                    'End date & time   => {4}'.format(self._recording.channel_name,
                                                      self._recording.channel_number,
                                                      self._recording.program_title,
                                                      self._recording.start_date_time_in_utc.astimezone(
                                                          get_localzone()).strftime('%Y-%m-%d %H:%M:%S'),
                                                      self._recording.end_date_time_in_utc.astimezone(
                                                          get_localzone()).strftime('%Y-%m-%d %H:%M:%S')))
示例#55
0
    def GDAX_LoadHistoricData(self, startTimestamp, stopTimestamp):

        print("Init to retrieve Historic Data from %s to %s" %
              (datetime.fromtimestamp(startTimestamp).isoformat(),
               datetime.fromtimestamp(stopTimestamp).isoformat()))
        print("---------")
        # Reset read index are we will overwrite the buffer
        self.HistoricDataReadIndex = 0

        local_tz = get_localzone()
        print("GDAX - Local timezone found: %s" % local_tz)
        tz = pytz.timezone(str(local_tz))

        stopSlice = 0
        startSlice = startTimestamp
        self.HistoricDataRaw = []
        self.HistoricData = []

        # Progression measurement
        granularityInSec = round(
            self.GDAX_HISTORIC_DATA_MIN_GRANULARITY_IN_SEC)
        nbIterationsToRetrieveEverything = (
            (stopTimestamp - startTimestamp) /
            (round(self.GDAX_HISTORIC_DATA_MIN_GRANULARITY_IN_SEC))) / round(
                self.GDAX_MAX_HISTORIC_PRICES_ELEMENTS)
        print("GDAX - Nb Max iterations to retrieve everything: %s" %
              nbIterationsToRetrieveEverything)
        nbLoopIterations = 0

        while (stopSlice < stopTimestamp):

            stopSlice = startSlice + self.GDAX_MAX_HISTORIC_PRICES_ELEMENTS * granularityInSec
            if (stopSlice > stopTimestamp):
                stopSlice = stopTimestamp
            print("GDAX - Start TS : %s  stop TS : %s" %
                  (startSlice, stopSlice))

            startTimestampSliceInISO = datetime.fromtimestamp(startSlice,
                                                              tz).isoformat()
            stopTimestampSliceInISO = datetime.fromtimestamp(stopSlice,
                                                             tz).isoformat()
            print("GDAX - Retrieving Historic Data from %s to %s" %
                  (startTimestampSliceInISO, stopTimestampSliceInISO))
            if (self.IsConnectedAndOperational == "True"):
                print("GDAX - Using public client to retrieve historic prices")
                HistoricDataSlice = self.clientAuth.get_product_historic_rates(
                    self.productStr,
                    granularity=granularityInSec,
                    start=startTimestampSliceInISO,
                    end=stopTimestampSliceInISO)
                # Only sleep if reloop condition is met
                if (stopSlice < stopTimestamp):
                    time.sleep(0.350)
                print(
                    "GDAX - Using private client to retrieve historic prices")
            else:
                HistoricDataSlice = self.clientPublic.get_product_historic_rates(
                    self.productStr,
                    granularity=granularityInSec,
                    start=startTimestampSliceInISO,
                    end=stopTimestampSliceInISO)
                # Only sleep if reloop condition is met
                if (stopSlice < stopTimestamp):
                    time.sleep(0.250)
                print("GDAX - Using public client to retrieve historic prices")

            print("GDAX - Size of HistoricDataSlice: %s" %
                  len(HistoricDataSlice))

            try:  # parfois le reversed crash. Pas de data dans la slice ?
                for slice in reversed(HistoricDataSlice):
                    self.HistoricDataRaw.append(slice)
            except BaseException as e:
                print("GDAX - Exception when reversing historic data slice")
            #print("Historic : %s " % HistoricDataSlice)

            startSlice = stopSlice  # Prepare next iteration

            # Progress report
            nbLoopIterations = nbLoopIterations + 1
            percentage = round(nbLoopIterations * 100 /
                               nbIterationsToRetrieveEverything)
            if (percentage > 100):
                percentage = 100
            self.theUIGraph.UIGR_updateLoadingDataProgress(str(percentage))

        # Clean buffer so that only data in the chronological order remains
        print(
            "GDAX - LoadHistoricData - Cleaning buffer. Nb elements before cleaning : %s"
            % len(self.HistoricDataRaw))
        tempIterationIndex = 0
        currentBrowsedTimestamp = 0
        while (tempIterationIndex < len(self.HistoricDataRaw)):
            if (self.HistoricDataRaw[tempIterationIndex][0] <=
                    currentBrowsedTimestamp + 1):
                # Useless data : do not copy into final buffer
                pass
            else:
                currentBrowsedTimestamp = self.HistoricDataRaw[
                    tempIterationIndex][0]
                self.HistoricData.append(
                    self.HistoricDataRaw[tempIterationIndex])

            #print(self.HistoricData[tempIterationIndex][0])
            tempIterationIndex = tempIterationIndex + 1

# DEBUG
#         tempIterationIndex = 0
#         while (tempIterationIndex < len(self.HistoricData)):
#             print(self.HistoricData[tempIterationIndex][0])
#             tempIterationIndex = tempIterationIndex + 1
#
        print(
            "GDAX - %s Historical samples have been retrieved (after cleaning)"
            % len(self.HistoricData))
示例#56
0
    def _need_build(self, service: str, inspect_data: dict) -> bool:
        """Check if service need build.

        This check use the following parameters from yml file:

        **watch_for_build_using_files**

            Will check if any of the files listed in this parameter have
            his modification date more recent than service docker image build date.

        **watch_for_build_using_git**

            Will check if any of the services listed in this parameters have
            his last commit data more recent than service docker image build date.

        :param service: service name as defined in docker-compose yml.

        :param inspect_data: docker inspect data.

        :return: bool
        """
        test_date = None
        image_name = inspect_data["Config"]["Image"]
        image_data = self.run(
            "docker inspect {} 2>/dev/null".format(image_name),
            get_stdout=True)
        if image_data:
            image_data = json.loads(image_data)[0]

            # Get current UTC offset
            time_now = datetime.datetime.now()
            time_now_utc = datetime.datetime.utcnow()
            time_offset_seconds = (time_now - time_now_utc).total_seconds()
            utc_offset = time.gmtime(abs(time_offset_seconds))
            utc_string = "{}{}".format(
                "-" if time_offset_seconds < 0 else "+",
                time.strftime("%H%M", utc_offset),
            )
            date = image_data.get("Created")[:-4] + " " + utc_string
            test_date = datetime.datetime.strptime(date,
                                                   "%Y-%m-%dT%H:%M:%S.%f %z")

        label = inspect_data["Config"]["Labels"].get(
            "com.docker.compose.service")
        if not label:
            return False
        full_path = None
        if test_date and self.compose.get_from_service(label, "build"):
            build_path = self.compose.get_from_service(label, "build")
            if isinstance(build_path, dict):
                build_path = build_path.get("context")
            full_path = get_path(build_path, self.compose.base_path)
            list_dates = [
                datetime.datetime.fromtimestamp(os.path.getmtime(
                    os.path.join(full_path, file)),
                                                tz=get_localzone())
                for file in self.files_to_watch
                if os.path.isfile(os.path.join(full_path, file))
            ]
            if list_dates:
                if max(list_dates) > test_date:
                    return True

        # Check for build using commit
        # Ex.: 2018-02-23 18:31:45 -0300
        if service in self.services_to_check_git and full_path:
            git_log = self.run(
                'cd {} && git log -1 --pretty=format:"%cd" --date=iso'.format(
                    full_path),
                get_stdout=True,
            )
            date_fmt = "%Y-%m-%d %H:%M:%S %z"
            if git_log:
                commit_date = datetime.datetime.strptime(git_log, date_fmt)
                if commit_date > test_date:
                    return True

        return False
class TestBaseConfig(unittest.TestCase):
    base_dir = os.path.dirname(os.path.dirname(__file__))
    config_dir = os.path.join(base_dir, 'tests/fixtures/configs')
    args = ['--config-file', '-s', '-q', 'example_prj']
    if (3, 3) <= sys.version_info <= (3, 4):
        django_version = '1.8'
    else:
        django_version = DJANGO_VERSION_MATRIX['lts']
    config_fixture = Namespace(
        **{
            'bootstrap': False,
            'cms_version': CMS_VERSION_MATRIX['lts'],
            'db': 'sqlite://localhost/project.db',
            'django_version': django_version,
            'dump_reqs': False,
            'extra_settings': None,
            'filer': True,
            'i18n': 'yes',
            'languages': ['en'],
            'no_db_driver': False,
            'no_deps': False,
            'noinput': True,
            'no_sync': False,
            'no_user': False,
            'permissions': 'yes',
            'pip_options': '',
            'plugins': False,
            'project_directory': os.path.abspath('.'),
            'project_name': 'example_prj',
            'requirements_file': None,
            'reversion': 'yes',
            'skip_project_dir_check': True,
            'starting_page': False,
            'template': None,
            'templates': False,
            'timezone': get_localzone(),
            'use_timezone': 'yes',
            'utc': False,
            'no_plugins': False,
            'verbose': False,
            'wizard': False,
            'delete_project_dir': False,
        })

    def __init__(self, *args, **kwargs):
        self.config_not_exists = self.conf('config-dump.ini')
        super(TestBaseConfig, self).__init__(*args, **kwargs)

    def tearDown(self):
        if os.path.isfile(self.config_not_exists):
            os.remove(self.config_not_exists)

    def conf(self, filename):
        return os.path.join(self.config_dir, filename)

    def unused(self, config_data):
        """Remove not configurable keys."""
        for attr in ('aldryn', 'config_dump', 'config_file', 'db_driver',
                     'db_parsed', 'project_path', 'settings_path',
                     'urlconf_path'):
            delattr(config_data, attr)
        # When `requirements` arg is used then requirements attr isn't set.
        if hasattr(config_data, 'requirements'):
            delattr(config_data, 'requirements')

    def test_parse_config_file(self, *args):
        """Tests .config.__init__._parse_config_file function."""
        with self.assertRaises(SystemExit) as error:
            config.parse(self.args[0:1] +
                         [self.conf('config-not-exists.ini')] + self.args[1:])
            self.assertEqual(7, error.exception.code)

        args = self.args[0:1] + [self.conf('config-01.ini')] + self.args[1:]
        config_data = config.parse(args)
        self.unused(config_data)
        self.assertEqual(self.config_fixture, config_data)

        test_data = [
            ('config-02.ini', 'db', 'postgres://*****:*****@host:54321/dbname'),
            ('config-03.ini', 'i18n', 'no'),
            ('config-04.ini', 'use_timezone', 'no'),
            ('config-05.ini', 'timezone', 'Europe/London'),
            ('config-06.ini', 'reversion', 'no'),
            ('config-07.ini', 'permissions', 'no'),
            ('config-08.ini', None, (('i18n', 'no'), ('languages', ['ru']))),
            ('config-09.ini', None, (('i18n', 'yes'), ('languages',
                                                       ['en', 'ru']))),
            ('config-10.ini', 'django_version', '1.8'),
            ('config-11.ini', 'project_directory', '/test/me'),
            ('config-12.ini', 'bootstrap', True),
            ('config-13.ini', 'templates', '.'),
            ('config-14.ini', 'starting_page', True),
            ('config-15.ini', 'plugins', True),
            ('config-16.ini', 'dump_reqs', True),
            ('config-17.ini', 'noinput', True),
            ('config-18.ini', 'filer', True),
            ('config-19.ini', 'requirements_file', '/test/reqs'),
            ('config-20.ini', 'no_deps', True),
            ('config-21.ini', 'no_db_driver', True),
            ('config-22.ini', 'no_sync', True),
            ('config-23.ini', 'no_user', True),
            ('config-24.ini', 'template', '/test/template'),
            ('config-25.ini', 'extra_settings', '/test/extra_settings'),
            ('config-26.ini', 'skip_project_dir_check', True),
            ('config-27.ini', 'utc', True),
            ('config-28.ini', 'no_plugins', True),
            ('config-30.ini', 'verbose', True),
            ('config-32.ini', 'delete_project_dir', True),
        ]
        fixture = copy.copy(self.config_fixture)
        for filename, key, val in test_data:
            if type(val) == tuple:
                for subkey, subval in val:
                    setattr(fixture, subkey, subval)  # Change value.
            else:
                setattr(fixture, key, val)  # Change value.
            args = self.args[0:1] + [self.conf(filename)
                                     ] + self.args[1:]  # Load new config.
            config_data = config.parse(args)
            self.unused(config_data)
            self.assertEqual(
                fixture,
                config_data)  # Check if config value and changed value equals.

    @patch('sys.stdout')
    @patch('sys.stderr')
    def test_dump_config_file(self, *args):
        """Tests .config.ini.dump_config_file function."""
        config_exists = self.conf('config-01.ini')

        with self.assertRaises(SystemExit) as error:
            config.parse(['--config-dump', config_exists] + self.args[1:] +
                         ['-p', '.'])
            self.assertEqual(8, error.exception.code)

        config.parse(['--config-dump', self.config_not_exists] +
                     self.args[1:] + ['-p', '.'])
        self.assertTrue(os.path.isfile(self.config_not_exists))

        fixture = copy.copy(self.config_fixture)
        setattr(fixture, 'timezone', get_localzone().zone)
        # Load dumped config.
        args = self.args[0:1] + [self.config_not_exists] + self.args[1:]
        config_data = config.parse(args)
        self.unused(config_data)
        self.assertEqual(fixture, config_data)
示例#58
0
	dialog = xbmcgui.Dialog()
	index = dialog.select("Select zone", cs)
	if index != -1:
		if index == 0:
			xbmc.executebuiltin('RunPlugin(%s)'%addon.build_plugin_url({'mode': 'auto_set_tz'}))
		else:
			zs = constants.get_zones_by_cat(cs[index])
			index = dialog.select("Select zone", zs)
			if index != -1:
				control.set_setting('timezone_new', str(constants.get_zone_idx(zs[index])))
				control.infoDialog('Timezone set: ' + str(zs[index]))
				control.refresh()

elif mode[0]=='auto_set_tz':
	from tzlocal import get_localzone # $ pip install tzlocal
	local_tz = get_localzone()
	control.set_setting('timezone_new', str(constants.get_zone_idx(local_tz.zone)))
	control.infoDialog('Timezone set: ' + local_tz.zone)
	control.refresh()



##################################################################################################################################
##################################################################################################################################

elif mode[0]=='reddit':
	from resources.lib.modules import subreddits
	items = subreddits.get_subreddits()
	for item in items:

		delete = addon.build_plugin_url({'mode':'delete_subreddit','reddit':item})
示例#59
0
from datetime import datetime
from enum import Enum
from typing import Optional, Sequence, List
from tzlocal import get_localzone

from mongoengine import DateTimeField, Document, FloatField, StringField, connect

from coinbase.trader.constant import Exchange, Interval
from coinbase.trader.object import BarData, TickData

from .database import BaseDatabaseManager, Driver, DB_TZ

LOCAL_TZ = get_localzone()


def init(_: Driver, settings: dict):
    database = settings["database"]
    host = settings["host"]
    port = settings["port"]
    username = settings["user"]
    password = settings["password"]
    authentication_source = settings["authentication_source"]

    if not username:  # if username == '' or None, skip username
        username = None
        password = None
        authentication_source = None

    connect(
        db=database,
        host=host,
示例#60
0
def format_exception_details(exc_type,
                             exc_value,
                             exc_traceback,
                             error_uid=None):
    """
	Format exception details to be show to a human. This should include enough
	information about the type of error that occurred and the system on which
	it was triggered to allow someone to attempt to debug and fix it. The first
	three parameters to this function directly correspond to the values
	returned from the :py:func:`sys.exc_info` function.

	:param exc_type: The type of the exception.
	:param exc_value: The exception instance.
	:param exc_traceback: The traceback object corresponding to the exception.
	:param error_uid: A unique identifier for this exception.
	:type error_uid: str, :py:class:`uuid.UUID`
	:return: A formatted message containing the details about the exception and environment.
	:rtype: str
	"""
    if isinstance(error_uid, uuid.UUID):
        error_uid = str(error_uid)
    elif error_uid is None:
        error_uid = 'N/A'
    elif not isinstance(error_uid, str):
        raise TypeError(
            'error_uid must be an instance of either str, uuid.UUID or None')
    pversion = 'UNKNOWN'
    if its.on_linux:
        pversion = 'Linux: ' + ' '.join(platform.linux_distribution())
    elif its.on_windows:
        pversion = 'Windows: ' + ' '.join(platform.win32_ver())
        if its.frozen:
            pversion += ' (Frozen=True)'
        else:
            pversion += ' (Frozen=False)'
    exc_name = format_exception_name(exc_type)
    rpc_error_details = 'N/A (Not a remote RPC error)'
    if isinstance(
            exc_value,
            advancedhttpserver.RPCError) and exc_value.is_remote_exception:
        rpc_error_details = "Name: {0}".format(
            exc_value.remote_exception['name'])
        if exc_value.remote_exception.get('message'):
            rpc_error_details += " Message: '{0}'".format(
                exc_value.remote_exception['message'])
    current_tid = threading.current_thread().ident
    thread_info = ("{0: >4}{1} (alive={2} daemon={3})".format(
        ('=> ' if thread.ident == current_tid else ''), thread.name,
        thread.is_alive(), thread.daemon) for thread in threading.enumerate())
    thread_info = '\n'.join(thread_info)
    details = EXCEPTION_DETAILS_TEMPLATE.format(
        error_details=repr(exc_value),
        error_type=exc_name,
        error_uid=error_uid,
        rpc_error_details=rpc_error_details,
        king_phisher_version=version.version,
        platform_version=pversion,
        python_version="{0}.{1}.{2}".format(*sys.version_info),
        gtk_version="{0}.{1}.{2}".format(Gtk.get_major_version(),
                                         Gtk.get_minor_version(),
                                         Gtk.get_micro_version()),
        stack_trace=''.join(
            traceback.format_exception(exc_type, exc_value, exc_traceback)),
        thread_info=thread_info,
        timezone=tzlocal.get_localzone().zone)
    details = details.strip() + '\n'
    # add on additional details for context as necessary
    if isinstance(exc_value, errors.KingPhisherGraphQLQueryError):
        details += '\nGraphQL Exception Information:\n=============================\n\n'
        if exc_value.errors:
            details += 'GraphQL Errors:\n---------------\n'
            details += '\n'.join(error.strip()
                                 for error in exc_value.errors) + '\n\n'
        details += 'GraphQL Query:\n--------------\n'
        details += textwrap.dedent(exc_value.query) + '\n'
    return details