def get_passes(self):


        passes_dict = []

        # use time of 4PM today for all calculations so that it always gets next rise and set times for this evening

        mytz = pytz.timezone(self.tz)
        eptz = pytz.timezone('utc')

        now = datetime.date.today()
        afternoon = mytz.localize( datetime.datetime(now.year,now.month,now.day)+ datetime.timedelta(hours=16))
        eptafternoon = afternoon.astimezone(eptz)
        # print "eptafternoon", eptafternoon

        # setup current location
        here = ephem.Observer()
        here.lon = str(self.lon)
        here.lat = str(self.lat)
        here.elev = self.alt
        here.date = eptafternoon
        # print here

        # do lookup from NASA website:
       
        url = params.nasa_url
        
        req = urllib2.Request(url)
        response = urllib2.urlopen(req)
        data = response.read()

        # look for TWO LINE MEAN ELEMENT SET in file
        table = data.split("TWO LINE MEAN ELEMENT SET")[1]
        line1 = table.splitlines()[3]
        line2 = table.splitlines()[4]
        # print "line 1:", line1
        # print "line 2:", line2
        
        iss = ephem.readtle('ISS', \
                            line1, \
                            line2)


        # get next 5 passes, there would never be more than 5 passes after 4PM
        for apass in range(0,5):
            iss.compute(here)

            iss_np = here.next_pass(iss)
            iss_r = ephem.localtime(iss_np[0])
            iss_s = ephem.localtime(iss_np[4])
            # print "pass n: iss rise, set:", apass, iss_r, iss_s

        
            # Store the data in a list      
            passes_dict.append({"begin_time": iss_r, "end_time": iss_s})

            here.date = iss_np[4]
        
        # Return all the data     
        return passes_dict  
示例#2
0
def ip2lt(ip):
    try:
        reader = d.Reader('./db/GeoLite2-City.mmdb')
    except FileNotFoundError:
        db.update()
        sys.exit(0)
    try:
        try:
            my_ip = localip.localip()
            myip = reader.city(my_ip)
            local_tz = pytz.timezone(str(myip.location.time_zone))
        except pytz.exceptions.UnknownTimeZoneError:
            print("Unable to detect your local time zone, try using\n'localt.ip2lt2(your time zone, ip)' instead")
            sys.exit(0)
        try:
            tz = timezone.timezone(ip)
            resp_tz = pytz.timezone(tz)
        except pytz.exceptions.UnknownTimeZoneError:
            print("Unable to detect given IP's time zone, try using\n'localt.tz2lt(time zone)' instead")
            sys.exit(0)
        local_time = localtime()
        local_dt = local_tz.localize(
            datetime(local_time.tm_year, local_time.tm_mon, local_time.tm_mday, local_time.tm_hour, local_time.tm_min,
                     local_time.tm_sec, local_time.tm_isdst), is_dst = isdst.isdstip(ip))
        resp_dt = local_dt.astimezone(resp_tz)
        fmt = '%H:%M'
    except ValueError:
        print("Please insert a valid IP Address")
        sys.exit(0)
    return resp_dt.strftime(fmt)
示例#3
0
def encode_date_optional_time(obj):
    """
    ISO encode timezone-aware datetimes
    """
    if isinstance(obj, datetime.datetime):
        return timezone("UTC").normalize(obj.astimezone(timezone("UTC"))).strftime('%Y-%m-%dT%H:%M:%SZ')
    raise TypeError("{0} is not JSON serializable".format(repr(obj)))
示例#4
0
    def test_linearRegression(self):
        original = functions.evaluateTarget
        try:
            # series starts at 60 seconds past the epoch and continues for 600 seconds (ten minutes)
            # steps are every 60 seconds
            savedSeries = TimeSeries('test.value',180,480,60,[3,None,5,6,None,8]),
            functions.evaluateTarget = lambda x, y: savedSeries

            # input values will be ignored and replaced by regression function
            inputSeries = TimeSeries('test.value',1200,1500,60,[123,None,None,456,None,None,None])
            inputSeries.pathExpression = 'test.value'
            results = functions.linearRegression({
                'startTime': datetime(1970, 1, 1, 0, 20, 0, 0, pytz.timezone(settings.TIME_ZONE)),
                'endTime': datetime(1970, 1, 1, 0, 25, 0, 0, pytz.timezone(settings.TIME_ZONE)),
                'localOnly': False,
                'data': [],
            }, [ inputSeries ], '00:03 19700101', '00:08 19700101')

            # regression function calculated from datapoints on minutes 3 to 8
            expectedResult = [
                TimeSeries('linearRegression(test.value, 180, 480)',1200,1500,60,[20.0,21.0,22.0,23.0,24.0,25.0,26.0])
            ]

            self.assertEqual(results, expectedResult)
        finally:
            functions.evaluateTarget = original
示例#5
0
 def __init__(self, lat=None, lon=None, tzname="UTC"):
     if lat is not None and lon is not None:
         with nostdout():
             w = tzwhere.tzwhere()
         self.tz = timezone(w.tzNameAt(lat, lon))
     else:
         self.tz = timezone(tzname)
示例#6
0
def humanize(obj, type=None, length=None):
    if obj is None:
        obj = ''
    elif type and type.startswith('time'):
        tz = type[len('time'):].lstrip('-')
        tz = timezone(tz) if tz else current_app.timezone or utc
        obj = format_time(float(obj), tz) if obj else ''
    elif type and type.startswith('natural-time'):
        tz = type[len('natural-time'):].lstrip('-')
        tz = timezone(tz) if tz else current_app.timezone or utc
        delta = datetime.now(tz) - datetime.fromtimestamp(float(obj), tz)
        if delta < timedelta(days=1):
            obj = format_timedelta(delta, locale='en_US') + ' ago'
        else:
            obj = format_time(float(obj), tz) if obj else ''
    elif isinstance(obj, string_types) and not re.match(UUID_REGEX, obj):
        obj = obj.replace('-', ' ').replace('_', ' ')
        obj = re.sub('|'.join(KEYWORDS_UP),
                     lambda m: m.group(0).upper(), obj)
        if obj and obj not in KEYWORDS_DOWN:
            obj = obj[0].upper() + obj[1:]
    elif isinstance(obj, list):
        if all(isinstance(x, (int, float) + string_types) for x in obj):
            obj = ', '.join(map(str, obj))
    if length is not None and len(obj) > length:
        obj = obj[:length - 4] + ' ...'
    return obj
示例#7
0
    def test_date_range_timestamp_equiv_explicit_pytz(self):
        rng = date_range('20090415', '20090519',
                         tz=pytz.timezone('US/Eastern'))
        stamp = rng[0]

        ts = Timestamp('20090415', tz=pytz.timezone('US/Eastern'), freq='D')
        assert ts == stamp
 def now(self):
     """
     Returns a timezone aware datetime localized to the account's timezone.
     """
     now = datetime.datetime.utcnow().replace(tzinfo=pytz.timezone("UTC"))
     timezone = settings.TIME_ZONE if not self.timezone else self.timezone
     return now.astimezone(pytz.timezone(timezone))
    def filter_time_series_by_minute_of_hour(self, minute, data_frame, in_tz = None, out_tz = None):
        """
        filter_time_series_by_minute_of_hour - Filter time series by minute of hour

        Parameters
        ----------
        minute : int
            minute of hour
        data_frame : DataFrame
            data frame to be filtered
        in_tz : str (optional)
            time zone of input data frame
        out_tz : str (optional)
            time zone of output data frame

        Returns
        -------
        DataFrame
        """
        if out_tz is not None:
            if in_tz is not None:
                data_frame = data_frame.tz_localize(pytz.timezone(in_tz))

            data_frame = data_frame.tz_convert(pytz.timezone(out_tz))

            # change internal representation of time
            data_frame.index = pandas.DatetimeIndex(data_frame.index.values)

        data_frame = data_frame[data_frame.index.minute == minute]

        return data_frame
示例#10
0
def remind(willie, trigger):
    """Gives you a reminder in the given amount of time."""
    duration = 0
    message = re.split('(\d+ ?(?:' + periods + ')) ?', trigger.group(2))[1:]
    reminder = ''
    stop = False
    for piece in message:
        grp = re.match('(\d+) ?(.*) ?', piece)
        if grp and not stop:
            length = float(grp.group(1))
            factor = scaling.get(grp.group(2), 60)
            duration += length * factor
        else:
            reminder = reminder + piece
            stop = True
    if duration == 0:
        return willie.reply("Sorry, didn't understand the input.")

    if duration % 1:
        duration = int(duration) + 1
    else:
        duration = int(duration)
    tzi = timezone('UTC')
    if willie.db and trigger.nick in willie.db.preferences:
        tz = willie.db.preferences.get(trigger.nick, 'tz') or 'UTC'
        tzi = timezone(tz)
    create_reminder(willie, trigger, duration, reminder, tzi)
示例#11
0
def get_with_timezone(
        datetime_object: datetime.datetime,
        to_timezone: str='',
        default_from_timezone: str='UTC',
) -> datetime.datetime:
    """
    Change timezone of a date
    :param datetime_object: datetime to update
    :param to_timezone: timezone name, if equal to '',
    try to grap current user timezone. If no given timezone name and no
    current user timezone, return original date time
    :param default_from_timezone: datetime original timezone if datetime
    object is naive
    :return: datetime updated
    """
    # If no to_timezone, try to grab from current user
    if not to_timezone and tmpl_context.current_user:
        to_timezone = tmpl_context.current_user.timezone

    # If no to_timezone, return original datetime
    if not to_timezone:
        return datetime_object

    # If datetime_object have not timezone, set new from default_from_timezone
    if not datetime_object.tzinfo:
        from_tzinfo = pytz.timezone(default_from_timezone)
        datetime_object = from_tzinfo.localize(datetime_object)

    new_tzinfo = pytz.timezone(to_timezone)
    return datetime_object.astimezone(new_tzinfo)
示例#12
0
def convert_utc_to_user_timezone(utc_timestamp):
	from pytz import timezone, UnknownTimeZoneError
	utcnow = timezone('UTC').localize(utc_timestamp)
	try:
		return utcnow.astimezone(timezone(get_user_time_zone()))
	except UnknownTimeZoneError:
		return utcnow
示例#13
0
def utc_timestamp(cr, uid, timestamp, context=None):
    """Returns the given timestamp converted to the client's timezone.
       This method is *not* meant for use as a _defaults initializer,
       because datetime fields are automatically converted upon
       display on client side. For _defaults you :meth:`fields.datetime.now`
       should be used instead.

       :param datetime timestamp: naive datetime value (expressed in LOCAL)
                                  to be converted to the client timezone
       :param dict context: the 'tz' key in the context should give the
                            name of the User/Client timezone (otherwise
                            UTC is used)
       :rtype: datetime
       :return: timestamp converted to timezone-aware datetime in UTC
    """
    assert isinstance(timestamp, datetime), 'Datetime instance expected'
    if context and context.get('tz'):
        tz_name = context['tz']  
    else:
        registry = RegistryManager.get(cr.dbname)
        tz_name = registry.get('res.users').read(cr, SUPERUSER_ID, uid, ['tz'])['tz']
    if tz_name:
        try:
            utc = pytz.timezone('UTC')
            context_tz = pytz.timezone(tz_name)
            context_timestamp = context_tz.localize(timestamp, is_dst=False) # UTC = no DST
            return context_timestamp.astimezone(utc)
        except Exception:
            _logger.debug("failed to compute context/client-specific timestamp, "
                          "using the UTC value",
                          exc_info=True)
    return timestamp
示例#14
0
文件: ir_http.py 项目: Vauxoo/odoo
    def _add_dispatch_parameters(cls, func):

        # Force website with query string paramater, typically set from website selector in frontend navbar
        force_website_id = request.httprequest.args.get('fw')
        if (force_website_id and request.session.get('force_website_id') != force_website_id and
                request.env.user.has_group('website.group_multi_website') and
                request.env.user.has_group('website.group_website_publisher')):
            request.env['website']._force_website(request.httprequest.args.get('fw'))

        context = {}
        if not request.context.get('tz'):
            context['tz'] = request.session.get('geoip', {}).get('time_zone')
            try:
                pytz.timezone(context['tz'] or '')
            except pytz.UnknownTimeZoneError:
                context.pop('tz')

        request.website = request.env['website'].get_current_website()  # can use `request.env` since auth methods are called
        context['website_id'] = request.website.id
        # This is mainly to avoid access errors in website controllers where there is no
        # context (eg: /shop), and it's not going to propagate to the global context of the tab
        context['allowed_company_ids'] = [request.website.company_id.id]

        # modify bound context
        request.context = dict(request.context, **context)

        super(Http, cls)._add_dispatch_parameters(func)

        if request.routing_iteration == 1:
            request.website = request.website.with_context(request.context)
示例#15
0
def rss():
    items = []
    last_update_date = datetime(1970, 1, 1, 0, 0, 0, tzinfo=timezone('UTC'))
    now = datetime.now(tz=timezone('UTC'))

    for article_id, article_meta in articles_list.items():
        date = article_meta['published_time']

        if date > last_update_date:
            last_update_date = date

        func_name = 'article_by_id_en' if article_meta['locale'] == 'en' else 'article_by_id_de'

        meta = {
            'title': article_meta['title'],
            'url': url_for(func_name, article_id=article_id),
            'date': date,
        }

        # TODO filter for less days
        if (now - date).days <= 365:  # pragma: no cover
            items.append(meta)

    items = sorted(items, key=lambda item: item['date'], reverse=True)

    for item in items:
        item['date'] = item['date'].strftime('%a, %d %b %Y %H:%M:%S %z')

    resp = Response(render_template('rss.xml', items=items,
                                    last_update_date=last_update_date.strftime('%a, %d %b %Y %H:%M:%S %z')),
                    mimetype='application/rss+xml')
    return resp
示例#16
0
def utc2server( date, naive=True ):
    date = date.replace(tzinfo=None)
    servertz = info.HelperMaKaCInfo.getMaKaCInfoInstance().getTimezone()
    servertime = timezone('UTC').localize(date).astimezone(timezone(servertz))
    if naive:
        return servertime.replace(tzinfo=None)
    return servertime
示例#17
0
 def handlePlotlyMessages(self, msg):
     ''' Write the msg data to the associated plotly stream
     msg is a json string of the form:
         {"x": 1, "y": 1, "token": "ibagefhdci"}
         or
         {"y": 1, "token": "ibagefhdci", "timezone": "America/Montreal"}
     If it's the latter form, we'll compute the time and assign that as x
     '''
     data = json.loads(msg)
     if data['token'] in self.streams:
         token = data['token']
         del data['token']
         if 'x' not in data and 'timezone' in data:
             try:
                 tz = timezone(data['timezone'])
             except:
                 c.logger.warning("Unrecognized timezone: {timezone}\n"
                     "Defaulting to America/Montreal".format(timezone=data['timezone']))
                 c.logger.debug(traceback.format_exc())
                 tz = timezone("America/Montreal")
             del data['timezone']
             data['x'] = tz.localize(datetime.datetime.now())
         self.streams[token].write(data)
     else:
         c.logger.warning("Recieved the token: {token} which has no associated stream.\n"\
             "Add the token {token} to your config.json file to register this stream.".format(token=data['token']))
def forwards_func(apps, schema_editor):
    ''' 
    タイムスタンプを導出/設定
    議案番号が-1なら、締め切り後に設定
    議案番号が-1でないなら、締め切り前に設定
    '''
    Issue = apps.get_model('document_system', 'Issue')
    db_alias = schema_editor.connection.alias

    issues_null_timestamp = Issue.objects.using(db_alias).filter(models.Q(updated_at__exact=None) | models.Q(updated_at__exact=None))
    for issue in issues_null_timestamp:
        posted_date = issue.meeting.meeting_date - timedelta(days=2)
        if issue.issue_order < 0:
            posted_hour = time(hour=22, tzinfo=pytz.timezone('Asia/Tokyo'))
        else:
            posted_hour = time(hour=20, tzinfo=pytz.timezone('Asia/Tokyo'))
        timestamp = datetime.combine(posted_date, posted_hour)

        # updated_atの自動更新を無効にしないと、migrateした日時になってしまう
       	for field in issue._meta.local_fields:
    	    if field.name == "updated_at":
                field.auto_now = False

        issue.updated_at = timestamp
        issue.created_at = timestamp
        issue.save()
示例#19
0
  def testPeopleRants(self):
    """This test contains various things which people rant about."""

    # Tests some of the pitfuls discussed at
    # http://www.enricozini.org/2009/debian/using-python-datetime/
    ############################################################################

    # That's right, the datetime object created by a call to datetime.datetime
    # constructor now seems to think that Finland uses the ancient "Helsinki
    # Mean Time" which was obsoleted in the 1920s.
    #
    # Well not anymore!
    eurhel = pytz.timezone("Europe/Helsinki")
    a = datetime_tz.datetime_tz(2008, 6, 23, 18, 2, 31, 101025, eurhel)
    self.assertEqual(repr(a),
                     "datetime_tz(2008, 6, 23, 18, 2, 31, 101025,"
                     " tzinfo=<DstTzInfo 'Europe/Helsinki' EEST+3:00:00 DST>)")

    # Timezone-aware datetime objects have other bugs: for example, they fail to
    # compute Unix timestamps correctly. The following example shows two
    # timezone-aware objects that represent the same instant but produce two
    # different timestamps.
    #
    # Well not anymore!
    utc = pytz.timezone("UTC")
    a = datetime_tz.datetime_tz(2008, 7, 6, 5, 4, 3, tzinfo=utc)
    self.assertEqual(str(a), "2008-07-06 05:04:03+00:00")
    self.assertEqual(a.totimestamp(), 1215320643.0)
    # FIXME(tansell): %s is effected by the TZ environment value.
    #self.assertEqual(a.strftime("%s"), "1215284643")

    italy = pytz.timezone("Europe/Rome")
    b = a.astimezone(italy)
    self.assertEqual(str(b), "2008-07-06 07:04:03+02:00")
    self.assertEqual(b.totimestamp(), 1215320643.0)
示例#20
0
def main(args, options):
    logging.info('Looking for builds...')
    if args[0] == 'latest':
        commands = ['triggerjobs %s' %
                    builds.BuildCache().find_latest_build(options.branch)]
    else:
        m = re.match('(\d{4})(\d{2})(\d{2})(\d{2})(\d{2})(\d{2})', args[0])
        if re.match('\d{14}', args[0]):
            # build id
            build_time = datetime.datetime.strptime(args[0], '%Y%m%d%H%M%S')
            start_time = build_time
            end_time = build_time
        else:
            start_time = from_iso_date_or_datetime(args[0])
            if len(args) > 1:
                end_time = from_iso_date_or_datetime(args[1])
            else:
                end_time = datetime.datetime.now()
        if not start_time.tzinfo:
            start_time = start_time.replace(tzinfo=pytz.timezone('US/Pacific'))
        if not end_time.tzinfo:
            end_time = end_time.replace(tzinfo=pytz.timezone('US/Pacific'))
        commands = ['triggerjobs %s' % url for url in
                    builds.BuildCache().find_builds(start_time, end_time, 
                                                    options.branch)]
    logging.info('Connecting to autophone server...')
    commands.append('exit')
    s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    s.connect((options.ip, options.port))
    logging.info('- %s' % s.recv(1024).strip())
    for c in commands:
        logging.info('%s' % c)
        s.sendall(c + '\n')
        logging.info('- %s' % s.recv(1024).strip())
    return 0
示例#21
0
 def test_simple_config(self):
     config = get_config(PATH + 'simple.conf')
     comp_config = {
         'calendars': {
             'home': {'path': os.path.expanduser('~/.calendars/home/'),
                      'readonly': False, 'color': '', 'type': 'calendar'},
             'work': {'path': os.path.expanduser('~/.calendars/work/'),
                      'readonly': False, 'color': '', 'type': 'calendar'},
         },
         'sqlite': {'path': os.path.expanduser('~/.local/share/khal/khal.db')},
         'locale': {
             'local_timezone': pytz.timezone('Europe/Berlin'),
             'default_timezone': pytz.timezone('Europe/Berlin'),
             'timeformat': '%H:%M',
             'dateformat': '%d.%m.',
             'longdateformat': '%d.%m.%Y',
             'datetimeformat': '%d.%m. %H:%M',
             'longdatetimeformat': '%d.%m.%Y %H:%M',
             'firstweekday': 0,
             'encoding': 'utf-8',
             'unicode_symbols': True,
             'weeknumbers': False,
         },
         'default': {
             'default_command': 'calendar',
             'default_calendar': None,
             'show_all_days': False,
             'print_new': 'False',
             'days': 2,
         }
     }
     for key in comp_config:
         assert config[key] == comp_config[key]
    def testOldPickles(self):
        # Ensure that applications serializing pytz instances as pickles
        # have no troubles upgrading to a new pytz release. These pickles
        # where created with pytz2006j
        east1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
            "I0\nS'EST'\np3\ntRp4\n."
            ))
        east2 = pytz.timezone('US/Eastern').localize(
            datetime(2006, 1, 1)).tzinfo
        self.assertTrue(east1 is east2)

        # Confirm changes in name munging between 2006j and 2007c cause
        # no problems.
        pap1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
            "\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
        pap2 = pytz.timezone('America/Port-au-Prince').localize(
            datetime(1910, 1, 1)).tzinfo
        self.assertTrue(pap1 is pap2)

        gmt1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
        gmt2 = pytz.timezone('Etc/GMT+10')
        self.assertTrue(gmt1 is gmt2)
示例#23
0
 def test_timezone(self):
     dt = datetime(2009, 11, 10, 23, 0, 0, 123456)
     utc = UTC.localize(dt)
     berlin = timezone('Europe/Berlin').localize(dt)
     eastern = berlin.astimezone(timezone('US/Eastern'))
     data = {
         "points": [
             {"measurement": "A", "fields": {"val": 1},
              "time": 0},
             {"measurement": "A", "fields": {"val": 1},
              "time": "2009-11-10T23:00:00.123456Z"},
             {"measurement": "A", "fields": {"val": 1}, "time": dt},
             {"measurement": "A", "fields": {"val": 1}, "time": utc},
             {"measurement": "A", "fields": {"val": 1}, "time": berlin},
             {"measurement": "A", "fields": {"val": 1}, "time": eastern},
         ]
     }
     self.assertEqual(
         line_protocol.make_lines(data),
         '\n'.join([
             'A val=1i 0',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257894000123456000',
             'A val=1i 1257890400123456000',
             'A val=1i 1257890400123456000',
         ]) + '\n'
     )
示例#24
0
文件: locale.py 项目: fulfilio/nereid
def get_timezone():
    """
    Returns the timezone that should be used for this request as
    `pytz.timezone` object.  This returns `None` if used outside of
    a request.
    """
    ctx = _request_ctx_stack.top
    tzinfo = getattr(ctx, 'babel_tzinfo', None)
    if tzinfo is None:
        babel = ctx.app.extensions['babel']
        if babel.timezone_selector_func is None:
            if not current_user.is_anonymous and current_user.timezone:
                tzinfo = timezone(current_user.timezone)
            elif current_website.company.timezone:
                tzinfo = timezone(current_website.company.timezone)
            else:
                tzinfo = babel.default_timezone
        else:
            rv = babel.timezone_selector_func()
            if rv is None:
                tzinfo = babel.default_timezone
            else:
                if isinstance(rv, basestring):
                    tzinfo = timezone(rv)
                else:
                    tzinfo = rv
        ctx.babel_tzinfo = tzinfo
    return tzinfo
示例#25
0
文件: forms.py 项目: shollmann/remo
    def clean(self):
        """Clean form."""
        super(EventForm, self).clean()

        cdata = self.cleaned_data

        cdata['budget_bug'] = cdata.get('budget_bug_form', None)
        cdata['swag_bug'] = cdata.get('swag_bug_form', None)
        if self.editable_owner:
            cdata['owner'] = cdata.get('owner_form', None)
        else:
            cdata['owner'] = self.instance.owner

        # Set timezone
        t = timezone(cdata['timezone'])
        if 'start_form' in cdata:
            start = make_naive(cdata['start_form'],
                               timezone(settings.TIME_ZONE))
            cdata['start'] = t.localize(start)

        if 'end_form' in cdata:
            end = make_naive(cdata['end_form'],
                             timezone(settings.TIME_ZONE))
            cdata['end'] = t.localize(end)

        # Directly write to self.errors as
        # ValidationError({'start_form': ['Error message']}) doesn't
        # seem to work.
        if cdata['start'] >= cdata['end']:
            self.errors['start_form'] = (u'Start date should come '
                                         'before end date.')
            raise ValidationError({'start_form': ['Error']})

        return cdata
示例#26
0
def make_plots(nc):
    ''' Generate some plots '''
    sts = compute_sts(nc)
    lats = nc.variables['lat'][:]
    lons = nc.variables['lon'][:]
    rts = (sts.astimezone(pytz.timezone("America/Chicago"))).strftime(
                                                            "%d %b %Y %H %p")
    for i, tm in enumerate(nc.variables['time'][:]):
        dt = sts + datetime.timedelta(minutes=float(tm))
        if dt.minute != 0:
            continue
        fhour = int( tm / 60.0 )
        fts = (dt.astimezone(pytz.timezone("America/Chicago"))).strftime(
                                                            "%d %b %Y %H %p")
        for pvar in PVARS:
            m = MapPlot(title='ISUMM5/Bridget Modelled %s' % (
                                                    PVARS[pvar]['title'],),
                        subtitle='Model Run: %s Forecast Valid: %s' % (rts, fts))
            vals = nc.variables[pvar][i,:,:]
            if pvar == 'bdeckt':
                vals = temperature(vals, 'K').value('F')
            m.pcolormesh(lons, lats, vals, PVARS[pvar]['levels'], units='mm')
            pqstr = "plot c %s model/frost/bridget/%02i/%s_%02i_f%03i.png bogus png" % (
                                        sts.strftime("%Y%m%d%H%M"), sts.hour,
                                        pvar, sts.hour, fhour)
            m.postprocess(pqstr=pqstr)
            m.close()
示例#27
0
    def check_uncheck_sampo(self):
        action = self.request.GET.get('action')
        time = self.request.GET.get('time')

        if time:
            hhmm = map(lambda x: int(x), self.request.GET['time'].split(':'))
        else:
            hhmm = [0, 0]

        date_str = self.request.GET.get('date')

        if date_str:
            date = map(lambda x: int(x), date_str.split('.'))
            date_params = dict(
                zip(('day', 'month', 'year', 'hour', 'minute'), date+hhmm)
            )
            now = make_aware(datetime.datetime(**date_params), timezone(TIME_ZONE))
        else:
            now = make_aware(datetime.datetime.now(), timezone(TIME_ZONE)).replace(hour=hhmm[0], minute=hhmm[1], second=0, microsecond=0)

        if action == 'check':
            new_usage = SampoPassUsage(
                sampo_pass_id=int(self.request.GET['pid']),
                date=now
            )

            new_usage.save()

            passes, payments, _ = get_sampo_details(now)

            _json = json.dumps({
                'payments': payments
            })

            return HttpResponse(_json)

        elif action == 'uncheck':
            # todo Если админ системы удалит запись отсюда за любой день кроме сегоднешнего, удалится не та запись!
            # todo решать эту проблему лучше через передачу в функцию праильной даты...
            last_usage = SampoPassUsage.objects.filter(
                sampo_pass_id=int(self.request.GET['pid']),
                date__range=(
                    now.replace(hour=0, minute=0, second=0, microsecond=0),
                    now.replace(hour=23, minute=59, second=59, microsecond=999999)
                )
            ).last()

            if last_usage:
                last_usage.delete()

            passes, payments, _ = get_sampo_details(now)

            _json = json.dumps({
                'payments': payments
            })

            return HttpResponse(_json)

        else:
            return HttpResponseServerError('failed')
示例#28
0
    def start_requests(self):
        """
            default Scrapy method to send requests
        """

        # if spider already active
        if self.settings['active'] == 'T':
            log.msg('[OVERLAP] - at %s EST' % (datetime.now(timezone('US/Eastern')).strftime("%Y-%m-%d %H:%M:%S")), level=log.INFO)
            # Close the spider
            raise exceptions.CloseSpider('Recon Spider already active')

        # Set spider is activating
        ReconSpiderSettings(self.site).write_active('T')

        log.msg('[START_ID] - %s at %s EST' % (str(self.settings['recon_startid']), datetime.now(timezone('US/Eastern'))
                .strftime("%Y-%m-%d %H:%M:%S")), level=log.INFO)
        log.msg('[CYCLES] - %s at %s EST' % (
            str(self.settings['cycles']), datetime.now(timezone('US/Eastern')).strftime("%Y-%m-%d %H:%M:%S")), level=log.INFO)

        # requires a new recon_startid, if not, close the spider
        if self.settings['recon_startid'] == -1:
            # Close the spider and notice to provive initial start_id
            raise exceptions.CloseSpider('Provide start_id value via start_id parameter for initilizing')

        # Generate ids list for reconnoitering
        url_ids = generate_ids(self.site)
        
        # Send URL requests
        for id in url_ids:
            req = Request("".join((self.base_url, str(id))), dont_filter=True, callback=self.parse)
            # save url_id for calling back
            req.meta['url_id'] = id
            yield req
示例#29
0
文件: forms.py 项目: danyjavierb/remo
    def __init__(self, *args, **kwargs):
        """Initialize form.

        Dynamically set choices for country field.
        """
        if 'editable_owner' in kwargs:
            self.editable_owner = kwargs['editable_owner']
            del(kwargs['editable_owner'])

        super(EventForm, self).__init__(*args, **kwargs)

        # Dynamic countries field.
        countries = product_details.get_regions('en').values()
        countries.sort()
        country_choices = ([('', "Country")] +
                           [(country, country) for country in countries])
        self.fields['country'].choices = country_choices

        # Dynamic owner field.
        if self.editable_owner:
            self.fields['owner_form'] = forms.ModelChoiceField(
                queryset=User.objects.filter(
                    userprofile__registration_complete=True,
                    groups__name='Rep'),
                empty_label='Owner', initial=self.instance.owner.pk)
        else:
            self.fields['owner_form'] = forms.CharField(
                required=False, initial=get_full_name(self.instance.owner),
                widget=forms.TextInput(attrs={'readonly': 'readonly',
                                              'class': 'input-text big'}))

        instance = self.instance
        # Dynamically set the year portion of the datetime widget
        now = datetime.now()
        start_year = getattr(self.instance.start, 'year', now.year)
        end_year = getattr(self.instance.end, 'year', now.year)
        self.fields['start_form'] = forms.DateTimeField(
            widget=SplitSelectDateTimeWidget(
                years=range(start_year, now.year + 10), minute_step=5),
            validators=[validate_datetime])
        self.fields['end_form'] = forms.DateTimeField(
            widget=SplitSelectDateTimeWidget(
                years=range(end_year, now.year + 10), minute_step=5),
            validators=[validate_datetime])
        # Make times local to venue
        if self.instance.start:
            start = make_naive(instance.local_start,
                               timezone(instance.timezone))
            self.fields['start_form'].initial = start

        if self.instance.end:
            end = make_naive(instance.local_end, timezone(instance.timezone))
            self.fields['end_form'].initial = end

        # Use of intermediate fields to translate between bug.id and
        # bug.bug_id
        if instance.budget_bug:
            self.fields['budget_bug_form'].initial = instance.budget_bug.bug_id
        if instance.swag_bug:
            self.fields['swag_bug_form'].initial = instance.swag_bug.bug_id
示例#30
0
    def make_utc_datetime(**kwargs):
        """
        Helper function to convert the local (Chicago) time as scraped
        to a UTC datetime object.

        Expected in the kwargs:

        :param int year: Year of the concert start time.
        :param int month: Month of the concert start time.
        :param int day: Day of the concert start time.
        :param int hour: Hour of the concert start time.
        :param int minute: Minute of the concert start time.
        :returns: UTC datetime object.
        """

        naive_time_obj = datetime.datetime(
            year=kwargs['show_year'],
            month=kwargs['show_month'],
            day=kwargs['show_day'],
            hour=kwargs['show_hour'],
            minute=kwargs['show_minute'],
        )

        chicago_tz = pytz.timezone('US/Central')
        utc_tz = pytz.timezone('UTC')

        localized = chicago_tz.localize(naive_time_obj)
        utc_time = localized.astimezone(utc_tz)
        return utc_time
示例#31
0
def load_time_zone_status(data_dir: str, chat: str, readable: bool):
    '''
	[Enter module description]

	Args:
		data_dir (str): Description
		chat (str): Description
		readable (bool): Description

	Returns:
		TYPE: Description
	'''
    conn = sqlite3.connect(os.path.join(data_dir, 'launchbot-data.db'))
    cursor = conn.cursor()

    try:
        cursor.execute(
            "SELECT time_zone, time_zone_str FROM chats WHERE chat = ?",
            (chat, ))
    except:
        create_chats_db(db_path=data_dir, cursor=cursor)
        conn.commit()
        cursor.execute(
            "SELECT time_zone, time_zone_str FROM chats WHERE chat = ?",
            (chat, ))

    query_return = cursor.fetchall()
    conn.close()

    if len(query_return) != 0:
        time_zone_string_found = bool(query_return[0][1] is not None)

    if not readable:
        if len(query_return) == 0:
            return 0

        if not time_zone_string_found:
            if query_return[0][0] is None:
                return 0

            return float(query_return[0][0])

        timezone = pytz.timezone(query_return[0][1])
        user_local_now = datetime.datetime.now(timezone)
        utc_offset = user_local_now.utcoffset().total_seconds() / 3600
        return utc_offset

    if len(query_return) == 0:
        return '+0'

    if not time_zone_string_found:
        if query_return[0][0] is None:
            return '+0'

        status = float(query_return[0][0])

        mins = int(60 * (abs(status) % 1))
        hours = math.floor(status)
        prefix = '+' if hours >= 0 else ''

        return f'{prefix}{hours}' if mins == 0 else f'{prefix}{hours}:{mins}'

    timezone = pytz.timezone(query_return[0][1])
    user_local_now = datetime.datetime.now(timezone)
    user_utc_offset = user_local_now.utcoffset().total_seconds() / 3600

    if user_utc_offset % 1 == 0:
        user_utc_offset = int(user_utc_offset)
        utc_offset_str = f'+{user_utc_offset}' if user_utc_offset >= 0 else f'{user_utc_offset}'
    else:
        utc_offset_hours = math.floor(user_utc_offset)
        utc_offset_minutes = int((user_utc_offset % 1) * 60)
        utc_offset_str = f'{utc_offset_hours}:{utc_offset_minutes}'
        utc_offset_str = f'+{utc_offset_str}' if user_utc_offset >= 0 else f'{utc_offset_str}'

    return utc_offset_str
示例#32
0
from log_into_wiki import *
import mwparserfromhell, dateutil.parser, pytz

site = login('me', 'smite-esports')  # Set wiki
summary = 'EST -> PST'  # Set summary

limit = -1
# startat_page = 'asdf'
this_template = site.pages['Template:GameSchedule5']  # Set template
pages = this_template.embeddedin()
startat = -1

pst = pytz.timezone('America/Los_Angeles')
est = pytz.timezone('America/New_York')

lmt = 0
for page in pages:
    if lmt == limit:
        break
    lmt += 1
    if lmt < startat:
        print("Skipping page %s" % page.name)
    else:
        text = page.text()
        wikitext = mwparserfromhell.parse(text)
        for template in wikitext.filter_templates():
            if template.name.matches('GameSchedule5'):
                if template.has('timezone') and template.has('time'):
                    if template.get('timezone').value.strip() == 'EST':
                        date = template.get("date").value.strip()
                        time = template.get("time").value.strip()
示例#33
0
class DateFieldTest(BaseFieldTestMixin, FieldTestCase):
    field_class = fields.Date

    def test_defaults(self):
        field = fields.Date()
        assert not field.required
        assert field.__schema__ == {'type': 'string', 'format': 'date'}

    def test_with_default(self):
        field = fields.Date(default='2014-08-25')
        assert field.__schema__ == {'type': 'string', 'format': 'date', 'default': '2014-08-25'}
        self.assert_field(field, None, '2014-08-25')

    def test_with_default_as_date(self):
        field = fields.Date(default=date(2014, 8, 25))
        assert field.__schema__ == {'type': 'string', 'format': 'date', 'default': '2014-08-25'}

    def test_with_default_as_datetime(self):
        field = fields.Date(default=datetime(2014, 8, 25))
        assert field.__schema__ == {'type': 'string', 'format': 'date', 'default': '2014-08-25'}

    def test_min(self):
        field = fields.Date(min='1984-06-07')
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_as_date(self):
        field = fields.Date(min=date(1984, 6, 7))
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_as_datetime(self):
        field = fields.Date(min=datetime(1984, 6, 7, 1, 2, 0))
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_exlusive(self):
        field = fields.Date(min='1984-06-07', exclusiveMin=True)
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07'
        assert 'exclusiveMinimum' in field.__schema__
        assert field.__schema__['exclusiveMinimum'] is True

    def test_max(self):
        field = fields.Date(max='1984-06-07')
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_as_date(self):
        field = fields.Date(max=date(1984, 6, 7))
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_as_datetime(self):
        field = fields.Date(max=datetime(1984, 6, 7, 1, 2, 0))
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_exclusive(self):
        field = fields.Date(max='1984-06-07', exclusiveMax=True)
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07'
        assert 'exclusiveMaximum' in field.__schema__
        assert field.__schema__['exclusiveMaximum'] is True

    @pytest.mark.parametrize('value,expected', [
        (date(2011, 1, 1), '2011-01-01'),
        (datetime(2011, 1, 1), '2011-01-01'),
        (datetime(2011, 1, 1, 23, 59, 59), '2011-01-01'),
        (datetime(2011, 1, 1, 23, 59, 59, 1000), '2011-01-01'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc), '2011-01-01'),
        (datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc), '2011-01-01'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.timezone('CET')), '2011-01-01')
    ])
    def test_value(self, value, expected):
        self.assert_field(fields.Date(), value, expected)

    def test_unsupported_value_format(self):
        self.assert_field_raises(fields.Date(), 'xxx')
示例#34
0
class DatetimeFieldTest(BaseFieldTestMixin, FieldTestCase):
    field_class = fields.DateTime

    def test_defaults(self):
        field = fields.DateTime()
        assert not field.required
        assert field.__schema__ == {'type': 'string', 'format': 'date-time'}
        self.assert_field(field, None, None)

    def test_with_default(self):
        field = fields.DateTime(default='2014-08-25')
        assert field.__schema__ == {'type': 'string', 'format': 'date-time', 'default': '2014-08-25T00:00:00'}
        self.assert_field(field, None, '2014-08-25T00:00:00')

    def test_with_default_as_datetime(self):
        field = fields.DateTime(default=datetime(2014, 8, 25))
        assert field.__schema__ == {'type': 'string', 'format': 'date-time', 'default': '2014-08-25T00:00:00'}
        self.assert_field(field, None, '2014-08-25T00:00:00')

    def test_with_default_as_date(self):
        field = fields.DateTime(default=date(2014, 8, 25))
        assert field.__schema__ == {'type': 'string', 'format': 'date-time', 'default': '2014-08-25T00:00:00'}
        self.assert_field(field, None, '2014-08-25T00:00:00')

    def test_min(self):
        field = fields.DateTime(min='1984-06-07T00:00:00')
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_as_date(self):
        field = fields.DateTime(min=date(1984, 6, 7))
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_as_datetime(self):
        field = fields.DateTime(min=datetime(1984, 6, 7, 1, 2, 0))
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07T01:02:00'
        assert 'exclusiveMinimum' not in field.__schema__

    def test_min_exlusive(self):
        field = fields.DateTime(min='1984-06-07T00:00:00', exclusiveMin=True)
        assert 'minimum' in field.__schema__
        assert field.__schema__['minimum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMinimum' in field.__schema__
        assert field.__schema__['exclusiveMinimum'] is True

    def test_max(self):
        field = fields.DateTime(max='1984-06-07T00:00:00')
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_as_date(self):
        field = fields.DateTime(max=date(1984, 6, 7))
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_as_datetime(self):
        field = fields.DateTime(max=datetime(1984, 6, 7, 1, 2, 0))
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07T01:02:00'
        assert 'exclusiveMaximum' not in field.__schema__

    def test_max_exclusive(self):
        field = fields.DateTime(max='1984-06-07T00:00:00', exclusiveMax=True)
        assert 'maximum' in field.__schema__
        assert field.__schema__['maximum'] == '1984-06-07T00:00:00'
        assert 'exclusiveMaximum' in field.__schema__
        assert field.__schema__['exclusiveMaximum'] is True

    @pytest.mark.parametrize('value,expected', [
        (date(2011, 1, 1), 'Sat, 01 Jan 2011 00:00:00 -0000'),
        (datetime(2011, 1, 1), 'Sat, 01 Jan 2011 00:00:00 -0000'),
        (datetime(2011, 1, 1, 23, 59, 59),
         'Sat, 01 Jan 2011 23:59:59 -0000'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc),
         'Sat, 01 Jan 2011 23:59:59 -0000'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.timezone('CET')),
         'Sat, 01 Jan 2011 22:59:59 -0000')
    ])
    def test_rfc822_value(self, value, expected):
        self.assert_field(fields.DateTime(dt_format='rfc822'), value, expected)

    @pytest.mark.parametrize('value,expected', [
        (date(2011, 1, 1), '2011-01-01T00:00:00'),
        (datetime(2011, 1, 1), '2011-01-01T00:00:00'),
        (datetime(2011, 1, 1, 23, 59, 59),
         '2011-01-01T23:59:59'),
        (datetime(2011, 1, 1, 23, 59, 59, 1000),
         '2011-01-01T23:59:59.001000'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.utc),
         '2011-01-01T23:59:59+00:00'),
        (datetime(2011, 1, 1, 23, 59, 59, 1000, tzinfo=pytz.utc),
         '2011-01-01T23:59:59.001000+00:00'),
        (datetime(2011, 1, 1, 23, 59, 59, tzinfo=pytz.timezone('CET')),
         '2011-01-01T23:59:59+01:00')
    ])
    def test_iso8601_value(self, value, expected):
        self.assert_field(fields.DateTime(dt_format='iso8601'), value, expected)

    def test_unsupported_format(self):
        field = fields.DateTime(dt_format='raw')
        self.assert_field_raises(field, datetime.now())

    def test_unsupported_value_format(self):
        field = fields.DateTime(dt_format='raw')
        self.assert_field_raises(field, 'xxx')
示例#35
0
    def test_run(self):
        conn_id = connections.ensure_connection(self)

        # run in check mode
        check_job_name = runner.run_check_mode(self, conn_id)

        # verify check  exit codes
        exit_status = menagerie.get_exit_status(conn_id, check_job_name)
        menagerie.verify_check_exit_status(self, exit_status, check_job_name)

        # verify the tap discovered the right streams
        found_catalogs = [
            fc for fc in menagerie.get_catalogs(conn_id)
            if fc['tap_stream_id'] in self.expected_check_streams()
        ]

        self.assertGreaterEqual(
            len(found_catalogs),
            1,
            msg="unable to locate schemas for connection {}".format(conn_id))

        found_catalog_names = set(
            map(lambda c: c['tap_stream_id'], found_catalogs))
        diff = self.expected_check_streams().symmetric_difference(
            found_catalog_names)
        self.assertEqual(
            len(diff),
            0,
            msg="discovered schemas do not match: {}".format(diff))

        # verify that persisted streams have the correct properties
        test_catalog = found_catalogs[0]

        self.assertEqual('postgres_logical_replication_test',
                         test_catalog['stream_name'])

        print("discovered streams are correct")

        additional_md = [{
            "breadcrumb": [],
            "metadata": {
                'replication-method': 'LOG_BASED'
            }
        }]
        #don't selcted our_text_2
        selected_metadata = connections.select_catalog_and_fields_via_metadata(
            conn_id, test_catalog,
            menagerie.get_annotated_schema(conn_id, test_catalog['stream_id']),
            additional_md, ['our_text_2'])

        # clear state
        menagerie.set_state(conn_id, {})

        sync_job_name = runner.run_sync_mode(self, conn_id)

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())

        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 4})
        records_by_stream = runner.get_records_from_target_output()

        table_version = records_by_stream['postgres_logical_replication_test'][
            'table_version']

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [0]['action'], 'activate_version')

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [1]['action'], 'upsert')

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [2]['action'], 'upsert')

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [3]['action'], 'upsert')

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [4]['action'], 'upsert')

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [5]['action'], 'activate_version')

        # verify state and bookmarks
        state = menagerie.get_state(conn_id)

        bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")

        self.assertIsNotNone(bookmark['lsn'],
                             msg="expected bookmark for stream to have an lsn")
        lsn_1 = bookmark['lsn']

        self.assertEqual(bookmark['version'],
                         table_version,
                         msg="expected bookmark for stream to match version")

        #----------------------------------------------------------------------
        # invoke the sync job again after adding a record
        #----------------------------------------------------------------------
        print("inserting a record 5")

        with db_utils.get_test_connection('dev') as conn:
            conn.autocommit = True
            with conn.cursor() as cur:
                #insert fixture data 3
                our_ts = datetime.datetime(1993, 3, 3, 3, 3, 3, 333333)
                nyc_tz = pytz.timezone('America/New_York')
                our_ts_tz = nyc_tz.localize(our_ts)
                our_time = datetime.time(3, 4, 5)
                our_time_tz = our_time.isoformat() + "-04:00"
                our_date = datetime.date(1933, 3, 3)
                my_uuid = str(uuid.uuid1())

                #STRINGS:
                #OUR TS: '1993-03-03 03:03:03.333333'
                #OUR TS TZ: '1993-03-03 08:03:03.333333+00'
                #'OUR TIME': '03:04:05'
                #'OUR TIME TZ': '03:04:05+00'
                self.rec_5 = {
                    'our_varchar': "our_varchar 5",  # str
                    'our_varchar_10': "varchar13",  # str
                    'our_text': "some text 3",  #str
                    'our_text_2': "NOT SELECTED",
                    'our_integer': 96000,  #int
                    'our_smallint': 3,  # int
                    'our_bigint': 3000000,  #int
                    'our_decimal': decimal.Decimal(
                        '1234567890.03'
                    ),  #1234567890.03 / our_decimal is a <class 'float'>
                    quote_ident('OUR TS', cur):
                    our_ts,  # str '1993-03-03 03:03:03.333333'
                    quote_ident('OUR TS TZ', cur):
                    our_ts_tz,  #str '1993-03-03 08:03:03.333333+00'
                    quote_ident('OUR TIME', cur): our_time,  # str '03:04:05'
                    quote_ident('OUR TIME TZ', cur):
                    our_time_tz,  # str '03:04:05+00'
                    quote_ident('OUR DATE', cur):
                    our_date,  #1933-03-03 / OUR DATE is a <class 'str'>
                    'our_double': 3.3,  #3.3 / our_double is a <class 'float'>
                    'our_real': 6.6,  #6.6 / our_real is a <class 'float'>
                    'our_boolean': True,  #boolean
                    'our_bit': '1',  #string
                    'our_json': json.dumps({'secret': 33}),  #string
                    'our_jsonb': json.dumps(['burgers make me hungry']),
                    'our_uuid': my_uuid,  #string
                    'our_store': 'jumps=>"high",name=>"betty"',  #string
                    'our_citext': 'maGICKal 3',
                    'our_cidr': '192.168.102.128/32',
                    'our_inet': '192.168.102.128/32',
                    'our_mac': '08:00:2b:01:02:05',
                    'our_money': '$412.1234'
                }

                insert_record(cur, test_table_name, self.rec_5)

        sync_job_name = runner.run_sync_mode(self, conn_id)

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())

        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 1})
        records_by_stream = runner.get_records_from_target_output()

        self.assertTrue(len(records_by_stream) > 0)

        for stream, recs in records_by_stream.items():
            # verify the persisted schema was correct
            self.assertEqual(
                recs['schema'],
                expected_schemas[stream],
                msg=
                "Persisted schema did not match expected schema for stream `{}`."
                .format(stream))

        self.assertEqual(
            1,
            len(records_by_stream['postgres_logical_replication_test']
                ['messages']))
        actual_record_1 = records_by_stream[
            'postgres_logical_replication_test']['messages'][0]['data']

        expected_inserted_record = {
            'our_text': 'some text 3',
            'our_real': decimal.Decimal('6.6'),
            '_sdc_deleted_at': None,
            'our_store': {
                'name': 'betty',
                'jumps': 'high'
            },
            'our_bigint': 3000000,
            'our_varchar': 'our_varchar 5',
            'our_double': decimal.Decimal('3.3'),
            'our_bit': True,
            'our_uuid': self.rec_5['our_uuid'],
            'OUR TS': '1993-03-03T03:03:03.333333+00:00',
            'OUR TS TZ': '1993-03-03T08:03:03.333333+00:00',
            'OUR TIME': '03:04:05',
            'OUR TIME TZ': '03:04:05-04:00',
            'OUR DATE': '1933-03-03T00:00:00+00:00',
            'our_decimal': decimal.Decimal('1234567890.03'),
            'id': 5,
            'our_varchar_10': 'varchar13',
            'our_json': '{"secret": 33}',
            'our_jsonb': self.rec_5['our_jsonb'],
            'our_smallint': 3,
            'our_integer': 96000,
            'our_boolean': True,
            'our_citext': 'maGICKal 3',
            'our_cidr': self.rec_5['our_cidr'],
            'our_inet': '192.168.102.128',
            'our_mac': self.rec_5['our_mac'],
            'our_alignment_enum': None,
            'our_money': '$412.12'
        }
        self.assertEqual(set(actual_record_1.keys()),
                         set(expected_inserted_record.keys()),
                         msg="keys for expected_record_1 are wrong: {}".format(
                             set(actual_record_1.keys()).symmetric_difference(
                                 set(expected_inserted_record.keys()))))

        for k, v in actual_record_1.items():
            self.assertEqual(actual_record_1[k],
                             expected_inserted_record[k],
                             msg="{} != {} for key {}".format(
                                 actual_record_1[k],
                                 expected_inserted_record[k], k))

        self.assertEqual(
            records_by_stream['postgres_logical_replication_test']['messages']
            [0]['action'], 'upsert')
        print("inserted record is correct")

        state = menagerie.get_state(conn_id)
        chicken_bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")

        self.assertIsNotNone(
            chicken_bookmark['lsn'],
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to have an scn"
        )
        lsn_2 = chicken_bookmark['lsn']

        self.assertTrue(lsn_2 >= lsn_1)

        #table_version does NOT change
        self.assertEqual(
            chicken_bookmark['version'],
            table_version,
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to match version"
        )

        #----------------------------------------------------------------------
        # invoke the sync job again after deleting a record
        #----------------------------------------------------------------------
        print("delete row from source db")
        with db_utils.get_test_connection('dev') as conn:
            with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
                cur.execute("DELETE FROM {} WHERE id = 3".format(
                    canonicalized_table_name(test_schema_name, test_table_name,
                                             cur)))

        sync_job_name = runner.run_sync_mode(self, conn_id)

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())

        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 2})
        records_by_stream = runner.get_records_from_target_output()

        for stream, recs in records_by_stream.items():
            # verify the persisted schema was correct
            self.assertEqual(
                recs['schema'],
                expected_schemas[stream],
                msg=
                "Persisted schema did not match expected schema for stream `{}`."
                .format(stream))

        # self.assertEqual(len(records_by_stream['postgres_logical_replication_test']['messages']), 1)
        #the 1st message will be the previous insert
        insert_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][0]['data']

        self.assertEqual(set(insert_message.keys()),
                         set(expected_inserted_record.keys()),
                         msg="keys for expected_record_1 are wrong: {}".format(
                             set(insert_message.keys()).symmetric_difference(
                                 set(expected_inserted_record.keys()))))

        for k, v in insert_message.items():
            self.assertEqual(v,
                             expected_inserted_record[k],
                             msg="{} != {} for key {}".format(
                                 v, expected_inserted_record[k], k))

        #the 2nd message will be the delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][1]
        self.assertEqual(delete_message['action'], 'upsert')

        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 3)
        print("deleted record is correct")

        state = menagerie.get_state(conn_id)
        bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")

        self.assertIsNotNone(
            bookmark['lsn'],
            msg="expected bookmark for stream ROOT-CHICKEN to have an scn")

        lsn_3 = bookmark['lsn']
        self.assertTrue(lsn_3 >= lsn_2)
        #----------------------------------------------------------------------
        # invoke the sync job again after deleting a record using the 'id IN (SELECT ...)' format
        #----------------------------------------------------------------------
        print("delete row from source db")
        with db_utils.get_test_connection('dev') as conn:
            with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
                cur.execute(
                    "DELETE FROM {} WHERE id IN (SELECT id FROM {} WHERE id=2)"
                    .format(
                        canonicalized_table_name(test_schema_name,
                                                 test_table_name, cur),
                        canonicalized_table_name(test_schema_name,
                                                 test_table_name, cur)))

        sync_job_name = runner.run_sync_mode(self, conn_id)

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())

        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 2})
        records_by_stream = runner.get_records_from_target_output()

        for stream, recs in records_by_stream.items():
            # verify the persisted schema was correct
            self.assertEqual(
                recs['schema'],
                expected_schemas[stream],
                msg=
                "Persisted schema did not match expected schema for stream `{}`."
                .format(stream))

        #first record will be the previous delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][0]
        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 3)

        #the 2nd message will be the more recent delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][1]
        self.assertEqual(delete_message['action'], 'upsert')

        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 2)
        print("deleted record is correct")

        state = menagerie.get_state(conn_id)
        bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")

        self.assertIsNotNone(
            bookmark['lsn'],
            msg="expected bookmark for stream ROOT-CHICKEN to have an scn")

        lsn_4 = bookmark['lsn']
        self.assertTrue(lsn_4 >= lsn_3)

        #table_version does NOT change
        self.assertEqual(
            bookmark['version'],
            table_version,
            msg=
            "expected bookmark for stream postgres_logical_replication_test to match version"
        )
        #----------------------------------------------------------------------
        # invoke the sync job again after deleting a record using the 'id IN (<id>, <id>)' format
        #----------------------------------------------------------------------
        print("delete row from source db")
        with db_utils.get_test_connection('dev') as conn:
            with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
                cur.execute("DELETE FROM {} WHERE id IN (4, 5)".format(
                    canonicalized_table_name(test_schema_name, test_table_name,
                                             cur)))

        sync_job_name = runner.run_sync_mode(self, conn_id)

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())

        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 3})
        records_by_stream = runner.get_records_from_target_output()

        for stream, recs in records_by_stream.items():
            # verify the persisted schema was correct
            self.assertEqual(
                recs['schema'],
                expected_schemas[stream],
                msg=
                "Persisted schema did not match expected schema for stream `{}`."
                .format(stream))

        #first record will be the previous delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][0]
        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 2)

        #the 2nd message will be the more recent delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][1]
        self.assertEqual(delete_message['action'], 'upsert')

        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 4)
        print("deleted record is correct")

        #the 3rd message will be the more recent delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][2]
        self.assertEqual(delete_message['action'], 'upsert')

        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 5)
        print("deleted record is correct")

        state = menagerie.get_state(conn_id)
        bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")

        self.assertIsNotNone(
            bookmark['lsn'],
            msg="expected bookmark for stream ROOT-CHICKEN to have an scn")

        lsn_5 = bookmark['lsn']
        self.assertTrue(lsn_5 >= lsn_4)

        #table_version does NOT change
        self.assertEqual(
            bookmark['version'],
            table_version,
            msg=
            "expected bookmark for stream postgres_logical_replication_test to match version"
        )

        #----------------------------------------------------------------------
        # invoke the sync job again after updating a record
        #----------------------------------------------------------------------
        print("updating row from source db")
        with db_utils.get_test_connection('dev') as conn:
            with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
                cur.execute(
                    "UPDATE {} SET our_varchar = 'THIS HAS BEEN UPDATED', our_money = '$56.811', our_decimal = 'NaN', our_real = '+Infinity', our_double = 'NaN' WHERE id = 1"
                    .format(
                        canonicalized_table_name(test_schema_name,
                                                 test_table_name, cur)))

        sync_job_name = runner.run_sync_mode(self, conn_id)
        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())
        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 3})
        records_by_stream = runner.get_records_from_target_output()
        for stream, recs in records_by_stream.items():
            # verify the persisted schema was correct
            self.assertEqual(
                recs['schema'],
                expected_schemas[stream],
                msg=
                "Persisted schema did not match expected schema for stream `{}`."
                .format(stream))

        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        self.assertEqual(
            len(records_by_stream['postgres_logical_replication_test']
                ['messages']), 3)
        #first record will be the previous first delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][0]
        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 4)

        #second record will be the previous second delete
        delete_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][1]
        sdc_deleted_at = delete_message['data'].get('_sdc_deleted_at')
        self.assertIsNotNone(sdc_deleted_at)
        self.assertEqual(delete_message['data']['id'], 5)

        #third record will be the new update
        update_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][2]
        self.assertEqual(update_message['action'], 'upsert')

        expected_updated_rec = {
            'our_varchar': 'THIS HAS BEEN UPDATED',
            'id': 1,
            'our_varchar_10': "varchar_10",
            'our_text': "some text",
            'our_integer': 44100,
            'our_smallint': 1,
            'our_bigint': 1000000,
            'our_decimal': None,
            'OUR TS': '1997-02-02T02:02:02.722184+00:00',
            'OUR TS TZ': '1997-02-02T07:02:02.722184+00:00',
            'OUR TIME': '12:11:10',
            'OUR TIME TZ': '12:11:10-04:00',
            'OUR DATE': '1998-03-04T00:00:00+00:00',
            'our_double': None,
            'our_real': None,
            'our_boolean': True,
            'our_bit': False,
            'our_json': '{"secret": 55}',
            'our_jsonb': self.rec_1['our_jsonb'],
            'our_uuid': self.rec_1['our_uuid'],
            '_sdc_deleted_at': None,
            'our_store': {
                'name': 'betty',
                'size': 'small'
            },
            'our_citext': 'maGICKal',
            'our_cidr': self.rec_1['our_cidr'],
            'our_inet': self.rec_1['our_inet'],
            'our_mac': self.rec_1['our_mac'],
            'our_alignment_enum': 'bad',
            'our_money': '$56.81'
        }

        self.assertEqual(
            set(update_message['data'].keys()),
            set(expected_updated_rec.keys()),
            msg="keys for expected_record_1 are wrong: {}".format(
                set(update_message['data'].keys()).symmetric_difference(
                    set(expected_updated_rec.keys()))))

        for k, v in update_message['data'].items():
            self.assertEqual(v,
                             expected_updated_rec[k],
                             msg="{} != {} for key {}".format(
                                 v, expected_updated_rec[k], k))

        print("updated record is correct")

        #check state again
        state = menagerie.get_state(conn_id)
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")
        chicken_bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")
        self.assertIsNotNone(
            chicken_bookmark['lsn'],
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to have an scn"
        )
        lsn_6 = chicken_bookmark['lsn']
        self.assertTrue(lsn_6 >= lsn_5)

        #table_version does NOT change
        self.assertEqual(
            chicken_bookmark['version'],
            table_version,
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to match version"
        )

        #----------------------------------------------------------------------
        # invoke the sync job one last time. should only get the PREVIOUS update
        #----------------------------------------------------------------------
        sync_job_name = runner.run_sync_mode(self, conn_id)
        # verify tap and target exit codes
        exit_status = menagerie.get_exit_status(conn_id, sync_job_name)
        menagerie.verify_sync_exit_status(self, exit_status, sync_job_name)

        record_count_by_stream = runner.examine_target_output_file(
            self, conn_id, self.expected_sync_streams(), self.expected_pks())
        #we will get the previous update record again
        self.assertEqual(record_count_by_stream,
                         {'postgres_logical_replication_test': 1})
        update_message = records_by_stream[
            'postgres_logical_replication_test']['messages'][2]
        self.assertEqual(update_message['action'], 'upsert')

        self.assertEqual(
            set(update_message['data'].keys()),
            set(expected_updated_rec.keys()),
            msg="keys for expected_record_1 are wrong: {}".format(
                set(update_message['data'].keys()).symmetric_difference(
                    set(expected_updated_rec.keys()))))

        for k, v in update_message['data'].items():
            self.assertEqual(v,
                             expected_updated_rec[k],
                             msg="{} != {} for key {}".format(
                                 v, expected_updated_rec[k], k))

        #check state again
        state = menagerie.get_state(conn_id)
        chicken_bookmark = state['bookmarks'][
            'dev-public-postgres_logical_replication_test']
        self.assertIsNone(state['currently_syncing'],
                          msg="expected state's currently_syncing to be None")
        self.assertIsNotNone(
            chicken_bookmark['lsn'],
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to have an scn"
        )
        lsn_7 = chicken_bookmark['lsn']
        self.assertTrue(lsn_7 >= lsn_6)

        #table_version does NOT change
        self.assertEqual(
            chicken_bookmark['version'],
            table_version,
            msg=
            "expected bookmark for stream public-postgres_logical_replication_test to match version"
        )
示例#36
0
    def setUp(self):
        db_utils.ensure_db('dev')
        self.maxDiff = None
        creds = {}
        missing_envs = [
            x for x in [
                os.getenv('TAP_POSTGRES_HOST'),
                os.getenv('TAP_POSTGRES_USER'),
                os.getenv('TAP_POSTGRES_PASSWORD'),
                os.getenv('TAP_POSTGRES_PORT'),
                os.getenv('TAP_POSTGRES_DBNAME')
            ] if x == None
        ]
        if len(missing_envs) != 0:
            #pylint: disable=line-too-long
            raise Exception(
                "set TAP_POSTGRES_HOST, TAP_POSTGRES_DBNAME, TAP_POSTGRES_USER, TAP_POSTGRES_PASSWORD, TAP_POSTGRES_PORT"
            )

        with db_utils.get_test_connection('dev') as conn:
            conn.autocommit = True
            with conn.cursor(cursor_factory=psycopg2.extras.DictCursor) as cur:
                cur.execute(""" SELECT EXISTS (SELECT 1
                                                FROM  pg_replication_slots
                                               WHERE  slot_name = 'stitch') """
                            )

                old_slot = cur.fetchone()[0]
                with db_utils.get_test_connection('dev', True) as conn2:
                    with conn2.cursor() as cur2:
                        if old_slot:
                            cur2.drop_replication_slot("stitch")
                        cur2.create_replication_slot('stitch',
                                                     output_plugin='wal2json')

                old_table = cur.execute(
                    """SELECT EXISTS (
                                          SELECT 1
                                          FROM  information_schema.tables
                                          WHERE  table_schema = %s
                                          AND  table_name =   %s);""",
                    [test_schema_name, test_table_name])
                old_table = cur.fetchone()[0]

                if old_table:
                    cur.execute("DROP TABLE {}".format(
                        canonicalized_table_name(test_schema_name,
                                                 test_table_name, cur)))

                cur = conn.cursor()
                cur.execute(
                    """ SELECT installed_version FROM pg_available_extensions WHERE name = 'hstore' """
                )
                if cur.fetchone()[0] is None:
                    cur.execute(""" CREATE EXTENSION hstore; """)

                cur.execute(
                    """ CREATE EXTENSION IF NOT EXISTS citext WITH SCHEMA public;"""
                )
                cur.execute(""" DROP TYPE IF EXISTS ALIGNMENT CASCADE """)
                cur.execute(
                    """ CREATE TYPE ALIGNMENT AS ENUM ('good', 'bad', 'ugly') """
                )

                create_table_sql = """
CREATE TABLE {} (id            SERIAL PRIMARY KEY,
                our_varchar    VARCHAR,
                our_varchar_10 VARCHAR(10),
                our_text       TEXT,
                our_text_2     TEXT,
                our_integer    INTEGER,
                our_smallint   SMALLINT,
                our_bigint     BIGINT,
                our_decimal    NUMERIC(12,2),
                "OUR TS"       TIMESTAMP WITHOUT TIME ZONE,
                "OUR TS TZ"    TIMESTAMP WITH TIME ZONE,
                "OUR TIME"     TIME WITHOUT TIME ZONE,
                "OUR TIME TZ"  TIME WITH TIME ZONE,
                "OUR DATE"     DATE,
                our_double     DOUBLE PRECISION,
                our_real       REAL,
                our_boolean    BOOLEAN,
                our_bit        BIT(1),
                our_json       JSON,
                our_jsonb      JSONB,
                our_uuid       UUID,
                our_store      HSTORE,
                our_citext     CITEXT,
                our_cidr       cidr,
                our_inet       inet,
                our_mac            macaddr,
                our_alignment_enum ALIGNMENT,
                our_money          money)
                """.format(
                    canonicalized_table_name(test_schema_name, test_table_name,
                                             cur))

                cur.execute(create_table_sql)

                #insert fixture data 1
                our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184)
                nyc_tz = pytz.timezone('America/New_York')
                our_ts_tz = nyc_tz.localize(our_ts)
                our_time = datetime.time(12, 11, 10)
                our_time_tz = our_time.isoformat() + "-04:00"
                our_date = datetime.date(1998, 3, 4)
                my_uuid = str(uuid.uuid1())

                self.rec_1 = {
                    'our_varchar': "our_varchar",
                    'our_varchar_10': "varchar_10",
                    'our_text': "some text",
                    'our_text_2': "NOT SELECTED",
                    'our_integer': 44100,
                    'our_smallint': 1,
                    'our_bigint': 1000000,
                    'our_decimal': decimal.Decimal('1234567890.01'),
                    quote_ident('OUR TS', cur): our_ts,
                    quote_ident('OUR TS TZ', cur): our_ts_tz,
                    quote_ident('OUR TIME', cur): our_time,
                    quote_ident('OUR TIME TZ', cur): our_time_tz,
                    quote_ident('OUR DATE', cur): our_date,
                    'our_double': 1.1,
                    'our_real': 1.2,
                    'our_boolean': True,
                    'our_bit': '0',
                    'our_json': json.dumps({'secret': 55}),
                    'our_jsonb': json.dumps(['burgers are good']),
                    'our_uuid': my_uuid,
                    'our_store': 'size=>"small",name=>"betty"',
                    'our_citext': 'maGICKal',
                    'our_cidr': '192.168.100.128/25',
                    'our_inet': '192.168.100.128/24',
                    'our_mac': '08:00:2b:01:02:03',
                    'our_alignment_enum': 'bad'
                }

                insert_record(cur, test_table_name, self.rec_1)

                #insert fixture data 2
                our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184)
                nyc_tz = pytz.timezone('America/New_York')
                our_ts_tz = nyc_tz.localize(our_ts)
                our_time = datetime.time(10, 9, 8)
                our_time_tz = our_time.isoformat() + "-04:00"
                our_date = datetime.date(1964, 7, 1)
                my_uuid = str(uuid.uuid1())

                self.rec_2 = {
                    'our_varchar': "our_varchar 2",
                    'our_varchar_10': "varchar_10",
                    'our_text': "some text 2",
                    'our_text_2': "NOT SELECTED",
                    'our_integer': 44101,
                    'our_smallint': 2,
                    'our_bigint': 1000001,
                    'our_decimal': decimal.Decimal('9876543210.02'),
                    quote_ident('OUR TS', cur): our_ts,
                    quote_ident('OUR TS TZ', cur): our_ts_tz,
                    quote_ident('OUR TIME', cur): our_time,
                    quote_ident('OUR TIME TZ', cur): our_time_tz,
                    quote_ident('OUR DATE', cur): our_date,
                    'our_double': 1.1,
                    'our_real': 1.2,
                    'our_boolean': True,
                    'our_bit': '1',
                    'our_json': json.dumps({'nymn': 77}),
                    'our_jsonb': json.dumps({'burgers': 'good++'}),
                    'our_uuid': my_uuid,
                    'our_store': 'dances=>"floor",name=>"betty"',
                    'our_citext': 'maGICKal 2',
                    'our_cidr': '192.168.101.128/25',
                    'our_inet': '192.168.101.128/24',
                    'our_mac': '08:00:2b:01:02:04',
                }

                insert_record(cur, test_table_name, self.rec_2)

                #insert fixture data 3
                our_ts = datetime.datetime(1997, 2, 2, 2, 2, 2, 722184)
                nyc_tz = pytz.timezone('America/New_York')
                our_ts_tz = nyc_tz.localize(our_ts)
                our_time = datetime.time(12, 11, 10)
                our_time_tz = our_time.isoformat() + "-04:00"
                our_date = datetime.date(1998, 3, 4)
                my_uuid = str(uuid.uuid1())

                self.rec_3 = {
                    'our_varchar': "our_varchar 3",
                    'our_varchar_10': "varchar_10",
                    'our_text': "some text",
                    'our_text_2': "NOT SELECTED",
                    'our_integer': 44100,
                    'our_smallint': 1,
                    'our_bigint': 1000000,
                    'our_decimal': decimal.Decimal('1234567890.01'),
                    quote_ident('OUR TS', cur): our_ts,
                    quote_ident('OUR TS TZ', cur): our_ts_tz,
                    quote_ident('OUR TIME', cur): our_time,
                    quote_ident('OUR TIME TZ', cur): our_time_tz,
                    quote_ident('OUR DATE', cur): our_date,
                    'our_double': 1.1,
                    'our_real': 1.2,
                    'our_boolean': True,
                    'our_bit': '0',
                    'our_json': json.dumps({'secret': 55}),
                    'our_jsonb': json.dumps(['burgers are good']),
                    'our_uuid': my_uuid,
                    'our_store': 'size=>"small",name=>"betty"',
                    'our_citext': 'maGICKal',
                    'our_cidr': '192.168.100.128/25',
                    'our_inet': '192.168.100.128/24',
                    'our_mac': '08:00:2b:01:02:03',
                    'our_alignment_enum': 'bad'
                }

                insert_record(cur, test_table_name, self.rec_3)

                #insert fixture data 4
                our_ts = datetime.datetime(1987, 3, 3, 3, 3, 3, 733184)
                nyc_tz = pytz.timezone('America/New_York')
                our_ts_tz = nyc_tz.localize(our_ts)
                our_time = datetime.time(10, 9, 8)
                our_time_tz = our_time.isoformat() + "-04:00"
                our_date = datetime.date(1964, 7, 1)
                my_uuid = str(uuid.uuid1())

                self.rec_4 = {
                    'our_varchar': "our_varchar 4",
                    'our_varchar_10': "varchar_10",
                    'our_text': "some text 2",
                    'our_text_2': "NOT SELECTED",
                    'our_integer': 44101,
                    'our_smallint': 2,
                    'our_bigint': 1000001,
                    'our_decimal': decimal.Decimal('9876543210.02'),
                    quote_ident('OUR TS', cur): our_ts,
                    quote_ident('OUR TS TZ', cur): our_ts_tz,
                    quote_ident('OUR TIME', cur): our_time,
                    quote_ident('OUR TIME TZ', cur): our_time_tz,
                    quote_ident('OUR DATE', cur): our_date,
                    'our_double': 1.1,
                    'our_real': 1.2,
                    'our_boolean': True,
                    'our_bit': '1',
                    'our_json': json.dumps({'nymn': 77}),
                    'our_jsonb': json.dumps({'burgers': 'good++'}),
                    'our_uuid': my_uuid,
                    'our_store': 'dances=>"floor",name=>"betty"',
                    'our_citext': 'maGICKal 2',
                    'our_cidr': '192.168.101.128/25',
                    'our_inet': '192.168.101.128/24',
                    'our_mac': '08:00:2b:01:02:04',
                }

                insert_record(cur, test_table_name, self.rec_4)
    def create_efaktur(self):
        context = dict(self._context or {})
        data = {}
        active_ids = self.env.context.get('active_ids')
        delimiter = ','
        data['form'] = active_ids
        user = self.env.user
        tz = pytz.timezone(user.tz) if user.tz else pytz.utc
        now = pytz.utc.localize(datetime.now()).astimezone(tz)
        download_time = datetime.strftime(now, "%d-%m-%Y_%H:%M")

        if self.export == "ppn":
            filename = "faktur_pajak_" + download_time + ".csv"

            output_head = '"FM"' + delimiter + '"KD_JENIS_TRANSAKSI"' + delimiter + '"FG_PENGGANTI"' + delimiter + '"NOMOR_FAKTUR"' + delimiter + '"MASA_PAJAK"' + delimiter + '"TAHUN_PAJAK"' + delimiter + "TANGGAL_FAKTUR" + delimiter
            output_head += '"NPWP"' + delimiter + '"NAMA"' + delimiter + '"ALAMAT_LENGKAP"' + delimiter + '"JUMLAH_DPP"' + delimiter + '"JUMLAH_PPN"' + delimiter + '"JUMLAH_PPNBM"' + delimiter + '"IS_CREDITABLE"' + '\n'

            for p in self.env['ka_account.payment'].browse(data['form']):
                if p.efaktur_url is False:
                    raise UserError(_("Please Fill E-Faktur URL"))
                else:
                    barcode = urllib2.urlopen(p.efaktur_url).read()
                    if barcode == '' or not barcode:
                        return

                    kdJenisTransaksi = self.find_between(
                        barcode, "<kdJenisTransaksi>", "</kdJenisTransaksi>")
                    fgPengganti = self.find_between(barcode, "<fgPengganti>",
                                                    "</fgPengganti>")
                    nomorFaktur = self.find_between(barcode, "<nomorFaktur>",
                                                    "</nomorFaktur>")
                    tanggalFaktur = datetime.strftime(
                        datetime.strptime(
                            self.find_between(barcode, "<tanggalFaktur>",
                                              "</tanggalFaktur>"), "%d/%m/%Y"),
                        "%Y-%m-%d")
                    npwpPenjual = self.find_between(barcode, "<npwpPenjual>",
                                                    "</npwpPenjual>")
                    namaPenjual = self.find_between(barcode, "<namaPenjual>",
                                                    "</namaPenjual>")
                    alamatPenjual = self.find_between(barcode,
                                                      "<alamatPenjual>",
                                                      "</alamatPenjual>")
                    jumlahDpp = self.find_between(barcode, "<jumlahDpp>",
                                                  "</jumlahDpp>")
                    jumlahPpn = self.find_between(barcode, "<jumlahPpn>",
                                                  "</jumlahPpn>")
                    jumlahPpnBm = self.find_between(barcode, "<jumlahPpnBm>",
                                                    "</jumlahPpnBm>")

                    output_head += '"FM"' + delimiter + '"' + kdJenisTransaksi + '"' + delimiter + '"' + fgPengganti + '"' + delimiter + '"' + nomorFaktur + '"' + delimiter
                    output_head += '"' + datetime.strftime(
                        now, "%m"
                    ) + '"' + delimiter + '"' + datetime.strftime(
                        now, "%Y"
                    ) + '"' + delimiter + '"' + tanggalFaktur + '"' + delimiter + '"' + npwpPenjual + '"' + delimiter
                    output_head += '"' + namaPenjual + '"' + delimiter + '"' + alamatPenjual + '"' + delimiter + '"' + str(
                        jumlahDpp) + '"' + delimiter + '"' + str(
                            jumlahPpn) + '"' + delimiter
                    output_head += '"' + jumlahPpnBm + '"' + delimiter + '"1"' + '\n'
        elif self.export == "pph":
            filename = "bukti_tagihan_pph_" + download_time + ".csv"

            output_head = ''

            for p in self.env['ka_account.payment'].browse(data['form']):
                if p.state != 'paid':
                    raise UserError(_("Status Tagihan Belum Dibayar!"))
                else:
                    tgl_bayar = p.date_paid[-2:] + "/" + p.date_paid[
                        5:-3] + "/" + p.date_paid[:4]
                    output_head += '"F113304"' + delimiter + '"' + p.date_paid[
                        5:
                        -3] + '"' + delimiter + '"' + p.date_paid[:4] + '"' + delimiter + '"0"' + delimiter + '"' + str(
                            p.no_npwp
                        ) + '"' + delimiter + '"' + p.partner_id.name + '"' + delimiter
                    output_head += '"' + p.partner_id.street + '"' + delimiter + '"' + str(
                        self.no_urut
                    ) + '"' + delimiter + '"' + tgl_bayar + '"' + delimiter
                    output_head += '"0"' + delimiter + '"0,25"' + delimiter + '"0"' + delimiter
                    output_head += '"0"' + delimiter + '"0,1"' + delimiter + '"0"' + delimiter
                    output_head += '"0"' + delimiter + '"0,3"' + delimiter + '"0"' + delimiter
                    output_head += '"0"' + delimiter + '"0,45"' + delimiter + '"0"' + delimiter
                    output_head += '"Farmasi"' + delimiter + '"0"' + delimiter + '"0,25"' + delimiter + '"0"' + delimiter
                    output_head += '""' + delimiter
                    output_head += '"0"' + delimiter + '"0"' + delimiter + '"0"' + delimiter
                    output_head += '""' + delimiter
                    output_head += '"0"' + delimiter + '"0"' + delimiter + '"0"' + delimiter
                    output_head += '"PERKEBUNAN"' + delimiter + '"' + str(
                        int(p.amount_dpp)
                    ) + '"' + delimiter + '"0,25"' + delimiter + '"' + str(
                        int(p.amount_pph)) + '"' + delimiter
                    output_head += '""' + delimiter
                    output_head += '"0"' + delimiter + '"0,25"' + delimiter + '"0"' + delimiter
                    output_head += '""' + delimiter
                    output_head += '"0"' + delimiter + '"0"' + delimiter + '"0"' + delimiter
                    output_head += '""' + delimiter
                    output_head += '"0"' + delimiter + '"0"' + delimiter + '"0"' + delimiter
                    output_head += '"' + str(int(
                        p.amount_dpp)) + '"' + delimiter + '"' + str(
                            int(p.amount_pph)) + '"' + '\n'
                    self.no_urut += 1

        my_utf8 = output_head.encode("utf-8")
        out = base64.b64encode(my_utf8)
        self.write({'state_x': 'get', 'data_x': out, 'name': filename})
        ir_model_data = self.env['ir.model.data']
        form_res = ir_model_data.get_object_reference(
            'ka_account', 'ka_account_payment_export_faktur_pajak_form'
        )  #module 'pti_faktur' and 'id wizard form'
        form_id = form_res and form_res[1] or False
        return {
            'name': _('Download csv'),
            'view_type': 'form',
            'view_mode': 'form',
            'res_model':
            'ka_account.payment.export.faktur.pajak.wiz',  #model wizard
            'res_id': self.id,  #id wizard
            'view_id': False,
            'views': [(form_id, 'form')],
            'type': 'ir.actions.act_window',
            'target': 'current'
        }
示例#38
0
def _append_tz(t):
    tz = pytz.timezone(settings.TIME_ZONE)
    return tz.localize(t)
示例#39
0
文件: base.py 项目: tklos/collect
USE_L10N = True

USE_TZ = True


# Messages
from django.contrib import messages
MESSAGE_TAGS = {
    messages.ERROR: 'danger',
}


# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/

STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, '../public/static/')
STATICFILES_DIRS = [
    os.path.join(BASE_DIR, 'static'),
]


# Timezone
LOCAL_TIMEZONE = pytz.timezone('Europe/Warsaw')


# Pagination
MEASUREMENTS_PAGINATE_BY = 30

# -*- coding: utf-8 -*-
import scraperwiki
import lxml.html
#import requests
from datetime import date, datetime
from dateutil.relativedelta import relativedelta
from pytz import timezone
from collections import defaultdict
from lxml.html import fromstring,tostring
import re

# http://www.kettering.gov.uk/site/scripts/planning_list.php?fromDay=05&fromMonth=04&fromYear=2012&toDay=12&toMonth=04&toYear=2012&submitDateWeekly=Go

DATE_FORMAT = "%d/%m/%Y"
tz = timezone('Europe/London')
rfc3339_date = "%Y-%m-%dT%H:%M:%SZ"
minus_60_days = (date.today() + relativedelta( days = -60 )).strftime(DATE_FORMAT)
test_date = '01/01/1998'

today_date = date.today().strftime(DATE_FORMAT)
is_date_regex = re.compile("^(3[01]|[12][0-9]|0[1-9])/(1[0-2]|0[1-9])/[0-9]{4}$") # match dd/mm/yyyy, requiring leading zeros (http://answers.oreilly.com/topic/226-how-to-validate-traditional-date-formats-with-regular-expressions/)
replacements = {' ':'_','(':'',')':'','º'.decode('utf-8'):''}


def replace_all(text, dic):
    for i, j in dic.iteritems():
            text = text.replace(i, j)
    return text

def table_to_list(table):
    dct = table_to_2d_dict(table)
def test_datetime_with_named_time_zone_output(cypher_eval):
    value = cypher_eval("RETURN datetime('1976-06-13T12:34:56.789012345"
                        "[Europe/London]')")
    assert isinstance(value, DateTime)
    dt = DateTime(1976, 6, 13, 12, 34, 56.789012345)
    assert value == timezone("Europe/London").localize(dt)
示例#42
0
文件: fe.py 项目: firth/radcomp
"""
 Proctor the reprocessing of NEXRAD data provide to me by NCDC
"""

import datetime
import subprocess
import pytz

sts = datetime.datetime(2003, 1, 1, 0, 0)
sts = sts.replace(tzinfo=pytz.timezone("UTC"))
ets = datetime.datetime(2003, 2, 1, 0, 0)
ets = ets.replace(tzinfo=pytz.timezone("UTC"))
interval = datetime.timedelta(minutes=5)

now = sts
while now < ets:
    print now
    if now.hour == 0 and now.minute == 0:
        # Extract tomorrow
        cmd = "python extract.py %s" % (now.strftime("%Y %m %d"), )
        subprocess.call(cmd, shell=True)

    cmd = "csh n0r.csh %s n0r 1" % (now.strftime("%Y %m %d %H %M"), )
    subprocess.call(cmd, shell=True)
    if now.hour == 23 and now.minute == 55:
        subprocess.call("rm -rf /tmp/nexrad/NIDS/*", shell=True)
        subprocess.call("rm -rf /tmp/nexrad3-herz", shell=True)
        # reextract today :(
        cmd = "python extract.py %s" % (now.strftime("%Y %m %d"), )
        subprocess.call(cmd, shell=True)
    now += interval
示例#43
0
def widgets_for_event_qs(request, qs, user, nmax):
    widgets = []

    # Get set of events where we have the permission to show the # of orders
    events_with_orders = set(qs.filter(
        Q(organizer_id__in=user.teams.filter(all_events=True, can_view_orders=True).values_list('organizer', flat=True))
        | Q(id__in=user.teams.filter(can_view_orders=True).values_list('limit_events__id', flat=True))
    ).values_list('id', flat=True))

    tpl = """
        <a href="{url}" class="event">
            <div class="name">{event}</div>
            <div class="daterange">{daterange}</div>
            <div class="times">{times}</div>
        </a>
        <div class="bottomrow">
            {orders}
            <a href="{url}" class="status-{statusclass}">
                {status}
            </a>
        </div>
    """

    events = qs.prefetch_related(
        '_settings_objects', 'organizer___settings_objects'
    ).select_related('organizer')[:nmax]
    for event in events:
        tzname = event.cache.get_or_set('timezone', lambda: event.settings.timezone)
        tz = pytz.timezone(tzname)
        if event.has_subevents:
            if event.min_from is None:
                dr = pgettext("subevent", "No dates")
            else:
                dr = daterange(
                    (event.min_from).astimezone(tz),
                    (event.max_fromto or event.max_to or event.max_from).astimezone(tz)
                )
        else:
            if event.date_to:
                dr = daterange(event.date_from.astimezone(tz), event.date_to.astimezone(tz))
            else:
                dr = date_format(event.date_from.astimezone(tz), "DATE_FORMAT")

        if event.has_ra:
            status = ('danger', _('Action required'))
        elif not event.live:
            status = ('warning', _('Shop disabled'))
        elif event.presale_has_ended:
            status = ('default', _('Sale over'))
        elif not event.presale_is_running:
            status = ('default', _('Soon'))
        else:
            status = ('success', _('On sale'))

        widgets.append({
            'content': tpl.format(
                event=escape(event.name),
                times=_('Event series') if event.has_subevents else (
                    ((date_format(event.date_admission.astimezone(tz), 'TIME_FORMAT') + ' / ')
                     if event.date_admission and event.date_admission != event.date_from else '')
                    + (date_format(event.date_from.astimezone(tz), 'TIME_FORMAT') if event.date_from else '')
                ) + (
                    ' <span class="fa fa-globe text-muted" data-toggle="tooltip" title="{}"></span>'.format(tzname)
                    if tzname != request.timezone and not event.has_subevents else ''
                ),
                url=reverse('control:event.index', kwargs={
                    'event': event.slug,
                    'organizer': event.organizer.slug
                }),
                orders=(
                    '<a href="{orders_url}" class="orders">{orders_text}</a>'.format(
                        orders_url=reverse('control:event.orders', kwargs={
                            'event': event.slug,
                            'organizer': event.organizer.slug
                        }),
                        orders_text=ungettext('{num} order', '{num} orders', event.order_count or 0).format(
                            num=event.order_count or 0
                        )
                    ) if user.has_active_staff_session(request.session.session_key) or event.pk in events_with_orders else ''
                ),
                daterange=dr,
                status=status[1],
                statusclass=status[0],
            ),
            'display_size': 'small',
            'priority': 100,
            'container_class': 'widget-container widget-container-event',
        })
        """
            {% if not e.live %}
                <span class="label label-danger">{% trans "Shop disabled" %}</span>
            {% elif e.presale_has_ended %}
                <span class="label label-warning">{% trans "Presale over" %}</span>
            {% elif not e.presale_is_running %}
                <span class="label label-warning">{% trans "Presale not started" %}</span>
            {% else %}
                <span class="label label-success">{% trans "On sale" %}</span>
            {% endif %}
        """
    return widgets
示例#44
0
def load_bulk_tz_offset(data_dir: str, chat_id_set: set) -> dict:
    '''
	Function returns a chat_id:(tz_offset_flt, tz_offset_str) dictionary, so the tz offset
	calls can be done in a single sql select.
	'''
    # db conn
    conn = sqlite3.connect(os.path.join(data_dir, 'launchbot-data.db'))
    conn.row_factory = sqlite3.Row
    cursor = conn.cursor()

    # verify we actually have chats to pull data for
    if len(chat_id_set) == 0:
        conn.close()
        return {}
    ''' Construct the sql select string: this isn't idead, but the only way I've got
	the sql IN () select statement to work in Python. Oh well. '''
    chats_str, set_len = '', len(chat_id_set)
    for enum, chat_id in enumerate(chat_id_set):
        chats_str += f"'{chat_id}'"
        if enum < set_len - 1:
            chats_str += ','

    # exceute our fancy select
    try:
        cursor.execute(
            f'SELECT chat, time_zone, time_zone_str FROM chats WHERE chat IN ({chats_str})'
        )
    except sqlite3.OperationalError:
        return {}

    query_return = cursor.fetchall()
    conn.close()

    if len(query_return) == 0:
        raise Exception(
            'Error pulling time zone information from chats database!')

    tz_offset_dict = {}
    for chat_row in query_return:
        # if we found the time zone in string format, it should be parsed in its own way
        time_zone_string_found = bool(chat_row['time_zone_str'] is not None)

        if not time_zone_string_found:
            # if there's no basic time zone found either, use 0 as chat's UTC offset
            if chat_row['time_zone'] is None:
                tz_offset_dict[chat_row['chat']] = (float(0), '+0')
                continue

            # if we arrived here, simply use the regular time zone UTC offset
            tz_offset = float(chat_row['time_zone'])

            # generate the time zone string
            mins, hours = int(60 * (abs(tz_offset) % 1)), math.floor(tz_offset)
            prefix = '+' if hours >= 0 else ''
            tz_str = f'{prefix}{hours}' if mins == 0 else f'{prefix}{hours}:{mins}'

            # store in the dict
            tz_offset_dict[chat_row['chat']] = (tz_offset, tz_str)
            continue

        # chat has a time_zone_string: parse with pytz
        timezone = pytz.timezone(chat_row['time_zone_str'])
        local_now = datetime.datetime.now(timezone)
        utc_offset = local_now.utcoffset().total_seconds() / 3600

        if utc_offset % 1 == 0:
            utc_offset = int(utc_offset)
            utc_offset_str = f'+{utc_offset}' if utc_offset >= 0 else f'{utc_offset}'
        else:
            utc_offset_hours = math.floor(utc_offset)
            utc_offset_minutes = int((utc_offset % 1) * 60)
            utc_offset_str = f'{utc_offset_hours}:{utc_offset_minutes}'
            utc_offset_str = f'+{utc_offset_str}' if utc_offset >= 0 else f'{utc_offset_str}'

        # store in the dict
        tz_offset_dict[chat_row['chat']] = (utc_offset, utc_offset_str)

    return tz_offset_dict
示例#45
0
def parse_events(content, start=None, end=None, default_span=timedelta(days=7)):
    """
    Query the events occurring in a given time range.

    :param content: iCal URL/file content as String
    :param start: start date for search, default today
    :param end: end date for search
    :param default_span: default query length (one week)
    :return: events as list
    """
    if not start:
        start = now()

    if not end:
        end = start + default_span

    if not content:
        raise ValueError("Content is invalid!")

    calendar = Calendar.from_ical(content)

    # Keep track of the timezones defined in the calendar
    timezones = {}

    # Parse non standard timezone name
    if "X-WR-TIMEZONE" in calendar:
        x_wr_timezone = str(calendar["X-WR-TIMEZONE"])
        timezones[x_wr_timezone] = get_timezone(x_wr_timezone)

    for c in calendar.walk("VTIMEZONE"):
        name = str(c["TZID"])
        try:
            timezones[name] = c.to_tz()
        except IndexError:
            # This happens if the VTIMEZONE doesn't
            # contain start/end times for daylight
            # saving time. Get the system pytz
            # value from the name as a fallback.
            timezones[name] = timezone(name)

    # If there's exactly one timezone in the file,
    # assume it applies globally, otherwise UTC
    if len(timezones) == 1:
        cal_tz = get_timezone(list(timezones)[0])
    else:
        cal_tz = UTC

    start = normalize(start, cal_tz)
    end = normalize(end, cal_tz)

    found = []
    recurrence_ids = []

    # Skip dates that are stored as exceptions.
    exceptions = {}
    for component in calendar.walk():
        if component.name == "VEVENT":
            e = create_event(component, cal_tz)

            if "RECURRENCE-ID" in component:
                recurrence_ids.append(
                    (e.uid, component["RECURRENCE-ID"].dt, e.sequence)
                )

            if "EXDATE" in component:
                # Deal with the fact that sometimes it's a list and
                # sometimes it's a singleton
                exlist = []
                if isinstance(component["EXDATE"], list):
                    exlist = component["EXDATE"]
                else:
                    exlist.append(component["EXDATE"])
                for ex in exlist:
                    exdate = ex.to_ical().decode("UTF-8")
                    exceptions[exdate[0:8]] = exdate

            # Attempt to work out what timezone is used for the start
            # and end times. If the timezone is defined in the calendar,
            # use it; otherwise, attempt to load the rules from pytz.
            start_tz = None
            end_tz = None

            if e.start.tzinfo != UTC:
                if str(e.start.tzinfo) in timezones:
                    start_tz = timezones[str(e.start.tzinfo)]
                else:
                    start_tz = e.start.tzinfo

            if e.end.tzinfo != UTC:
                if str(e.end.tzinfo) in timezones:
                    end_tz = timezones[str(e.end.tzinfo)]
                else:
                    end_tz = e.end.tzinfo

            # If we've been passed or constructed start/end values
            # that are timezone naive, but the actual appointment
            # start and end times are in a timezone, convert start
            # and end to have a timezone. Otherwise, python will
            # raise an exception for comparing timezone naive
            # and offset-aware values.
            if e.start.tzinfo and not start.tzinfo:
                start = normalize(start, e.start.tzinfo)
            if e.start.tzinfo and not end.tzinfo:
                end = normalize(end, e.start.tzinfo)

            duration = e.end - e.start
            if e.recurring:
                # Unfold recurring events according to their rrule
                rule = parse_rrule(component, cal_tz)
                [after] = adjust_timezone(component, [start - duration], start_tz)
                [end] = adjust_timezone(component, [end], start_tz)

                for dt in rule.between(after, end, inc=True):
                    if start_tz is None:
                        # Shrug. If we couldn't work out the timezone, it is what it is.
                        ecopy = e.copy_to(dt, e.uid)
                    else:
                        # Recompute the start time in the current timezone *on* the
                        # date of *this* occurrence. This handles the case where the
                        # recurrence has crossed over the daylight savings time boundary.
                        naive = datetime(
                            dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second
                        )
                        dtstart = normalize(naive, tz=start_tz)

                        ecopy = e.copy_to(dtstart, e.uid)

                        # We're effectively looping over the start time, we might need
                        # to adjust the end time too, but don't have it's recurred value.
                        # Make sure it's adjusted by constructing it from the meeting
                        # duration. Pro: it'll be right. Con: if it was in a different
                        # timezone from the start time, we'll have lost that.
                        ecopy.end = dtstart + duration

                    exdate = "%04d%02d%02d" % (
                        ecopy.start.year,
                        ecopy.start.month,
                        ecopy.start.day,
                    )
                    if exdate not in exceptions:
                        found.append(ecopy)
            elif e.end >= start and e.start <= end:
                exdate = "%04d%02d%02d" % (e.start.year, e.start.month, e.start.day)
                if exdate not in exceptions:
                    found.append(e)
    # Filter out all events that are moved as indicated by the recurrence-id prop
    return [
        event
        for event in found
        if e.sequence is None
        or not (event.uid, event.start, e.sequence) in recurrence_ids
    ]
示例#46
0
 def unix_to_localtime(cls,unixtime, timezone):
     tt = time.gmtime( unixtime )
     dt = pytz.utc.localize(datetime(tt[0],tt[1],tt[2],tt[3],tt[4],tt[5]))
     return dt.astimezone( pytz.timezone(timezone) )
示例#47
0
 def localtime_to_unix(cls,year,month,day,hour,minute,second,timezone):
     dt = pytz.timezone(timezone).localize(datetime(year,month,day,hour,minute,second)).astimezone(pytz.utc)
     return calendar.timegm( (dt.year, dt.month, dt.day, dt.hour, dt.minute, dt.second) )
示例#48
0
load_dotenv()  # All environment variables are stored in '.env' file
TOKEN = os.getenv('DISCORD_TOKEN')
DEBUG = os.getenv('DEBUG')
CHANNEL_ADMIN = int(os.getenv('CHANNEL_ADMIN'))
ADMIN_ROLE = os.getenv('ADMIN_ROLE')

bot = commands.Bot(command_prefix='!')
client = discord.Client()

number_emoji = (':zero:', ':one:', ':two:', ':three:', ':four:', ':five:',
                ':six:', ':seven:', ':eight:', ':nine:', ':keycap_ten:')
errors_text = {
    1: '`Error` No such faction. Please check faction name and try again.'
}
frontier_tz = pytz.timezone('UTC')
frontier_time = datetime.now(frontier_tz)
'''What I do on startup'''


@bot.event
async def on_ready():
    print(f'{bot.user.name} is connected to the following guilds:')
    for guild in bot.guilds:
        print(f'"{guild.name}" with id: {guild.id}')
    print('\n')
    await bot_start()


'''What I can do on my own'''
示例#49
0
def process_replaytv_list(character, start=0):
    now = datetime.datetime.now(pytz.timezone("Europe/Amsterdam"))
    sevendays = datetime.datetime.now(pytz.timezone("Europe/Amsterdam")) - datetime.timedelta(days=7)
    nowstamp = int((now - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())
    sevendaysstamp = int((sevendays - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())

    data = api_get_channels()

    prefs = load_prefs(profile_id=1)
    channels_ar = []

    if prefs:
        for row in prefs:
            currow = prefs[row]

            if not check_key(data, unicode(row)):
                continue

            if int(currow['replay']) == 1:
                channels_ar.append(row)

    data = api_get_list_by_first(first=character, start=nowstamp, end=sevendaysstamp, channels=channels_ar)

    start = int(start)
    items = []
    count = 0
    item_count = 0

    if not data:
        return {'items': items, 'count': item_count, 'count2': count, 'total': 0}

    for currow in data:
        row = data[currow]

        if item_count == 51:
            break

        count += 1

        if count < start + 1:
            continue

        item_count += 1

        label = unicode(row['title'])
        idtitle = unicode(currow)
        icon = unicode(row['icon'])

        items.append(plugin.Item(
            label = label,
            info = {
                'sorttitle': label.upper(),
            },
            art = {
                'thumb': icon,
                'fanart': icon
            },
            path = plugin.url_for(func_or_url=replaytv_item, label=label, idtitle=idtitle, start=0),
        ))

    returnar = {'items': items, 'count': item_count, 'count2': count, 'total': len(data)}

    return returnar
示例#50
0
 def create_localtime(cls,year,month,day,hour,minute,second,timezone):
     return pytz.timezone(timezone).localize(datetime(year,month,day,hour,minute,second))
示例#51
0
import datetime
import pytz
import time

##  -- Display type  is set automatically in the newer library
# Don't change any values of RED to yellow as red just equals "colour"
inky_display = auto(ask_user=True, verbose=True)
## --   ----------------------------------------------------

inky_display.set_border(inky_display.WHITE)
img = Image.new("P", (inky_display.WIDTH, inky_display.HEIGHT))
draw = ImageDraw.Draw(img)

# find current time and convert to year month day etc
the_now = datetime.datetime.now(datetime.timezone.utc)
the_now_local = the_now.astimezone(pytz.timezone('Europe/London'))

the_year = the_now.year
the_month = the_now.month
the_hour = the_now.hour
the_day = the_now.day
if the_now.minute < 30:
    the_segment = 0
else:
    the_segment = 1

print('segment:')
print(the_segment)

# select from db where record == the above
cur.execute(
示例#52
0
def process_replaytv_content(station, day=0, start=0):
    day = int(day)
    start = int(start)
    curdate = datetime.date.today() - datetime.timedelta(days=day)

    startDate = convert_datetime_timezone(datetime.datetime(curdate.year, curdate.month, curdate.day, 0, 0, 0), "Europe/Amsterdam", "UTC")
    endDate = convert_datetime_timezone(datetime.datetime(curdate.year, curdate.month, curdate.day, 23, 59, 59), "Europe/Amsterdam", "UTC")
    startTimeStamp = int((startDate - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())
    endTimeStamp = int((endDate - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())

    data = api_get_epg_by_date_channel(date=curdate.strftime('%Y') + curdate.strftime('%m') + curdate.strftime('%d'), channel=station)

    items = []
    count = 0
    item_count = 0

    if not data:
        return {'items': items, 'count': item_count, 'count2': count, 'total': 0}

    for currow in data:
        row = data[currow]

        if item_count == 51:
            break

        count += 1

        if count < start + 1:
            continue

        context = []
        item_count += 1
        channel = unicode(row['channel'])

        startT = datetime.datetime.fromtimestamp(int(row['start']))
        startT = convert_datetime_timezone(startT, "Europe/Amsterdam", "Europe/Amsterdam")
        endT = datetime.datetime.fromtimestamp(int(row['end']))
        endT = convert_datetime_timezone(endT, "Europe/Amsterdam", "Europe/Amsterdam")

        if endT < (datetime.datetime.now(pytz.timezone("Europe/Amsterdam")) - datetime.timedelta(days=7)):
            continue

        label = startT.strftime("%H:%M") + " - " + unicode(row['title'])

        description = unicode(row['description'])

        duration = int((endT - startT).total_seconds())

        program_image = unicode(row['icon'])
        program_image_large = unicode(row['icon'])
        program_id = unicode(row['program_id'])

        if CONST_WATCHLIST:
            context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=program_id, type='item')), ))

        items.append(plugin.Item(
            label = label,
            info = {
                'plot': description,
                'duration': duration,
                'mediatype': 'video',
                'sorttitle': label.upper(),
            },
            art = {
                'thumb': program_image,
                'fanart': program_image_large
            },
            path = plugin.url_for(func_or_url=play_video, type='program', channel=channel, id=program_id),
            context = context,
            playable = True,
        ))

    returnar = {'items': items, 'count': item_count, 'count2': count, 'total': len(data)}

    return returnar
logging.basicConfig(level=logging.INFO)
import common.application as ap
import pytz
#from simulator import place_order as simulator
from common.SlackUtil import sendMessage

app_properties = ap.app_properties
token_mappings = ap.token_mappings
csv_mapping = ap.csv_mapping

api_key = app_properties['api_key']
api_secret = app_properties['api_secret']
token ="1510401"
os.makedirs(token, exist_ok=True)
file_name = "trades.csv"
tz = pytz.timezone('Asia/Kolkata')
logger = logging.getLogger('algo_tester')
# global define
profit = 0.019
stop_loss = 0.0095
historical_data = ""
rsi = 0.00
rsi_slope = 0.00
last_close = 0.00
last_high = 0.00
last_low = 0.00
last_price = get_price(token, 0.00)
current_price = 0.00
holding = ""
order_id = ""
swing = False
示例#54
0
def process_replaytv_list_content(label, idtitle, start=0):
    start = int(start)

    now = datetime.datetime.now(pytz.timezone("Europe/Amsterdam"))
    sevendays = datetime.datetime.now(pytz.timezone("Europe/Amsterdam")) - datetime.timedelta(days=7)
    nowstamp = int((now - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())
    sevendaysstamp = int((sevendays - datetime.datetime(1970, 1, 1, tzinfo=pytz.utc)).total_seconds())

    data = api_get_channels()

    prefs = load_prefs(profile_id=1)
    channels_ar = []
    channels_ar2 = {}

    if prefs:
        for row in prefs:
            currow = prefs[row]

            if not check_key(data, unicode(row)):
                continue

            channels_ar2[unicode(row)] = data[unicode(row)]['name']

            if int(currow['replay']) == 1:
                channels_ar.append(row)

    data = api_get_epg_by_idtitle(idtitle=idtitle, start=nowstamp, end=sevendaysstamp, channels=channels_ar)

    items = []
    count = 0
    item_count = 0

    if not data:
        return {'items': items, 'count': item_count, 'count2': count, 'total': 0}

    for currow in data:
        row = data[currow]

        if item_count == 51:
            break

        count += 1

        if count < start + 1:
            continue

        context = []
        item_count += 1

        channel = unicode(row['channel'])

        startT = datetime.datetime.fromtimestamp(int(row['start']))
        startT = convert_datetime_timezone(startT, "Europe/Amsterdam", "Europe/Amsterdam")
        endT = datetime.datetime.fromtimestamp(int(row['end']))
        endT = convert_datetime_timezone(endT, "Europe/Amsterdam", "Europe/Amsterdam")

        if xbmc.getLanguage(xbmc.ISO_639_1) == 'nl':
            itemlabel = '{weekday} {day} {month} {yearhourminute} '.format(weekday=date_to_nl_dag(startT), day=startT.strftime("%d"), month=date_to_nl_maand(startT), yearhourminute=startT.strftime("%Y %H:%M"))
        else:
            itemlabel = startT.strftime("%A %d %B %Y %H:%M ").capitalize()

        itemlabel += unicode(row['title'])

        try:
            itemlabel += " (" + unicode(channels_ar2[channel]) + ")"
        except:
            pass

        description = unicode(row['description'])
        duration = int((endT - startT).total_seconds())
        program_image = unicode(row['icon'])
        program_image_large = unicode(row['icon'])
        program_id = unicode(row['program_id'])

        if CONST_WATCHLIST:
            context.append((_.ADD_TO_WATCHLIST, 'RunPlugin({context_url})'.format(context_url=plugin.url_for(func_or_url=add_to_watchlist, id=program_id, type='item')), ))

        items.append(plugin.Item(
            label = itemlabel,
            info = {
                'plot': description,
                'duration': duration,
                'mediatype': 'video',
                'sorttitle': itemlabel.upper(),
            },
            art = {
                'thumb': program_image,
                'fanart': program_image_large
            },
            path = plugin.url_for(func_or_url=play_video, type='program', channel=channel, id=program_id),
            playable = True,
            context = context
        ))

    returnar = {'items': items, 'count': item_count, 'count2': count, 'total': len(data)}

    return returnar
示例#55
0
文件: utils.py 项目: liougeal/beem
def addTzInfo(t, timezone="UTC"):
    """Returns a datetime object with tzinfo added"""
    if t and isinstance(t, (datetime, date, time)) and t.tzinfo is None:
        utc = pytz.timezone(timezone)
        t = utc.localize(t)
    return t
示例#56
0
 def customTime(*args):
     utc_dt = utc.localize(datetime.utcnow())
     my_tz = timezone(time_zone)
     converted = utc_dt.astimezone(my_tz)
     return converted.timetuple()
示例#57
0
文件: p52.py 项目: jamayfieldjr/iem
def plotter(fdict):
    """ Go """
    pgconn = get_dbconn('postgis')
    ctx = get_autoplot_context(fdict, get_description())
    station = ctx['station']
    sts = ctx['sdate']
    sts = datetime.datetime(sts.year, sts.month, sts.day)
    days = ctx['days']

    tz = pytz.timezone(ctx['_nt'].sts[station]['tzname'])

    sts = sts.replace(tzinfo=tz)
    ets = sts + datetime.timedelta(days=days)
    df = read_sql("""
        SELECT phenomena, significance, eventid,
        min(issue at time zone 'UTC') as minissue,
        max(expire at time zone 'UTC') as maxexpire,
        max(coalesce(init_expire, expire) at time zone 'UTC') as maxinitexpire,
        extract(year from product_issue) as year
        from warnings
        WHERE wfo = %s and issue > %s and issue < %s
        GROUP by phenomena, significance, eventid, year
        ORDER by minissue ASC
    """,
                  pgconn,
                  params=(station, sts, ets),
                  index_col=None)
    if df.empty:
        raise NoDataFound("No events were found for WFO and time period.")

    events = []
    labels = []
    types = []
    for i, row in df.iterrows():
        endts = max(row[4], row[5]).replace(tzinfo=pytz.utc)
        events.append((row[3].replace(tzinfo=pytz.utc), endts, row[2]))
        labels.append(vtec.get_ps_string(row[0], row[1]))
        types.append("%s.%s" % (row[0], row[1]))

    # If we have lots of WWA, we need to expand vertically a bunch, lets
    # assume we can plot 5 WAA per 100 pixels
    if len(events) > 20:
        height = int(len(events) / 6.0) + 1
        (fig, ax) = plt.subplots(figsize=(8, height))
        fontsize = 8
    else:
        (fig, ax) = plt.subplots(figsize=(8, 6))
        fontsize = 10

    used = []

    def get_label(i):
        if types[i] in used:
            return ''
        used.append(types[i])
        return "%s (%s)" % (labels[i], types[i])

    halfway = sts + datetime.timedelta(days=days / 2.)

    for i, e in enumerate(events):
        secs = abs((e[1] - e[0]).days * 86400.0 + (e[1] - e[0]).seconds)
        ax.barh(i + 1,
                secs / 86400.0,
                left=e[0],
                align='center',
                fc=vtec.NWS_COLORS.get(types[i], 'k'),
                ec=vtec.NWS_COLORS.get(types[i], 'k'),
                label=get_label(i))
        align = 'left'
        xpos = e[0] + datetime.timedelta(seconds=secs + 3600)
        if xpos > halfway:
            align = 'right'
            xpos = e[0] - datetime.timedelta(minutes=90)
        textcolor = vtec.NWS_COLORS.get(
            types[i] if types[i] != 'TO.A' else 'X', 'k')
        ax.text(xpos,
                i + 1,
                labels[i].replace("Weather", "Wx") + " " + str(e[2]),
                color=textcolor,
                ha=align,
                va='center',
                bbox=dict(color='white', boxstyle='square,pad=0'),
                fontsize=fontsize)

    ax.set_ylabel("Sequential Product Number")
    ax.set_title(("%s-%s NWS %s\nissued Watch/Warning/Advisories") %
                 (sts.strftime("%-d %b %Y"), ets.strftime("%-d %b %Y"),
                  ctx['_nt'].sts[station]['name']))
    ax.set_ylim(0.4, len(events) + 1)
    ax.xaxis.set_minor_locator(mdates.DayLocator(interval=1, tz=tz))
    xinterval = int(days / 7) + 1
    ax.xaxis.set_major_locator(mdates.DayLocator(interval=xinterval, tz=tz))
    ax.xaxis.set_major_formatter(mdates.DateFormatter('%-d %b', tz=tz))

    ax.grid(True)

    ax.set_xlim(sts, ets)

    # Shrink current axis's height by 10% on the bottom
    box = ax.get_position()
    ax.set_position(
        [box.x0, box.y0 + box.height * 0.2, box.width, box.height * 0.8])

    ax.legend(loc='upper center',
              bbox_to_anchor=(0.5, -0.1),
              fancybox=True,
              shadow=True,
              ncol=3,
              scatterpoints=1,
              fontsize=8)

    return fig, df
示例#58
0
文件: utils.py 项目: liougeal/beem
def parse_time(block_time):
    """Take a string representation of time from the blockchain, and parse it
       into datetime object.
    """
    utc = pytz.timezone("UTC")
    return utc.localize(datetime.strptime(block_time, timeFormat))
示例#59
0
 def __init__(self):
     self.timezone = pytz.timezone('UTC')
def _compose_ltz(dt, tz):
    ret = ZERO_EPOCH + timedelta(seconds=float(dt))
    return pytz.utc.localize(ret).astimezone(
        pytz.timezone(tz))