Ejemplo n.º 1
0
    def testOldPickles(self):
        # Ensure that applications serializing pytz instances as pickles
        # have no troubles upgrading to a new pytz release. These pickles
        # where created with pytz2006j
        east1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
            "I0\nS'EST'\np3\ntRp4\n."
            ))
        east2 = pytz.timezone('US/Eastern').localize(
            datetime(2006, 1, 1)).tzinfo
        self.assertTrue(east1 is east2)

        # Confirm changes in name munging between 2006j and 2007c cause
        # no problems.
        pap1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
            "\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
        pap2 = pytz.timezone('America/Port-au-Prince').localize(
            datetime(1910, 1, 1)).tzinfo
        self.assertTrue(pap1 is pap2)

        gmt1 = pickle.loads(_byte_string(
            "cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
        gmt2 = pytz.timezone('Etc/GMT+10')
        self.assertTrue(gmt1 is gmt2)
Ejemplo n.º 2
0
    def testOldPickles(self):
        # Ensure that applications serializing pytz instances as pickles
        # have no troubles upgrading to a new pytz release. These pickles
        # where created with pytz2006j
        east1 = pickle.loads(
            _byte_string("cpytz\n_p\np1\n(S'US/Eastern'\np2\nI-18000\n"
                         "I0\nS'EST'\np3\ntRp4\n."))
        east2 = pytz.timezone('US/Eastern').localize(datetime(2006, 1,
                                                              1)).tzinfo
        self.assertTrue(east1 is east2)

        # Confirm changes in name munging between 2006j and 2007c cause
        # no problems.
        pap1 = pickle.loads(
            _byte_string(
                "cpytz\n_p\np1\n(S'America/Port_minus_au_minus_Prince'"
                "\np2\nI-17340\nI0\nS'PPMT'\np3\ntRp4\n."))
        pap2 = pytz.timezone('America/Port-au-Prince').localize(
            datetime(1910, 1, 1)).tzinfo
        self.assertTrue(pap1 is pap2)

        gmt1 = pickle.loads(
            _byte_string("cpytz\n_p\np1\n(S'Etc/GMT_plus_10'\np2\ntRp3\n."))
        gmt2 = pytz.timezone('Etc/GMT+10')
        self.assertTrue(gmt1 is gmt2)
Ejemplo n.º 3
0
    def save(self):
        """
        Save the collected observation in the text/csv;subtype=istSOS/2.0.0

        with a file name composed of
          - name procedure
          - underscore _
          - datetime in UTC
          - extension (.dat)
        """
        self.log("End position: %s" % self.getIOEndPosition())
        if len(self.observations) > 0:
            if self.getIOEndPosition() == None:
                f = open(
                    os.path.join(
                        self.folderOut, "%s_%s.dat" %
                        (self.name,
                         datetime.strftime(
                             self.observations[-1].getEventime().astimezone(
                                 timezone('UTC')), "%Y%m%d%H%M%S%f"))), 'w')
            else:
                if self.getIOEndPosition() < self.observations[-1].getEventime(
                ):
                    raise IstSOSError(
                        "End position (%s) cannot be before the last observation event time (%s)"
                        % (self.getIOEndPosition(),
                           self.observations[-1].getEventime()))
                f = open(
                    os.path.join(
                        self.folderOut, "%s_%s.dat" %
                        (self.name,
                         datetime.strftime(
                             self.getIOEndPosition().astimezone(
                                 timezone('UTC')), "%Y%m%d%H%M%S%f"))), 'w')
            f.write("%s\n" % ",".join(self.obsindex))
            #self.observations.sort(key=lambda x: x.__eventime, reverse=True)
            #self.observations = sorted(self.observations, key=lambda observation: observation.__eventime)
            self.observations = sorted(self.observations,
                                       key=methodcaller('getEventime'))
            for o in self.observations:
                f.write("%s\n" % o.csv(",", self.obsindex))
        else:
            # End position is used to advance the sampling time in cases where
            # there is a "no data" observation (rain)
            self.addMessage("No data, but end position updated")
            if self.getIOEndPosition() == None:
                raise IstSOSError(
                    "The file has no observations, if this happens, you shall use the setEndPosition function to set the endPosition manually"
                )
            f = open(
                os.path.join(
                    self.folderOut, "%s_%s.dat" %
                    (self.name,
                     datetime.strftime(
                         self.getIOEndPosition().astimezone(timezone('UTC')),
                         "%Y%m%d%H%M%S%f"))), 'w')
            f.write("%s\n" % ",".join(self.obsindex))

        f.flush()
        f.close()
Ejemplo n.º 4
0
    def testUnicodeTimezone(self):
        # We need to ensure that cold lookups work for both Unicode
        # and traditional strings, and that the desired singleton is
        # returned.
        self.clearCache()
        eastern = pytz.timezone(unicode('US/Eastern'))
        self.assertTrue(eastern is pytz.timezone('US/Eastern'))

        self.clearCache()
        eastern = pytz.timezone('US/Eastern')
        self.assertTrue(eastern is pytz.timezone(unicode('US/Eastern')))
Ejemplo n.º 5
0
    def testUnicodeTimezone(self):
        # We need to ensure that cold lookups work for both Unicode
        # and traditional strings, and that the desired singleton is
        # returned.
        self.clearCache()
        eastern = pytz.timezone(unicode('US/Eastern'))
        self.assertTrue(eastern is pytz.timezone('US/Eastern'))

        self.clearCache()
        eastern = pytz.timezone('US/Eastern')
        self.assertTrue(eastern is pytz.timezone(unicode('US/Eastern')))
Ejemplo n.º 6
0
 def setEndPositionFromFilename(self, fileName):
     """
     Extract from file name the EndPosition Date, usefull with irregular 
     procedures like rain (tipping bucket) that can have no data when it's 
     not raining.
     
     In the config there shall be this type of configuration:
         
         "filenamedate": {
             "format": '%y%m%d%H%M',
             "tz": '+02:00',
             "remove": ['Calcaccia_A_','.dat']
         }
         
     """
     
     if "filenamedate" in self.config:
         
         dt = fileName;
         for rem in self.config["filenamedate"]["remove"]:
             dt = dt.replace(rem,'')
             
         dt = datetime.strptime(dt,self.config["filenamedate"]["format"])
         dt = dt.replace(tzinfo=timezone('UTC'))
         
         if "tz" in self.config["filenamedate"]:
             offset = self.config["filenamedate"]["tz"].split(":")
             dt = dt - timedelta(hours=int(offset[0]), minutes=int(offset[1]))
         
         self.setEndPosition(dt)
Ejemplo n.º 7
0
class LondonHistoryStartTestCase(USEasternDSTStartTestCase):
    # The first known timezone transition in London was in 1847 when
    # clocks where synchronized to GMT. However, we currently only
    # understand v1 format tzfile(5) files which does handle years
    # this far in the past, so our earliest known transition is in
    # 1916.
    tzinfo = pytz.timezone('Europe/London')
    # transition_time = datetime(1847, 12, 1, 1, 15, 00, tzinfo=UTC)
    # before = {
    #     'tzname': 'LMT',
    #     'utcoffset': timedelta(minutes=-75),
    #     'dst': timedelta(0),
    #     }
    # after = {
    #     'tzname': 'GMT',
    #     'utcoffset': timedelta(0),
    #     'dst': timedelta(0),
    #     }
    transition_time = datetime(1916, 5, 21, 2, 00, 00, tzinfo=UTC)
    before = {
        'tzname': 'GMT',
        'utcoffset': timedelta(0),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'BST',
        'utcoffset': timedelta(hours=1),
        'dst': timedelta(hours=1),
    }
Ejemplo n.º 8
0
    def testDst(self):
        tz = pytz.timezone('Europe/Amsterdam')
        dt = datetime(2004, 2, 1, 0, 0, 0)

        for localized_tz in tz._tzinfos.values():
            self._roundtrip_tzinfo(localized_tz)
            self._roundtrip_datetime(dt.replace(tzinfo=localized_tz))
Ejemplo n.º 9
0
    def testDst(self):
        tz = pytz.timezone('Europe/Amsterdam')
        dt = datetime(2004, 2, 1, 0, 0, 0)

        for localized_tz in tz._tzinfos.values():
            self._roundtrip_tzinfo(localized_tz)
            self._roundtrip_datetime(dt.replace(tzinfo=localized_tz))
Ejemplo n.º 10
0
    def testDatabaseFixes(self):
        # Hack the pickle to make it refer to a timezone abbreviation
        # that does not match anything. The unpickler should be able
        # to repair this case
        tz = pytz.timezone('Australia/Melbourne')
        p = pickle.dumps(tz)
        tzname = tz._tzname
        hacked_p = p.replace(_byte_string(tzname),
                             _byte_string('?' * len(tzname)))
        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertTrue(tz is unpickled_tz)

        # Simulate a database correction. In this case, the incorrect
        # data will continue to be used.
        p = pickle.dumps(tz)
        new_utcoffset = tz._utcoffset.seconds + 42

        # Python 3 introduced a new pickle protocol where numbers are stored in
        # hexadecimal representation. Here we extract the pickle
        # representation of the number for the current Python version.
        old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
        new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
        hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)

        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
        self.assertTrue(tz is not unpickled_tz)
Ejemplo n.º 11
0
    def testDatabaseFixes(self):
        # Hack the pickle to make it refer to a timezone abbreviation
        # that does not match anything. The unpickler should be able
        # to repair this case
        tz = pytz.timezone('Australia/Melbourne')
        p = pickle.dumps(tz)
        tzname = tz._tzname
        hacked_p = p.replace(_byte_string(tzname),
                             _byte_string('?'*len(tzname)))
        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertTrue(tz is unpickled_tz)

        # Simulate a database correction. In this case, the incorrect
        # data will continue to be used.
        p = pickle.dumps(tz)
        new_utcoffset = tz._utcoffset.seconds + 42

        # Python 3 introduced a new pickle protocol where numbers are stored in
        # hexadecimal representation. Here we extract the pickle
        # representation of the number for the current Python version.
        old_pickle_pattern = pickle.dumps(tz._utcoffset.seconds)[3:-1]
        new_pickle_pattern = pickle.dumps(new_utcoffset)[3:-1]
        hacked_p = p.replace(old_pickle_pattern, new_pickle_pattern)

        self.assertNotEqual(p, hacked_p)
        unpickled_tz = pickle.loads(hacked_p)
        self.assertEqual(unpickled_tz._utcoffset.seconds, new_utcoffset)
        self.assertTrue(tz is not unpickled_tz)
Ejemplo n.º 12
0
def _get_localzone():
    tzname = os.popen("systemsetup -gettimezone").read().replace("Time Zone: ", "").strip()
    if not tzname or tzname not in pytz.all_timezones_set:
        # link will be something like /usr/share/zoneinfo/America/Los_Angeles.
        link = os.readlink("/etc/localtime")
        tzname = link[link.rfind("zoneinfo/") + 9:]
    return pytz.timezone(tzname)
Ejemplo n.º 13
0
def _get_localzone():
    tzname = os.popen("systemsetup -gettimezone").read().replace(
        "Time Zone: ", "").strip()
    if not tzname or tzname not in pytz.all_timezones_set:
        # link will be something like /usr/share/zoneinfo/America/Los_Angeles.
        link = os.readlink("/etc/localtime")
        tzname = link[link.rfind("zoneinfo/") + 9:]
    return pytz.timezone(tzname)
Ejemplo n.º 14
0
 def test_only_localtime(self):
     local_path = os.path.split(__file__)[0]
     tz = tzlocal.unix._get_localzone(
         _root=os.path.join(local_path, 'test_data', 'localtime'))
     self.assertEqual(tz.zone, 'local')
     dt = datetime(2012, 1, 1, 5)
     self.assertEqual(
         pytz.timezone('Africa/Harare').localize(dt), tz.localize(dt))
Ejemplo n.º 15
0
 def no_testCreateLocaltime(self):
     # It would be nice if this worked, but it doesn't.
     tz = pytz.timezone('Europe/Amsterdam')
     dt = datetime(2004, 10, 31, 2, 0, 0, tzinfo=tz)
     self.assertEqual(
             dt.strftime(fmt),
             '2004-10-31 02:00:00 CET+0100'
             )
Ejemplo n.º 16
0
    def testNormalize(self):
        tz = pytz.timezone('US/Eastern')
        dt = datetime(2004, 4, 4, 7, 0, 0, tzinfo=UTC).astimezone(tz)
        dt2 = dt - timedelta(minutes=10)
        self.assertEqual(dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                         '2004-04-04 02:50:00 EDT-0400')

        dt2 = tz.normalize(dt2)
        self.assertEqual(dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                         '2004-04-04 01:50:00 EST-0500')
Ejemplo n.º 17
0
    def test_astimezone(self):
        other_tz = pytz.timezone('Europe/Paris')
        self.assertTrue(self.tz is not other_tz)

        dt = datetime(2012, 3, 26, 12, 0)
        other_dt = other_tz.localize(dt)

        local_dt = other_dt.astimezone(self.tz)

        self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
        self.assertNotEqual(
            local_dt.replace(tzinfo=None), other_dt.replace(tzinfo=None))
Ejemplo n.º 18
0
    def test_astimezone(self):
        other_tz = pytz.timezone('Europe/Paris')
        self.assertTrue(self.tz is not other_tz)

        dt = datetime(2012, 3, 26, 12, 0)
        other_dt = other_tz.localize(dt)

        local_dt = other_dt.astimezone(self.tz)

        self.assertTrue(local_dt.tzinfo is not other_dt.tzinfo)
        self.assertNotEqual(local_dt.replace(tzinfo=None),
                            other_dt.replace(tzinfo=None))
Ejemplo n.º 19
0
    def save(self):
        """
        Save the collected observation in the text/csv;subtype=istSOS/2.0.0
        
        with a file name composed of 
          - name procedure
          - underscore _
          - datetime in UTC
          - extension (.dat)
        .astimezone(pytz.utc).isoformat()
        """
        self.log("End position: %s" % self.getIOEndPosition())
        if len(self.observations)>0:
            if self.getIOEndPosition() == None:
                f = open(os.path.join(self.folderOut,"%s_%s.dat" %(
                    self.name,

                    datetime.strftime(self.observations[-1].getEventime().astimezone(timezone('UTC')), "%Y%m%d%H%M%S%f"))), 'w')
            else:
                if self.getIOEndPosition() < self.observations[-1].getEventime():
                    raise IstSOSError("End position (%s) cannot be before the last observation event time (%s)" % (
                        self.getIOEndPosition(), self.observations[-1].getEventime()))
                f = open(os.path.join(self.folderOut,"%s_%s.dat" %(
                    self.name,
                    datetime.strftime(self.getIOEndPosition().astimezone(timezone('UTC')), "%Y%m%d%H%M%S%f"))), 'w')
            f.write("%s\n" % ",".join(self.obsindex))
            for o in self.observations:
                f.write("%s\n" % o.csv(",",self.obsindex))
        else:
            # End position is used to advance the sampling time in cases where 
            # there is a "no data" observation (rain)
            if self.getIOEndPosition() == None:
                raise IstSOSError("The file has no observations, if this happens, you shall use the setEndPosition function to set the endPosition manually")
            f = open(os.path.join(self.folderOut,"%s_%s.dat" % (
                self.name,
                datetime.strftime(self.getIOEndPosition().astimezone(timezone('UTC')), "%Y%m%d%H%M%S%f"))), 'w')
            f.write("%s\n" % ",".join(self.obsindex))
        f.flush()
        f.close()
Ejemplo n.º 20
0
    def testUnknownOffsets(self):
        # This tzinfo behavior is required to make
        # datetime.time.{utcoffset, dst, tzname} work as documented.

        dst_tz = pytz.timezone('US/Eastern')

        # This information is not known when we don't have a date,
        # so return None per API.
        self.assertTrue(dst_tz.utcoffset(None) is None)
        self.assertTrue(dst_tz.dst(None) is None)
        # We don't know the abbreviation, but this is still a valid
        # tzname per the Python documentation.
        self.assertEqual(dst_tz.tzname(None), 'US/Eastern')
Ejemplo n.º 21
0
class USEasternDSTEndTestCase(USEasternDSTStartTestCase):
    tzinfo = pytz.timezone('US/Eastern')
    transition_time = datetime(2002, 10, 27, 6, 0, 0, tzinfo=UTC)
    before = {
        'tzname': 'EDT',
        'utcoffset': timedelta(hours=-4),
        'dst': timedelta(hours=1),
    }
    after = {
        'tzname': 'EST',
        'utcoffset': timedelta(hours=-5),
        'dst': timedelta(hours=0),
    }
Ejemplo n.º 22
0
    def testUnknownOffsets(self):
        # This tzinfo behavior is required to make
        # datetime.time.{utcoffset, dst, tzname} work as documented.

        dst_tz = pytz.timezone('US/Eastern')

        # This information is not known when we don't have a date,
        # so return None per API.
        self.assertTrue(dst_tz.utcoffset(None) is None)
        self.assertTrue(dst_tz.dst(None) is None)
        # We don't know the abbreviation, but this is still a valid
        # tzname per the Python documentation.
        self.assertEqual(dst_tz.tzname(None), 'US/Eastern')
Ejemplo n.º 23
0
def play_archive(stream_id, playTime, title, duration):
    try:
        timestamp = int(
            time.mktime(
                datetime.strptime(playTime, "%Y-%m-%d:%H-%M").timetuple()))
    except TypeError:
        timestamp = int((time.mktime(time.strptime(playTime,
                                                   "%Y-%m-%d:%H-%M"))))

    serverTzString = vaderClass.user_info['server_info']['timezone']
    tzObj = pytz.timezone(serverTzString)
    newPlayTime = datetime.fromtimestamp(int(timestamp),
                                         tz=tzObj).strftime('%Y-%m-%d:%H-%M')
    play_archive_adjusted(stream_id, newPlayTime, title, duration)
Ejemplo n.º 24
0
    def testNormalize(self):
        tz = pytz.timezone('US/Eastern')
        dt = datetime(2004, 4, 4, 7, 0, 0, tzinfo=UTC).astimezone(tz)
        dt2 = dt - timedelta(minutes=10)
        self.assertEqual(
                dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                '2004-04-04 02:50:00 EDT-0400'
                )

        dt2 = tz.normalize(dt2)
        self.assertEqual(
                dt2.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                '2004-04-04 01:50:00 EST-0500'
                )
Ejemplo n.º 25
0
class NoumeaDSTEndTestCase(USEasternDSTStartTestCase):
    # Noumea dropped DST in 1997.
    tzinfo = pytz.timezone('Pacific/Noumea')
    transition_time = datetime(1997, 3, 1, 15, 00, 00, tzinfo=UTC)
    before = {
        'tzname': 'NCST',
        'utcoffset': timedelta(hours=12),
        'dst': timedelta(hours=1),
    }
    after = {
        'tzname': 'NCT',
        'utcoffset': timedelta(hours=11),
        'dst': timedelta(0),
    }
Ejemplo n.º 26
0
class TahitiTestCase(USEasternDSTStartTestCase):
    # Tahiti has had a single transition in its history.
    tzinfo = pytz.timezone('Pacific/Tahiti')
    transition_time = datetime(1912, 10, 1, 9, 58, 16, tzinfo=UTC)
    before = {
        'tzname': 'LMT',
        'utcoffset': timedelta(hours=-9, minutes=-58),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'TAHT',
        'utcoffset': timedelta(hours=-10),
        'dst': timedelta(0),
    }
Ejemplo n.º 27
0
def convert_to_datetime(input, tz, arg_name):
    """
    Converts the given object to a timezone aware datetime object.
    If a timezone aware datetime object is passed, it is returned unmodified.
    If a native datetime object is passed, it is given the specified timezone.
    If the input is a string, it is parsed as a datetime with the given timezone.

    Date strings are accepted in three different forms: date only (Y-m-d),
    date with time (Y-m-d H:M:S) or with date+time with microseconds
    (Y-m-d H:M:S.micro).

    :param str|datetime input: the datetime or string to convert to a timezone aware datetime
    :param datetime.tzinfo tz: timezone to interpret ``input`` in
    :param str arg_name: the name of the argument (used in an error message)
    :rtype: datetime
    """

    if input is None:
        return
    elif isinstance(input, datetime):
        datetime_ = input
    elif isinstance(input, date):
        datetime_ = datetime.combine(input, time())
    elif isinstance(input, six.string_types):
        m = _DATE_REGEX.match(input)
        if not m:
            raise ValueError('Invalid date string')
        values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
        values = dict(values)
        datetime_ = datetime(**values)
    else:
        raise TypeError('Unsupported type for %s: %s' %
                        (arg_name, input.__class__.__name__))

    if datetime_.tzinfo is not None:
        return datetime_
    if tz is None:
        raise ValueError(
            'The "tz" argument must be specified if %s has no timezone information'
            % arg_name)
    if isinstance(tz, six.string_types):
        tz = timezone(tz)

    try:
        return tz.localize(datetime_, is_dst=None)
    except AttributeError:
        raise TypeError(
            'Only pytz timezones are supported (need the localize() and normalize() methods)'
        )
Ejemplo n.º 28
0
def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None):
    """Factory function for unpickling pytz tzinfo instances.

    This is shared for both StaticTzInfo and DstTzInfo instances, because
    database changes could cause a zones implementation to switch between
    these two base classes and we can't break pickles on a pytz version
    upgrade.
    """
    # Raises a KeyError if zone no longer exists, which should never happen
    # and would be a bug.
    tz = pytz.timezone(zone)

    # A StaticTzInfo - just return it
    if utcoffset is None:
        return tz

    # This pickle was created from a DstTzInfo. We need to
    # determine which of the list of tzinfo instances for this zone
    # to use in order to restore the state of any datetime instances using
    # it correctly.
    utcoffset = memorized_timedelta(utcoffset)
    dstoffset = memorized_timedelta(dstoffset)
    try:
        return tz._tzinfos[(utcoffset, dstoffset, tzname)]
    except KeyError:
        # The particular state requested in this timezone no longer exists.
        # This indicates a corrupt pickle, or the timezone database has been
        # corrected violently enough to make this particular
        # (utcoffset,dstoffset) no longer exist in the zone, or the
        # abbreviation has been changed.
        pass

    # See if we can find an entry differing only by tzname. Abbreviations
    # get changed from the initial guess by the database maintainers to
    # match reality when this information is discovered.
    for localized_tz in tz._tzinfos.values():
        if (localized_tz._utcoffset == utcoffset
                and localized_tz._dst == dstoffset):
            return localized_tz

    # This (utcoffset, dstoffset) information has been removed from the
    # zone. Add it back. This might occur when the database maintainers have
    # corrected incorrect information. datetime instances using this
    # incorrect information will continue to do so, exactly as they were
    # before being pickled. This is purely an overly paranoid safety net - I
    # doubt this will ever been needed in real life.
    inf = (utcoffset, dstoffset, tzname)
    tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos)
    return tz._tzinfos[inf]
Ejemplo n.º 29
0
def unpickler(zone, utcoffset=None, dstoffset=None, tzname=None):
    """Factory function for unpickling pytz tzinfo instances.

    This is shared for both StaticTzInfo and DstTzInfo instances, because
    database changes could cause a zones implementation to switch between
    these two base classes and we can't break pickles on a pytz version
    upgrade.
    """
    # Raises a KeyError if zone no longer exists, which should never happen
    # and would be a bug.
    tz = pytz.timezone(zone)

    # A StaticTzInfo - just return it
    if utcoffset is None:
        return tz

    # This pickle was created from a DstTzInfo. We need to
    # determine which of the list of tzinfo instances for this zone
    # to use in order to restore the state of any datetime instances using
    # it correctly.
    utcoffset = memorized_timedelta(utcoffset)
    dstoffset = memorized_timedelta(dstoffset)
    try:
        return tz._tzinfos[(utcoffset, dstoffset, tzname)]
    except KeyError:
        # The particular state requested in this timezone no longer exists.
        # This indicates a corrupt pickle, or the timezone database has been
        # corrected violently enough to make this particular
        # (utcoffset,dstoffset) no longer exist in the zone, or the
        # abbreviation has been changed.
        pass

    # See if we can find an entry differing only by tzname. Abbreviations
    # get changed from the initial guess by the database maintainers to
    # match reality when this information is discovered.
    for localized_tz in tz._tzinfos.values():
        if (localized_tz._utcoffset == utcoffset
                and localized_tz._dst == dstoffset):
            return localized_tz

    # This (utcoffset, dstoffset) information has been removed from the
    # zone. Add it back. This might occur when the database maintainers have
    # corrected incorrect information. datetime instances using this
    # incorrect information will continue to do so, exactly as they were
    # before being pickled. This is purely an overly paranoid safety net - I
    # doubt this will ever been needed in real life.
    inf = (utcoffset, dstoffset, tzname)
    tz._tzinfos[inf] = tz.__class__(inf, tz._tzinfos)
    return tz._tzinfos[inf]
Ejemplo n.º 30
0
    def testPartialMinuteOffsets(self):
        # utcoffset in Amsterdam was not a whole minute until 1937
        # However, we fudge this by rounding them, as the Python
        # datetime library
        tz = pytz.timezone('Europe/Amsterdam')
        utc_dt = datetime(1914, 1, 1, 13, 40, 28, tzinfo=UTC)  # correct
        utc_dt = utc_dt.replace(second=0)  # But we need to fudge it
        loc_dt = utc_dt.astimezone(tz)
        self.assertEqual(loc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                         '1914-01-01 14:00:00 AMT+0020')

        # And get back...
        utc_dt = loc_dt.astimezone(UTC)
        self.assertEqual(utc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                         '1914-01-01 13:40:00 UTC+0000')
Ejemplo n.º 31
0
class SamoaInternationalDateLineChange(USEasternDSTStartTestCase):
    # At the end of 2011, Samoa will switch from being east of the
    # international dateline to the west. There will be no Dec 30th
    # 2011 and it will switch from UTC-10 to UTC+14.
    tzinfo = pytz.timezone('Pacific/Apia')
    transition_time = datetime(2011, 12, 30, 10, 0, 0, tzinfo=UTC)
    before = {
        'tzname': 'SDT',
        'utcoffset': timedelta(hours=-10),
        'dst': timedelta(hours=1),
    }
    after = {
        'tzname': 'WSDT',
        'utcoffset': timedelta(hours=14),
        'dst': timedelta(hours=1),
    }
Ejemplo n.º 32
0
class VilniusWMTEndTestCase(USEasternDSTStartTestCase):
    # At the end of 1916, Vilnius changed timezones putting its clock
    # forward by 11 minutes 35 seconds. Neither timezone was in DST mode.
    tzinfo = pytz.timezone('Europe/Vilnius')
    instant = timedelta(seconds=31)
    transition_time = datetime(1916, 12, 31, 22, 36, 00, tzinfo=UTC)
    before = {
        'tzname': 'WMT',
        'utcoffset': timedelta(hours=1, minutes=24),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'KMT',
        'utcoffset': timedelta(hours=1, minutes=36),  # Really 1:35:36
        'dst': timedelta(0),
    }
Ejemplo n.º 33
0
class WarsawWMTEndTestCase(USEasternDSTStartTestCase):
    # In 1915, Warsaw changed from Warsaw to Central European time.
    # This involved the clocks being set backwards, causing a end-of-DST
    # like situation without DST being involved.
    tzinfo = pytz.timezone('Europe/Warsaw')
    transition_time = datetime(1915, 8, 4, 22, 36, 0, tzinfo=UTC)
    before = {
        'tzname': 'WMT',
        'utcoffset': timedelta(hours=1, minutes=24),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'CET',
        'utcoffset': timedelta(hours=1),
        'dst': timedelta(0),
    }
Ejemplo n.º 34
0
class VilniusCESTStartTestCase(USEasternDSTStartTestCase):
    # In 1941, Vilnius changed from MSG to CEST, switching to summer
    # time while simultaneously reducing its UTC offset by two hours,
    # causing the clocks to go backwards for this summer time
    # switchover.
    tzinfo = pytz.timezone('Europe/Vilnius')
    transition_time = datetime(1941, 6, 23, 21, 00, 00, tzinfo=UTC)
    before = {
        'tzname': 'MSK',
        'utcoffset': timedelta(hours=3),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'CEST',
        'utcoffset': timedelta(hours=2),
        'dst': timedelta(hours=1),
    }
Ejemplo n.º 35
0
def astimezone(obj):
    """
    Interprets an object as a timezone.

    :rtype: tzinfo
    """

    if isinstance(obj, six.string_types):
        return timezone(obj)
    if isinstance(obj, tzinfo):
        if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'):
            raise TypeError('Only timezones from the pytz library are supported')
        if obj.zone == 'local':
            raise ValueError('Unable to determine the name of the local timezone -- use an explicit timezone instead')
        return obj
    if obj is not None:
        raise TypeError('Expected tzinfo, got %s instead' % obj.__class__.__name__)
Ejemplo n.º 36
0
class NoumeaHistoryStartTestCase(USEasternDSTStartTestCase):
    # Noumea adopted a whole hour offset in 1912. Previously
    # it was 11 hours, 5 minutes and 48 seconds off UTC. However,
    # due to limitations of the Python datetime library, we need
    # to round that to 11 hours 6 minutes.
    tzinfo = pytz.timezone('Pacific/Noumea')
    transition_time = datetime(1912, 1, 12, 12, 54, 12, tzinfo=UTC)
    before = {
        'tzname': 'LMT',
        'utcoffset': timedelta(hours=11, minutes=6),
        'dst': timedelta(0),
    }
    after = {
        'tzname': 'NCT',
        'utcoffset': timedelta(hours=11),
        'dst': timedelta(0),
    }
Ejemplo n.º 37
0
def _tz_from_env(tzenv):
    if tzenv[0] == ':':
        tzenv = tzenv[1:]

    # TZ specifies a file
    if os.path.exists(tzenv):
        with open(tzenv, 'rb') as tzfile:
            return pytz.tzfile.build_tzinfo('local', tzfile)

    # TZ specifies a zoneinfo zone.
    try:
        tz = pytz.timezone(tzenv)
        # That worked, so we return this:
        return tz
    except pytz.UnknownTimeZoneError:
        raise pytz.UnknownTimeZoneError(
            "tzlocal() does not support non-zoneinfo timezones like %s. \n"
            "Please use a timezone in the form of Continent/City")
Ejemplo n.º 38
0
def _tz_from_env(tzenv):
    if tzenv[0] == ':':
        tzenv = tzenv[1:]

    # TZ specifies a file
    if os.path.exists(tzenv):
        with open(tzenv, 'rb') as tzfile:
            return pytz.tzfile.build_tzinfo('local', tzfile)

    # TZ specifies a zoneinfo zone.
    try:
        tz = pytz.timezone(tzenv)
        # That worked, so we return this:
        return tz
    except pytz.UnknownTimeZoneError:
        raise pytz.UnknownTimeZoneError(
            "tzlocal() does not support non-zoneinfo timezones like %s. \n"
            "Please use a timezone in the form of Continent/City")
Ejemplo n.º 39
0
def convert_to_datetime(input, tz, arg_name):
    """
    Converts the given object to a timezone aware datetime object.
    If a timezone aware datetime object is passed, it is returned unmodified.
    If a native datetime object is passed, it is given the specified timezone.
    If the input is a string, it is parsed as a datetime with the given timezone.

    Date strings are accepted in three different forms: date only (Y-m-d),
    date with time (Y-m-d H:M:S) or with date+time with microseconds
    (Y-m-d H:M:S.micro).

    :param str|datetime input: the datetime or string to convert to a timezone aware datetime
    :param datetime.tzinfo tz: timezone to interpret ``input`` in
    :param str arg_name: the name of the argument (used in an error message)
    :rtype: datetime
    """

    if input is None:
        return
    elif isinstance(input, datetime):
        datetime_ = input
    elif isinstance(input, date):
        datetime_ = datetime.combine(input, time())
    elif isinstance(input, six.string_types):
        m = _DATE_REGEX.match(input)
        if not m:
            raise ValueError('Invalid date string')
        values = [(k, int(v or 0)) for k, v in m.groupdict().items()]
        values = dict(values)
        datetime_ = datetime(**values)
    else:
        raise TypeError('Unsupported type for %s: %s' % (arg_name, input.__class__.__name__))

    if datetime_.tzinfo is not None:
        return datetime_
    if tz is None:
        raise ValueError('The "tz" argument must be specified if %s has no timezone information' % arg_name)
    if isinstance(tz, six.string_types):
        tz = timezone(tz)

    try:
        return tz.localize(datetime_, is_dst=None)
    except AttributeError:
        raise TypeError('Only pytz timezones are supported (need the localize() and normalize() methods)')
Ejemplo n.º 40
0
    def test_fromutc(self):
        # naive datetime.
        dt1 = datetime(2011, 10, 31)

        # localized datetime, same timezone.
        dt2 = self.tz.localize(dt1)

        # Both should give the same results. Note that the standard
        # Python tzinfo.fromutc() only supports the second.
        for dt in [dt1, dt2]:
            loc_dt = self.tz.fromutc(dt)
            loc_dt2 = pytz.utc.localize(dt1).astimezone(self.tz)
            self.assertEqual(loc_dt, loc_dt2)

        # localized datetime, different timezone.
        new_tz = pytz.timezone('Europe/Paris')
        self.assertTrue(self.tz is not new_tz)
        dt3 = new_tz.localize(dt1)
        self.assertRaises(ValueError, self.tz.fromutc, dt3)
Ejemplo n.º 41
0
    def testPartialMinuteOffsets(self):
        # utcoffset in Amsterdam was not a whole minute until 1937
        # However, we fudge this by rounding them, as the Python
        # datetime library
        tz = pytz.timezone('Europe/Amsterdam')
        utc_dt = datetime(1914, 1, 1, 13, 40, 28, tzinfo=UTC) # correct
        utc_dt = utc_dt.replace(second=0) # But we need to fudge it
        loc_dt = utc_dt.astimezone(tz)
        self.assertEqual(
                loc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                '1914-01-01 14:00:00 AMT+0020'
                )

        # And get back...
        utc_dt = loc_dt.astimezone(UTC)
        self.assertEqual(
                utc_dt.strftime('%Y-%m-%d %H:%M:%S %Z%z'),
                '1914-01-01 13:40:00 UTC+0000'
                )
Ejemplo n.º 42
0
class LondonHistoryEndTestCase(USEasternDSTStartTestCase):
    # Timezone switchovers are projected into the future, even
    # though no official statements exist or could be believed even
    # if they did exist. We currently only check the last known
    # transition in 2037, as we are still using v1 format tzfile(5)
    # files.
    tzinfo = pytz.timezone('Europe/London')
    # transition_time = datetime(2499, 10, 25, 1, 0, 0, tzinfo=UTC)
    transition_time = datetime(2037, 10, 25, 1, 0, 0, tzinfo=UTC)
    before = {
        'tzname': 'BST',
        'utcoffset': timedelta(hours=1),
        'dst': timedelta(hours=1),
    }
    after = {
        'tzname': 'GMT',
        'utcoffset': timedelta(0),
        'dst': timedelta(0),
    }
Ejemplo n.º 43
0
    def test_fromutc(self):
        # naive datetime.
        dt1 = datetime(2011, 10, 31)

        # localized datetime, same timezone.
        dt2 = self.tz.localize(dt1)

        # Both should give the same results. Note that the standard
        # Python tzinfo.fromutc() only supports the second.
        for dt in [dt1, dt2]:
            loc_dt = self.tz.fromutc(dt)
            loc_dt2 = pytz.utc.localize(dt1).astimezone(self.tz)
            self.assertEqual(loc_dt, loc_dt2)

        # localized datetime, different timezone.
        new_tz = pytz.timezone('Europe/Paris')
        self.assertTrue(self.tz is not new_tz)
        dt3 = new_tz.localize(dt1)
        self.assertRaises(ValueError, self.tz.fromutc, dt3)
Ejemplo n.º 44
0
def astimezone(obj):
    """
    Interprets an object as a timezone.

    :rtype: tzinfo
    """

    if isinstance(obj, six.string_types):
        return timezone(obj)
    if isinstance(obj, tzinfo):
        if not hasattr(obj, 'localize') or not hasattr(obj, 'normalize'):
            raise TypeError(
                'Only timezones from the pytz library are supported')
        if obj.zone == 'local':
            raise ValueError(
                'Unable to determine the name of the local timezone -- use an explicit timezone instead'
            )
        return obj
    if obj is not None:
        raise TypeError('Expected tzinfo, got %s instead' %
                        obj.__class__.__name__)
Ejemplo n.º 45
0
def get_localzone():
    """Returns the zoneinfo-based tzinfo object that matches the Windows-configured timezone."""
    global _cache_tz
    if _cache_tz is None:
        _cache_tz = pytz.timezone(get_localzone_name())
    return _cache_tz
Ejemplo n.º 46
0
def reload_localzone():
    """Reload the cached localzone. You need to call this if the timezone has changed."""
    global _cache_tz
    _cache_tz = pytz.timezone(get_localzone_name())
Ejemplo n.º 47
0
    def testLocalize(self):
        loc_tz = pytz.timezone('Europe/Amsterdam')

        loc_time = loc_tz.localize(datetime(1930, 5, 10, 0, 0, 0))
        # Actually +00:19:32, but Python datetime rounds this
        self.assertEqual(loc_time.strftime('%Z%z'), 'AMT+0020')

        loc_time = loc_tz.localize(datetime(1930, 5, 20, 0, 0, 0))
        # Actually +00:19:32, but Python datetime rounds this
        self.assertEqual(loc_time.strftime('%Z%z'), 'NST+0120')

        loc_time = loc_tz.localize(datetime(1940, 5, 10, 0, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'NET+0020')

        loc_time = loc_tz.localize(datetime(1940, 5, 20, 0, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')

        loc_time = loc_tz.localize(datetime(2004, 2, 1, 0, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')

        loc_time = loc_tz.localize(datetime(2004, 4, 1, 0, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'CEST+0200')

        tz = pytz.timezone('Europe/Amsterdam')
        loc_time = loc_tz.localize(datetime(1943, 3, 29, 1, 59, 59))
        self.assertEqual(loc_time.strftime('%Z%z'), 'CET+0100')


        # Switch to US
        loc_tz = pytz.timezone('US/Eastern')

        # End of DST ambiguity check
        loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=1)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')

        loc_time = loc_tz.localize(datetime(1918, 10, 27, 1, 59, 59), is_dst=0)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')

        self.assertRaises(pytz.AmbiguousTimeError,
            loc_tz.localize, datetime(1918, 10, 27, 1, 59, 59), is_dst=None
            )

        # Start of DST non-existent times
        loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=0)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')

        loc_time = loc_tz.localize(datetime(1918, 3, 31, 2, 0, 0), is_dst=1)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EDT-0400')

        self.assertRaises(pytz.NonExistentTimeError,
            loc_tz.localize, datetime(1918, 3, 31, 2, 0, 0), is_dst=None
            )

        # Weird changes - war time and peace time both is_dst==True

        loc_time = loc_tz.localize(datetime(1942, 2, 9, 3, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'EWT-0400')

        loc_time = loc_tz.localize(datetime(1945, 8, 14, 19, 0, 0))
        self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')

        loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=1)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EPT-0400')

        loc_time = loc_tz.localize(datetime(1945, 9, 30, 1, 0, 0), is_dst=0)
        self.assertEqual(loc_time.strftime('%Z%z'), 'EST-0500')

        # Weird changes - ambiguous time (end-of-DST like) but is_dst==False
        for zonename, ambiguous_naive, expected in [
                ('Europe/Warsaw', datetime(1915, 8, 4, 23, 59, 59),
                 ['1915-08-04 23:59:59 WMT+0124',
                  '1915-08-04 23:59:59 CET+0100']),
                ('Europe/Moscow', datetime(2014, 10, 26, 1, 30),
                 ['2014-10-26 01:30:00 MSK+0400',
                  '2014-10-26 01:30:00 MSK+0300'])]:
            loc_tz = pytz.timezone(zonename)
            self.assertRaises(pytz.AmbiguousTimeError,
                loc_tz.localize, ambiguous_naive, is_dst=None
                )
            # Also test non-boolean is_dst in the weird case
            for dst in [True, timedelta(1), False, timedelta(0)]:
                loc_time = loc_tz.localize(ambiguous_naive, is_dst=dst)
                self.assertEqual(loc_time.strftime(fmt), expected[not dst])
Ejemplo n.º 48
0
def execute(args, conf=None):
    def log(message):
        if conf is not None and 'logger' in conf:
            conf['logger'].log(message)
        else:
            print message

    def addMessage(self, message):
        if 'logger' in conf:
            conf['logger'].addMessage(message)
        else:
            print message

    def addWarning(self, message):
        if 'logger' in conf:
            conf['logger'].addWarning(message)
        else:
            print message

    def addException(self, message):
        if 'logger' in conf:
            conf['logger'].addException(message)
        else:
            print message

    pp = pprint.PrettyPrinter(indent=2)

    try:

        # Initializing URLs
        url = args['u']

        # Service instance name
        service = args['s']

        # Quality index
        quality = '100'
        if 'q' in args:
            quality = args['q']

        # Procedures
        procs = args['p']

        # Working directory, where the CSV files are located
        wd = args['wd']

        # File extension
        ext = '.dat'
        if 'e' in args:
            ext = args['e']

        debug = False
        if 'v' in args:
            debug = args['v']

        test = False
        if 't' in args:
            test = args['t']

        user = None
        if 'user' in args and args['user'] is not None:
            user = args['user']

        password = None
        if 'password' in args and args['password'] is not None:
            password = args['password']

        auth = None
        if user and password:
            auth = HTTPBasicAuth(user, password)

        noqi = False  # False meas that quality index is also migrated
        if 'noqi' in args:
            if args['noqi'] is True:
                noqi = True

        maxobs = 5000
        if 'm' in args:
            maxobs = int(args['m'])

        #req = requests.session()
        req = requests

        for proc in procs:

            log("\nProcedure: %s" % proc)

            if conf is not None and 'description' in conf:
                data = conf['description']
            else:
                # Load procedure description
                res = req.get("%s/wa/istsos/services/%s/procedures/%s" %
                              (url, service, proc),
                              auth=auth,
                              verify=False)

                data = res.json()

                if data['success'] is False:
                    raise Exception(
                        "Description of procedure %s can not be loaded: %s" %
                        (proc, data['message']))
                else:
                    log("%s > %s" % (proc, data['message']))

                data = data['data']

            aid = data['assignedSensorId']

            # Getting observed properties from describeSensor response
            op = []
            for out in data['outputs']:
                if not noqi or not ':qualityIndex' in out['definition']:
                    op.append(out['definition'])

            # Load of a getobservation request
            res = req.get("%s/wa/istsos/services/%s/operations/getobservation/"
                          "offerings/%s/procedures/%s/observedproperties/%s/ev"
                          "enttime/last" %
                          (url, service, 'temporary', proc, ','.join(op)),
                          auth=auth,
                          verify=False)

            data = res.json()

            if data['success'] is False:
                raise Exception("Last observation of procedure %s can not be "
                                "loaded: %s" % (proc, data['message']))
            else:
                log("%s > %s" % (proc, data['message']))

            data = data['data'][0]
            data['AssignedSensorId'] = aid

            # Set values array empty, can contain 1 value if
            # procedure not empty
            if len(data['result']['DataArray']['values']) == 1:
                lastMeasure = data['result']['DataArray']['values'][0]
            else:
                lastMeasure = None

            data['result']['DataArray']['values'] = []

            # discover json observed property disposition
            jsonindex = {}
            for pos in range(0, len(data['result']['DataArray']['field'])):
                field = data['result']['DataArray']['field'][pos]
                if not noqi:
                    jsonindex[field['definition']] = pos
                elif not ':qualityIndex' in field['definition']:
                    jsonindex[field['definition']] = pos
                elif ':qualityIndex' in field['definition'] and noqi:
                    data['result']['DataArray']['field'].pop(pos)

            log("Searching: %s" % os.path.join(wd, "%s_[0-9]*%s" %
                                               (proc, ext)))

            files = glob.glob(os.path.join(wd, "%s_*%s" % (proc, ext)))
            files.sort()

            if debug:
                log("%s > %s %s found" %
                    (proc, len(files), "Files" if len(files) > 1 else "File"))

            if len(files) > 0:
                for f in files:

                    # open file
                    file = open(f, 'rU')

                    # loop lines
                    lines = file.readlines()

                    obsindex = lines[0].strip(' \t\n\r').split(",")

                    # Check if all the observedProperties of the procedure are
                    # included in the CSV file (quality index is optional)
                    for k, v in jsonindex.iteritems():
                        if k in obsindex:
                            continue
                        elif ':qualityIndex' in k:
                            continue
                        else:
                            raise Exception(
                                "Mandatory observed property %s is not present"
                                " in the CSV." % k)

                    # loop lines (skipping header)
                    for i in range(1, len(lines)):
                        try:
                            line = lines[i]
                            lineArray = line.strip(' \t\n\r').split(",")

                            # Creating an empty array where the values will
                            # be inserted
                            observation = [''] * len(jsonindex)

                            for k, v in jsonindex.iteritems():
                                val = None
                                if k in obsindex:
                                    val = lineArray[obsindex.index(k)]
                                elif ':qualityIndex' in k:
                                    # Quality index is not present in the CSV
                                    # so the default value will be set
                                    val = quality

                                observation[v] = val

                            # attach to object
                            data['result']['DataArray']['values'].append(
                                observation)

                        except Exception as e:
                            raise Exception("Errore alla riga: %s - %s\n%s" %
                                            (i, lines[i], str(e)))

                log("Before insert ST: %s" % proc)
                if 'beginPosition' in data["samplingTime"]:
                    log(" > Begin: %s" % data["samplingTime"]["beginPosition"])
                if 'endPosition' in data["samplingTime"]:
                    log("   + End: %s" % data["samplingTime"]["endPosition"])

                dtstr = os.path.split(f)[1].replace("%s_" % proc,
                                                    "").replace(ext, "")

                offset = False

                if '+' in dtstr:
                    offset = dtstr[dtstr.index('+'):]
                    offset = [offset[0:3], offset[3:5]]
                    dtstr = dtstr[:dtstr.index('+')]

                elif '-' in dtstr:
                    offset = dtstr[dtstr.index('-'):]
                    offset = [offset[0:3], offset[3:5]]
                    dtstr = dtstr[:dtstr.index('-')]

                ep = datetime.strptime(
                    dtstr, "%Y%m%d%H%M%S%f").replace(tzinfo=timezone('UTC'))

                if offset:
                    ep = ep - timedelta(hours=int(offset[0]),
                                        minutes=int(offset[1]))

                # A kick in the brain code snippet
                # If there is at least one measure:
                if len(data['result']['DataArray']['values']) > 0:
                    # taking first observation as begin position
                    bp = iso.parse_datetime(data['result']['DataArray']
                                            ['values'][0][jsonindex[isourn]])

                else:
                    # otherwise this can be an irrebular procedure where just
                    # the end position is moved forward
                    if ep > iso.parse_datetime(
                            data["samplingTime"]["endPosition"]):
                        bp = ep
                    else:
                        raise Exception(
                            "Something is wrong with begin position..")

                data["samplingTime"] = {
                    "beginPosition": bp.isoformat(),
                    "endPosition": ep.isoformat()
                }

                log("Insert ST: %s" % proc)
                log(" > Begin: %s" % bp.isoformat())
                log("   + End: %s" % ep.isoformat())
                log(" > Values: %s" %
                    len(data['result']['DataArray']['values']))

                if not test and len(files) > 0:  # send to wa

                    if len(data['result']['DataArray']['values']) > maxobs:

                        total = len(data['result']['DataArray']['values'])
                        inserted = last = maxobs

                        while len(data['result']['DataArray']['values']) > 0:

                            tmpData = copy.deepcopy(data)
                            tmpData['result']['DataArray']['values'] = (
                                data['result']['DataArray']['values'][:last])
                            data['result']['DataArray']['values'] = (
                                data['result']['DataArray']['values'][last:])

                            if len(data['result']['DataArray']['values']) > 0:
                                tmpData["samplingTime"] = {
                                    "beginPosition":
                                    tmpData['result']['DataArray']['values'][0]
                                    [jsonindex[isourn]],
                                    "endPosition":
                                    data['result']['DataArray']['values'][0][
                                        jsonindex[isourn]]
                                }
                            else:
                                tmpData["samplingTime"] = {
                                    "beginPosition":
                                    tmpData['result']['DataArray']['values'][0]
                                    [jsonindex[isourn]],
                                    "endPosition":
                                    ep.isoformat()
                                }

                            res = req.post("%s/wa/istsos/services/%s/"
                                           "operations/insertobservation" %
                                           (url, service),
                                           auth=auth,
                                           verify=False,
                                           data=json.dumps({
                                               "ForceInsert":
                                               "true",
                                               "AssignedSensorId":
                                               aid,
                                               "Observation":
                                               tmpData
                                           }))

                            # read response
                            res.raise_for_status()
                            log(" > Insert observation success of %s/%s (%s / "
                                "%s) observations: %s" %
                                (inserted, total,
                                 tmpData["samplingTime"]["beginPosition"],
                                 tmpData["samplingTime"]["endPosition"],
                                 res.json()['success']))

                            if not res.json()['success']:
                                log(res.json()['message'])

                            if len(data['result']['DataArray']
                                   ['values']) < maxobs:
                                last = len(
                                    data['result']['DataArray']['values'])
                            inserted += last

                    else:

                        res = req.post("%s/wa/istsos/services/%s/operations/"
                                       "insertobservation" % (url, service),
                                       auth=auth,
                                       verify=False,
                                       data=json.dumps({
                                           "ForceInsert": "true",
                                           "AssignedSensorId": aid,
                                           "Observation": data
                                       }))

                        # read response
                        res.raise_for_status()
                        log(" > Insert observation success: %s" %
                            (res.json()['success']))

                        if not res.json()['success']:
                            log(res.json()['message'])

        pass

    except requests.exceptions.HTTPError as eh:
        #addException(str(eh))
        traceback.print_exc()

    except Exception as e:
        #addException(str(e))
        traceback.print_exc()
Ejemplo n.º 49
0
 def test_only_localtime(self):
     local_path = os.path.split(__file__)[0]
     tz = tzlocal.unix._get_localzone(_root=os.path.join(local_path, 'test_data', 'localtime'))
     self.assertEqual(tz.zone, 'local')
     dt = datetime(2012, 1, 1, 5)
     self.assertEqual(pytz.timezone('Africa/Harare').localize(dt), tz.localize(dt))
Ejemplo n.º 50
0
 def testRoundtrip(self):
     dt = datetime(2004, 2, 1, 0, 0, 0)
     for zone in pytz.all_timezones:
         tz = pytz.timezone(zone)
         self._roundtrip_tzinfo(tz)
Ejemplo n.º 51
0
    def __delay(self, procedureName, servicedb):
        """
            Get the delay status (check last observation and sampling time)
            return a dict containing status, last observation, delay (s), and cycle delay
        """
        sql = """
        SELECT p.etime_prc as time, p.time_acq_prc as delay
        FROM  %s.procedures p
        WHERE
        """ % (self.service, )

        sql += """ p.name_prc = %s;
        """
        par = (procedureName, )
        row = servicedb.select(sql, par)
        statusDict = {}

        # if exist the procedure
        if len(row) == 1:

            lastDate = row[0]['time']
            if row[0]['delay'] is None:
                return None

            delay = int(row[0]['delay'])

            if delay == 0:
                return None

            nowDate = datetime.now().replace(tzinfo=timezone(time.tzname[0]))

            limitDelay = timedelta(seconds=delay).total_seconds()
            tmpDelay = 0
            tmpCycle = 0

            if lastDate is None:
                status = "No observation"
                lastDate = "No observation"
            else:
                tmpDelta = (nowDate - lastDate).total_seconds()
                if (tmpDelta > limitDelay) and delay > 0:
                    status = "NOT OK"
                    tmpDelay = tmpDelta
                else:
                    status = "OK"
                    tmpDelay = limitDelay - tmpDelta

                if limitDelay > 0:
                    tmpCycle = tmpDelta / limitDelay
                else:
                    tmpCycle = 0
                    tmpDelay = 0

            statusDict['status'] = status
            statusDict['lastObservation'] = lastDate
        else:
            statusDict['status'] = "No observation"
            statusDict['lastObservation'] = "No observation"

        statusDict['delay'] = tmpDelay
        statusDict['cycle'] = tmpCycle

        return statusDict
Ejemplo n.º 52
0
 def getDateTimeWithTimeZone(self, dt, tz):
     dt = dt.replace(tzinfo=timezone('UTC'))
     offset = tz.split(":")
     return dt - timedelta(hours=int(offset[0]), minutes=int(offset[1]))
Ejemplo n.º 53
0
def execute (args, logger=None):
    
    def log(message):
        if logger:
            logger.log(message)
        else:
            print message
    
    pp = pprint.PrettyPrinter(indent=2)
    
    try:
    
        # Initializing URLs
        url = args['u']
        
        # Service instance name
        service = args['s']
        
        # Quality index
        quality = '100'
        if 'q' in args:
            quality = args['q']
        
        # Procedures
        procs = args['p']
        
        # Working directory, where the CSV files are located
        wd = args['wd']
        
        # File extension
        ext = '.dat'
        if 'e' in args:
            ext = args['e']
        
        debug = False
        if 'v' in args:
            debug = args['v']
        
        test = False
        if 't' in args:
            test = args['t']
        
        user = None
        if 'user' in args:
            user = args['user']
        passw = None
        if 'password' in args:
            password = args['password']
            
        auth = None
        if user and password:
            auth = HTTPBasicAuth(user, password)
        
        noqi = False # False meas that quality index is also migrated
        if 'noqi' in args:
            if args['noqi'] == True: 
                noqi = True
            
        #print "noqi: %s" % noqi
            
        maxobs = 5000
        if 'm' in args:
            maxobs = int(args['m'])
            
        #req = requests.session()
        req = requests
        
        for proc in procs:
            
            log("\nProcedure: %s" % proc)
            
            # Load procedure description                
            res = req.get("%s/wa/istsos/services/%s/procedures/%s" % (
                url,
                service,
                proc
                ), auth=auth, verify=False)
                
            data = res.json()
                
            if data['success']==False:
                raise Exception ("Description of procedure %s can not be loaded: %s" % (proc, data['message']))
            else:
                print " > %s" % data['message']
            
            data = data['data']
            
            aid = data['assignedSensorId']
            
            # Getting observed properties from describeSensor response
            op = []
            for out in data['outputs']:
                if not noqi or not ':qualityIndex' in out['definition']:
                    op.append(out['definition'])
                    
            # Load of a getobservation request
            res = req.get("%s/wa/istsos/services/%s/operations/getobservation/offerings/%s/procedures/%s/observedproperties/%s/eventtime/last" % (
                url,
                service,
                'temporary',
                proc,
                ','.join(op)
                ), auth=auth, verify=False)
            
            data = res.json()
            
            if data['success']==False:
                raise Exception ("Last observation of procedure %s can not be loaded: %s" % (proc, data['message']))
            else:
                print " > %s" % data['message']
                    
            data = data['data'][0]
            data['AssignedSensorId'] = aid
            
            # Set values array empty (can contain 1 value if procedure not empty)
            lastMeasure = data['result']['DataArray']['values'][0] if len(data['result']['DataArray']['values'])==1 else None
            data['result']['DataArray']['values'] = []
            
            # discover json observed property disposition
            jsonindex = {}
            for pos in range(0, len(data['result']['DataArray']['field'])):
                field = data['result']['DataArray']['field'][pos]
                if not noqi:
                    jsonindex[field['definition']] = pos
                elif not ':qualityIndex' in field['definition']:
                    jsonindex[field['definition']] = pos
                elif ':qualityIndex' in field['definition'] and noqi:
                    data['result']['DataArray']['field'].pop(pos)
            
            log ("Searching: %s" % os.path.join(wd, "%s_[0-9]*%s" % (proc,ext)))
                
            files = glob.glob(os.path.join(wd, "%s_*%s" % (proc,ext)))
            files.sort()
            
            if debug:
                print " > %s %s found" % (len(files), "Files" if len(files)>1 else "File")
                
            if len(files)>0:
                for f in files:
                
                    # open file
                    file = open(f, 'rU')
                    
                    # loop lines
                    lines = file.readlines()
                    
                    obsindex = lines[0].strip(' \t\n\r').split(",")
                    
                    # Check if all the observedProperties of the procedure are included in the CSV file (quality index is optional)
                    for k, v in jsonindex.iteritems():
                        if k in obsindex:
                            continue
                        elif ':qualityIndex' in k:
                            continue
                        else:
                            raise Exception ("Mandatory observed property %s is not present in the CSV." % k)
                    
                    # loop lines (skipping header)
                    for i in range(1, len(lines)):
                        try:
                            line = lines[i]
                            lineArray = line.strip(' \t\n\r').split(",")
                            
                            # Creating an empty array where the values will be inserted
                            observation =  ['']*len(jsonindex)
                            
                            for k, v in jsonindex.iteritems():
                                val = None
                                if k in obsindex:
                                    val = lineArray[obsindex.index(k)]
                                elif ':qualityIndex' in k: # Quality index is not present in the CSV so the default value will be set
                                    val = quality
                                    
                                observation[v] = val
                                
                            # attach to object
                            data['result']['DataArray']['values'].append(observation)
                            
                        except Exception as e:
                            print "Errore alla riga: %s - %s)" % (i, lines[i])
                            traceback.print_exc()
                            raise e
                            
                log ("Before insert ST:")
                if 'beginPosition' in data["samplingTime"]:
                    log (" > Begin: %s" % data["samplingTime"]["beginPosition"])
                if 'endPosition' in data["samplingTime"]:
                    log ("   + End: %s" % data["samplingTime"]["endPosition"])
                
                ep = datetime.strptime(
                    os.path.split(f)[1].replace("%s_" % proc, "").replace(ext, ""),"%Y%m%d%H%M%S%f"
                ).replace(tzinfo=timezone('UTC'))
                
                # Kick in the brain code snippet
                
                # If there is at least one measure:
                if len(data['result']['DataArray']['values'])>0:
                
                    # taking first observation as begin position
                    bp = iso.parse_datetime(
                        data['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']]
                    )
                        
                else: # otherwise this can be an irrebular procedure where just the end position is moved forward
                
                    if ep > iso.parse_datetime(data["samplingTime"]["endPosition"]):
                        bp = ep
                    else:
                        raise Exception("Something is wrong with begin position..")
                        
                data["samplingTime"] = {
                    "beginPosition": bp.isoformat(),
		            "endPosition":  ep.isoformat()
                }
                
                #data["result"]["DataArray"]["elementCount"] = str(len(data['result']['DataArray']['values']))
                
                log ("Insert ST:")
                log (" > Begin: %s" % bp.isoformat())
                log ("   + End: %s" % ep.isoformat())
                log (" > Values: %s" % len( data['result']['DataArray']['values']))
                    
                if not test and len(files)>0: # send to wa
                
                    if len(data['result']['DataArray']['values']) > maxobs:
                    
                        import copy
                        
                        total = len(data['result']['DataArray']['values'])
                        inserted = last = maxobs
                        
                        while len(data['result']['DataArray']['values'])>0:
                            
                            tmpData = copy.deepcopy(data)                            
                            tmpData['result']['DataArray']['values'] = data['result']['DataArray']['values'][:last]                            
                            data['result']['DataArray']['values']    = data['result']['DataArray']['values'][last:]
                            
                            if len(data['result']['DataArray']['values'])>0:
                                tmpData["samplingTime"] = {
                                    "beginPosition": tmpData['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']],
			                        "endPosition":      data['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']]
                                }
                            else:
                                tmpData["samplingTime"] = {
                                    "beginPosition": tmpData['result']['DataArray']['values'][0][jsonindex['urn:ogc:def:parameter:x-istsos:1.0:time:iso8601']],
			                        "endPosition":   ep.isoformat()
                                }
                                
                            res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % (
                                url,
                                service), 
                                auth=auth,
                                verify=False,
                                data=json.dumps({
                                    "ForceInsert": "true",
                                    "AssignedSensorId": aid,
                                    "Observation": tmpData
                                })
                            )
                            
                            # read response
                            res.raise_for_status()
                            log (" > Insert observation success of %s/%s (%s / %s) observations: %s" % (inserted,total,tmpData["samplingTime"]["beginPosition"],tmpData["samplingTime"]["endPosition"],res.json()['success']))
                            if not res.json()['success']:
                                log (res.json()['message'])
                                
                            if len(data['result']['DataArray']['values'])<maxobs:
                                last = len(data['result']['DataArray']['values'])
                            inserted += last
                            
                    
                    else:
                
                        res = req.post("%s/wa/istsos/services/%s/operations/insertobservation" % (
                            url,
                            service), 
                            auth=auth,
                            verify=False,
                            data=json.dumps({
                                "ForceInsert": "true",
                                "AssignedSensorId": aid,
                                "Observation": data
                            })
                        )
                        # read response
                        res.raise_for_status()
                        log (" > Insert observation success: %s" % res.json()['success'])
                        if not res.json()['success']:
                            log (res.json()['message'])
                        
                    
                    print "~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~*~"
        pass
    
    except requests.exceptions.HTTPError as eh:
        print "ERROR: %s\n\n" % eh
        traceback.print_exc()
        pass
    except Exception as e:    
        print "ERROR: %s\n\n" % e
        traceback.print_exc()
        pass
        
    pass
Ejemplo n.º 54
0
    def __delay(self, procedureName, servicedb):
        """
            Get the delay status (check last observation and sampling time)
            return a dict containing status, last observation, delay (s), and cycle delay
        """
        sql = """
        SELECT p.etime_prc as time, p.time_acq_prc as delay
        FROM  %s.procedures p
        WHERE
        """ % (self.service,)

        sql += """ p.name_prc = %s;
        """
        par = (procedureName,)
        row = servicedb.select(sql, par)
        statusDict = {}

        # if exist the procedure
        if len(row) == 1:

            lastDate = row[0]['time']
            if row[0]['delay'] is None:
                return None

            delay = int(row[0]['delay'])

            if delay == 0:
                return None

            nowDate = datetime.now().replace(tzinfo=timezone(time.tzname[0]))

            limitDelay = timedelta(seconds=delay).total_seconds()
            tmpDelay = 0
            tmpCycle = 0

            if lastDate is None:
                status = "No observation"
                lastDate = "No observation"
            else:
                tmpDelta = (nowDate - lastDate).total_seconds()
                if(tmpDelta > limitDelay) and delay > 0:
                    status = "NOT OK"
                    tmpDelay = tmpDelta
                else:
                    status = "OK"
                    tmpDelay = limitDelay - tmpDelta

                if limitDelay > 0:
                    tmpCycle = tmpDelta / limitDelay
                else:
                    tmpCycle = 0
                    tmpDelay = 0

            statusDict['status'] = status
            statusDict['lastObservation'] = lastDate
        else:
            statusDict['status'] = "No observation"
            statusDict['lastObservation'] = "No observation"

        statusDict['delay'] = tmpDelay
        statusDict['cycle'] = tmpCycle

        return statusDict
Ejemplo n.º 55
0
from lib.pytz import reference
from lib.pytz.tzfile import _byte_string
from lib.pytz.tzinfo import DstTzInfo, StaticTzInfo

# I test for expected version to ensure the correct version of pytz is
# actually being tested.
EXPECTED_VERSION='2015.7'
EXPECTED_OLSON_VERSION='2015g'

fmt = '%Y-%m-%d %H:%M:%S %Z%z'

NOTIME = timedelta(0)

# GMT is a tzinfo.StaticTzInfo--the class we primarily want to test--while
# UTC is reference implementation.  They both have the same timezone meaning.
UTC = pytz.timezone('UTC')
GMT = pytz.timezone('GMT')
assert isinstance(GMT, StaticTzInfo), 'GMT is no longer a StaticTzInfo'

def prettydt(dt):
    """datetime as a string using a known format.

    We don't use strftime as it doesn't handle years earlier than 1900
    per http://bugs.python.org/issue1777412
    """
    if dt.utcoffset() >= timedelta(0):
        offset = '+%s' % (dt.utcoffset(),)
    else:
        offset = '-%s' % (-1 * dt.utcoffset(),)
    return '%04d-%02d-%02d %02d:%02d:%02d %s %s' % (
        dt.year, dt.month, dt.day,
Ejemplo n.º 56
0
def getDateTimeWithTimeZone(dt, tz):
    dt = dt.replace(tzinfo=timezone('UTC'))
    offset = tz.split(":")
    return dt - timedelta(hours=int(offset[0]), minutes=int(offset[1]))
Ejemplo n.º 57
0
def _get_localzone(_root='/'):
    """Tries to find the local timezone configuration.

    This method prefers finding the timezone name and passing that to pytz,
    over passing in the localtime file, as in the later case the zoneinfo
    name is unknown.

    The parameter _root makes the function look for files like /etc/localtime
    beneath the _root directory. This is primarily used by the tests.
    In normal usage you call the function without parameters."""

    tzenv = _try_tz_from_env()
    if tzenv:
        return tzenv

    # Now look for distribution specific configuration files
    # that contain the timezone name.
    for configfile in ('etc/timezone', 'var/db/zoneinfo'):
        tzpath = os.path.join(_root, configfile)
        if os.path.exists(tzpath):
            with open(tzpath, 'rb') as tzfile:
                data = tzfile.read()

                # Issue #3 was that /etc/timezone was a zoneinfo file.
                # That's a misconfiguration, but we need to handle it gracefully:
                if data[:5] == 'TZif2':
                    continue

                etctz = data.strip().decode()
                # Get rid of host definitions and comments:
                if ' ' in etctz:
                    etctz, dummy = etctz.split(' ', 1)
                if '#' in etctz:
                    etctz, dummy = etctz.split('#', 1)
                return pytz.timezone(etctz.replace(' ', '_'))

    # CentOS has a ZONE setting in /etc/sysconfig/clock,
    # OpenSUSE has a TIMEZONE setting in /etc/sysconfig/clock and
    # Gentoo has a TIMEZONE setting in /etc/conf.d/clock
    # We look through these files for a timezone:

    zone_re = re.compile('\s*ZONE\s*=\s*\"')
    timezone_re = re.compile('\s*TIMEZONE\s*=\s*\"')
    end_re = re.compile('\"')

    for filename in ('etc/sysconfig/clock', 'etc/conf.d/clock'):
        tzpath = os.path.join(_root, filename)
        if not os.path.exists(tzpath):
            continue
        with open(tzpath, 'rt') as tzfile:
            data = tzfile.readlines()

        for line in data:
            # Look for the ZONE= setting.
            match = zone_re.match(line)
            if match is None:
                # No ZONE= setting. Look for the TIMEZONE= setting.
                match = timezone_re.match(line)
            if match is not None:
                # Some setting existed
                line = line[match.end():]
                etctz = line[:end_re.search(line).start()]

                # We found a timezone
                return pytz.timezone(etctz.replace(' ', '_'))

    # systemd distributions use symlinks that include the zone name,
    # see manpage of localtime(5) and timedatectl(1)
    tzpath = os.path.join(_root, 'etc/localtime')
    if os.path.exists(tzpath) and os.path.islink(tzpath):
        tzpath = os.path.realpath(tzpath)
        start = tzpath.find("/")+1
        while start is not 0:
            tzpath = tzpath[start:]
            try:
                return pytz.timezone(tzpath)
            except pytz.UnknownTimeZoneError:
                pass
            start = tzpath.find("/")+1

    # No explicit setting existed. Use localtime
    for filename in ('etc/localtime', 'usr/local/etc/localtime'):
        tzpath = os.path.join(_root, filename)

        if not os.path.exists(tzpath):
            continue
        with open(tzpath, 'rb') as tzfile:
            return pytz.tzfile.build_tzinfo('local', tzfile)

    raise pytz.UnknownTimeZoneError('Can not find any timezone configuration')