def is_iso8601(time): """ Return true if time is a valid iso8601 time value. @param time the time we wish to validate @return boolean False if it is invalid, else True """ try: iso8601.parse(time) except ValueError: return False else: return True
def __get_object_from_node(self, node, obj_type = ''): """ Copy values from a node into a new Object """ if obj_type == '': constructor = globals()[self.__name__] else: constructor = globals()[obj_type] obj = constructor(self.api_key, self.sub_domain) for childnodes in node.childNodes: node_name = childnodes.nodeName.replace('-', '_') if childnodes.nodeType == 1 and not node_name == '': if node_name in self.__attribute_types__: obj.__setattr__(node_name, self._applyS(childnodes.toxml(encoding="utf-8"), self.__attribute_types__[node_name], node_name)) else: node_value = self.__get_xml_value(childnodes.childNodes) if "type" in childnodes.attributes.keys(): node_type = childnodes.attributes["type"] if node_value: if node_type.nodeValue == 'datetime': node_value = datetime.datetime.fromtimestamp(iso8601.parse(node_value)) obj.__setattr__(node_name, node_value) log.log(100, '__get_object_from_node: obj[%s]' % obj) #log.log(100, '__get_object_from_node: obj[%s]' % dir(obj)) return obj
def __get_object_from_node(self, node, obj_type=''): """ Copy values from a node into a new Object """ if obj_type == '': constructor = globals()[self.__name__] else: constructor = globals()[obj_type] obj = constructor(self.api_key, self.sub_domain) for childnodes in node.childNodes: if childnodes.nodeType == 1 and not childnodes.nodeName == '': if childnodes.nodeName in self.__attribute_types__: obj.__setattr__(childnodes.nodeName, self._applyS(childnodes.toxml(), self.__attribute_types__[childnodes.nodeName], childnodes.nodeName)) else: node_value = self.__get_xml_value(childnodes.childNodes) if "type" in childnodes.attributes.keys(): node_type = childnodes.attributes["type"] if node_value: if node_type.nodeValue == 'datetime': node_value = datetime.datetime.fromtimestamp( iso8601.parse(node_value)) obj.__setattr__(childnodes.nodeName, node_value) return obj
def __get_object_from_node(self, node, obj_type=''): """ Copy values from a node into a new Object """ if obj_type == '': constructor = globals()[self.__name__] else: constructor = globals()[obj_type] obj = constructor(self.api_key, self.sub_domain) for childnodes in node.childNodes: if childnodes.nodeType == 1 and not childnodes.nodeName == '': if childnodes.nodeName in self.__attribute_types__: obj.__setattr__(childnodes.nodeName, self._applyS(childnodes.toxml(encoding='utf-8'), self.__attribute_types__[childnodes.nodeName], childnodes.nodeName)) else: node_value = self.__get_xml_value(childnodes.childNodes) if "type" in childnodes.attributes.keys(): node_type = childnodes.attributes["type"] if node_value: if node_type.nodeValue == 'datetime': node_value = datetime.datetime.fromtimestamp( iso8601.parse(node_value)) obj.__setattr__(childnodes.nodeName, node_value) return obj
def asynk_ts_parse(ts): """For historical reasons (IOW Bugs), ASynK versions have stored created and modified timestamps in two distinct text representations. This routine is a wrapper to gracefully handle both cases, convert it into a iso string, and then parse it into a python datetime object, which is returned """ return iso8601.parse(asynk_ts_to_iso8601(ts))
def asynk_ts_parse (ts): """For historical reasons (IOW Bugs), ASynK versions have stored created and modified timestamps in two distinct text representations. This routine is a wrapper to gracefully handle both cases, convert it into a iso string, and then parse it into a python datetime object, which is returned """ return iso8601.parse(asynk_ts_to_iso8601(ts))
def asynk_ts_to_iso8601(ts): """The text timestamps in ASynK will be stored in a format that is readily usable by BBDB. Frequently there is a need to parse it into other formats, and as an intermediate step we would like to convert it into iso8601 format to leverage the libraries available for handling iso8601. This routine converts a text string in the internal ASynK (BBDB) text format, into iso8601 format with Zone Specifier.""" ## FIXME: All of these assume the timestamps are in UTC. Bad things can ## happen if some other timezone is provided. try: ## Eliminate the case where the input string is already in iso8601 ## format... iso8601.parse(ts) return ts except ValueError, e: return re.sub(r'(\d\d\d\d-\d\d-\d\d) (\d\d:\d\d:\d\d).*$', r'\1T\2Z', ts)
def iso8601ToTimestamp(date): """ @type date: str """ # FIXME: Google passes dtstart like 2008-12-10T00:00:00 if not date[-6] in ('+', '-') and not date.endswith('Z'): date += 'Z' return int(iso8601.parse(date))
def asynk_ts_to_iso8601 (ts): """The text timestamps in ASynK will be stored in a format that is readily usable by BBDB. Frequently there is a need to parse it into other formats, and as an intermediate step we would like to convert it into iso8601 format to leverage the libraries available for handling iso8601. This routine converts a text string in the internal ASynK (BBDB) text format, into iso8601 format with Zone Specifier.""" ## FIXME: All of these assume the timestamps are in UTC. Bad things can ## happen if some other timezone is provided. try: ## Eliminate the case where the input string is already in iso8601 ## format... iso8601.parse(ts) return ts except ValueError, e: return re.sub(r'(\d\d\d\d-\d\d-\d\d) (\d\d:\d\d:\d\d).*$', r'\1T\2Z', ts)
def __get_object_from_node(self, node, obj_type=''): """ Copy values from a node into a new Object """ if obj_type == '': constructor = globals()[self.__name__] else: constructor = globals()[obj_type] obj = constructor(self.api_key, self.sub_domain) for childnodes in node.childNodes: if childnodes.nodeType == 1 and not childnodes.nodeName == '': if childnodes.nodeName in self.__attribute_types__: obj.__setattr__( childnodes.nodeName, self._applyS( childnodes.toxml(), self.__attribute_types__[childnodes.nodeName], childnodes.nodeName)) elif "type" in childnodes.attributes.keys( ) and childnodes.attributes["type"].nodeValue == "array": children = list() for subChildNode in childnodes.childNodes: children.append( self.__get_object_from_node( subChildNode, self.__attribute_types__.get( subChildNode.nodeName))) obj.__setattr__(childnodes.nodeName, children) else: node_value = self.__get_xml_value(childnodes.childNodes) if "type" in childnodes.attributes.keys(): node_type = childnodes.attributes["type"] if node_value: if node_type.nodeValue == 'datetime': node_value = datetime.datetime.fromtimestamp( iso8601.parse(node_value)) elif node_type.nodeValue == 'integer': node_value = int(node_value) elif node_type.nodeValue == 'boolean': node_value = True if node_value == "true" else False elif node_type.nodeValue == 'decimal': node_value = Decimal(node_value) elif obj.__single_value_attribute_types__.has_key( childnodes.nodeName): node_value = obj.__single_value_attribute_types__.get( childnodes.nodeName)(node_value) obj.__setattr__(childnodes.nodeName, node_value) return obj
def iso8601_to_datetime(time): """ Convert an ISO8601 string to a python datetime object. @param time ISO8601 string @return python datetime object without tzinfo """ try: time_stamp = iso8601.parse(time) except ValueError: raise exceptions.DatetimeConversionError() else: return datetime.datetime.utcfromtimestamp(time_stamp)
def parse_vcard_time(self, t): """Return a datetime object containing the native UTC timestamp based on the specified vCard REV timestamp string.""" # IMP: Note that we assume the time is in UTC - and ignore what is # actually in the string. This sucks, but this is all I am willing to # do for the m moment. FIXME res = re.search(r'(\d\d\d\d\d\d\d\dT\d\d\d\d\d\d).*', t) if res: t = res.group(1) return datetime.datetime.strptime(t, '%Y%m%dT%H%M%S') else: t = iso8601.parse(t) return datetime.datetime.utcfromtimestamp(t)
def _add_dates_to_olprops (self, olprops): cd = self.get_created() if cd: cd = iso8601.parse(cd) olprops.append((mt.PR_CREATION_TIME, cd)) bday = self.get_birthday() if bday: bday = yyyy_mm_dd_to_pytime(bday) olprops.append((mt.PR_BIRTHDAY, bday)) anniv = self.get_anniv() if anniv: anniv = yyyy_mm_dd_to_pytime(anniv) olprops.append((mt.PR_WEDDING_ANNIVERSARY, anniv))
def _add_dates_to_olprops(self, olprops): cd = self.get_created() if cd: cd = iso8601.parse(cd) olprops.append((mt.PR_CREATION_TIME, cd)) bday = self.get_birthday() if bday: bday = yyyy_mm_dd_to_pytime(bday) olprops.append((mt.PR_BIRTHDAY, bday)) anniv = self.get_anniv() if anniv: anniv = yyyy_mm_dd_to_pytime(anniv) olprops.append((mt.PR_WEDDING_ANNIVERSARY, anniv))
def parse_vcard_time (self, t): """Return a datetime object containing the native UTC timestamp based on the specified vCard REV timestamp string.""" # IMP: Note that we assume the time is in UTC - and ignore what is # actually in the string. This sucks, but this is all I am willing to # do for the m moment. FIXME res = re.search(r'(\d\d\d\d\d\d\d\dT\d\d\d\d\d\dZ).*', t) if res: t = res.group(1) return datetime.datetime.strptime(t, '%Y%m%dT%H%M%SZ') else: t = iso8601.parse(t) return datetime.datetime.utcfromtimestamp(t)
def get_until(self): if self.rrule == None or self.rrule == "": return -1 rules = self.rrule.split(";") print rules d = "" for r in rules: if r.startswith("UNTIL="): d = r[len("UNTIL="):] break if len(d) == 8: iso_date = d[:4] + "-" + d[4:6] + "-" + d[6:] elif len(d) == 15: iso_date = d[:4] + "-" + d[4:6] + "-" + d[6:11] \ + ":" + d[11:13] + ":" + d[13:] else: logger.append("event %s: strange until value: %s" % (self.title, d)) return -1 # ?? return int(iso8601.parse(iso_date)) + 1
def getCapabilities(req, params, config, lastUpdateTime): """ Returns the Capabilities document. req = mod_python request object or WMS.FakeModPythonRequest object params = wmsUtils.RequestParser object containing the request parameters config = ConfigParser object containing configuration info for this WMS lastUpdateTime = time at which cache of data and metadata was last updated """ version = params.getParamValue("version", "") format = params.getParamValue("format", "") # TODO: deal with version and format # Check the UPDATESEQUENCE (used for cache consistency) updatesequence = params.getParamValue("updatesequence", "") if updatesequence != "": try: us = iso8601.parse(updatesequence) if round(us) == round(lastUpdateTime): # Equal to the nearest second raise CurrentUpdateSequence(updatesequence) elif us > lastUpdateTime: raise InvalidUpdateSequence(updatesequence) except ValueError: # Client didn't supply a valid ISO8601 date # According to the spec, InvalidUpdateSequence is not the # right error code here so we use a generic exception raise WMSException("UPDATESEQUENCE must be a valid ISO8601 date") output = StringIO() output.write("<?xml version=\"1.0\" encoding=\"UTF-8\"?>\n") output.write("<WMS_Capabilities version=\"" + wmsUtils.getWMSVersion() + "\"") # UpdateSequence is accurate to the nearest second output.write(" updateSequence=\"%s\"" % iso8601.tostring(round(lastUpdateTime))) output.write(" xmlns=\"http://www.opengis.net/wms\"") output.write(" xmlns:xlink=\"http://www.w3.org/1999/xlink\"") # The next two lines should be commented out if you wish to load this document # in Cadcorp SIS from behind the University of Reading firewall output.write(" xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"") output.write(" xsi:schemaLocation=\"http://www.opengis.net/wms http://schemas.opengis.net/wms/1.3.0/capabilities_1_3_0.xsd\"") output.write(">") output.write("<Service>") output.write("<Name>WMS</Name>") output.write("<Title>%s</Title>" % config.title) output.write("<Abstract>%s</Abstract>" % config.abstract) output.write("<KeywordList>") for keyword in config.keywords: output.write("<Keyword>%s</Keyword>" % keyword) output.write("</KeywordList>") output.write("<OnlineResource xlink:type=\"simple\" xlink:href=\"%s\"/>" % config.url) output.write("<ContactInformation>") output.write("<ContactPersonPrimary>") output.write("<ContactPerson>%s</ContactPerson>" % config.contactName) output.write("<ContactOrganization>%s</ContactOrganization>" % config.contactOrg) output.write("</ContactPersonPrimary>") output.write("<ContactVoiceTelephone>%s</ContactVoiceTelephone>" % config.contactTel) output.write("<ContactElectronicMailAddress>%s</ContactElectronicMailAddress>" % config.contactEmail) output.write("</ContactInformation>") output.write("<Fees>none</Fees>") output.write("<AccessConstraints>none</AccessConstraints>") output.write("<LayerLimit>%d</LayerLimit>" % getmap.getLayerLimit()) output.write("<MaxWidth>%d</MaxWidth>" % config.maxImageWidth) output.write("<MaxHeight>%d</MaxHeight>" % config.maxImageHeight) output.write("</Service>") output.write("<Capability>") output.write("<Request>") output.write("<GetCapabilities>") output.write("<Format>text/xml</Format>") url = "http://%s%s?" % (req.server.server_hostname, req.unparsed_uri.split("?")[0]) output.write("<DCPType><HTTP><Get><OnlineResource xlink:type=\"simple\" xlink:href=\"" + url + "\"/></Get></HTTP></DCPType>") output.write("</GetCapabilities>") output.write("<GetMap>") for format in getmap.getSupportedImageFormats(): output.write("<Format>%s</Format>" % format) output.write("<DCPType><HTTP><Get><OnlineResource xlink:type=\"simple\" xlink:href=\"" + url + "\"/></Get></HTTP></DCPType>") output.write("</GetMap>") if config.allowFeatureInfo: output.write("<GetFeatureInfo>") for format in getfeatureinfo.getSupportedFormats(): output.write("<Format>%s</Format>" % format) output.write("<DCPType><HTTP><Get><OnlineResource xlink:type=\"simple\" xlink:href=\"" + url + "\"/></Get></HTTP></DCPType>") output.write("</GetFeatureInfo>") output.write("</Request>") # TODO: support more exception types output.write("<Exception>") for ex_format in getmap.getSupportedExceptionFormats(): output.write("<Format>%s</Format>" % ex_format) output.write("</Exception>") # Write the top-level container layer output.write("<Layer>") output.write("<Title>%s</Title>" % config.title) # TODO: add styles for crs in grids.getSupportedCRSs().keys(): output.write("<CRS>" + crs + "</CRS>") # Now for the dataset layers datasets = config.datasets for dsid in datasets.keys(): # Write a container layer for this dataset. Container layers # do not have a Name output.write("<Layer>") output.write("<Title>%s</Title>" % datasets[dsid].title) # Now write the displayable data layers vars = datareader.getVariableMetadata(datasets[dsid].location) for vid in vars.keys(): output.write("<Layer") if config.allowFeatureInfo and datasets[dsid].queryable: output.write(" queryable=\"1\"") output.write(">") output.write("<Name>%s%s%s</Name>" % (dsid, wmsUtils.getLayerSeparator(), vid)) output.write("<Title>%s</Title>" % vars[vid].title) output.write("<Abstract>%s</Abstract>" % vars[vid].abstract) # Set the bounding box minLon, minLat, maxLon, maxLat = vars[vid].bbox output.write("<EX_GeographicBoundingBox>") output.write("<westBoundLongitude>%s</westBoundLongitude>" % str(minLon)) output.write("<eastBoundLongitude>%s</eastBoundLongitude>" % str(maxLon)) output.write("<southBoundLatitude>%s</southBoundLatitude>" % str(minLat)) output.write("<northBoundLatitude>%s</northBoundLatitude>" % str(maxLat)) output.write("</EX_GeographicBoundingBox>") output.write("<BoundingBox CRS=\"CRS:84\" ") output.write("minx=\"%f\" maxx=\"%f\" miny=\"%f\" maxy=\"%f\"/>" % (minLon, maxLon, minLat, maxLat)) # Set the level dimension if vars[vid].zvalues is not None: output.write("<Dimension name=\"elevation\" units=\"%s\"" % vars[vid].zunits) # Use the first value in the array as the default # If the default value is removed, you also need to edit # the data reading code (e.g. DataReader.java) to # disallow default z values output.write(" default=\"%s\">" % vars[vid].zvalues[0]) firstTime = 1 for z in vars[vid].zvalues: if firstTime: firstTime = 0 else: output.write(",") output.write(str(z)) output.write("</Dimension>") # Set the time dimension if vars[vid].tvalues is not None: output.write("<Dimension name=\"time\" units=\"ISO8601\">") # If we change this to support the "current" attribute # we must also change the data reading code firstTime = 1 for t in vars[vid].tvalues: if firstTime: firstTime = 0 else: output.write(",") output.write(iso8601.tostring(t)) output.write("</Dimension>") output.write("</Layer>") # end of variable Layer output.write("</Layer>") # end of dataset layer output.write("</Layer>") # end of top-level container layer output.write("</Capability>") output.write("</WMS_Capabilities>") req.content_type="text/xml" req.write(output.getvalue()) output.close() # Free the buffer return
def string_to_dt(s): """Convert an ISO8601-formatted date string to a datetime object.""" return datetime.datetime.fromtimestamp(iso8601.parse(s))
def getCalendar(config, dataset, varID, dateTime): """ returns an HTML calendar for the given dataset and variable. dateTime is a string in ISO 8601 format with the required 'focus time' """ datasets = config.datasets # Get an array of time axis values in seconds since the epoch tValues = datareader.getVariableMetadata(datasets[dataset].location)[varID].tvalues # TODO: is this the right thing to do here? if tValues is None: return "" str = StringIO() prettyDateFormat = "%d %b %Y" # Find the closest time step to the given dateTime value # TODO: binary search would be more efficient reqTime = iso8601.parse(dateTime) # Gives seconds since the epoch diff = 1e20 for i in xrange(len(tValues)): testDiff = math.fabs(tValues[i] - reqTime) if testDiff < diff: # Axis is monotonic so we should move closer and closer # to the nearest value diff = testDiff nearestIndex = i elif i > 0: # We've moved past the closest date break str.write("<root>") str.write("<nearestValue>%s</nearestValue>" % iso8601.tostring(tValues[nearestIndex])) str.write("<prettyNearestValue>%s</prettyNearestValue>" % time.strftime(prettyDateFormat, time.gmtime(tValues[nearestIndex]))) str.write("<nearestIndex>%d</nearestIndex>" % nearestIndex) # create a struct_time tuple with zero timezone offset (i.e. GMT) nearesttime = time.gmtime(tValues[nearestIndex]) # Now print out the calendar in HTML str.write("<calendar>") str.write("<table><tbody>") # Add the navigation buttons at the top of the month view str.write("<tr>") str.write("<td><a href=\"#\" onclick=\"javascript:setCalendar('%s','%s','%s'); return false\"><<</a></td>" % (dataset, varID, _getYearBefore(nearesttime))) str.write("<td><a href=\"#\" onclick=\"javascript:setCalendar('%s','%s','%s'); return false\"><</a></td>" % (dataset, varID, _getMonthBefore(nearesttime))) str.write("<td colspan=\"3\">%s</td>" % _getHeading(nearesttime)) str.write("<td><a href=\"#\" onclick=\"javascript:setCalendar('%s','%s','%s'); return false\">></a></td>" % (dataset, varID, _getMonthAfter(nearesttime))) str.write("<td><a href=\"#\" onclick=\"javascript:setCalendar('%s','%s','%s'); return false\">>></a></td>" % (dataset, varID, _getYearAfter(nearesttime))) str.write("</tr>") # Add the day-of-week headings str.write("<tr><th>M</th><th>T</th><th>W</th><th>T</th><th>F</th><th>S</th><th>S</th></tr>") # Add the calendar body tValIndex = 0 # index in tvalues array for week in calendar.monthcalendar(nearesttime[0], nearesttime[1]): str.write("<tr>") for day in week: if day > 0: # Search through the t axis and find out whether we have # any data for this particular day found = 0 calendarDay = (nearesttime[0], nearesttime[1], day, 0, 0, 0, 0, 0, 0) while not found and tValIndex < len(tValues): axisDay = time.gmtime(tValues[tValIndex]) res = _compareDays(axisDay, calendarDay) if res == 0: found = 1 # Found data on this day elif res < 0: tValIndex = tValIndex + 1 # Date on axis is before target day else: break # Date on axis is after target day: no point searching further if found: tValue = iso8601.tostring(tValues[tValIndex]) prettyTValue = time.strftime(prettyDateFormat, axisDay) str.write("<td id=\"t%d\"><a href=\"#\" onclick=\"javascript:getTimesteps('%s','%s','%d','%s','%s'); return false\">%d</a></td>" % (tValIndex, dataset, varID, tValIndex, tValue, prettyTValue, day)) else: str.write("<td>%d</td>" % day) else: str.write("<td></td>") str.write("</tr>") str.write("</tbody></table>") str.write("</calendar>") str.write("</root>") s = str.getvalue() str.close() return s
def prep_sync_lists (self, destid, sl, synct_sto=None, cnt=0): """See the documentation in folder.Folder""" pname = sl.get_pname() stag = self.get_config().make_sync_label(pname, destid) logging.info('Querying MAPI for status of Contact Entries') ## Sort the DBIds so dest1 has the 'lower' ID dest1 = self.get_db().get_dbid() if dest1 > destid: dest2 = dest1 dest1 = destid else: dest2 = destid ctable = self.get_contents() ## FIXME: This needs to be fixed. The ID will be different based on ## the actual remote database, of course. stp = self.get_proptags().sync_tags[stag] cols = (mt.PR_ENTRYID, mt.PR_LAST_MODIFICATION_TIME, mt.PR_DISPLAY_NAME, stp) ctable.SetColumns(cols, 0) i = 0 pname = sl.get_pname() synct_str = self.get_config().get_last_sync_start(pname) if not synct_sto: synct_sto = self.get_config().get_last_sync_stop(pname) synct = iso8601.parse(synct_sto) logging.debug('Last Start iso str : %s', synct_str) logging.debug('Last Stop iso str : %s', synct_sto) logging.debug('Current Time : %s', iso8601.tostring(time.time())) logging.info('Data obtained from MAPI. Processing...') while True: rows = ctable.QueryRows(1, 0) #if this is the last row then stop if len(rows) != 1: break ((entryid_tag, entryid), (tt, modt), (name_tag, name), (gid_tag, gid)) = rows[0] b64_entryid = base64.b64encode(entryid) sl.add_entry(b64_entryid, gid) if mt.PROP_TYPE(gid_tag) == mt.PT_ERROR: # Was not synced for whatever reason. sl.add_new(b64_entryid) else: if mt.PROP_TYPE(tt) == mt.PT_ERROR: logging.debug('Impossible! Entry has no timestamp. i = %d', i) else: if utils.utc_time_to_local_ts(modt) <= synct: sl.add_unmod(b64_entryid) else: sl.add_mod(b64_entryid, gid) i += 1 if cnt != 0 and i >= cnt: break ctable.SetColumns(self.get_def_cols(), 0)
def prep_sync_lists (self, destid, sl, synct_sto=None, cnt=0): """See the documentation in folder.Folder""" pname = sl.get_pname() conf = self.get_config() pdb1id = conf.get_profile_db1(pname) oldi = conf.get_itemids(pname) stag = conf.make_sync_label(pname, destid) logging.info('Querying MAPI for status of Contact Entries') ## Sort the DBIds so dest1 has the 'lower' ID dest1 = self.get_db().get_dbid() if dest1 > destid: dest2 = dest1 dest1 = destid else: dest2 = destid ctable = self.get_contents() stp = self.get_proptags().sync_tags[stag] cols = (mt.PR_ENTRYID, mt.PR_LAST_MODIFICATION_TIME, mt.PR_DISPLAY_NAME, stp) ctable.SetColumns(cols, 0) i = 0 synct_str = self.get_config().get_last_sync_start(pname) if not synct_sto: synct_sto = self.get_config().get_last_sync_stop(pname) synct = iso8601.parse(synct_sto) logging.debug('Last Start iso str : %s', synct_str) logging.debug('Last Stop iso str : %s', synct_sto) logging.debug('Current Time : %s', iso8601.tostring(time.time())) logging.info('Data obtained from MAPI. Processing...') newi = {} while True: rows = ctable.QueryRows(1, 0) #if this is the last row then stop if len(rows) != 1: break ((entryid_tag, entryid), (tt, modt), (name_tag, name), (gid_tag, gid)) = rows[0] b64_entryid = base64.b64encode(entryid) newi.update({b64_entryid : gid}) if mt.PROP_TYPE(gid_tag) == mt.PT_ERROR: # Was not synced for whatever reason. logging.debug('New Outlook Contact: %20s %s', name, b64_entryid) sl.add_new(b64_entryid) else: if mt.PROP_TYPE(tt) == mt.PT_ERROR: logging.debug('Impossible! Entry has no timestamp. i = %d', i) else: if utils.utc_time_to_local_ts(modt) <= synct: sl.add_unmod(b64_entryid) else: logging.debug('Modified Outlook Contact: %20s %s', name, b64_entryid) sl.add_mod(b64_entryid, gid) i += 1 if cnt != 0 and i >= cnt: break ctable.SetColumns(self.get_def_cols(), 0) kss = newi.keys() for x, y in oldi.iteritems(): if not x in kss and not y in kss: logging.debug('Deleted Outlook Contact: %s:%s', x, y) if pdb1id == self.get_dbid(): sl.add_del(x, y) else: sl.add_del(y,x)
def test(self): self.assertEqual(parse('2012'), date(2012, 1, 1)) self.assertEqual(parse('2012-05-03'), date(2012, 5, 3)) self.assertEqual(parse('20120503'), date(2012, 5, 3)) self.assertEqual(parse('2012-05'), date(2012, 5, 1)) # Week numbers self.assertEqual(parse('2012-W05'), date(2012, 1, 30)) self.assertEqual(parse('2012W05'), date(2012, 1, 30)) self.assertEqual(parse('2012-W05-5'), date(2012, 2, 3)) self.assertEqual(parse('2012W055'), date(2012, 2, 3)) # Ordinal days self.assertEqual(parse('2012-007'), date(2012, 1, 7)) self.assertEqual(parse('2012007'), date(2012, 1, 7)) # Times self.assertEqual(parse('00:00'), time(0, 0)) self.assertEqual(parse('12:04:23'), time(12, 4, 23)) self.assertEqual(parse('120423'), time(12, 4, 23)) self.assertEqual(parse('12:04'), time(12, 4, 0)) self.assertEqual(parse('1204'), date(1204, 1, 1)) self.assertEqual(parse_time('1204'), time(12, 4, 0)) self.assertEqual(parse('12'), time(12, 0, 0)) self.assertEqual(parse('02'), time(2, 0, 0)) self.assertEqual(parse('12:04:23.450686'), time(12, 4, 23, 450686)) # Combined self.assertEqual(parse('2008-09-03T20:56:35.450686'), datetime(2008, 9, 3, 20, 56, 35, 450686)) self.assertEqual(parse('2008-09-03T20:56:35.450686Z'), datetime(2008, 9, 3, 20, 56, 35, 450686, TimeZone(timedelta()))) self.assertEqual(parse('2008-09-03T20:56:35.450686+01'), datetime(2008, 9, 3, 20, 56, 35, 450686, TimeZone(timedelta(minutes=60)))) self.assertEqual(parse('2008-09-03T20:56:35.450686+0100'), datetime(2008, 9, 3, 20, 56, 35, 450686, TimeZone(timedelta(minutes=60)))) self.assertEqual(parse('2008-09-03T20:56:35.450686+01:30'), datetime(2008, 9, 3, 20, 56, 35, 450686, TimeZone(timedelta(minutes=60 + 30)))) self.assertEqual(parse('2008-09-03T20:56:35.450686-01:30'), datetime(2008, 9, 3, 20, 56, 35, 450686, TimeZone(timedelta(minutes=-(60 + 30))))) self.assertEqual(parse('2013-03-28T02:30:24+00:00'), datetime(2013, 3, 28, 2, 30, 24, tzinfo=TimeZone(timedelta(minutes=0))))
def prep_sync_lists(self, destid, sl, synct_sto=None, cnt=0): """See the documentation in folder.Folder""" pname = sl.get_pname() conf = self.get_config() pdb1id = conf.get_profile_db1(pname) oldi = conf.get_itemids(pname) stag = conf.make_sync_label(pname, destid) logging.info('Querying MAPI for status of Contact Entries') ## Sort the DBIds so dest1 has the 'lower' ID dest1 = self.get_db().get_dbid() if dest1 > destid: dest2 = dest1 dest1 = destid else: dest2 = destid ctable = self.get_contents() stp = self.get_proptags().sync_tags[stag] cols = (mt.PR_ENTRYID, mt.PR_LAST_MODIFICATION_TIME, mt.PR_DISPLAY_NAME, stp) ctable.SetColumns(cols, 0) i = 0 synct_str = self.get_config().get_last_sync_start(pname) if not synct_sto: synct_sto = self.get_config().get_last_sync_stop(pname) synct = iso8601.parse(synct_sto) logging.debug('Last Start iso str : %s', synct_str) logging.debug('Last Stop iso str : %s', synct_sto) logging.debug('Current Time : %s', iso8601.tostring(time.time())) logging.info('Data obtained from MAPI. Processing...') newi = {} while True: rows = ctable.QueryRows(1, 0) #if this is the last row then stop if len(rows) != 1: break ((entryid_tag, entryid), (tt, modt), (name_tag, name), (gid_tag, gid)) = rows[0] b64_entryid = base64.b64encode(entryid) newi.update({b64_entryid: gid}) if mt.PROP_TYPE(gid_tag) == mt.PT_ERROR: # Was not synced for whatever reason. logging.debug('New Outlook Contact: %20s %s', name, b64_entryid) sl.add_new(b64_entryid) else: if mt.PROP_TYPE(tt) == mt.PT_ERROR: logging.debug('Impossible! Entry has no timestamp. i = %d', i) else: if utils.utc_time_to_local_ts(modt) <= synct: sl.add_unmod(b64_entryid) else: logging.debug('Modified Outlook Contact: %20s %s', name, b64_entryid) sl.add_mod(b64_entryid, gid) i += 1 if cnt != 0 and i >= cnt: break ctable.SetColumns(self.get_def_cols(), 0) kss = newi.keys() for x, y in oldi.iteritems(): if not x in kss and not y in kss: logging.debug('Deleted Outlook Contact: %s:%s', x, y) if pdb1id == self.get_dbid(): sl.add_del(x, y) else: sl.add_del(y, x)
def _decode_object(self, obj): if 'timestamp' in obj: obj['timestamp'] = iso8601.parse(obj['timestamp']) return obj