def inner_parse(id): """Gets a dict, parses and saves it""" dict = srv.metaWeblog.dict_id(id) pattern = "^(\d{4})(\d{2})(\d{2}).(\d{2}).(\d{2}).(\d{2})$" cd = DateTime().make_comparable(dict['creation_date'])[1] lu = DateTime().make_comparable(dict['last_update'])[1] ep = DateTime().make_comparable(dict['end_publication'])[1] sp = DateTime().make_comparable(dict['start_publication'])[1] def group(value): return value.group(1, 2, 3, 4, 5, 6) cd = group(re.search(pattern, cd)) lu = group(re.search(pattern, lu)) ep = group(re.search(pattern, ep)) sp = group(re.search(pattern, sp)) def str_to_int(string): return [int(x) for x in string] cd = str_to_int(cd) lu = str_to_int(lu) ep = str_to_int(ep) sp = str_to_int(sp) dict['creation_date'] = datetime(*cd) dict['last_update'] = datetime(*lu) dict['end_publication'] = datetime(*ep) dict['start_publication'] = datetime(*sp) Gbobject(**dict).save()
def import_segment(epoch_group, segment, sources, protocol=None, equipment_setup_root=None): ctx = epoch_group.getDataContext() if protocol is None: protocol = ctx.getProtocol(NEO_PROTOCOL) if protocol is None: protocol = ctx.insertProtocol(NEO_PROTOCOL, NEO_PROTOCOL_TEXT) segment_duration = max(arr.t_stop for arr in segment.analogsignals) segment_duration.units = 'ms' #milliseconds start_time = DateTime(epoch_group.getStart()) inputSources = Maps.newHashMap() outputSources = Maps.newHashMap() for s in sources: if s: s = asclass("Source", s) inputSources.put(s.getLabel(), s) device_parameters = dict(("{}.{}".format(equipment_setup_root, k), v) for (k,v) in segment.annotations.items()) epoch = epoch_group.insertEpoch(inputSources, outputSources, start_time, start_time.plusMillis(int(segment_duration)), protocol, to_map(segment.annotations), to_map(device_parameters) ) if segment.index is not None: epoch.addProperty('index', box_number(segment.index)) if len(segment.analogsignalarrays) > 0: log_warning("Segment contains AnalogSignalArrays. Import of AnalogSignalArrays is currently not supported") for analog_signal in segment.analogsignals: import_analog_signal(epoch, analog_signal, equipment_setup_root) import_timeline_annotations(epoch, segment, start_time) if len(segment.spikes) > 0: logging.warning("Segment contains Spikes. Import of individual Spike data is not yet implemented (but SpikeTrains are).") import_spiketrains(epoch, protocol, segment)
def sync(self, tracker): server = ServerProxy(tracker.config, allow_none=True, use_datetime=datetime) last_update = DateTime(time.mktime(tracker.last_update.timetuple())) users = self._get_users(server) ids = map(int, server.filter('issue', None, {'activity': str(last_update)})) for issue_id in ids: data = server.display('issue%d' % issue_id, 'title', 'creation', 'creator', 'assignedto', 'activity', 'messages', 'status') issue = Issue.by_tracker_id(tracker.id, issue_id) issue.no = issue_id issue.set_title(data.get('title', '')) issue.set_description( self._get_description(server, data.get('messages', []))) issue.reporter = users[int(data['creator'])] issue.owner = users[int(data['assignedto'])] issue.last_change = _roundup_date_to_datetime(data.get('activity')) status = int(data.get('status', -1)) issue.active = status in ACTIVE_STATUS issue.tracker = tracker if not issue.id: issue.created = datetime.now() issue.updated = datetime.now() issue.save() post_issue_sync.send(sender=self, issue=issue) self._update_user_data(server, data, issue, users) tracker.last_update = datetime.now() - timedelta(days=1) tracker.save() post_tracker_sync.send(sender=self, tracker=tracker) return True
def marshall_param(data): """Converts all `Entity` instances to dicts, recursively. Dicts, lists, and tuples are kept, generators are wrapped by `itertools.imap`, `datetime.datetime` objects are converted to `xmlrpclib.DateTime`. Other data types are left as-is. """ if isinstance(data, Entity): return dict((k, marshall_param(v)) for (k, v) in data) elif isinstance(data, dict): return dict( (key, marshall_param(value)) for key, value in data.iteritems()) elif isinstance(data, list): return map(marshall_param, data) elif isinstance(data, tuple): return tuple(map(marshall_param, data)) elif isinstance(data, GeneratorType): return imap(marshall_param, data) elif isinstance(data, datetime): return DateTime(data) else: return data
def post_structure(entry, site): """A post structure with extensions""" author = entry.authors.all()[0] return {'title': entry.title, 'description': unicode(entry.html_content), 'link': '%s://%s%s' % (PROTOCOL, site.domain, entry.get_absolute_url()), # Basic Extensions 'permaLink': '%s://%s%s' % (PROTOCOL, site.domain, entry.get_absolute_url()), 'categories': [cat.title for cat in entry.categories.all()], 'dateCreated': DateTime(entry.creation_date.isoformat()), 'postid': entry.pk, 'userid': author.username, # Useful Movable Type Extensions 'mt_excerpt': entry.excerpt, 'mt_allow_comments': int(entry.comment_enabled), 'mt_allow_pings': (int(entry.pingback_enabled) or int(entry.trackback_enabled)), 'mt_keywords': entry.tags, # Useful Wordpress Extensions 'wp_author': author.username, 'wp_author_id': author.pk, 'wp_author_display_name': author.username, 'wp_password': entry.password, 'wp_slug': entry.slug, 'sticky': entry.featured}
def convIn(self, objType, data, keepDict = False): has = hasattr(data, "__convert__") if has or isinstance(data, objType): if has: objType = data.__class__ #endif if keepDict: return dict([ (key, self.convIn(objType, value), ) \ for key, value in data.iteritems() if value != Missing]) #endif result = {} for attr in objType.__slots__: value = getattr(data, attr) if value != Missing: result[attr] = self.convIn(objType, value) #endif #endfor return result elif isinstance(data, datetime): return DateTime(int(mktime(data.timetuple()))) elif isinstance(data, ListType): return [self.convIn(objType, value) for value in data] elif isinstance(data, TupleType): return tuple([self.convIn(objType, value) for value in data]) else: return data
def test_dumps_datetime(self): self.assert_eq( dumps((DateTime("19980717T14:08:55"), )), "<params><param><value><dateTime.iso8601>" "19980717T14:08:55" "</dateTime.iso8601></value></param></params>") return
def test_DateTime_to_str(monkeypatch): monkeypatch.setenv('TZ', 'EST+05EDT,M4.1.0,M10.5.0') time.tzset() assert BlogIt.DateTime_to_str('invalid input') == '' assert BlogIt.DateTime_to_str( DateTime('20090628T17:38:58'), '%a %b %d %H:%M:%S %Y') == 'Sun Jun 28 17:38:58 2009'
def testXmlRpc(self): scheme = 'http' try: scheme = self.harness.scheme except AttributeError: pass if scheme == 'https': url = 'https://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url, transport=HTTPSTransport()) else: url = 'http://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url) self.getPage('/xmlrpc/foo') self.assertBody('Hello world!') self.assertEqual(proxy.return_single_item_list(), [42]) self.assertNotEqual(proxy.return_single_item_list(), 'one bazillion') self.assertEqual(proxy.return_string(), 'here is a string') self.assertEqual(proxy.return_tuple(), list( ('here', 'is', 1, 'tuple'))) self.assertEqual(proxy.return_dict(), {'a': 1, 'c': 3, 'b': 2}) self.assertEqual(proxy.return_composite(), [{ 'a': 1, 'z': 26 }, 'hi', ['welcome', 'friend']]) self.assertEqual(proxy.return_int(), 42) self.assertEqual(proxy.return_float(), 3.14) self.assertEqual(proxy.return_datetime(), DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))) self.assertEqual(proxy.return_boolean(), True) self.assertEqual(proxy.test_argument_passing(22), 44) try: proxy.test_argument_passing({}) except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual( x.faultString, "unsupported operand type(s) for *: 'dict' and 'int'") else: self.fail('Expected xmlrpclib.Fault') try: proxy.non_method() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, 'method "non_method" is not supported') else: self.fail('Expected xmlrpclib.Fault') try: proxy.test_returning_Fault() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, 'custom Fault response') else: self.fail('Expected xmlrpclib.Fault')
def testXmlRpc(self): scheme = self.scheme if scheme == "https": url = 'https://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url, transport=HTTPSTransport()) else: url = 'http://%s:%s/xmlrpc/' % (self.interface(), self.PORT) proxy = ServerProxy(url) # begin the tests ... self.getPage("/xmlrpc/foo") self.assertBody("Hello world!") self.assertEqual(proxy.return_single_item_list(), [42]) self.assertNotEqual(proxy.return_single_item_list(), 'one bazillion') self.assertEqual(proxy.return_string(), "here is a string") self.assertEqual(proxy.return_tuple(), list(('here', 'is', 1, 'tuple'))) self.assertEqual(proxy.return_dict(), {'a': 1, 'c': 3, 'b': 2}) self.assertEqual(proxy.return_composite(), [{'a': 1, 'z': 26}, 'hi', ['welcome', 'friend']]) self.assertEqual(proxy.return_int(), 42) self.assertEqual(proxy.return_float(), 3.14) self.assertEqual(proxy.return_datetime(), DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))) self.assertEqual(proxy.return_boolean(), True) self.assertEqual(proxy.test_argument_passing(22), 22 * 2) # Test an error in the page handler (should raise an xmlrpclib.Fault) try: proxy.test_argument_passing({}) except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, ("unsupported operand type(s) " "for *: 'dict' and 'int'")) else: self.fail("Expected xmlrpclib.Fault") # http://www.cherrypy.org/ticket/533 # if a method is not found, an xmlrpclib.Fault should be raised try: proxy.non_method() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, 'method "non_method" is not supported') else: self.fail("Expected xmlrpclib.Fault") # Test returning a Fault from the page handler. try: proxy.test_returning_Fault() except Exception: x = sys.exc_info()[1] self.assertEqual(x.__class__, Fault) self.assertEqual(x.faultString, ("custom Fault response")) else: self.fail("Expected xmlrpclib.Fault")
def xml_wrap(value, version): """ Wrap all ``str`` in ``xmlrpc.client.Binary``. Because ``xmlrpc.client.dumps()`` will itself convert all ``unicode`` instances into UTF-8 encoded ``str`` instances, we don't do it here. So in total, when encoding data for an XML-RPC packet, the following transformations occur: * All ``str`` instances are treated as binary data and are wrapped in an ``xmlrpc.client.Binary()`` instance. * Only ``unicode`` instances are treated as character data. They get converted to UTF-8 encoded ``str`` instances (although as mentioned, not by this function). Also see `xml_unwrap()`. :param value: The simple scalar or simple compound value to wrap. """ if type(value) in (list, tuple): return tuple(xml_wrap(v, version) for v in value) if isinstance(value, dict): return dict((k, xml_wrap(v, version)) for (k, v) in value.items()) if type(value) is bytes: return Binary(value) if type(value) is Decimal: # transfer Decimal as a string return unicode(value) if isinstance(value, six.integer_types) and (value < MININT or value > MAXINT): return unicode(value) if isinstance(value, DN): return str(value) # Encode datetime.datetime objects as xmlrpc.client.DateTime objects if isinstance(value, datetime.datetime): if capabilities.client_has_capability(version, 'datetime_values'): return DateTime(value) else: return value.strftime(LDAP_GENERALIZED_TIME_FORMAT) if isinstance(value, DNSName): if capabilities.client_has_capability(version, 'dns_name_values'): return {'__dns_name__': unicode(value)} else: return unicode(value) if isinstance(value, Principal): return unicode(value) if isinstance(value, crypto_x509.Certificate): return base64.b64encode(value.public_bytes( x509_Encoding.DER)).decode('ascii') assert type(value) in (unicode, float, bool, type(None)) + six.integer_types return value
def test_loads_datetime(self): loads_map = { ("<params><param><value><dateTime.iso8601>" "19980717T14:08:55" "</dateTime.iso8601></value></param></params>"): ((DateTime("19980717T14:08:55"), ), None), } self.confirm_loads_map(loads_map) return
def test_str_to_DateTime(monkeypatch): monkeypatch.setenv('TZ', 'EST+05EDT,M4.1.0,M10.5.0') time.tzset() assert isinstance(BlogIt.str_to_DateTime(), DateTime) assert str(BlogIt.str_to_DateTime('Sun Jun 28 19:38:58 2009', '%a %b %d %H:%M:%S %Y'))\ == '20090628T19:38:58' assert str(BlogIt.str_to_DateTime(BlogIt.DateTime_to_str( DateTime('20090628T17:38:58'))))\ == '20090628T17:38:58'
def xmlrpc_status(self): """ Return hellanzb's current status text """ from Hellanzb.NZBQueue import listQueue s = {} totalSpeed = Hellanzb.getCurrentRate() s['time'] = DateTime() s['uptime'] = secondsToUptime(time.time() - Hellanzb.BEGIN_TIME) s['is_paused'] = Hellanzb.downloadPaused s['rate'] = totalSpeed s['queued_mb'] = Hellanzb.queue.totalQueuedBytes / 1024 / 1024 if totalSpeed == 0: s['eta'] = 0 else: s['eta'] = int( (Hellanzb.queue.totalQueuedBytes / 1024) / totalSpeed) s['percent_complete'] = 0 currentNZBs = Hellanzb.queue.currentNZBs() if len(currentNZBs): currentNZB = currentNZBs[0] s['percent_complete'] = currentNZB.getPercentDownloaded() if Hellanzb.ht.readLimit == None or Hellanzb.ht.readLimit == 0: s['maxrate'] = 0 else: s['maxrate'] = Hellanzb.ht.readLimit / 1024 s['total_dl_nzbs'] = Hellanzb.totalArchivesDownloaded s['total_dl_files'] = Hellanzb.totalFilesDownloaded s['total_dl_segments'] = Hellanzb.totalSegmentsDownloaded s['total_dl_mb'] = Hellanzb.totalBytesDownloaded / 1024 / 1024 s['config_file'] = Hellanzb.CONFIG_FILENAME s['hostname'] = Hellanzb.HOSTNAME s['version'] = Hellanzb.version s['currently_downloading'] = [ self.makeNZBStruct(nzb) for nzb in currentNZBs ] Hellanzb.postProcessorLock.acquire() s['currently_processing'] = [self.makeNZBStruct(processor) for processor in \ Hellanzb.postProcessors] Hellanzb.postProcessorLock.release() s['queued'] = listQueue() s['log_entries'] = [{getLevelName(entry[0]): self.cleanLog(entry[1])} \ for entry in Hellanzb.recentLogs] return s
def parse(module=None, instance=None, id=None): """Parses and saves instances""" try: instances = srv.metaWeblog.show_instance(module, instance, id) if module == "objectapp.models": module = objmodels if module == "gstudio.models": module = gstmodels for i in instances: pattern = "^(\d{4})(\d{2})(\d{2}).(\d{2}).(\d{2}).(\d{2})$" if "_tags_cache" in i: del i["_tags_cache"] if "_state" in i: del i["_state"] if "_altnames_cache" in i: del i["_altnames_cache"] if "_mptt_cached_fields" in i: del i["_mptt_cached_fields"] def group(value): return value.group(1, 2, 3, 4, 5, 6) def str_to_int(string): return [int(x) for x in string] # Weird check for DateTime objects for key in i.keys(): if "make_comparable" in dir(i[key]): dt = DateTime().make_comparable(i[key])[1] dt = str_to_int(group(re.search(pattern, dt))) i[key] = datetime(*dt) class_checker(module)[instance](**i).save() except (ObjectDoesNotExist, IntegrityError): sys.stderr.write("sync-instances.py:55: " "Object matching query does not exist\n") except ValueError: sys.stderr.write("sync-instances.py:93: " "Object already exists\n")
def test_marshall_datetime_result(self): py_dt = datetime(2014, 4, 14, 16, 27, 00) xml_dt = DateTime('20140414T16:27:00+0200') # format used by Sklik tests = [ (MockEntity, xml_dt, py_dt), (MockEntity, [1234, xml_dt, 'abcdef'], [1234, py_dt, 'abcdef']), (MockEntity, (1234, xml_dt, 'abcdef'), (1234, py_dt, 'abcdef')), (MockEntity, [{'a': xml_dt}], [MockEntity(a=py_dt)]), (None, {'X': [{'a': xml_dt}]}, {'X': [{'a': py_dt}]}), ] for obj_type, data, expected in tests: marshalled = marshall_result(data, obj_type) self.assertEqual(marshalled, expected)
def post_structure(entry, site): """A post structure with extensions""" author = User.objects.get(pk=2) return {'title': entry.title, 'description': unicode(entry.html_body), 'link': '%s://%s%s' % (PROTOCOL, site.domain, reverse('blog_post',args=[entry.slug]) ), # Basic Extensions 'permaLink': '%s://%s%s' % (PROTOCOL, site.domain, entry.get_absolute_url()), 'categories': [cat.title for cat in entry.categories.all()], 'tags': [tag.title for tag in entry.tags.all()], 'dateCreated': DateTime(entry.pub_date.isoformat()), 'postid': entry.pk, 'userid': author.username, 'mt_allow_comments': int(entry.comment_enabled), 'wp_slug': entry.slug, }
def post_structure(post): return { 'title': post.title, 'description': post.content, 'dateCreated': DateTime(post.date), 'categories': [cate.name for cate in Category.objects.all()], 'link': settings.BLOG_DOMAIN + post.get_absolute_url(), 'permalink': settings.BLOG_DOMAIN + post.get_absolute_url(), 'postid': post.pk, 'userid': post.author.username, 'mt_excerpt': post.excerpt_content, 'mt_allow_comments': post.allow_comment, 'mt_allow_comments': post.allow_pingback, 'mt_keywords': post.tags, 'sticky': post.sticky, 'wp_password': post.password, 'wp_slug': post.slug, 'wp_author': post.author.username, 'wp_author_id': post.author.pk, 'wp_author_display_name': post.author.username }
except socket.gaierror, err: self._log.error('Network error: %s', err) return False except ProtocolError, err: self._log.error('Protocol error: %s %s' % (err.errcode, err.errmsg)) return False except Error, err: self._log.error('Error: %s' % err) return False except: self._log.error('Unknown error: %s' % str(sys.exc_info()[1])) return False self._log.info('Trac XMLRPC-API version: %s' % '.'.join(map(str, api_version))) d = DateTime(time.mktime(tracker.last_update.timetuple())) self._log.info('Last update: %s' % d) tids = server.ticket.getRecentChanges(d) self._log.info('Issue updates: %s' % ` tids `) for tid in tids: issue = Issue.by_tracker_id(tracker.id, tid) id_, cr_date, last_change, data = server.ticket.get(tid) #if isinstance(cr_date, int): #cr_date = datetime.fromtimestamp(cr_date) #last_change = datetime.fromtimestamp(last_change) issue.no = id_ issue.set_title(data.get('summary', '')) issue.set_description(data.get('description', '')) issue.reporter = data.get('reporter') issue.owner = data.get('owner') issue.last_change = last_change
def return_datetime(self): return DateTime((2003, 10, 7, 8, 1, 0, 1, 280, -1))
def rescan(hostid): try: try: # get the current time, call it x curr_time = datetime.utcnow() # acquire common lock l = lock.Lock(RESCAN_LOCK_NAME, HOST_LOCK_NAME_FORMAT % hostid) l.acquire() while 1: # check if starttime_anything exists tryRescan = False files = glob.glob(START_TIME_FILE_PATH_FORMAT % (hostid, "*")) if len(files) == 0: # if not, create starttime_x path = START_TIME_FILE_PATH_FORMAT % (hostid, str(curr_time)) path = path.replace(" ", "_") open(path, "w").close() # release common lock l.release() # perform host rescan _rescan_hostID(hostid) # acquire common lock l.acquire() # remove starttime_x os.unlink(path) # release common lock and exit l.release() break else: # if it does # read the start time start_time = files[0].split(START_TIME_FILE_PATH_FORMAT % (hostid, ""))[1] start_time = DateTime(start_time.replace("__", " ")) while 1: # stick around till start_time exists # drop common lock l.release() # sleep for a sec time.sleep(1) # acquire common lock l.acquire() # check if start time exists if len(glob.glob(START_TIME_FILE_PATH_FORMAT % (hostid, "*"))) == 0: tryRescan = False if DateTime(str(curr_time)) < start_time: # we are cool, this started before the rescan # drop common lock and go home l.release() else: # try to start a rescan tryRescan = True break # else continue by default if not tryRescan: break except Exception, e: util.SMlog("Failed to perform rescan of host: %s. " "Error: %s" % (hostid, str(e))) finally: l.release()
SATELLITE_URL = "http://%s/rpc/api" % SATELLITE_HOST SATELLITE_LOGIN = parser.get("rhof", "rhns_user") SATELLITE_PASSWORD = parser.get("rhof", "rhns_password") # the statusfile to store temporary status information of scheduled actions # Note: To make parallel scheduling possible we add as a suffix to the filename # defined below the releaseid or if not set the date-time of the action (YYMMDDHHMM) global statusfile statusfile = "/var/tmp/schedule_status" # define some variables systemlist = [] success_ops = 0 failed_ops = 0 failed_systems = [] date8601 = DateTime(Time()) needed = 'provisioning_entitled' ############################################################################################### # # SCRIPT OPTIONS # ############################################################################################### scriptparser.add_option( "-u", "--user", dest="scriptuser", default="root", help="Username to run the remote script. Default: root") scriptparser.add_option( "-g",
def rescan(hostid): try: try: # get the current time, call it x curr_time = datetime.utcnow() # acquire common lock l = lock.Lock(RESCAN_LOCK_NAME, HOST_LOCK_NAME_FORMAT % hostid) l.acquire() while(1): # check if starttime_anything exists tryRescan = False files = glob.glob(START_TIME_FILE_PATH_FORMAT % (hostid, '*')) if len(files) == 0: # if not, create starttime_x path = START_TIME_FILE_PATH_FORMAT % (hostid, str(curr_time)) path = path.replace(' ', '_') open(path, 'w').close() # release common lock l.release() # perform host rescan _rescan_hostID(hostid) # acquire common lock l.acquire() # remove starttime_x os.unlink(path) # release common lock and exit l.release() break else: # if it does # read the start time start_time = files[0].split(START_TIME_FILE_PATH_FORMAT % (hostid, ''))[1] start_time = DateTime(start_time.replace('__', ' ')) while(1): # stick around till start_time exists # drop common lock l.release() # sleep for a sec time.sleep(1) # acquire common lock l.acquire() # check if start time exists if len(glob.glob(START_TIME_FILE_PATH_FORMAT % \ (hostid, '*'))) == 0: tryRescan = False if DateTime(str(curr_time)) < start_time: # we are cool, this started before the rescan # drop common lock and go home l.release() else: # try to start a rescan tryRescan = True break # else continue by default if not tryRescan: break except Exception, e: util.SMlog("Failed to perform rescan of host: %s. "\ "Error: %s" % (hostid, str(e))) finally: l.release()
def _get_story_data(cls, story, site=None): url = story.get_absolute_url(site=site) preview_url = get_preview_url(story) or url # See http://codex.wordpress.org/Post_Status_Transitions if story.is_published: if story.pub_date > datetime.datetime.now(): post_status = 'future' else: post_status = 'publish' else: post_status = 'draft' # unescaping as inlines are escaped. story_body = unescape_entities(story.raw_body) # Add media image items as HTML in the story body. # They'll get converted back when saving the story. images = list(story.images.all()) videos = list(story.videos.all()) story_body = cls._create_media_html(images, videos) + story_body return { 'dateCreated': DateTime(story.pub_date), 'userid': str(story.author.id), 'postid': str(story.id), 'description': story_body, 'title': story.headline, 'link': url, 'permaLink': preview_url, 'categories': [smart_unicode(cat) for cat in story.categories.all()], 'mt_excerpt': story.get_short_summary(), 'mt_text_more': '', 'wp_more_text': '', 'mt_allow_comments': int(story.comments.enabled), 'mt_allow_pings': 0, 'mt_keywords': ', '.join((smart_unicode(tag) for tag in story.tags)), 'wp_slug': story.slug, 'wp_password': '', 'wp_author_id': str(story.author.id), 'wp_author_display_name': story.author.username, 'date_created_gmt': DateTime(to_gmt(story.pub_date)), 'post_status': post_status, 'custom_fields': [], 'wp_post_format': 'standard', 'date_modified': DateTime(story.updated_date or story.pub_date), 'date_modified_gmt': DateTime(to_gmt(story.updated_date or story.pub_date)), }