def save(self): """Persists the current job instance to its corresponding Redis key.""" key = self.key obj = {} obj["created_at"] = times.format(self.created_at, "UTC") if self.func_name is not None: obj["data"] = dumps(self.job_tuple) if self.origin is not None: obj["origin"] = self.origin if self.description is not None: obj["description"] = self.description if self.enqueued_at is not None: obj["enqueued_at"] = times.format(self.enqueued_at, "UTC") if self.ended_at is not None: obj["ended_at"] = times.format(self.ended_at, "UTC") if self._result is not None: obj["result"] = dumps(self._result) if self.exc_info is not None: obj["exc_info"] = self.exc_info if self.timeout is not None: obj["timeout"] = self.timeout if self.result_ttl is not None: obj["result_ttl"] = self.result_ttl if self._status is not None: obj["status"] = self._status if self.meta: obj["meta"] = dumps(self.meta) self.connection.hmset(key, obj)
def dump(self): """Returns a serialization of the current job instance""" obj = {} obj['created_at'] = times.format(self.created_at or times.now(), 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = dumps(self._result) if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout if self.result_ttl is not None: obj['result_ttl'] = self.result_ttl if self._status is not None: obj['status'] = self._status if self._dependency_id is not None: obj['dependency_id'] = self._dependency_id if self.meta: obj['meta'] = dumps(self.meta) return obj
def save(self): """Persists the current job instance to its corresponding Redis key.""" key = self.key obj = {} obj['created_at'] = times.format(self.created_at, 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = dumps(self._result) if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout if self.result_ttl is not None: obj['result_ttl'] = self.result_ttl if self._status is not None: obj['status'] = self._status if self.meta: obj['meta'] = dumps(self.meta) self.connection.hmset(key, obj)
def save(self, pipeline=None): """Persists the current job instance to its corresponding Redis key.""" key = self.key connection = pipeline if pipeline is not None else self.connection obj = {} obj['created_at'] = times.format(self.created_at or times.now(), 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = dumps(self._result) if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout if self.result_ttl is not None: obj['result_ttl'] = self.result_ttl if self._status is not None: obj['status'] = self._status if self.meta: obj['meta'] = dumps(self.meta) connection.hmset(key, obj)
def on_callback(self, request): if request.method != 'POST': request.respond('This hook only supports POST method.') else: if request.GET.get('secret', [None])[0] != self.bot.config.draftin_secret: request.respond('Wrong secret was specified') else: payload = anyjson.deserialize(request.POST['payload'][0]) title = payload['name'] content = payload['content'] slug = slugify(title) created_at = times.to_universal(payload['created_at']) updated_at = times.to_universal(payload['updated_at']) timezone = self.bot.config.timezone with open(os.path.join( self.bot.config.documents_dir, slug + '.md'), 'w') as f: post_content = self.template.format(title=title, content=content, slug=slug, created_at=times.format(created_at, timezone, '%Y-%m-%d %H:%M'), updated_at=times.format(updated_at, timezone, '%Y-%m-%d %H:%M')) f.write(post_content.encode('utf-8')) try: subprocess.check_output(self.bot.config.update_command, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError, e: request.respond(u'I tried to update a blog, but there was an error: ' + e.output.encode('utf-8')) else: request.respond('Done, published')
def save(self): """Persists the current job instance to its corresponding Redis key.""" key = self.key obj = {} obj['created_at'] = times.format(self.created_at, 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = self._result if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout self.connection.hmset(key, obj)
def test_clean_rq(self): r = get_redis_connection() self.assertEqual(len(r.keys("rq:job:*")), 0) r.hmset("rq:job:abc", {"bar": "baz"}) r.hmset("rq:job:def", {"created_at": times.format(times.now(), "UTC")}) r.hmset("rq:job:123", {"created_at": times.format(times.now() - timedelta(days=10), "UTC")}) self.assertEqual(len(r.keys("rq:job:*")), 3) call_command("clean_rq") self.assertEqual(len(r.keys("rq:job:*")), 2)
def test_format_without_tzinfo(self): """Format times without timezone info""" dt = self.sometime_univ auckland = pytz.timezone('Pacific/Auckland') est = pytz.timezone('EST') ams = pytz.timezone('Europe/Amsterdam') self.assertEquals(times.format(dt, auckland), '2012-02-02 00:56:31+1300') self.assertEquals(times.format(dt, ams), '2012-02-01 12:56:31+0100') self.assertEquals(times.format(dt, est), '2012-02-01 06:56:31-0500')
def test_format_without_tzinfo(self): # noqa """Format times without timezone info""" dt = self.sometime_univ auckland = 'Pacific/Auckland' est = 'EST' ams = 'Europe/Amsterdam' self.assertEquals(times.format(dt, auckland), '2012-02-02T00:56:31+13:00') self.assertEquals(times.format(dt, ams), '2012-02-01T12:56:31+01:00') self.assertEquals(times.format(dt, est), '2012-02-01T06:56:31-05:00')
def test_clean_rq(self): r = get_redis_connection() self.assertEqual(len(r.keys('rq:job:*')), 0) r.hmset('rq:job:abc', {'bar': 'baz'}) r.hmset('rq:job:def', {'created_at': times.format(times.now(), 'UTC')}) r.hmset('rq:job:123', { 'created_at': times.format( times.now() - timedelta(days=10), 'UTC')}) self.assertEqual(len(r.keys('rq:job:*')), 3) call_command('clean_rq') self.assertEqual(len(r.keys('rq:job:*')), 2)
def test_clean_rq(self): r = redis.Redis(**settings.REDIS) self.assertEqual(len(r.keys('rq:job:*')), 0) r.hmset('rq:job:abc', {'bar': 'baz'}) r.hmset('rq:job:def', {'created_at': times.format(times.now(), 'UTC')}) r.hmset('rq:job:123', { 'created_at': times.format( times.now() - timedelta(days=10), 'UTC')}) self.assertEqual(len(r.keys('rq:job:*')), 3) call_command('clean_rq') self.assertEqual(len(r.keys('rq:job:*')), 2)
def test_convert_unix_time_to_datetime(self): # noqa """Can convert from UNIX time to universal time.""" unix_time = 1328257004.456 # as returned by time.time() self.assertEquals(times.from_unix(unix_time), datetime(2012, 2, 3, 8, 16, 44, 456000)) self.assertEquals(times.format(times.from_unix(unix_time), 'UTC'), '2012-02-03T08:16:44.456000+00:00') self.assertEquals( times.format(times.from_unix(unix_time), 'Europe/Amsterdam'), '2012-02-03T09:16:44.456000+01:00') self.assertEquals( times.format(times.from_unix(unix_time), 'Pacific/Auckland'), '2012-02-03T21:16:44.456000+13:00')
def convert(tzs): if isinstance(tzs, basestring): return times.format(tzs, tzoffset) elif isinstance(tzs, int): return tzs + int(3600*tzoffset*1000) elif isinstance(tzs, list): return map(convert, tzs)
def callback_perform_job(self, result): rv, job = result if isinstance(rv, defer.Deferred): rv = yield rv pickled_rv = dumps(rv) job._status = Status.FINISHED job.ended_at = times.now() if LOGGING_OK_JOBS: meta = ','.join([item for item in job.meta.values()]) if rv is None: self.log.msg('[%s] Job OK. %s' % (meta, job)) else: self.log.msg('[%s] Job OK. %s. result = %r' % (meta, job, rv)) result_ttl = self.default_result_ttl if job.result_ttl is None else job.result_ttl if result_ttl == 0: yield job.delete() #self.log.msg('Result discarded immediately.') else: yield self.connection.hset(job.key, 'result', pickled_rv) yield self.connection.hset(job.key, 'ended_at', times.format(job.ended_at, 'UTC')) yield self.connection.hset(job.key, 'status', job._status) if result_ttl > 0: yield self.connection.expire(job.key, result_ttl)
def facebook_event(id): event = current_user.event_or_404(id) if event.is_facebook_involved(): try: api = facebook.create_api() payload = { 'name': event.name, 'description': event.description or '', 'location': event.venue or '', 'start_time': times.format(event.starts_at, current_user.timezone, '%Y-%m-%dT%H:%M:%S'), } if event.facebook_id: api.post(path='/' + event.facebook_id, **payload) else: data = api.post(path='/events', **payload) with db.transaction: event.facebook_id = data['id'] contacts_to_invite = list(event.contacts_facebook_to_invite) if contacts_to_invite: ids = ','.join([c.facebook_id for c in contacts_to_invite]) api.post(path='/' + event.facebook_id + '/invited?users=' + ids) with db.transaction: for contact in contacts_to_invite: event.set_invitation_sent(contact) except (facebook.ConnectionError, facebook.OAuthError): return redirect(facebook.create_authorize_url( action_url=url_for('facebook_event', id=event.id), error_url=url_for('edit_event', id=event.id), scope='create_event' )) return redirect(url_for('google_event', id=event.id))
def convert(tzs): if isinstance(tzs, basestring): return times.format(tzs, tzoffset) elif isinstance(tzs, int): return tzs + int(3600 * tzoffset * 1000) elif isinstance(tzs, list): return map(convert, tzs)
def test_convert_unix_time_to_datetime(self): # noqa """Can convert from UNIX time to universal time.""" unix_time = 1328257004.456 # as returned by time.time() self.assertEquals( times.from_unix(unix_time), datetime(2012, 2, 3, 8, 16, 44, 456000) ) self.assertEquals( times.format(times.from_unix(unix_time), 'UTC'), '2012-02-03T08:16:44.456000+00:00') self.assertEquals( times.format(times.from_unix(unix_time), 'Europe/Amsterdam'), '2012-02-03T09:16:44.456000+01:00') self.assertEquals( times.format(times.from_unix(unix_time), 'Pacific/Auckland'), '2012-02-03T21:16:44.456000+13:00')
def register_death(self): """Registers its own death.""" self.log.debug('Registering death') with self.connection._pipeline() as p: # We cannot use self.state = 'dead' here, because that would # rollback the pipeline p.srem(self.redis_workers_keys, self.key) p.hset(self.key, 'death', times.format(times.now(), 'UTC')) p.expire(self.key, 60) p.execute()
def tz_choices(): """Prepares timezone choices for use in forms.""" choices = [] for tz in common_timezones: places = tz.split('/') places.reverse() label = ', '.join(places).replace('_', ' ') time = times.format(times.now(), tz, '%H:%M') choices.append((tz, time + u' – ' + label)) return sorted(choices, key=lambda choice: choice[1])
def save(self): """Persists the current job instance to its corresponding Redis key.""" key = self.key obj = {} obj['created_at'] = times.format(self.created_at, 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = dumps(self._result) if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout if self.result_ttl is not None: obj['result_ttl'] = self.result_ttl if self._status is not None: obj['status'] = self._status """ Store additional attributes from job instance into Redis. This is done so that third party libraries using RQ can store additional data directly on ``Job`` instances. For example: job = Job.create(func) job.foo = 'bar' job.save() # Will persist the 'foo' attribute """ additional_attrs = set(self.__dict__.keys()).difference(JOB_ATTRS) for attr in additional_attrs: obj[attr] = getattr(self, attr) self.connection.hmset(key, obj)
def on_callback(self, request): if request.method != 'POST': request.respond('This hook only supports POST method.') else: if request.GET.get('secret', [None])[0] != self.bot.config.draftin_secret: request.respond('Wrong secret was specified') else: payload = anyjson.deserialize(request.POST['payload'][0]) title = payload['name'] content = payload['content'] slug = slugify(title) created_at = times.to_universal(payload['created_at']) updated_at = times.to_universal(payload['updated_at']) timezone = self.bot.config.timezone with open( os.path.join(self.bot.config.documents_dir, slug + '.md'), 'w') as f: post_content = self.template.format( title=title, content=content, slug=slug, created_at=times.format(created_at, timezone, '%Y-%m-%d %H:%M'), updated_at=times.format(updated_at, timezone, '%Y-%m-%d %H:%M')) f.write(post_content.encode('utf-8')) try: subprocess.check_output(self.bot.config.update_command, stderr=subprocess.STDOUT, shell=True) except subprocess.CalledProcessError, e: request.respond( u'I tried to update a blog, but there was an error: ' + e.output.encode('utf-8')) else: request.respond('Done, published')
def convert(tzs, tzoffset=None): if tzoffset == 'system': tzoffset = (time.timezone / -(60 * 60) * 100) if not tzoffset: return tzs elif isinstance(tzs, datetime): return tzs + timedelta(hours=float(tzoffset) / 100) elif isinstance(tzs, basestring): return times.format(tzs, int(tzoffset)) elif isinstance(tzs, int): return tzs + int(3600 * float(tzoffset) / 100) elif isinstance(tzs, list): return map(lambda tz: convert(tz, float(tzoffset)), tzs)
def convert(tzs, tzoffset=None): if tzoffset == 'system': tzoffset = (time.timezone / -(60*60) * 100) if not tzoffset: return tzs elif isinstance(tzs, datetime): return tzs + timedelta(hours=float(tzoffset)/100) elif isinstance(tzs, basestring): return times.format(tzs, int(tzoffset)) elif isinstance(tzs, int): return tzs + int(3600*float(tzoffset)/100) elif isinstance(tzs, list): return map(lambda tz: convert(tz, float(tzoffset)), tzs)
def perform_job(self, job): """Performs the actual work of a job. Will/should only be called inside the work horse's process. """ self.procline('Processing %s from %s since %s' % ( job.func_name, job.origin, time.time())) try: with death_penalty_after(job.timeout or 180): rv = job.perform() # Pickle the result in the same try-except block since we need to # use the same exc handling when pickling fails pickled_rv = dumps(rv) job._status = Status.FINISHED job.ended_at = times.now() except: # Use the public setter here, to immediately update Redis job.status = Status.FAILED self.handle_exception(job, *sys.exc_info()) return False if rv is None: self.log.info('Job OK') else: self.log.info('Job OK, result = %s' % (yellow(unicode(rv)),)) # How long we persist the job result depends on the value of # result_ttl: # - If result_ttl is 0, cleanup the job immediately. # - If it's a positive number, set the job to expire in X seconds. # - If result_ttl is negative, don't set an expiry to it (persist # forever) result_ttl = self.default_result_ttl if job.result_ttl is None else job.result_ttl # noqa if result_ttl == 0: job.delete() self.log.info('Result discarded immediately.') else: p = self.connection.pipeline() p.hset(job.key, 'result', pickled_rv) p.hset(job.key, 'status', job._status) p.hset(job.key, 'ended_at', times.format(job.ended_at, 'UTC')) if result_ttl > 0: p.expire(job.key, result_ttl) self.log.info('Result is kept for %d seconds.' % result_ttl) else: self.log.warning('Result will never expire, clean up result key manually.') p.execute() return True
def save(self): """Persists the current job instance to its corresponding Redis key.""" key = self.key obj = {} obj['created_at'] = times.format(self.created_at, 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = dumps(self._result) if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout if self.result_ttl is not None: obj['result_ttl'] = self.result_ttl """ Store additional attributes from job instance into Redis. This is done so that third party libraries using RQ can store additional data directly on ``Job`` instances. For example: job = Job.create(func) job.foo = 'bar' job.save() # Will persist the 'foo' attribute """ additional_attrs = set(self.__dict__.keys()).difference(JOB_ATTRS) for attr in additional_attrs: obj[attr] = getattr(self, attr) self.connection.hmset(key, obj)
def save(self): key = self.key obj = {} obj['create_at'] = times.format(self.create_at, 'UTC') if self.func_name is not None: obj['data'] = dumps(self.job_tuple) if self.origin is not None: obj['origin'] = self.origin if self.description is not None: obj['description'] = self.description if self.enqueued_at is not None: obj['enqueued_at'] = times.format(self.enqueued_at, 'UTC') if self.ended_at is not None: obj['ended_at'] = times.format(self.ended_at, 'UTC') if self._result is not None: obj['result'] = self._result if self.exc_info is not None: obj['exc_info'] = self.exc_info if self.timeout is not None: obj['timeout'] = self.timeout self.connection.hmset(key, obj)
def register_birth(self): # noqa """Registers its own birth.""" self.log.debug('Registering birth of worker %s' % (self.name,)) if self.connection.exists(self.key) and \ not self.connection.hexists(self.key, 'death'): raise ValueError('There exists an active worker named \'%s\' ' 'already.' % (self.name,)) key = self.key queues = ','.join(self.queue_names()) with self.connection._pipeline() as p: p.delete(key) p.hset(key, 'birth', times.format(times.now(), 'UTC')) p.hset(key, 'queues', queues) p.sadd(self.redis_workers_keys, key) p.expire(key, self.default_worker_ttl) p.execute()
def strip_milliseconds(date): return times.to_universal(times.format(date, "UTC"))
def default(self, o): if isinstance(o, datetime.datetime): return times.format(o, 'Zulu') if isinstance(o, datetime.date): return o.isoformat() return super(JSONDateTimeMixin, self).default(o)
def test_format_refuses_local_times(self): """Format refuses local time input""" auckland = pytz.timezone('Pacific/Auckland') with self.assertRaises(ValueError): times.format(self.time_in_ams, auckland)
def test_custom_format(self): dt = self.sometime_univ auckland = pytz.timezone('Pacific/Auckland') est = pytz.timezone('EST') self.assertEquals(times.format(dt, auckland, '%H'), '00') self.assertEquals(times.format(dt, est, '%H'), '06')
TAG = 'philMeta' API_KEY = config['flickr_api_key'] API_SECRET = config['flickr_api_secret'] REST_ENDPOINT = 'https://api.flickr.com/services/rest/' SEARCHES_DIR = './search' IMG_URL = 'http://farm%s.staticflickr.com/%s/%s_%s_z.jpg' IMG_FNAME = './images/%s/%s-%s.jpg' # query/id-query.jpg IMG_URL_S = 'http://farm%s.staticflickr.com/%s/%s_%s_q.jpg' IMG_FNAME_S = './images/%s/%s_square-%s.jpg' # query/id-query.jpg IMG_DIR = './images/%s' # query DATA_DIR = './data' DATA_FNAME = './data/%s.json' # query NOW = times.now() TZ = 'America/New_York' YMD = times.format(NOW, TZ, fmt='%Y-%m-%d') flickr_api.set_keys(api_key=API_KEY, api_secret=API_SECRET) def unjsonpify(jsonp): return jsonp[14:-1] # totally hacky strip off jsonp func def get_photo_info(photo): params = {'api_key': API_KEY, 'photo_id': photo['id'], 'secret': photo['secret'], 'method': 'flickr.photos.getInfo', 'format': 'json'} response = requests.get(REST_ENDPOINT, params=params) time.sleep(0.5)
def rfc_utc_now(): """ Return the RFC 3339 format of current UTC time. """ s = times.format(times.now(), "UTC", "%Y-%m-%dT%H:%M:%S%z") return s[:-2]+":"+s[-2:]
def datetime(dt): """Formats datetime objects.""" return times.format( dt, getattr(current_user, 'timezone', app.config['DEFAULT_TIMEZONE']), '%x, %H:%M')
def default(self, o): if isinstance(o, (datetime.datetime, datetime.date)): return times.format(o, 'Zulu') super(JSONDateTimeMixin, self).default(o)
def strip_milliseconds(date): return times.to_universal(times.format(date, 'UTC'))
def serialize_date(dt): if dt is None: return None return times.format(dt, get_tz())
TAG = 'philMeta' API_KEY = config['flickr_api_key'] API_SECRET = config['flickr_api_secret'] REST_ENDPOINT = 'https://api.flickr.com/services/rest/' SEARCHES_DIR = './search' IMG_URL = 'http://farm%s.staticflickr.com/%s/%s_%s_z.jpg' IMG_FNAME = './images/%s/%s-%s.jpg' # query/id-query.jpg IMG_URL_S = 'http://farm%s.staticflickr.com/%s/%s_%s_q.jpg' IMG_FNAME_S = './images/%s/%s_square-%s.jpg' # query/id-query.jpg IMG_DIR = './images/%s' # query DATA_DIR = './data' DATA_FNAME = './data/%s.json' # query NOW = times.now() TZ = 'America/New_York' YMD = times.format(NOW, TZ, fmt='%Y-%m-%d') flickr_api.set_keys(api_key=API_KEY, api_secret=API_SECRET) def unjsonpify(jsonp): return jsonp[14:-1] # totally hacky strip off jsonp func def get_photo_info(photo): params = { 'api_key': API_KEY, 'photo_id': photo['id'], 'secret': photo['secret'], 'method': 'flickr.photos.getInfo', 'format': 'json' }
def test_format_refuses_local_times(self): """Format refuses local time input""" auckland = 'Pacific/Auckland' with self.assertRaises(ValueError): times.format(self.time_in_ams, auckland)
def local_time_format(value): return times.format(value, timezone='EET', fmt='%d %b %Y, %H:%M:%S')
def test_custom_format(self): dt = self.sometime_univ auckland = 'Pacific/Auckland' est = 'EST' self.assertEquals(times.format(dt, auckland, '%H'), '00') self.assertEquals(times.format(dt, est, '%H'), '06')
def serialize_date(dt): if dt is None: return None return times.format(dt, 'UTC')
def __make_revision_text(rev): desc = rev.desc.rstrip().replace('\n', ' ') time_format = '%Y-%m-%d %H:%M:%S' time_str = times.format(rev.time, vim.eval('g:p4#timezone'), time_format) return '{:4}{:10d} {} {:<10} {}'.format(rev.rev, rev.change, time_str, rev.user, desc)