def daemon(db, settings, queue): log.info('Starting daemon') daemon_started = True while daemon_started: job = queue.popleft(100) try: next_job = prepare_job(job, db, settings).process() except KeyboardInterrupt as inter: log.info('Daemon interrupted') queue.append(job) daemon_started = False except SystemExit as inter: # If job was interrupted, don't toss job. queue.append(job) log.info('Daemon interrupted') daemon_started = False except Exception as exc: ex_type, ex, tb = sys.exc_info() traceback.print_tb(tb) if job['attempt'] <= settings.MAX_QUEUE_ATTEMPTS: job['attempt'] += 1 queue.append(job) else: # What should it do? Send a notification, record an error? # Don't loose the task log.info('Adding job %s to bad jobs' % job['key']) queue.append_bad(job) else: if next_job: queue.append(next_job) log.info("Finishing daemon")
def process(self): data = self.data pictures = list(self.db.pictures.by_keys(data['keys'])) log.info("Tagging %s pictures" % len(pictures)) tags = data['tags'] for picture in pictures: self.db.tags.change_for_picture(picture['id'], tags) log.info("Done")
def flickr_upload(self): tags = self.data['tags'] key = self.key flickr_url, photo_id = flickr.upload(self.settings, self.filename, self.full_filepath, tags) self.db.pictures.update(key, 'flickr', json.dumps({ 'url': flickr_url, 'id': photo_id })) log.info("Uploaded %s to Flickr" % key) return self.data
def process(self): data = self.data pictures = list(self.db.pictures.find({ 'year': data['year'], 'month': data['month'], 'day': data['day'] })) log.info("Tagging day: %s-%s-%s (%s pictures)" % (data['year'], data['month'], data['day'], len(pictures))) tags = data['tags'] for picture in pictures: self.db.tags.change_for_picture(picture['id'], tags) log.info("Done")
def gphotos_upload(self): batch_id = self.data['batch_id'] album_url = None if batch_id: album_url = base.batch_2_album(batch_id, self.settings, section='feed') gphotos_data = gphotos.upload(self.settings, self.full_filepath, self.filename, album_url) self.db.pictures.update(self.key, 'gphotos', json.dumps({ 'xml': gphotos_data })) log.info("Uploaded %s to Gphotos" % self.key) return self.data
def process(self): job = self.data step = job['step'] task_name, next_step = self.steps[step] if step in job.get('skip', []): job['step'] = next_step job['attempt'] = 0 # Step completed. Start next job fresh log.info('Skipping %s - Step: %s (%s)' % (self.key, step, self.filename)) else: log.info('Processing %s - Step: %s (%s)' % (self.key, step, self.filename)) if job['attempt'] > 0: log.info('Attempt %s for %s - %s' % (job['attempt'], step, self.key)) task = getattr(self, task_name) job = task() if job: job['step'] = next_step job['attempt'] = 0 # Step completed. Start next job fresh else: log.info('Finished %s (%s)' % (self.key, self.filename)) # if self.data['is_last']: # batch_id = self.data['batch_id'] # base.end_batch(batch_id, self.settings) # log.info("Batch %s ended" % batch_id) return job
def process(self): changes = self.data['changes'] keys, dates = zip(*changes) pictures = {p['key']: p for p in self.db.pictures.by_keys(keys)} log.info("Changing dates for %s pictures" % len(pictures)) for key, date in changes: self.db.pictures.change_date(key, { 'year': date.year, 'month': date.month, 'day': date.day, 'taken_time': mktime(date.timetuple()), 'date_taken': date.strftime('%Y-%m-%d') }) log.info("Done")
def process(self): origin = self.data['origin'] target = self.data['target'] params = { 'year': origin.year, 'month': origin.month, 'day': origin.day } pictures = list(self.db.pictures.find(params)) for pic in pictures: self.db.pictures.change_date(pic['key'], { 'year': target.year, 'month': target.month, 'day': target.day, 'taken_time': mktime(target.timetuple()), 'date_taken': target.strftime('%Y-%m-%d') }) log.info("Done")
def local_process(self): """ Collapses quick jobs so each picture doesn't get queued up in case of long batches """ base_file = self.original_filename key = self.key log.info('Processing %s - Step: read_exif (%s)' % (key, base_file)) self._read_exif() log.info('Processing %s - Step: thumbs (%s)' % (key, base_file)) self._generate_thumbs() log.info('Processing %s - Step: s3_upload (%s)' % (key, base_file)) self._s3_upload() log.info('Processing %s - Step: local_store (%s)' % (key, base_file)) self._local_store() return self.data
def get_token(settings): tokens = TokensDB(settings.DB_FILE) token = tokens.get_token(SERVICE) token_type = "Bearer" if token: access_token = token["access_token"] token_type = token["token_type"] if tokens.needs_refresh(SERVICE, access_token): log.info("Refreshing Gphotos token...") access_token = refresh_access_token( tokens, settings.GPHOTOS_CLIENT_ID, settings.GPHOTOS_SECRET, token["refresh_token"] ) log.info("Token refreshed") else: log.info("Obtaining Gphotos token") access_token = exchange_token( tokens, settings.GPHOTOS_CLIENT_ID, settings.GPHOTOS_SECRET, settings.GPHOTOS_ACCESS_CODE ) log.info("Token obtained") return access_token, token_type