def test_get_first(self): for dict, expected in ( ({}, None), ({9: 9}, None), ({0: None}, None), ({0: []}, None), ({0: [3]}, 3), ({0: (3, 4, 5)}, 3), ): self.assertEqual(expected, util.get_first(dict, 0)) self.assertEqual('default', util.get_first({}, 0, 'default'))
def alter_count(self): ''' 修改最后一条字段的count UPDATE id_queue set count=new_count WHERE 1 ORDER BY uid DESC LIMIT 1 :return: ''' first = get_first() query = "UPDATE id_queue set `count`=(%s) WHERE 1 ORDER BY uid DESC LIMIT 1" self.cursor.execute(query, (first.get('count'), )) self.cnx.commit()
def main(args): line = util.get_first(util.gen_nonempty_stripped_lines(sys.stdin)) otu_name, genotype_string = line.split(None, 1) genotypes = genotype_string.split() for i, genotype in enumerate(genotypes): name = 'SNP_' + str(i) chromosome = '1' morgans = '0.0' bases = i+1 row = [name, chromosome, morgans, bases] print '\t'.join(str(x) for x in row)
def post(self): logging.debug('Params: %s', self.request.params) type = self.request.get('type') if type: assert type in ('event', ) source = util.load_source(self) if not source or source.status == 'disabled' or 'listen' not in source.features: logging.error('Source not found or disabled. Dropping task.') return logging.info('Source: %s %s, %s', source.label(), source.key.string_id(), source.bridgy_url(self)) post_id = util.get_required_param(self, 'post_id') source.updates = {} try: if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key.id()) if not activities or not activities[0]: logging.info('Post %s not found.', post_id) return assert len(activities) == 1, activities self.backfeed(source, activities={activities[0]['id']: activities[0]}) obj = activities[0].get('object') or activities[0] in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1]) except Exception, e: code, body = util.interpret_http_exception(e) if (code and (code in source.RATE_LIMIT_HTTP_CODES or code in ('400', '404') or int(code) / 100 == 5) or util.is_connection_failure(e)): logging.error('API call failed; giving up. %s: %s\n%s', code, body, e) self.abort(util.ERROR_HTTP_RETURN_CODE) else: raise
def dispatch_request(self): logger.debug(f'Params: {list(request.values.items())}') g.TRANSIENT_ERROR_HTTP_CODES = ('400', '404') type = request.values.get('type') if type: assert type in ('event', ) source = g.source = util.load_source() if not source or source.status == 'disabled' or 'listen' not in source.features: logger.error('Source not found or disabled. Dropping task.') return '' logger.info( f'Source: {source.label()} {source.key_id()}, {source.bridgy_url()}' ) post_id = request.values['post_id'] source.updates = {} if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key_id()) if not activities or not activities[0]: logger.info(f'Post {post_id} not found.') return '' assert len(activities) == 1, activities activity = activities[0] activities = {activity['id']: activity} # STATE: propagate tasks created by backfeed() here get started before their Response entities get created/updated, so they fail with https://github.com/snarfed/bridgy/issues/237 , but that's a red herring, it's really that activities_json and urls_to_activity are empty # is poll transactional somehow, and this isn't? # no more transactional tasks. https://github.com/googleapis/python-tasks/issues/26 # they're still supported in the new "bundled services" thing, but that seems like a dead end. # https://groups.google.com/g/google-appengine/c/22BKInlWty0/m/05ObNEdsAgAJ self.backfeed(source, responses=activities, activities=activities) obj = activity.get('object') or activity in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1]) return 'OK'
def post(self): logging.debug('Params: %s', self.request.params) type = self.request.get('type') if type: assert type in ('event',) key = util.get_required_param(self, 'source_key') source = ndb.Key(urlsafe=key).get() if not source or source.status == 'disabled' or 'listen' not in source.features: logging.error('Source not found or disabled. Dropping task.') return logging.info('Source: %s %s, %s', source.label(), source.key.string_id(), source.bridgy_url(self)) post_id = util.get_required_param(self, 'post_id') source.updates = {} try: if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities( fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key.id()) if not activities or not activities[0]: logging.info('Post %s not found.', post_id) return assert len(activities) == 1, activities self.backfeed(source, activities={activities[0]['id']: activities[0]}) in_reply_to = util.get_first(activities[0]['object'], 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get('id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1]) except Exception, e: code, body = util.interpret_http_exception(e) if (code and (code in util.HTTP_RATE_LIMIT_CODES or code in ('400', '404') or int(code) / 100 == 5) or util.is_connection_failure(e)): logging.error('API call failed; giving up. %s: %s\n%s', code, body, e) self.abort(util.ERROR_HTTP_RETURN_CODE) else: raise
def insert_first(self): # alter table id_queue AUTO_INCREMENT = 1 self.cursor.execute("select count(*) from id_queue") count = self.cursor.fetchone()[0] if (count == 0): first = get_first() add_first = ("INSERT INTO id_queue" "(id, next_id, count, area, next_area)" "VALUES (%s, %s, %s, %s, %s)") self.cursor.execute( add_first, (first.get('id'), first.get('next_id'), first.get('count'), first.get('area'), first.get('next_area'))) self.cnx.commit() else: pass
def post(self): logging.debug('Params: %s', list(self.request.params.items())) type = self.request.get('type') if type: assert type in ('event', ) source = self.source = util.load_source(self) if not source or source.status == 'disabled' or 'listen' not in source.features: logging.error('Source not found or disabled. Dropping task.') return logging.info('Source: %s %s, %s', source.label(), source.key_id(), source.bridgy_url(self)) post_id = util.get_required_param(self, 'post_id') source.updates = {} if type == 'event': activities = [source.gr_source.get_event(post_id)] else: activities = source.get_activities(fetch_replies=True, fetch_likes=True, fetch_shares=True, activity_id=post_id, user_id=source.key_id()) if not activities or not activities[0]: logging.info('Post %s not found.', post_id) return assert len(activities) == 1, activities activity = activities[0] activities = {activity['id']: activity} self.backfeed(source, responses=activities, activities=activities) obj = activity.get('object') or activity in_reply_to = util.get_first(obj, 'inReplyTo') if in_reply_to: parsed = util.parse_tag_uri(in_reply_to.get( 'id', '')) # TODO: fall back to url if parsed: util.add_discover_task(source, parsed[1])