def parse(self): """Parses the request arguments.""" parsed_kwargs = {} raw_kwargs = {} for argname, argobj in iteritems(self.argmap): dest = argobj.dest if argobj.dest is not None else argname parsed_value = self.parse_arg(argname, argobj) if parsed_value is not self.MISSING: try: parsed_kwargs[dest] = self.convert(parsed_value, argobj.type) except Exception as e: msg = ("The parameter '%s' specified in the request " "URI is not supported. %s" % (argname, e)) try: abort(400) except: exc_type, exc_value, tb = sys.exc_info() exc_value.data = msg reraise(exc_type, exc_value, tb.tb_next) else: parsed_kwargs[dest] = argobj.default raw_value = argobj.raw_value(parsed_kwargs[dest]) if raw_value is not None: raw_kwargs[argname] = raw_value return parsed_kwargs, raw_kwargs
def handle_error(self): exc_type, exc_value, tb = sys.exc_info() if (isinstance(exc_value, (click.ClickException, click.Abort)) or self.debug): reraise(exc_type, exc_value, tb.tb_next) else: sys.stderr.write(u"\nError: %s\n" % exc_value) sys.exit(1)
def proxy_old_api(path): try: return proxy_request(path, proxy_host, proxy_port, proxy_prefix, proxy_auth) except IOError as e: msg = ("502 Bad Gateway. %s ('%s:%d')" % (e, proxy_host, proxy_port)) try: abort(502) except: exc_type, exc_value, tb = sys.exc_info() exc_value.data = msg reraise(exc_type, exc_value, tb.tb_next)
def copy_table(ctx, table, from_conn, to_conn, pk=None): use_pg_copy = False if hasattr(ctx, 'pg_copy') and ctx.pg_copy: if hasattr(to_conn.dialect, 'psycopg2_version'): use_pg_copy = True insert_query = table.insert() select_query = select([table]) count_query = select([func.count()]).select_from(table) if pk is not None: min_pk = to_conn.execute(select([func.max(pk)])).scalar() if min_pk is not None: count_query = count_query.where(pk > min_pk) select_query = select_query.order_by( *(order_by_func(pk) for pk in get_primary_keys(table)) ) total_lenght = from_conn.execute(count_query).scalar() if total_lenght == 0: return select_query = select_query.execution_options(stream_results=True) def log(progress): percentage = blue("%s/%s" % (progress, total_lenght)) message = yellow('\r copy') + ' ~> table %s (%s)' ctx.log(message % (table.name, percentage), nl=False) def fetch_stream(): def gen_pagination_with_pk(chunk): max_pk_query = select([func.max(pk)]) min_pk_query = select([func.min(pk)]) max_pk = from_conn.execute(max_pk_query).scalar() or 0 min_pk = from_conn.execute(min_pk_query).scalar() or 0 min_pk = to_conn.execute(max_pk_query).scalar() or (min_pk - 1) left_seq = range(min_pk + 1, max_pk, chunk) right_seq = range(min_pk + chunk, max_pk + chunk, chunk) for min_id, max_id in zip(left_seq, right_seq): yield select_query.where(pk.between(min_id, max_id)) if pk is not None: queries = gen_pagination_with_pk(ctx.chunk) else: queries = [select_query] progress = 0 for query in queries: page = 0 while True: q = query.offset(page * ctx.chunk).limit(ctx.chunk) rows = from_conn.execute(q) if rows.rowcount == 0: break progress = rows.rowcount + progress if progress > total_lenght: progress = total_lenght log(progress) yield (i for i in rows) page = page + 1 ctx.log("") if not use_pg_copy: for rows in fetch_stream(): to_conn.execute(insert_query, list(rows)) else: from oar.lib.psycopg2 import pg_bulk_insert columns = None for rows in fetch_stream(): if columns is None: first = next(rows, None) columns = ["%s" % k for k in first.keys()] rows = chain((first,), rows) try: with to_conn.begin(): cursor = to_conn.connection.cursor() pg_bulk_insert(cursor, table, rows, columns, binary=ctx.pg_copy_binary) except: exc_type, exc_value, tb = sys.exc_info() reraise(exc_type, exc_value, tb.tb_next)