Example #1
0
def main():
    parser = create_parser(description=__doc__)
    parser.add_argument(
        "-t",
        dest="template",
        default=template,
        type=unicode,
        help=("The template to use. " "(default: '%s', possible keys: %s)" % (template, possible_attrs)),
    )
    args = parser.parse_args()
    load_from_args(args)
    instances = get_instances(args)

    query = User.all_q()

    if instances is not None:
        instance_ids = [instance.id for instance in instances]
        query = query.filter(
            User.memberships.any(
                and_(
                    Membership.instance_id.in_(instance_ids),
                    or_(Membership.expire_time == None, Membership.expire_time > datetime.utcnow()),
                )
            )
        )

    for user in query:
        userinfo = user_info(user)
        s = args.template.format(**userinfo)
        print s.encode("utf-8")
Example #2
0
def main():
    parser = create_parser(description=__doc__)
    parser.add_argument("-t",
                        dest="template",
                        default=template,
                        type=unicode,
                        help=("The template to use. "
                              "(default: '%s', possible keys: %s)" %
                              (template, possible_attrs)))
    args = parser.parse_args()
    load_from_args(args)
    instances = get_instances(args)

    query = User.all_q()

    if instances is not None:
        instance_ids = [instance.id for instance in instances]
        query = query.filter(
            User.memberships.any(
                and_(
                    Membership.instance_id.in_(instance_ids),
                    or_(Membership.expire_time == None,
                        Membership.expire_time > datetime.utcnow()))))

    for user in query:
        userinfo = user_info(user)
        s = args.template.format(**userinfo)
        print s.encode('utf-8')
Example #3
0
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f',
                        dest='force',
                        default=False,
                        action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(ForeignKeyConstraint((), (), name=fk['name']))
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
Example #4
0
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('-f', dest='force', default=False, action='store_true',
                        help="force deletion without asking for confirmation")
    args = parser.parse_args()

    if not args.force:
        input = raw_input('Delete all data? No backup will be done! '
                          'If so type "yes": ')
        if input != 'yes':
            print 'Answer not "yes", but: "%s"\nAborting.' % input
            exit(1)

    config = config_from_args(args)
    engine = get_engine(config, echo=True)
    conn = engine.connect()

    # the transaction only applies if the DB supports
    # transactional DDL, i.e. Postgresql, MS SQL Server
    trans = conn.begin()

    inspector = reflection.Inspector.from_engine(engine)

    # gather all data first before dropping anything.
    # some DBs lock after things have been dropped in
    # a transaction.

    metadata = MetaData()

    tbs = []
    all_fks = []

    for table_name in inspector.get_table_names():
        fks = []
        for fk in inspector.get_foreign_keys(table_name):
            if not fk['name']:
                continue
            fks.append(
                ForeignKeyConstraint((), (), name=fk['name'])
                )
        t = Table(table_name, metadata, *fks)
        tbs.append(t)
        all_fks.extend(fks)

    for fkc in all_fks:
        conn.execute(DropConstraint(fkc))

    for table in tbs:
        conn.execute(DropTable(table))

    trans.commit()
def main():
    parser = create_parser(description=__doc__)
    args = parser.parse_args()
    load_from_args(args)
    instances = get_instances(args)

    for instance in instances:
        proposals = Proposal.all_q(instance=instance)
        proposals = sorted(proposals, key=lambda x: x.comment_count(),
                           reverse=True)

        print instance.label
        for proposal in proposals:
            print "%s: %s" % (proposal.comment_count(), proposal.title)
Example #6
0
def main():
    parser = create_parser(description=__doc__)
    args = parser.parse_args()
    load_from_args(args)
    instances = get_instances(args)

    for instance in instances:
        proposals = Proposal.all_q(instance=instance)
        proposals = sorted(proposals,
                           key=lambda x: x.comment_count(),
                           reverse=True)

        print instance.label
        for proposal in proposals:
            print "%s: %s" % (proposal.comment_count(), proposal.title)
Example #7
0
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument(
        "username",
        help=("The name of the user who should become a global admin"))
    args = parser.parse_args()
    load_from_args(args)

    user = User.find(args.username)
    if user is None:
        print 'Cannot find user %s\n' % args.username
        parser.exit()

    global_membership = [membership for membership in user.memberships if
                         membership.instance is None][0]
    admin_group = Group.by_code(Group.CODE_ADMIN)
    global_membership.group = admin_group
    meta.Session.commit()
Example #8
0
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('--dump',
                        default=None,
                        required=True,
                        help="Path to the SQL dump file.")
    args = parser.parse_args()

    # check and cleanup dump file
    dump_path = os.path.join(os.getcwd(), args.dump)
    if not os.path.exists(dump_path):
        parser.error('SQL dump file "%s" does not exist.' % args.dump)

    # get an engine to get the driver type and connection details.
    engine = get_engine(config_from_args(args))
    drivername = engine.url.drivername

    error = False
    if drivername == 'postgresql':
        # use the psql command line script for imports.
        # pg_dump by default emits COPY ... FROM STDIN statements
        # which the psycopg2 driver can't handle.
        # pg_dump can emit inserts (--inserts), but that's
        # dead slow to import.
        vars = engine.url.__dict__.copy()
        vars['dump_path'] = dump_path
        command = ('psql -U {username} -h {host} -p {port} -'
                   'd {database} -f {dump_path}').format(**vars)
        print 'Executing command: %s' % command
        if engine.url.password is not None:
            print 'Prefixing it with PGPASSWORD="******"'
            command = 'PGPASSWORD="******" ' % (engine.url.password, command)

        error = subprocess.call(command, shell=True)
    else:
        print(
            'Action for driver "%s" is not defined.\n'
            "Note: sqlite3 has a non-standard executescript() method.")
        exit(1)

    if error:
        print 'Process exited with Error: %s' % error
        exit(error)
def main():
    parser = create_parser(description=__doc__, use_instance=False)
    parser.add_argument('--dump', default=None, required=True,
                        help="Path to the SQL dump file.")
    args = parser.parse_args()

    # check and cleanup dump file
    dump_path = os.path.join(os.getcwd(), args.dump)
    if not os.path.exists(dump_path):
        parser.error('SQL dump file "%s" does not exist.' % args.dump)

    # get an engine to get the driver type and connection details.
    engine = get_engine(config_from_args(args))
    drivername = engine.url.drivername

    error = False
    if drivername == 'postgresql':
        # use the psql command line script for imports.
        # pg_dump by default emits COPY ... FROM STDIN statements
        # which the psycopg2 driver can't handle.
        # pg_dump can emit inserts (--inserts), but that's
        # dead slow to import.
        vars = engine.url.__dict__.copy()
        vars['dump_path'] = dump_path
        command = ('psql -U {username} -h {host} -p {port} -'
                   'd {database} -f {dump_path}').format(**vars)
        print 'Executing command: %s' % command
        if engine.url.password is not None:
            print 'Prefixing it with PGPASSWORD="******"'
            command = 'PGPASSWORD="******" ' % (engine.url.password, command)

        error = subprocess.call(command, shell=True)
    else:
        print ('Action for driver "%s" is not defined.\n'
               "Note: sqlite3 has a non-standard executescript() method.")
        exit(1)

    if error:
        print 'Process exited with Error: %s' % error
        exit(error)
Example #10
0
def parse_args():
    p = common.create_parser()
    return p.parse_args()
Example #11
0
    queue_handler: QueueHandler = QueueHandler(queue)
    top_log.addHandler(queue_handler)
    top_log.setLevel(level)

    formatter: logging.Formatter = logging.Formatter(fmt=fmt, style=style)
    console: logging.StreamHandler = logging.StreamHandler(stream=stdout)
    console.setFormatter(formatter)

    listener: QueueListener = QueueListener(queue, console)

    return top_log, listener


if __name__ == "__main__":

    PARSER: ArgumentParser = create_parser()
    PARSER.add_argument("-p",
                        "--processes",
                        type=int,
                        required=False,
                        default=1)
    PARSER.add_argument(
        "-ed",
        "--emission_delay",
        type=float,
        required=False,
        default=1.0,
        help="The time (in seconds) between sending messages",
    )
    ARGS: Namespace = PARSER.parse_args()
def parse_args():
    p = common.create_parser()
    p.add_argument('--all-tenants', action='store_true')
    return p.parse_args()
def parse_args():
    p = common.create_parser()
    return p.parse_args()
Example #14
0
    elif FLAGS.model == 'mlp':
        model = bag_of_words_MLP_model
    elif FLAGS.model == 'rnn':
        model = rnn_model
    else:
        raise ValueError('unknown model')

    classifications = predict(x_query, query_lengths, model, FLAGS)
    for i, query in enumerate(queries):
        print('The model classifies "{}" as a member of the class {}.'.format(
            query, classes['class'][classifications[i]]))


# Run script ##############################################
if __name__ == "__main__":
    parser = create_parser()
    parser.add_argument(
        '--embed-dim',
        type=int,
        default=EMBEDDING_DIM,
        help='Number of dimensions in the embedding, '
        'i.e. the number of nodes in the hidden embedding layer (default: {})'.
        format(EMBEDDING_DIM))
    parser.add_argument('model_dir', help='The directory of the trained model')
    parser.add_argument('model',
                        help='Which model, e.g. perceptron, mlp, etc...')
    parser.add_argument(
        '--query-file',
        default=QUERY_FILENAME,
        help='Name of the queries file (default: {})'.format(QUERY_FILENAME))
    parser.add_argument(
def parse_args():
    p = common.create_parser()
    p.add_argument('--all-tenants',
                   action='store_true')
    return p.parse_args()
Example #16
0
def main():
    os.chdir(os.path.join(os.path.dirname(__file__), os.pardir))

    parser = common.create_parser(True)
    args = parser.parse_args()

    # Clone depot_tools
    if not os.path.exists("depot_tools"):
        subprocess.check_call([
            "git", "clone",
            "https://chromium.googlesource.com/chromium/tools/depot_tools.git",
            "depot_tools"
        ])

    # Clone Skia
    match = re.match('(m\\d+)(?:-([0-9a-f]+)(?:-([1-9][0-9]*))?)?',
                     args.version)
    if not match:
        raise Exception('Expected --version "m<ver>-<sha>", got "' +
                        args.version + '"')
    branch = "chrome/" + match.group(1)
    commit = match.group(2)
    iteration = match.group(3)

    if os.path.exists("skia"):
        os.chdir("skia")
        if subprocess.check_output(["git", "branch", "--list", branch]):
            print("> Advancing", branch)
            subprocess.check_call(["git", "checkout", "-B", branch])
            subprocess.check_call(["git", "fetch"])
            subprocess.check_call(
                ["git", "reset", "--hard", "origin/" + branch])
        else:
            print("> Fetching", branch)
            subprocess.check_call(["git", "reset", "--hard"])
            subprocess.check_call([
                "git", "fetch", "origin", branch + ":remotes/origin/" + branch
            ])
            subprocess.check_call(["git", "checkout", branch])
    else:
        print("> Cloning", branch)
        subprocess.check_call([
            "git", "clone", "https://skia.googlesource.com/skia", "--quiet",
            "--branch", branch, "skia"
        ])
        os.chdir("skia")

    # Checkout commit
    print("> Checking out", commit)
    subprocess.check_call(
        ["git", "-c", "advice.detachedHead=false", "checkout", commit])

    # Apply patches
    subprocess.check_call(["git", "reset", "--hard"])
    for x in pathlib.Path(os.pardir, 'patches').glob('*.patch'):
        print("> Applying", x)
        subprocess.check_call(["git", "apply", str(x)])

    # git deps
    if 'windows' == common.system():
        env = os.environ.copy()
        env['PYTHONHTTPSVERIFY'] = '0'
        subprocess.check_call(["python", "tools/git-sync-deps"], env=env)
    else:
        subprocess.check_call(["python2", "tools/git-sync-deps"])

    return 0