コード例 #1
0
def check_file(filename, sexp_cache, args):
    coq_filename = os.path.splitext(filename)[0] + '.v'
    fields = coq_filename.split(os.path.sep)
    loc2code = get_code(open(coq_filename, 'rb').read())
    file_data = {
        'filename': os.path.sep.join(fields[2:]),
        'coq_project': fields[1],
        'vernac_cmds': [],
        'num_extra_cmds': None,
        'proofs': [],
    }
    meta = open(filename).read()

    with SerAPI(args.timeout, args.debug) as serapi:
        # set the path
        file_data['vernac_cmds'].extend(set_paths(meta, serapi, sexp_cache))
        file_data['num_extra_cmds'] = len(file_data['vernac_cmds'])

        # extract the coq code
        coq_code = extract_code(meta, loc2code)

        # process the coq code
        for num_executed, (code_line, tags) in enumerate(coq_code):
            if 'PROOF_NAME' in tags:
                if tags['PROOF_NAME'] not in file_data['proofs']:
                    file_data['proofs'].append(tags['PROOF_NAME'])
            # execute until the proof editing mode starts
            if args.debug:
                print('%d: %s' % (num_executed, code_line))
            _, ast = serapi.execute(code_line, return_ast=True)
            file_data['vernac_cmds'].append(
                (code_line, tags['VERNAC_TYPE'], sexp_cache.dump(ast)))

    return file_data
コード例 #2
0
ファイル: extract_proof.py プロジェクト: brando90/TacTok
def get_proof(sexp_cache, args):
    coq_filename = os.path.splitext(args.file)[0] + '.v'
    fields = coq_filename.split(os.path.sep)
    loc2code = get_code(open(coq_filename, 'rb').read())
    meta = open(args.file).read()
    coq_code = extract_code(meta, loc2code)
    file_data = json.load(open(os.path.join(args.data_path, args.file[13:-5] + '.json')))

    with SerAPI(args.timeout, args.debug) as serapi:
        num_extra_cmds = len(set_paths(meta, serapi, sexp_cache))
        
        # process the coq code
        proof_start_lines = []
        in_proof = False
        for num_executed, (code_line, tags) in enumerate(coq_code):
            assert code_line == file_data['vernac_cmds'][num_extra_cmds + num_executed][0]
            if 'PROOF_NAME' in tags and tags['PROOF_NAME'] == args.proof:  # the proof ends
                serapi.pop() 
                line_nb = proof_start_lines[-1]
                proof_data = record_proof(num_extra_cmds, line_nb, coq_code[line_nb + 1:], sexp_cache, serapi, args)
                break
            # execute the code
            if args.debug:
                print('%d: %s' % (num_executed, code_line))
            serapi.execute(code_line)
            if serapi.has_open_goals():
                if not in_proof:  # the proof starts
                    in_proof = True
                    proof_start_lines.append(num_executed)
                    serapi.push()
            else:
                in_proof = False

    return proof_data 
コード例 #3
0
ファイル: languages.py プロジェクト: ia03/vummer
def run_code(arg, input_data, attachment, lang_id, channel_id):
    code = get_code(arg, attachment)
    print('Running code: ', code.decode())
    submission = api.submission.submit(client, code, lang_id,
        stdin=input_data.encode())
    status = submission.status
    output = submission.stdout
    errors = submission.stderr
    compile_output = submission.compile_output
    if output:
        output = output.decode()
    if errors:
        errors = errors.decode()
    if compile_output:
        compile_output = compile_output.decode()

    message = 'Status: ' + status['description'] + '\n'

    if output:
        message += 'Output: ```\n' + output + '\n```'
    else:
        message += 'No output sent.\n'
    if errors:
        message += 'Errors: ```\n' + errors + '\n```'
    if compile_output:
        message += ('Compiler output: ```\n' + compile_output
            + '\n```\n')

    message += stats(submission.time, submission.memory)
    send_message(channel_id, message)
コード例 #4
0
ファイル: check_proofs.py プロジェクト: princeton-vl/CoqGym
def check_file(filename, sexp_cache, args):
    print("\n" + filename)
    coq_filename = os.path.splitext(filename)[0] + ".v"
    fields = coq_filename.split(os.path.sep)
    loc2code = get_code(open(coq_filename, "rb").read())
    file_data = {
        "filename": os.path.sep.join(fields[2:]),
        "coq_project": fields[1],
        "vernac_cmds": [],
        "num_extra_cmds": None,
        "proofs": [],
    }
    meta = open(filename).read()

    with SerAPI(args.timeout, args.debug) as serapi:
        # set the path
        file_data["vernac_cmds"].extend(set_paths(meta, serapi, sexp_cache))
        file_data["num_extra_cmds"] = len(file_data["vernac_cmds"])

        # extract the coq code
        coq_code = extract_code(meta, loc2code)

        # process the coq code
        for num_executed, (code_line, tags) in enumerate(coq_code):
            if "PROOF_NAME" in tags:
                if tags["PROOF_NAME"] not in file_data["proofs"]:
                    file_data["proofs"].append(tags["PROOF_NAME"])
            # execute until the proof editing mode starts
            if args.debug:
                print("%d: %s" % (num_executed, code_line))
            _, ast = serapi.execute(code_line, return_ast=True)
            file_data["vernac_cmds"].append(
                (code_line, tags["VERNAC_TYPE"], sexp_cache.dump(ast)))

    return file_data
コード例 #5
0
ファイル: views.py プロジェクト: profkumarr/bug-binder
def forget(request):
    # Comment out in production
    if settings.EMAIL_HOST_PASSWORD == "":
        raise ValueError(
            "Email password is missing. Set password in EMAIL_HOST_PASSWORD at settings.py")

    email = request.POST.get("email")
    try:
        user = User.objects.filter(
            Q(email=email) | Q(username=email)
        ).first()
        code = get_code()
        if Code.objects.filter(user=user).count() > 0:
            obj = Code.objects.filter(user=user).first()
        else:
            obj = Code()
        obj.code = get_hash(code)
        obj.user = user
        obj.save()
        username = user.username
        subject = "Bugbinder | Reset Password."
        message = f"Dear {username},\nYou recently requested to reset your password for your Bugbinder account.\n\nCODE: {code}\n\nIf you didn't request a password reset, please ignore this email.\n\nThanks,\nBugbinder"
        async_send_mail(subject, message, settings.EMAIL_HOST_USER, user.email)
        return JsonResponse({'status': 200})
    except:
        return JsonResponse({'status': 403})
コード例 #6
0
def get_proof(args):

    coq_filename = os.path.splitext(args.file)[0] + '.v'
    fields = coq_filename.split(os.path.sep)
    loc2code = get_code(open(coq_filename, 'rb').read())
    meta = open(args.file).read()
    coq_code = extract_code(meta, loc2code)
    with open(os.path.join(args.data_path, args.file[13:-5] + '.json'),
              "r") as json_data:
        file_data = json.load(json_data)
    with open(args.file0, "r") as json_data:
        proof_data0 = json.load(json_data)
    proof_data0 = [tempdic['name'] for tempdic in proof_data0['proofs']]
    for tempproof in file_data['proofs']:
        args.proof = tempproof
        if tempproof not in proof_data0:
            continue
        if os.path.isfile(os.path.join(
                dirname, args.proof +
                '.json')):  # tempproof != 'loc_unconstrained_satisf':
            continue
        db_path = os.path.join(dirname, args.proof + '-sexp_cache')
        sexp_cache = SexpCache(db_path)

        with SerAPI(args.timeout, args.debug) as serapi:
            num_extra_cmds = len(set_paths(meta, serapi, sexp_cache))

            # process the coq code
            proof_start_lines = []
            in_proof = False
            for num_executed, (code_line, tags) in enumerate(coq_code):
                assert code_line == file_data['vernac_cmds'][num_extra_cmds +
                                                             num_executed][0]
                if 'PROOF_NAME' in tags and tags[
                        'PROOF_NAME'] == args.proof:  # the proof ends
                    serapi.pop()
                    line_nb = proof_start_lines[-1]

                    proof_data = record_proof(num_extra_cmds, line_nb,
                                              coq_code[line_nb + 1:],
                                              sexp_cache, serapi, args)
                    if proof_data is not None:
                        dump(proof_data, args)
                    break
                # execute the code
                if args.debug:
                    print('%d: %s' % (num_executed, code_line))
                serapi.execute(code_line)
                if serapi.has_open_goals():
                    if not in_proof:  # the proof starts
                        in_proof = True
                        proof_start_lines.append(num_executed)
                        serapi.push()
                else:
                    in_proof = False

    return
コード例 #7
0
ファイル: hashmaker.py プロジェクト: lerry/fileshare
 def has_value(self, hash_value):
     '''check if there is a file with the given hash value'''
     with self.connect() as conn:
         cur = conn.cursor()
         cur.execute('''SELECT * FROM hash_table WHERE hash="%s"''' % hash_value)
         result = cur.fetchall()    
         if result:
             return result[0][1].encode(utils.get_code())
         else:
             return False    
コード例 #8
0
ファイル: languages.py プロジェクト: ia03/vummer
def test_code(arg, attachment, lang_id, problem_name, channel_id):
    code = get_code(arg, attachment)
    print('Running test code: ', code.decode())
    cases = get_problem(problem_name).cases
    message = ''
    for stdin in cases:
        expected_output = cases[stdin]
        submission = api.submission.submit(client, code, lang_id,
            stdin=stdin.encode(), expected_output=expected_output.encode())
        status = submission.status
        message += 'Status: ' + status['description'] + '\n'
        message += stats(submission.time, submission.memory)
    send_message(channel_id, message)
コード例 #9
0
ファイル: cli.py プロジェクト: caioariede/docker-run-build
def main(image, output, no_assert_hostname, mount):
    click.secho('\nUpdating image {} (to: {})'.format(
        image, output if output else 'same image'),
                fg='green')

    if not output:
        output = image

    with warnings.catch_warnings():
        warnings.simplefilter("ignore")

        cli = dockerapi.get_cli(assert_hostname=not no_assert_hostname)

        # cache old options
        options = u.get_old_options(cli, image)

        # read code from stdin
        code = u.get_code()

        # execute code inside a container
        click.secho('Running code..., output:', fg='green')
        container_id = u.run_code_in_container(
            cli, image, code, mount, entrypoint=options['entrypoint'])

        # get logs
        exitcode = cli.wait(container=container_id)

        logs = cli.logs(container_id, stdout=True, stderr=True)

        if logs:
            if exitcode < 1:
                click.echo(logs)
            else:
                click.secho(logs, fg='red')
        else:
            click.secho('Nothing', bold=True)

        # commit running container to image
        if exitcode < 1:
            cli.commit(container=container_id, repository=output)
            cli.stop(container=container_id)

        cli.remove_container(container=container_id)

        # restore container's entrypoint and cmd
        if exitcode < 1:
            u.restore_image_options(cli, output, options)
コード例 #10
0
    def __init__(self, scope: Construct, id: str, props: LambdaProps,
                 **kwargs) -> None:
        super().__init__(scope, id)

        common_runtimes = dict(runtime=_lambda.Runtime.PYTHON_3_7,
                               handler='index.handler',
                               timeout=Duration.seconds(30))

        self.email_approval = _lambda.Function(
            self,
            'RespondEmailApproval',
            code=_lambda.Code.inline(get_code('lambda_approval.js')),
            runtime=_lambda.Runtime.NODEJS_12_X,
            handler='index.handler',
            timeout=Duration.seconds(30),
            environment={'DYNAMOTABLE': props.table_name})
        self.send_email_approval = _lambda.Function(
            self,
            'SendEmailApproval',
            code=_lambda.Code.inline(get_code('sendEmailApproval.js')),
            runtime=_lambda.Runtime.NODEJS_12_X,
            handler='index.handler',
            timeout=Duration.seconds(30),
            environment={
                'SNSTOPIC': props.sns_topic_arn,
                'DYNAMOTABLE': props.table_name,
                'APIURL': props.api
            })

        self.my_lambda = _lambda.Function(
            self,
            'custom_config_ES',
            code=_lambda.Code.inline(get_code('restrict_es_custom_config.py')),
            **common_runtimes)

        self.check_status_dynamo = _lambda.Function(
            self,
            'CheckStatus',
            code=_lambda.Code.inline(get_code('check_dynamo_status.py')),
            **common_runtimes,
            environment={'DYNAMOTABLE': props.table_name})
        self.replicate_to_global = _lambda.Function(
            self,
            'replicate_stream_global',
            code=_lambda.Code.from_inline(get_code('index.py')),
            **common_runtimes)
        self.stream_lambda_source(props.table, self.replicate_to_global)

        self.restric_es_policy = _lambda.Function(
            self,
            'RestricESpolicy',
            **common_runtimes,
            code=_lambda.Code.inline(get_code('restrict_es_policy.py')),
            environment={'DYNAMOTABLE': props.table_name})

        self.restric_rds_policy = _lambda.Function(
            self,
            'RestricRDS',
            **common_runtimes,
            code=_lambda.Code.inline(get_code('restrict_rds.py')),
            # environment={
            #     'DYNAMOTABLE' : props.table_name
            # }
        )

        self.custom_config_rds = _lambda.Function(
            self,
            'custom_config_RDS',
            code=_lambda.Code.inline(get_code('public_rds_custom_config.py')),
            **common_runtimes)

        self.add_role_restric_es()
        self.add_role_restrict_rds()
コード例 #11
0
    def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
        super().__init__(scope, id, **kwargs)

        # Kinesis to lambda
        self.stream_lambda = kinesis_lambda.KinesisStreamsToLambda(
            self,
            'clickstream',
            lambda_function_props=_lambda.FunctionProps(
                runtime=_lambda.Runtime.PYTHON_3_7,
                handler='index.lambda_handler',
                code=_lambda.Code.inline(
                    get_code('send_data_to_firehose.py'))),
            kinesis_stream_props=kinesis.StreamProps(
                stream_name='clickstream',
                retention_period=core.Duration.days(1),
                shard_count=4),
            kinesis_event_source_props=lambda_sources.KinesisEventSourceProps(
                starting_position=_lambda.StartingPosition.TRIM_HORIZON,
                batch_size=1))

        # Lambda to produce data
        self.produce_fake_data = _lambda.Function(
            self,
            'produce_data',
            runtime=_lambda.Runtime.PYTHON_3_7,
            timeout=core.Duration.seconds(90),
            handler='index.lambda_handler',
            code=_lambda.Code.inline(get_code('produce_data.py')),
            environment={
                'STREAM_NAME': self.stream_lambda.kinesis_stream.stream_name
            })
        self.stream_lambda.kinesis_stream.grant_read_write(
            self.produce_fake_data)

        # EventBridge to activate my function above
        self.event_rule = events.Rule(
            self,
            'scheduledRule',
            schedule=events.Schedule.expression('rate(1 minute)'))
        self.event_rule.add_target(
            targets.LambdaFunction(self.produce_fake_data))

        # S3 Bucket
        self.bucket = s3.Bucket(self,
                                'data-clicks-lake',
                                removal_policy=core.RemovalPolicy.DESTROY,
                                auto_delete_objects=True)

        # Glue
        self.glue_db_analytical = glue.Database(
            self,
            'analytic_clickstream',
            database_name='clickstream_db',
            location_uri=None,
        )

        self.glue_table_analytical = glue.Table(
            self,
            'analytical-table',
            table_name='analytical-table',
            columns=[
                glue_column('custid', 'int'),
                glue_column('trafficfrom', 'string'),
                glue_column('url', 'string'),
                glue_column('device', 'string'),
                glue_column('touchproduct', 'int'),
                glue_column('trans_timestamp', 'string')
            ],
            database=self.glue_db_analytical,
            data_format=glue.DataFormat.PARQUET,
            bucket=self.bucket,
            s3_prefix='kinesis/',
        )

        # Firehose
        iam_role_firehose_analytical = self.create_firehose_role()
        self.bucket.grant_read_write(iam_role_firehose_analytical)

        firehose_props = FirehoseProps(
            bucket=self.bucket,
            role=iam_role_firehose_analytical,
            stream=self.stream_lambda.kinesis_stream,
            glue_db=self.glue_db_analytical,
            glue_table=self.glue_table_analytical)

        self.firehose = FirehoseLib(self, 'firehose_clickstream',
                                    firehose_props)

        # Elasticsearh
        self.es_domain = ElasticsearchLib(self,
                                          'ES-clickstream-domain').es_domain

        # Lambda to send data to Elasticsearch
        self.send_data_to_elasticsearch = lambda_python.PythonFunction(
            self,
            'clickstream_to_es',
            entry='./analytics_ml_flow/lambda/lambda_with_requirements/',
            handler='handler',
            timeout=core.Duration.seconds(180),
            index='Kinesis_ES.py',
            environment={
                'ES_HOST_HTTP': self.es_domain.domain_endpoint,
                'ES_INDEX': 'clickstream',
                'ES_IND_TYPE': 'transactions',
                'ES_REGION': 'us-west-2',
            })
        self.es_domain.grant_index_read_write('clickstream',
                                              self.send_data_to_elasticsearch)
        self.es_domain.grant_read_write(self.send_data_to_elasticsearch)

        stream_source = lambda_sources.KinesisEventSource(
            self.stream_lambda.kinesis_stream,
            starting_position=_lambda.StartingPosition.TRIM_HORIZON,
            batch_size=1)

        self.stream_lambda.kinesis_stream.grant_read(
            self.send_data_to_elasticsearch)
        self.send_data_to_elasticsearch.add_event_source(stream_source)

        # Glue Crawler
        crawler_role = self.create_crawler_permissions()
        glue_props = GlueCrawlerProps(bucket=self.bucket, role=crawler_role)
        self.glue_crawler = GlueCrawlerLib(self, 'glueCrawler', glue_props)