예제 #1
0
    def validate(self, discretization, parallelization):

        # get parallelization configuration
        xpesize, ypesize, zpesize = parallelization.reshape(3)

        # check if number of cells in not smaller than block size
        if discretization['NX'] < self.bs * xpesize:
            message = 'mesh resolution NX / xpesize is smaller than block size'
            details = '%d / %d < %d.' % (discretization['NX'], xpesize,
                                         self.bs)
            helpers.error(message, details)
        if discretization['NY'] < self.bs * ypesize:
            message = 'mesh resolution NY / ypesize is smaller than block size'
            details = '%d / %d < %d.' % (discretization['NY'], ypesize,
                                         self.bs)
            helpers.error(message, details)
        if discretization['NZ'] < self.bs * zpesize:
            message = 'mesh resolution NZ / zpesize is smaller than block size'
            details = '%d / %d < %d.' % (discretization['NZ'], zpesize,
                                         self.bs)
            helpers.error(message, details)

        # check if number of blocks is not smaller than available threads
        blocks_x = discretization['NX'] / (self.bs * xpesize)
        blocks_y = discretization['NY'] / (self.bs * ypesize)
        blocks_z = discretization['NZ'] / (self.bs * zpesize)
        blocks = blocks_x * blocks_y * blocks_z
        if blocks < parallelization.threads:
            message = 'number of blocks is smaller than available threads: %d < %d.' % (
                blocks, parallelization.threads)
            details = 'Discretization: %s' % str(discretization)
            advice = 'Parallelization: %s ranks, %d threads' % (str(
                parallelization.reshape(3)), parallelization.threads)
            helpers.warning(message, details, advice)
            helpers.query('Continue with sub-optimal parallelization?')
예제 #2
0
    def simulation(self):

        # report config
        self.config.report()

        # query for progress
        helpers.query('Continue?')

        # check for consistency
        if self.params.restart and self.params.proceed:
            helpers.error(
                'Both \'-r\' (--restart) and \'-p\' (--proceed) were specified.',
                advice='Use either of the options, not both.')

        # report scheduler
        self.config.scheduler.report()

        # initial phase
        if self.params.restart:
            self.init()

        # proceed with the existing simulation (no modification in setup)
        if self.params.proceed:
            self.proceed()

        # recursive updating phase
        self.update()

        # query for progress
        helpers.query('Quit simulation?')
예제 #3
0
def capture_pattern(device=None, backend=None):
    inst = get_instrument(device, backend=backend)
    for cmd in setup_cmds.strip().split('\n'):
        if cmd.endswith('?'):
            query(inst, cmd)
        else:
            write(inst, cmd)
예제 #4
0
 def do_select_dataset(self, idx):
     self.ds = "http://dbpedia.org/ontology/{}".format(
         self.ret[int(idx)][0])
     print("{} selected!".format(self.ds))
     print("Process dataset... please wait!")
     query(self.ds)
     Print('Done!')
예제 #5
0
    def proceed(self):

        # load MLMC simulation
        self.load()

        # report number of samples used so far
        self.config.samples.report()

        # distribute required samples
        self.config.scheduler.distribute()

        # query for progress
        helpers.query('Submit jobs?')

        # compute required samples
        self.run()

        # save MLMC simulation
        self.save()

        # for clusters: if non-interactive session -> exit
        if local.cluster and not self.params.interactive:
            print
            print ' :: INFO: Non-interactive mode specified -> exiting.'
            print '  : -> Run PyMLMC with \'-i\' option for an interactive mode.'
            print
            sys.exit()
예제 #6
0
파일: app.py 프로젝트: Domi0tech/BDO-Market
def details(data="default"):
    if data == "default":
        item_name = request.form.get("item_name")
        item = helpers.query(item_name, 1)
        item = item[0]
        item_name = item["Name"]
        price_history = db.execute(
            "SELECT prices.Price, Date FROM prices JOIN items ON (prices.Id = items.Id) WHERE items.Name = ? ORDER BY prices.Date ASC",
            item_name)
        print(price_history)
        print(400)
        return render_template("details.html",
                               item=item,
                               price_history=price_history)
    else:
        item_name = data
        item = helpers.query(item_name, 1)
        item = item[0]
        item_name = item["Name"]
        price_history = db.execute(
            "SELECT prices.Price, Date FROM prices JOIN items ON (prices.Id = items.Id) WHERE items.Name = ? ORDER BY prices.Date ASC",
            item_name)
        return render_template("details.html",
                               item=item,
                               price_history=price_history)
예제 #7
0
def search_by_type(request, type_):
    """This is a search into either addons or libraries."""
    page = request.GET.get('page', 1)
    q = (request.GET.get('q', ''))
    try:
        data = query(q, type_, user=request.user, page=page)
    except EmptyPage:
        data = query(q, type_, user=request.user)
    data.update(q=q, type=type_)
    return render(request, 'results.html', data)
예제 #8
0
def get_screenshot(filename=None, device=None, backend=None):
    inst = get_instrument(device, backend=backend)
    if not filename: filename = dt.now().replace(microsecond=0).isoformat('_').replace(':','-') + '.png'
    if query(inst, "ACQuire:STATe?") not in ("STOP", "BRE"):
        write(inst, "STOP")
    if query(inst, "HCOPy:LANGuage?") != "PNG":
        write(inst, "HCOPy:LANGuage PNG")
    data = inst.query_raw("HCOPy:DATA?")
    data = ieee_488_2_block_data(data)
    with open(filename, 'wb') as f:
        f.write(data)
예제 #9
0
def combined(request):
    """This aggregates the first results from add-ons and libraries."""
    q = request.GET.get('q', '')

    addons = query(q, user=request.user, type_='addon', limit=5)
    libs = query(q, user=request.user, type_='library', limit=5)
    addons.update(q=q,
            addons=addons['pager'].object_list,
            libraries=libs['pager'].object_list,
            total=addons.get('total', 0) + libs.get('total', 0)
            )
    return render(request, 'aggregate.html', addons)
예제 #10
0
def results(request):
    """This aggregates the first results from add-ons and libraries."""
    q = alphanum_space(request.GET.get('q', ''))

    if q:
        addons = query(q, user=request.user, type_='addon', limit=5)
        libs = query(q, user=request.user, type_='library', limit=5)
        addons.update(q=q,
                addons=addons['pager'].object_list,
                libraries=libs['pager'].object_list
                )
        return render(request, 'aggregate.html', addons)
    else:
        return render(request, 'blank.html', {'page': 'search'})
예제 #11
0
def insert_file(col_id, file_path):
    file_resource_uuid = helpers.generate_uuid()
    file_name = col_id + ".png"
    file_resource_uri = "share://Histograms/" + file_name
    upload_resource_uuid = helpers.generate_uuid()
    upload_resource_uri = "<http://mu.semte.ch/services/file-service/files/{uuid}>".format(
        uuid=upload_resource_uuid)
    file_size = os.stat(file_path).st_size

    q = """
        PREFIX ext: <http://mu.semte.ch/vocabularies/ext/>
        PREFIX dct: <http://purl.org/dc/terms/>
        PREFIX nfo: <http://www.semanticdesktop.org/ontologies/2007/03/22/nfo#>
        PREFIX dbpedia: <http://dbpedia.org/ontology/>
        PREFIX nie: <http://www.semanticdesktop.org/ontologies/2007/01/19/nie#>
        PREFIX mu: <http://mu.semte.ch/vocabularies/core/>

        INSERT DATA {{
            GRAPH <http://mu.semte.ch/application> {{
                {uploadUri} a nfo:FileDataObject ;
                    mu:uuid "{uploadUuid}";
                    nfo:fileName "{fileName}";
                    nfo:fileSize "{size}"^^xsd:integer;
                    dct:created "{created}"^^xsd:dateTime;
                    dbpedia:fileExtension "png".
                <{resourceUri}> a nfo:FileDataObject ;
                    mu:uuid "{resourceUuid}";
                    nfo:fileName "{fileName}";
                    nie:dataSource {uploadUri};
                    dct:created "{created}"^^xsd:dateTime;
                    nfo:fileSize "{size}"^^xsd:integer;
                    dbpedia:fileExtension "png".
                    
            }}
        }}
        """.format(uploadUri=upload_resource_uri,
                   uploadUuid=upload_resource_uuid,
                   resourceUuid=file_resource_uuid,
                   resourceUri=file_resource_uri,
                   fileName=file_name,
                   created=datetime.now(
                       timezone('Europe/Brussels')).isoformat(),
                   size=file_size)

    print("Insert file")
    helpers.query(q)

    return upload_resource_uri
예제 #12
0
def get_column_by_uuid(uuid):
    """
        Queries the database for all the data of the column with given uuid
        """

    query = """
            PREFIX ext: <http://mu.semte.ch/vocabularies/ext/>
            PREFIX mu: <http://mu.semte.ch/vocabularies/core/>
            PREFIX dct: <http://purl.org/dc/terms/>

            SELECT DISTINCT ?name, ?dataType, ?quantityKind, ?recordCount, ?missingCount, ?nullCount, ?min, ?max, ?mean, ?median, ?commonValues, ?unitSpecificInfo, ?unit, ?job{{
                GRAPH <http://mu.semte.ch/application> {{
                            ?column mu:uuid "{uuid}";
                                ext:name ?name;
                                ext:recordCount ?recordCount;
                                ext:missingCount ?missingCount;
                                ext:nullCount ?nullCount;
                                ext:min ?min;
                                ext:max ?max;
                                ext:mean ?mean;
                                ext:median ?median;
                                ext:commonValues ?commonValues.
                            ?job ext:column ?column.
                            OPTIONAL {{?column ext:dataType ?dataType.}}.
                            OPTIONAL {{?column ext:quantityKind ?quantityKind.}}.
                            OPTIONAL {{?column ext:unitSpecificInfo ?unitSpecificInfo.}}.
                            OPTIONAL {{?column ext:unit ?unit.}}.

                }}
            }}
            """.format(uuid=uuid)

    result = helpers.query(query)
    print("RESULT: ", result)
    return result
def evaluate_theme(theme):
    """Calculate & insert topic-fingerprint for a given theme"""
    themes_query = build_topicscore_query(theme)
    try:
        topicscores = helpers.query(themes_query)["results"]["bindings"]
        #helpers.log(topicscores)
    except Exception as e:
        helpers.log("Querying SPARQL-endpoint failed:\n" + str(e))
        return
    topichash = {}
    for topicscore in topicscores:
        score = float(topicscore["score"]["value"])
        topicuri = topicscore["topic"]["value"]
        try:
            topichash[topicuri] += score
        except KeyError:
            topichash[topicuri] = score

    #weighing: divide by sum of all scores (so that sum of all topicprints = 1)
    weightedhash = {
        k: v / sum(topichash.values())
        for k, v in topichash.items()
    }

    try:
        helpers.update(build_topicprint_update_query(theme, weightedhash))
        helpers.log(
            'Calculated & inserted topic-fingerprint for theme "{}" ({} topicprints)'
            .format(theme, len(topichash)))
    except Exception as e:
        helpers.log("Querying SPARQL-endpoint failed, exiting:\n" + str(e))
        return
def load_mail(uri):
    # this query will find the mail (if it exists)
    select_query = "SELECT DISTINCT ?uuid ?from ?to ?ready ?subject ?content\n"
    select_query += "WHERE \n{\n"
    select_query += "<" + str(uri) + "> <http://mail.com/from> ?from;\n"
    select_query += "a <http://mail.com/Mail>;\n"
    select_query += "<http://mail.com/content> ?content;\n"
    select_query += "<http://mail.com/subject> ?subject;\n"
    select_query += "<http://mail.com/to> ?to;\n"
    select_query += "<http://mail.com/ready> ?ready;\n"
    select_query += "<http://mu.semte.ch/vocabularies/core/uuid> ?uuid.\n"
    select_query += "}"

    # execute the query...
    result = helpers.query(select_query)

    # if the length of the result array is 0 we return nil
    if len(result['results']['bindings']) < 1:
        return {}

    # I should probably check here but for a quick test application
    # it doesn't matter that much. If there is more than 1 result
    # that would indicate a data error
    bindings = result['results']['bindings'][0]

    # we extract an object
    mail = dict()
    mail['uuid'] = bindings['uuid']['value']
    mail['from'] = bindings['from']['value']
    mail['ready'] = bindings['ready']['value']
    mail['to'] = bindings['to']['value']
    mail['subject'] = bindings['subject']['value']
    mail['content'] = bindings['content']['value']

    return mail
예제 #15
0
def run():
    # Query sparql
    select_query = build_select_query()
    try:
        results = helpers.query(select_query)["results"]["bindings"]
    except Exception as e:
        helpers.log("Querying SPARQL-endpoint failed:\n" + str(e))
        return
    contents = {result["subject"]["value"]: result["text"]["value"] \
        for result in results} # Key names dependent of ?...-names in query!
    # prepare MALLET run (text from store -> files)
    mallet_input_dir = os.getenv('INPUT_PATH')
    os.makedirs(mallet_input_dir, exist_ok=True)
    fn_map = write_mallet_input(contents, mallet_input_dir)
    # Run MALLET
    try:
        mallet_command = "/start.sh"
        subprocess.run(mallet_command)
    except subprocess.CalledProcessError as e:
        helpers.log("Failed to run MALLET ...\n" + str(e))
    # Read in MALLET results from files
    mallet_output = mallet_tools.process_file(
        os.path.join(os.getenv('OUTPUT_PATH'), 'output.txt'))
    # Make a map of weights by subject (map back from uuid to subject-url)
    weights_by_subject = {fn_map[os.path.basename(path)]: topics \
        for nr, path, topics in mallet_output}
    insert_querys = build_insert_query(weights_by_subject)
    for q in insert_querys:
        try:
            helpers.log(q)
            helpers.update(q)
        except Exception as e:
            helpers.log("Querying SPARQL-endpoint failed:\n" + str(e))
예제 #16
0
def get_document_by_uuid(uuid):
    query_str = construct_get_document_by_uuid(uuid)
    document_results = query(query_str)['results']['bindings']
    if not document_results:
        raise NoQueryResultsException("No document found by uuid '{}'".format(uuid))
    document_uri = document_results[0]["uri"]["value"]
    return document_uri
예제 #17
0
def generate_integration_url(signinghub_session: SigningHubSession,
                             signflow_uri: str, piece_uri: str,
                             collapse_panels: bool):
    pieces = signing_flow.get_pieces(signflow_uri)
    piece = query_result_helpers.ensure_1(pieces)
    if piece["uri"] != piece_uri:
        raise exceptions.InvalidStateException(
            f"Piece <{piece_uri}> is not linked to signflow <{signflow_uri}>.")

    query_string = _query_signinghub_document.substitute(
        graph=sparql_escape_uri(APPLICATION_GRAPH),
        signflow=sparql_escape_uri(signflow_uri),
        piece=sparql_escape_uri(piece_uri))

    signinghub_document_result = query(query_string)
    signinghub_documents = query_result_helpers.to_recs(
        signinghub_document_result)
    signinghub_document = signinghub_documents[0]
    signinghub_package_id = signinghub_document["signinghub_package_id"]

    integration_url = signinghub_session.get_integration_link(
        signinghub_package_id,
        {
            "language": "nl-NL",
            # "user_email": "*****@*****.**", # Know through SSO login?
            # "callback_url":"https://web.signinghub.com/", # default configured fir the app.
            "collapse_panels": "true" if collapse_panels else "false",
            # "usercertificate_id": "31585" # Undocumented
        })

    return integration_url
예제 #18
0
def get_physical_file(uri):
    """
    Fetches the physical file on disk that belongs to a certain
    virtual file object (virtual files can link to files that are not in the
    database, but every file in the database has a virtual file object that acts
    as an identifier).
    """
    # Get file based on ?file
    file_query = """
        PREFIX mu: <http://mu.semte.ch/vocabularies/core/>
        PREFIX nfo: <http://www.semanticdesktop.org/ontologies/2007/03/22/nfo#>
        PREFIX dbpedia: <http://dbpedia.org/ontology/>
        PREFIX dct: <http://purl.org/dc/terms/>
        PREFIX nie: <http://www.semanticdesktop.org/ontologies/2007/01/19/nie#>

        SELECT (?uuid as ?id) ?logical_file ?extension
        WHERE{{
            GRAPH <http://mu.semte.ch/application>{{
            {uri} a nfo:FileDataObject ;
                  dbpedia:fileExtension ?extension;
                  mu:uuid ?uuid .
            ?logical_file nie:dataSource {uri} .
            }}
        }}
        LIMIT 20
        """.format(uri=escape_helpers.sparql_escape_uri(uri))

    result = helpers.query(file_query)
    pprint(flask.jsonify(result))

    logical_file = extract_from_query(result, "logical_file")
    extension = extract_from_query(result, "extension")

    return logical_file, extension
예제 #19
0
def user_post_feed(user_id, max_cursor=0):
    request_url = Constants.BASE_URL + Constants.USER_POST_FEED_ENDP.format(
        user_id, max_cursor) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
예제 #20
0
def get_job_uri(uuid):
    """
    Queries the database for SchemaAnalysisJob objects that
    have the uuid as specified by "uuid".
    """
    job_query = """
        PREFIX ext: <http://mu.semte.ch/vocabularies/ext/>
        PREFIX mu: <http://mu.semte.ch/vocabularies/core/>
        PREFIX dct: <http://purl.org/dc/terms/>

        SELECT DISTINCT ?job, ?file, ?created WHERE {{
             GRAPH <http://mu.semte.ch/application> {{
                        ?job mu:uuid "{uuid}" ;
                        dct:created ?created ;
                         ext:file ?file ;
                           a ext:SchemaAnalysisJob.
             }}
        }}  OFFSET 0
            LIMIT 20
            """.format(uuid=uuid)

    result = helpers.query(job_query)

    file_uri = extract_from_query(result, "file")
    job_uri = extract_from_query(result, "job")

    return file_uri, job_uri
예제 #21
0
def hashtag_search(text):
    request_url = Constants.BASE_URL + Constants.HASHTAG_SEARCH_ENDP.format(
        text) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
예제 #22
0
def get_following(target_user_id):
    request_url = Constants.BASE_URL + Constants.USER_FOLLOWING_FNDP.format(
        target_user_id) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
예제 #23
0
def get_live_feed(live_room_id):
    request_url = Constants.BASE_URL + Constants.LIVE_ROOM_ENDP.format(
        live_room_id) + helpers.query(Constants.DEVICE_VARS)
    # as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    # request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
def get_mandatee_by_id(mandatee_id):
    query_str = construct_get_mandatee_by_id(mandatee_id)
    mandatee_results = query(query_str)['results']['bindings']
    if not mandatee_results:
        raise NoQueryResultsException(
            "No mandatee found by id '{}'".format(mandatee_id))
    return mandatee_results[0]["uri"]["value"]
def get_signing_flow_by_uuid(uuid):
    query_str = construct_by_mu_uuid(uuid)
    signflow_results = query(query_str)['results']['bindings']
    if not signflow_results:
        raise NoQueryResultsException(
            "No signflow found by uuid '{}'".format(uuid))
    signflow_uri = signflow_results[0]["signflow"]["value"]
    return signflow_uri
def run_batch(batch_size, graph):

    documents = list(
        map(
            lambda res: res['doc']['value'],
            list(
                helpers.query(construct_select_docs_query(
                    batch_size, graph))['results']['bindings'])))

    res = helpers.query(construct_list_doc_versions_query(
        documents, graph))['results']['bindings']

    res_by_doc = itertools.groupby(res, lambda res: res['doc']['value'])

    triples = []
    for doc_uri, results in res_by_doc:
        results = list(results)
        for i in range(len(results)):
            res = results[i]
            try:
                title = res['stuknummerVR']['value']
            except KeyError:
                title = res['title']['value']
            versioned_title = title + LATIN_ADVERBIAL_NUMERALS[int(
                res['num']['value'])]
            triples.append((
                escape_helpers.sparql_escape_uri(results[i]['ver']['value']),
                'dct:title',
                escape_helpers.sparql_escape_string(versioned_title),
            ))
            if i > 0:
                triples.append((
                    escape_helpers.sparql_escape_uri(
                        results[i]['ver']['value']),
                    'pav:previousVersion',
                    escape_helpers.sparql_escape_uri(
                        results[i - 1]['ver']['value']),
                ))
    if triples:
        query = construct_insert_triples(triples, graph)
        res = helpers.update(query)

    query = construct_migrate_docs(documents, graph)
    res = helpers.update(query)

    return documents
예제 #27
0
def get_user_info(user_id):
    request_url = Constants.BASE_URL + Constants.USER_INFO_ENDP.format(
        user_id) + helpers.query(Constants.DEVICE_VARS)
    #as_cp = ptts.signature_gen.generate_as_cp(request_url, helpers.get_timestamp())
    #request_url = request_url + "&as={:s}&cp={:s}".format(as_cp[0], as_cp[1])
    #request_url = request_url + "&as=a1qwert123&cp=cbfhckdckkde1&mas=01937dea4a12a8c410eb526555c121d44decec4c0ccc0c8666c61c"
    request_response = helpers.make_request(request_url, request_type="get")
    return request_response.json() if request_response else None
예제 #28
0
def me_by_type(request, type_):
    if not request.user.is_authenticated():
        return redirect(reverse('search.results') + '?' +
                        request.META['QUERY_STRING'])
    q = request.GET.get('q', '')
    data = query(q, type_, user=request.user, filter_by_user=True)
    data.update(q=q, type=type_)
    return render(request, 'results.html', data)
예제 #29
0
def me(request):
    if not request.user.is_authenticated():
        return redirect(reverse('search.combined') + '?' +
                        request.META['QUERY_STRING'])
    q = (request.GET.get('q', ''))
    data = query(q, user=request.user, filter_by_user=True)
    data.update(q=q)
    return render(request, 'aggregate.html', data)
def get_mandatee(mandatee_uri):
    query_str = construct_get_mandatee(mandatee_uri)
    madatee_results = query(query_str)['results']['bindings']
    if not madatee_results:
        raise NoQueryResultsException(
            "No mandatee found by uri <{}>".format(mandatee_uri))
    mandatee = {k: v["value"] for k, v in madatee_results[0].items()}
    return mandatee
예제 #31
0
def reset_default_password():
    if hookenv.leader_get('default_admin_password_changed'):
        hookenv.log('Default admin password already changed')
        return

    # Cassandra ships with well known credentials, rather than
    # providing a tool to reset credentials. This is a huge security
    # hole we must close.
    try:
        # We need a big timeout here, as the cassandra user actually
        # springs into existence some time after Cassandra has started
        # up and is accepting connections.
        with helpers.connect('cassandra',
                             'cassandra',
                             timeout=120,
                             auth_timeout=120) as session:
            # But before we close this security hole, we need to use these
            # credentials to create a different admin account for the
            # leader, allowing it to create accounts for other nodes as they
            # join. The alternative is restarting Cassandra without
            # authentication, which this charm will likely need to do in the
            # future when we allow Cassandra services to be related together.
            helpers.status_set('maintenance',
                               'Creating initial superuser account')
            username, password = helpers.superuser_credentials()
            pwhash = helpers.encrypt_password(password)
            helpers.ensure_user(session, username, pwhash, superuser=True)
            helpers.set_unit_superusers([hookenv.local_unit()])

            helpers.status_set('maintenance',
                               'Changing default admin password')
            helpers.query(session, 'ALTER USER cassandra WITH PASSWORD %s',
                          cassandra.ConsistencyLevel.ALL, (host.pwgen(), ))
    except cassandra.AuthenticationFailed:
        hookenv.log('Default superuser account already reset')
        try:
            with helpers.connect():
                hookenv.log("Leader's superuser account already created")
        except cassandra.AuthenticationFailed:
            # We have no known superuser credentials. Create the account
            # the hard, slow way. This will be the normal method
            # of creating the service's initial account when we allow
            # services to be related together.
            helpers.create_unit_superuser_hard()

    hookenv.leader_set(default_admin_password_changed=True)
예제 #32
0
def reset_default_password():
    if hookenv.leader_get('default_admin_password_changed'):
        hookenv.log('Default admin password already changed')
        return

    # Cassandra ships with well known credentials, rather than
    # providing a tool to reset credentials. This is a huge security
    # hole we must close.
    try:
        # We need a big timeout here, as the cassandra user actually
        # springs into existence some time after Cassandra has started
        # up and is accepting connections.
        with helpers.connect('cassandra', 'cassandra',
                             timeout=120, auth_timeout=120) as session:
            # But before we close this security hole, we need to use these
            # credentials to create a different admin account for the
            # leader, allowing it to create accounts for other nodes as they
            # join. The alternative is restarting Cassandra without
            # authentication, which this charm will likely need to do in the
            # future when we allow Cassandra services to be related together.
            helpers.status_set('maintenance',
                               'Creating initial superuser account')
            username, password = helpers.superuser_credentials()
            pwhash = helpers.encrypt_password(password)
            helpers.ensure_user(session, username, pwhash, superuser=True)
            helpers.set_unit_superusers([hookenv.local_unit()])

            helpers.status_set('maintenance',
                               'Changing default admin password')
            helpers.query(session, 'ALTER USER cassandra WITH PASSWORD %s',
                          cassandra.ConsistencyLevel.ALL, (host.pwgen(),))
    except cassandra.AuthenticationFailed:
        hookenv.log('Default superuser account already reset')
        try:
            with helpers.connect():
                hookenv.log("Leader's superuser account already created")
        except cassandra.AuthenticationFailed:
            # We have no known superuser credentials. Create the account
            # the hard, slow way. This will be the normal method
            # of creating the service's initial account when we allow
            # services to be related together.
            helpers.create_unit_superuser_hard()

    hookenv.leader_set(default_admin_password_changed=True)
예제 #33
0
def query():
    """Example query: Returns all the triples in the application graph in a JSON
    format."""
    q = " SELECT *"
    q += " WHERE{"
    q += "   GRAPH <http://mu.semte.ch/application> {"
    q += "     ?s ?p ?o"
    q += "   }"
    q += " }"
    return flask.jsonify(helpers.query(q))
예제 #34
0
def query():
    """Example query: Returns all the triples in the application graph in a JSON
    format."""
    q =  " SELECT *"
    q += " WHERE{"
    q += "   GRAPH <http://mu.semte.ch/application> {"
    q += "     ?s ?p ?o"
    q += "   }"
    q += " }"
    return flask.jsonify(helpers.query(q))
def get_mandatee_by_email(mandatee_email):
    query_str = construct_get_mandatee_by_email(mandatee_email)
    madatee_results = query(query_str)['results']['bindings']
    if not madatee_results:
        raise NoQueryResultsException(
            "No mandatee found by id '{}'".format(mandatee_email))
    if madatee_results.length > 1:
        log("Multiple mandatees found for e-mail address '{}'. Picking one.".
            format(mandatee_email))
    mandatee = {k: v["value"] for k, v in madatee_results[0].items()}
    return mandatee
def __get_signflow_record(query_command: str):
    result = query(query_command)
    records = query_result_helpers.to_recs(result)
    record = query_result_helpers.ensure_1(records)

    record = {
        "id": record["signflow_id"],
        "uri": record["signflow"],
        "sh_package_id": record["sh_package_id"],
    }

    return record
예제 #37
0
    def query(self):

        message = 'specify the required tolerance'
        hint = 'press ENTER to leave tol=%.1e' % self.tol
        tol = helpers.query(message,
                            hint=hint,
                            type=float,
                            default=self.tol,
                            format='%.1e',
                            exit=0)
        modified = tol != self.tol
        self.tol = tol
        return modified
예제 #38
0
파일: scraping.py 프로젝트: ankitr/sec-ner
def initiate_indexes(connection):
    """Collects the indexes from the SEC server for processing over. Should be used for initialization."""
    logging.debug('Collecting indexes.')
    logging.debug('Checking if indexes directory exists.')
    if os.path.exists('./bin/indexes'):
        logging.debug('indexes directory exists. Asking for permission to remove.')
        if helpers.query('The indexes directory already exists. Would you like to remove it?'):
            sys.stdout.write('Deleting...\n\n')
            status = os.system('rm -rf ./bin/indexes')
            if status == 256:
                logging.error('User is not authorized to delete indexes.')
                sys.exit('Permission denied. Please run from a super user state. (Maybe sudo python secner [command]%s if you\'re an admin)\n' % (' inspect' if __main__.inspect else str()))
            elif status != 0:
                logging.error('Something went wrong while removing the indexes directory.')
                sys.exit('Something went wrong while removing the indexes directory.\n')
            logging.debug('indexes directory removed successfully.')
        else:
            logging.error('User permission not granted to remove indexes directory.')
            sys.exit('Process stopped by user.\n')
    else: logging.debug('indexes directory does not exist. Proceeding normally.')
    logging.debug('Making indexes directory.')
    sys.stdout.write('Setting up for index download.\n')
    status = os.system('mkdir ./bin/indexes')
    if status == 256:
        logging.error('User is not authorized to create an indexes directory.')
        sys.exit('Permission denied. Please run from a super user state. (Maybe sudo python secner [command]%s if you\'re an admin)\n' % (' inspect' if __main__.inspect else str()))
    elif status != 0:
        logging.error('Something went wrong while creating the indexes directory.')
        sys.exit('Something went wrong while creating the indexes directory.\n')
    logging.debug('New indexes directory created successfully.')
    logging.debug('Moving to indexes directory.')
    os.chdir('./bin/indexes')
    logging.debug('Checking for wget...')
    status, output = commands.getstatusoutput('wget')
    if status == 32512:
        logging.debug('wget is not installed.')
        sys.exit('wget (a prerequisite of this program) is not installed. Please install before running again.\n')
    logging.debug('wget is installed.')
    sys.stdout.write('Preparing to download the indexes for the server. Depending on your connection this may take a couple hours.\nDO NOT SEVER THE CONNECTION DURING THIS TIME PERIOD. LEAVE THE MACHINE RUNNING FOR THIS TIME.\n\n\n')
    if not helpers.query('Are you ready to begin?'):
        logging.debug('Process ceased by user. Download not started')
        sys.exit('Process stopped by user.\n')
    sys.stdout.write('\n\n\n')
    sys.stdout.write('Starting')
    for i in range(3):
        sys.stdout.write('.')
        sys.stdout.flush()
        time.sleep(0.75)
    sys.stdout.write('\n\n')
    logging.info('Starting server initiation.')
    if __main__.inspect:
        logging.debug('Inspecting.')
        status = os.system('wget -r ftp://ftp.sec.gov/edgar/daily-index/')
    else:
        status = os.system('wget -r -quiet ftp://ftp.sec.gov/edgar/daily-index/*')
        logging.debug(status)
        logging.debug(output)
    if status == 1024:
        logging.error('Unable to connect to the server from wget.')
        sys.exit('wget failed to connect with the server. Please check your network connection or try again later.\n')
    elif status == 256:
        logging.error('Permission denied to copy indexes.')
        sys.exit('Permission denied. Please run from a super user state. (Maybe sudo python secner [command]%s if you\'re an admin)\n' % (' inspect' if __main__.inspect else str()))
    elif status != 0:
        logging.critical('wget failed. Need to clean up indexes.')
        sys.exit('\nCritical failure. Run clean before attempting to download again.\n')
    logging.info('Indexes created.')
    sys.stdout.write('Created indexes. Thank you for your patience.')