Exemple #1
0
 def preprocess(self):
     contents = []
     with open(self.all_data_file, "r", encoding="utf-8") as f:
         for line in f:
             line = line.strip().lower()
             if judge_zh(line):
                 sentences = sentence_seg(line)
                 for sen in sentences:
                     if judge_zh(sen):
                         contents.append(sen)
     update_file(contents, self.all_data_clean)
Exemple #2
0
    def updateStreamView(self, boss):
        if boss in BOSS_MAP:
            boss = BOSS_MAP[boss]

        utils.update_file(r'%s\cur_boss.txt' % self.dir, boss)

        sql = "SELECT timestamp FROM raid_events WHERE boss=? AND type='WIPE'"
        self.cur.execute(sql, (boss, ))
        wipes = self.cur.fetchall()
        utils.update_file(r'%s\\boss_wipe.txt' % self.dir, len(wipes))

        today = str(datetime.today()).split(' ')[0]
        wipes_today = list()
        for timestamp, in wipes:
            if today in timestamp:
                wipes_today.append(timestamp)
        utils.update_file(r'%s\\boss_wipe_today.txt' % self.dir,
                          len(wipes_today))

        sql = "SELECT timestamp FROM raid_events WHERE boss=? AND type='WIN'"
        self.cur.execute(sql, (boss, ))
        wins = self.cur.fetchall()
        utils.update_file(r'%s\\boss_kill.txt' % self.dir, len(wins))

        sql = "SELECT min(prog) FROM raid_events WHERE boss=?"
        self.cur.execute(sql, (boss, ))
        prog = str(self.cur.fetchone()[0]) + '%'
        utils.update_file(r'%s\\boss_prog.txt' % self.dir, prog)

        raid_type = 'PROG'
        if len(wins) > 0:
            raid_type = 'FARM'

        if raid_type == 'PROG':
            h1 = 'WIPES'
            c1 = len(wipes_today)
            h2 = 'BEST'
            c2 = prog
        else:
            raid_type = 'FARM'
            h1 = 'WINS'
            c1 = len(wins)
            h2 = 'WIPES'
            c2 = len(wipes_today)

        with open(self.html_file_base, 'r') as b:
            html = b.read()
            with open(self.html_file_live, 'w') as l:
                l.write(html % (raid_type, boss, h1, c1, h2, c2))
                l.close()
            b.close()
Exemple #3
0
    def get_known_words(self):
        file_names = os.listdir(self.all_dicts_path)
        known_words = []

        for name in file_names:
            if name.endswith("txt"):
                dict_file = os.path.join(self.all_dicts_path, name)
                words = get_lines(dict_file)
                self.logger.info("file:{}, num of words:{}".format(
                    name, len(words)))
                update_file(words, dict_file)
                known_words.extend(words)

        update_file(known_words, self.known_words_file)
        self.logger.info("num of known words: {}".format(len(known_words)))
Exemple #4
0
def add_item_bid():
    item_id = request.json.get('item_id', 0)
    bid_amount = request.json.get('bid_amount', 0)
    user_id = request.json.get('user_id', 0)
    # do something usefull with these data
    items = get_items()
    for i in items:
        if i['id'] == item_id:
            i['bids'].append({
                'user_id': user_id,
                'amount': bid_amount,
                'id': 5
            })
            update_file(items)
            return Response(dumps({'msg': 'Created'}), status=201)
    return Response(dumps({'msg': 'This item does not exists'}), status=404)
    def _upload(self, project_slug, repository_name, repository_branch, resource_path, import_file_path):
        renamed_import_file_path = import_file_path + os.path.splitext(resource_path)[1]
        os.rename(import_file_path, renamed_import_file_path)
        crowdin_resource_path = resource_path

        d = {
            "operation": "ResourceUpload",
            "resource_full_path": os.path.join(repository_name, resource_path),
            "status_code": "N/A",
            "project_slug": project_slug,
            "crowdin_resource_path": crowdin_resource_path,
            "new_strings": "N/A",
            "mod_strings": "N/A",
            "del_strings": "N/A"
            }
        if utils.update_file(self._crowdin_project_key, project_slug, repository_branch, crowdin_resource_path, renamed_import_file_path):
            os.rename(renamed_import_file_path, renamed_import_file_path + '_crowdin_imported')
            d['results'] = 'SUCCESS'
            logger.info("ExecStats='{}'.".format(json.dumps(d)))
            return True
        else:
            os.rename(renamed_import_file_path, renamed_import_file_path + '_import_failed')
            d['results'] = 'FAILURE'
            logger.info("ExecStats='{}'.".format(json.dumps(d)))
            return False
    def post(self):
        '''Adds file to cache, performed on reads, also updates a file in the cache if it exists,
        performed on writes
        {
            'file_name'
            'file_content'
            'file_version'
        }
        '''

        print('post received, cache size is: ', file_map.count())
        
        if(not request.is_json):
            abort(400)
        data = request.json
        file_content = data['file_content']
        file_name = data['file_name']
        file_version = data['file_version']
        #check if the file exists on the cache
        #check if the cache is full, if so, boot the most recently used
        #Create a the file in the cache
        #Add it to the timestamp map 

        if file_map.find_one({'file_name': file_name}) == None:
            if cache_is_full():
                LRU = get_LRU_file()
                file_map.remove({'file_name': LRU})
                delete_file(LRU, cf.CACHE_FILE_PATH)


        try:
            update_file(file_name, cf.CACHE_FILE_PATH, file_content)
            file_map.update_one(
                {'file_name': file_name},
                {
                    '$set':{
                        'file_name': file_name,
                        'timestamp': datetime.now(),
                        'file_version': file_version
                    }
                },
                upsert=True
            )
        except:
            abort(403)
Exemple #7
0
def revision_form_submit(request, form, code):
    dajax = Dajax()
    form = RevisionForm(deserialize_form(form))

    dajax.remove_css_class('#revision-form textarea', 'error')
    dajax.remove('.error-message')

    if form.is_valid():

        commit_message = form.cleaned_data['commit_message']
        username, email = request.user.username, request.user.email

        # push changes to temp repo
        # update_file returns True if the push is success.
        commit_sha = utils.update_file(
            request.session['filepath'],
            commit_message,
            base64.b64encode(code),
            [username, email],
            main_repo=False,
        )

        if commit_sha is not None:
            # everything is fine

            # save the revision info in database
            rev = TextbookCompanionRevision(
                example_file_id=request.session['example_file_id'],
                commit_sha=commit_sha,
                commit_message=commit_message,
                committer_name=username,
                committer_email=email,
            )
            rev.save(using='scilab')

            dajax.alert(
                'submitted successfully! \nYour changes will be visible after review.'
            )
            dajax.script('$("#submit-revision-wrapper").trigger("close")')
    else:
        for error in form.errors:
            dajax.add_css_class('#id_{0}'.format(error), 'error')
        for field in form:
            for error in field.errors:
                message = '<div class="error-message">* {0}</div>'.format(
                    error)
                dajax.append('#id_{0}_wrapper'.format(field.name), 'innerHTML',
                             message)
        # non field errors
        if form.non_field_errors():
            message = '<div class="error-message"><small>{0}</small></div>'.format(
                form.non_field_errors())
            dajax.append('#non-field-errors', 'innerHTML', message)

    return dajax.json()
Exemple #8
0
def push_revision(request, code):
    """
    code: from code editor on review interface
    """
    dajax = Dajax()
    revision = TextbookCompanionRevision.objects.using('scilab').get(
        id=request.session['revision_id'])

    print('pushing to repo')
    utils.update_file(revision.example_file.filepath,
                      revision.commit_message,
                      base64.b64encode(code),
                      [revision.committer_name, revision.committer_email],
                      branch='master',
                      main_repo=True)

    print('update push_status')
    revision.push_status = True
    revision.save()

    dajax.alert('pushed successfully!')
    dajax.script('location.reload()')

    return dajax.json()
Exemple #9
0
    final_line = original_line.replace(to_replace_from_level, new_from_level)
    final_line = final_line.replace(to_replace_to_level, new_to_level)
    return final_line


# put those line editing functions into the dictionary that utils uses to
# actually do the editing
utils.edit_line_dict["refinement"] = edit_line_refinement

# ==============================================================================
#
# The actual work is done here!!!
#
# ==============================================================================
config_updates = [
    utils.CheckLine("directory:outputs", "dir"),
    #   utils.CheckLine("directory:logs", "dir"),
    utils.CheckLine("snapshot-epochs", "epochs"),
    #   utils.CheckLine("refinement", "none"),
    utils.CheckLine("auni-stop", "float"),
    utils.CheckLine("max-dark-matter-level", "int"),
    utils.CheckLine("sf:min-level", "int")
]
# We don't want to update the log directory because it should be automatically
# generated in the submit script, as we want fresh log directories for each
# run.
# Refinement isn't done either, as it does not change throughoug the history
# of a given run

utils.update_file(config_filepath, config_updates)
Exemple #10
0
import sys
SRC_PATH = "/home/lavelld/Documents/SS/Internet_Apps/DFS/src"
sys.path.append(SRC_PATH)
from utils import get_file_read, update_file, get_files_in_dir, split_path, clear_path, add_and_get_file

if __name__ == '__main__':
    test = int(sys.argv[1])
    if test == 0:
        test_path = SRC_PATH + '/FS_3'
        print(get_file_read("D1/1.1", test_path))

    if test == 1:
        test_path = SRC_PATH + '/' + 'cache'
        file_name = 'D1/1.1'
        file_content = 'Hello test'
        print(update_file(file_name, test_path, file_content))

    if test == 2:
        test_path = SRC_PATH + '/' + 'FS_1/D1'
        print(get_files_in_dir(test_path))
    elif test == 3:
        path = 'D1/1.1'
        print(split_path(path)[1])

    elif test == 4:
        test_path = SRC_PATH + '/' + 'temp'
        print(clear_path(test_path))

    elif test == 5:
        test_path = SRC_PATH + '/' + 'temp'
        file_name = 'D1/1.1'
Exemple #11
0
import sys
import os

import utils

home_dir = os.path.abspath(sys.argv[1])
defs_file = home_dir + os.sep + "defs.h"

defs_updates = [utils.CheckLine("#define num_refinement_levels", "int")]
utils.update_file(defs_file, defs_updates)

    def post(self):
        '''write to file
        request format:
        {
            file_name:
            file_content:
            replicate: (if replicate = true send request onto the replication server)
            new_file:
        }'''

        content = request.json
        file_name = content['file_name']
        new_file = content['new_file']
        file_version = 0
        file = get_file_write(file_name, FILE_SERVER_PATH)
        print("Received request for " + file_name)
        if (file == None and new_file == False):
            abort(404)
        else:

            file_content = content["file_content"]
            if new_file == True:
                print("Creating new file")
                try:
                    update_file(file_name, FILE_SERVER_PATH, file_content)
                    #Send alert to dir server
                    dir_port = get_port('dir_server')
                    req = format_file_req(dir_port)
                    data = {
                        'file_name': file_name,
                        'file_server_port': str(SERVER_PORT),
                        'new_file': new_file,
                        'file_content': file_content
                    }
                    response = json.loads(
                        requests.post(req,
                                      data=json.dumps(data),
                                      headers=cf.JSON_HEADER).content.decode())
                    file_version = response['file_version']

                except:
                    print('Creation failed')
                    return
            else:
                print("Performing write")
                try:
                    file.write(file_content)
                    if content['replicate'] == True:
                        print('replicating write')
                        #Find replication port
                        dir_server_port = get_port('dir_server')
                        #Send post onto replication server
                        req = format_file_req(dir_server_port)
                        data = {
                            'file_name': file_name,
                            'file_content': content['file_content'],
                            'file_server_port': str(SERVER_PORT),
                            'new_file': new_file
                        }
                        response = json.loads(
                            requests.post(
                                req,
                                data=json.dumps(data),
                                headers=cf.JSON_HEADER).content.decode())
                        file_version = response['file_version']

                except:
                    print('write failed')
                    return
            response = {"file_version": file_version}

            return response
Exemple #13
0
def main(nqubits_list: List[int],
         type: str,
         device: Optional[str] = "/CPU:0",
         accelerators: Optional[Dict[str, int]] = None,
         fuse: bool = False,
         nlayers: Optional[int] = None,
         gate_type: Optional[str] = None,
         params: Dict[str, float] = {},
         nshots: Optional[int] = None,
         directory: Optional[str] = None,
         name: Optional[str] = None,
         compile: bool = False):
    """Runs benchmarks for the Quantum Fourier Transform.

    If `directory` is specified this saves an `.h5` file that contains the
    following keys:
        * nqubits: List with the number of qubits that were simulated.
        * simulation_time: List with simulation times for each number of qubits.
        * compile_time (optional): List with compile times for each number of
            qubits. This is saved only if `compile` is `True`.

    Args:
        nqubits_list: List with the number of qubits to run for.
        type: Type of Circuit to use.
            See ``benchmark_models.py`` for available types.
        device: Tensorflow logical device to use for the benchmark.
            If ``None`` the first available device is used.
        nlayers: Number of layers for supremacy-like or gate circuits.
            If a different circuit is used ``nlayers`` is ignored.
        gate_type: Type of gate for gate circuits.
            If a different circuit is used ``gate_type`` is ignored.
        params: Gate parameter for gate circuits.
            If a non-parametrized circuit is used then ``params`` is ignored.
        nshots: Number of measurement shots.
        directory: Directory to save the log files.
            If ``None`` then logs are not saved.
        name: Name of the run to be used when saving logs.
            This should be specified if a directory in given. Otherwise it
            is ignored.
        compile: If ``True`` then the Tensorflow graph is compiled using
            ``circuit.compile()``. In this case the compile time is also logged.

    Raises:
        FileExistsError if the file with the `name` specified exists in the
        given `directory`.
    """
    if device is None:
        device = tf.config.list_logical_devices()[0].name

    if directory is not None:
        if name is None:
            raise ValueError(
                "A run name should be given in order to save logs.")

        # Generate log file name
        log_name = [name]
        if compile:
            log_name.append("compiled")
        log_name = "{}.h5".format("_".join(log_name))
        # Generate log file path
        file_path = os.path.join(directory, log_name)
        if os.path.exists(file_path):
            raise FileExistsError("File {} already exists in {}."
                                  "".format(log_name, directory))

        print("Saving logs in {}.".format(file_path))

    # Create log dict
    logs = {"nqubits": [], "simulation_time": [], "creation_time": []}
    if compile:
        logs["compile_time"] = []

    # Set circuit type
    print("Running {} benchmarks.".format(type))

    for nqubits in nqubits_list:
        kwargs = {"nqubits": nqubits, "circuit_type": type}
        params = {k: v for k, v in params.items() if v is not None}
        if params: kwargs["params"] = params
        if nlayers is not None: kwargs["nlayers"] = nlayers
        if gate_type is not None: kwargs["gate_type"] = gate_type
        if accelerators is not None:
            kwargs["accelerators"] = accelerators
            kwargs["memory_device"] = device

        start_time = time.time()
        circuit = circuits.CircuitFactory(**kwargs)
        if fuse:
            circuit = circuit.fuse()
        logs["creation_time"].append(time.time() - start_time)

        try:
            actual_backend = circuit.queue[0].einsum.__class__.__name__
        except AttributeError:
            actual_backend = "Custom"

        print("\nBenchmark parameters:", kwargs)
        print("Actual backend:", actual_backend)
        with tf.device(device):
            if compile:
                start_time = time.time()
                circuit.compile()
                # Try executing here so that compile time is not included
                # in the simulation time
                final_state = circuit.execute(nshots=nshots)
                logs["compile_time"].append(time.time() - start_time)

            start_time = time.time()
            final_state = circuit.execute(nshots=nshots)
            logs["simulation_time"].append(time.time() - start_time)

        logs["nqubits"].append(nqubits)

        # Write updated logs in file
        if directory is not None:
            utils.update_file(file_path, logs)

        # Print results during run
        print("Creation time:", logs["creation_time"][-1])
        if compile:
            print("Compile time:", logs["compile_time"][-1])
        print("Simulation time:", logs["simulation_time"][-1])
        print("Final dtype:", final_state.dtype)