Ejemplo n.º 1
0
 def decode(self, message):
     mask = int(message[0])  #mascara
     filename = message[2]  #nome do arquivo
     path = message[3]  #caminho
     if mask == notify.CREATE_DIR:  #se for uma mensagem de criar pasta, chama a função que cria pasta, e assim por diante, para todas as máscaras
         util.create_folder(path, filename)
     elif mask == notify.DELETE_DIR:
         util.delete_folder(path, filename)
     elif mask == notify.CREATE_FILE:
         util.create_file(path, filename)
     elif mask == notify.DELETE_FILE:
         util.delete_file(path, filename)
     elif mask == notify.MODIFY_FILE:
         BkpSync.flag_send = 1
         time.sleep(0.5)
         util.modify_file(path, filename, self.conn)
         time.sleep(1)
         BkpSync.flag_send = 0
     elif mask == notify.DIR_MOVED_FROM:
         util.delete_folder(path, filename)
     elif mask == notify.DIR_MOVED_TO:
         util.create_folder(path, filename)
     elif mask == notify.FILE_MOVED_FROM:
         util.delete_file(path, filename)
     elif mask == notify.FILE_MOVE_TO:
         BkpSync.flag_send = 1
         time.sleep(0.5)
         util.modify_file(path, filename, self.conn)
         time.sleep(1)
         BkpSync.flag_send = 0
    def __init__(self,
                 directory,
                 filename,
                 monitor='val_loss',
                 verbose=0,
                 save_best_only=False,
                 save_weights_only=False,
                 mode='auto',
                 period=1):

        # make folder with the current time as name
        now = datetime.datetime.now()
        current_time = "{}_{}_{}_{}_{}_{}".format(now.day, now.month, now.year,
                                                  now.hour, now.minute,
                                                  now.second)
        constants.SAVE_DIR = os.path.join(directory, current_time)

        create_folder(constants.SAVE_DIR)

        ModelCheckpoint.__init__(self,
                                 os.path.join(constants.SAVE_DIR, filename),
                                 monitor=monitor,
                                 save_best_only=save_best_only,
                                 save_weights_only=save_weights_only,
                                 mode=mode,
                                 period=period)
Ejemplo n.º 3
0
def save_grid(lines, lv, ss, nodes, folder=''):
    util.create_folder(folder + r'ProcessedData')
    print('Saving Full data:\n\tLines: {}\n\tML/LV: {}\n\tUnique Nodes:{}'.format(len(lines), len(lv), len(unique_nodes(lines))))
    lv.to_csv(folder + r'ProcessedData\\' +  'MVLV_full.csv')
    ss.to_csv(folder + r'ProcessedData\\' +  'SS_full.csv')
    lines.to_csv(folder + r'ProcessedData\\' +  'MVLines_full.csv')
    nodes.to_csv(folder + r'ProcessedData\\' +  'Nodes_full.csv')
Ejemplo n.º 4
0
    def save_results(self, outputfolder=None):
        if outputfolder is None:
            of = self.outputfolder
        else:
            of = outputfolder
        util.create_folder(of)
        self.res_v.to_csv(of + r'/vm_pu.csv')
        self.global_res.to_csv(of + r'/global_res.csv')

        if self.feeder_pq:
            self.p_feeder.to_csv(of + r'/p_feeder.csv')
            self.q_feeder.to_csv(of + r'/q_feeder.csv')
Ejemplo n.º 5
0
def save_curated_data(content: dict, metadata: dict):
    """
    Creates a CSV file for the content extracted from 25 papers.

    :param content: Data extracted from 25 paper
    :param metadata: Other relevant information
    :return: None
    """
    df = pd.DataFrame(content)
    root_path = os.path.join(os.getcwd(), 'Curated_Data')
    create_folder(root_path)
    df.to_csv(os.path.join(
        root_path, '{}_{}_{}.csv'.format(metadata['Subject'],
                                         metadata['Month'], metadata['Year'])),
              index=False)
Ejemplo n.º 6
0
 def save_data_pp(self, event=None):
     now = datetime.datetime.now()
     fn = r'Data_{}_{}'.format(self.ss.index[0], str(now.date()))
     if len(self.outputfolder) > 0:
         if not self.outputfolder[-2:] == r'\\':
             self.outputfolder += r'\\'
     util.create_folder(self.outputfolder + fn)
     print(
         'Saving data:\n\tLines: {}\n\tML/LV: {}\n\tUnique Nodes:{}'.format(
             len(self.lines), len(self.lv), len(self.nodes)))
     self.lv.to_csv(self.outputfolder + fn + r'\\' + 'MVLV.csv')
     self.ss.to_csv(self.outputfolder + fn + r'\\' + 'SS.csv')
     self.lines.to_csv(self.outputfolder + fn + r'\\' + 'MVLines.csv')
     self.nodes.to_csv(self.outputfolder + fn + r'\\' + 'Nodes.csv')
     print('Saved data in folder {}'.format(fn))
Ejemplo n.º 7
0
def random_word():
    words = get_words_list()
    word, meaning = random.choice(words.items())
    # TODO: process result data and get word depending on the history of it too
    click.echo(click.style(word + ": ", bold=True))
    raw_input('<Enter> to show meaning')
    click.echo(meaning)

    # check if the user knows about this word or not
    result = raw_input('Did you know / remember the meaning?\n')
    correct = 0
    if result == 'y' or result == 'yes':
        correct = 1
    util.create_folder(VOCABULARY_CONFIG_FOLDER_PATH)
    with open(VOCABULARY_CONFIG_FOLDER_PATH + '/results.txt', 'a') as fp:
        timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        fp.write('{} {} {}\n'.format(timestamp, word, correct))
Ejemplo n.º 8
0
def setup():
    util.create_folder(MONEY_CONFIG_FOLDER_PATH)

    if util.ask_overwrite(MONEY_CONFIG_FILE_PATH):
        return

    chalk.blue('Enter default currency code:')
    currency_code = (raw_input().strip())
    click.echo(currency_rates.get_rates(currency_code))
    click.echo(currency_codes.get_symbol(currency_code))
    click.echo(currency_codes.get_currency_name(currency_code))

    chalk.blue('Enter inital amount:')
    initial_money = int(raw_input().strip())

    setup_data = dict(currency_code=currency_code, initial_money=initial_money)

    util.input_data(setup_data, MONEY_CONFIG_FILE_PATH)
Ejemplo n.º 9
0
def expense():
    util.create_folder(MONEY_CONFIG_FOLDER_PATH)
    with open(MONEY_CONFIG_FOLDER_PATH + '/expenditures.txt', 'a') as fp:
        request.query = raw_input()
        click.echo('output: ')
        response = request.getresponse().read()
        output = json.loads(response)
        # click.echo(output)
        currency_name = output['result']['parameters']['currency-name']
        item = output['result']['parameters']['any'] if len(
            output['result']['parameters']['any'].split()) == 1 else (
                '"' + output['result']['parameters']['any'] + '"')
        number = output['result']['parameters']['number']

        timestamp = datetime.datetime.fromtimestamp(
            time.time()).strftime('%Y-%m-%d %H:%M:%S')
        fp.write('{} {} {} {}\n'.format(timestamp, currency_name, number,
                                        item))
Ejemplo n.º 10
0
    def save(self, epoch, total_loss, total_acc, res_dfs, train=True):
        net_state_dir = os.path.join(self.log_dir, 'state')
        log_dir = os.path.join(self.log_dir, 'train' if train else 'test')

        util.create_folder(net_state_dir)
        util.create_folder(log_dir)

        if not train:
            torch.save(
                self.net.state_dict(),
                os.path.join(net_state_dir,
                             f'{epoch:03d}-{self.exp_name}-state.pkl'))

        res_dfs.to_csv(
            os.path.join(
                log_dir,
                f'{epoch:03d}-{self.exp_name}-loss_{total_loss:0.6f}-acc_{total_acc:0.6f}.csv'
            ))
Ejemplo n.º 11
0
def new_set_fc(name):
    if name:
        if len(name.split()) > 1:
            chalk.red('The length of name should not be more than one')
        else:
            sets = get_set_statuses()
            if not sets:
                # there is no file, so create one
                util.create_folder(FLASHCARDS_CONFIG_FOLDER_PATH)

                description = raw_input('Enter a description:\n')

                with open(FLASHCARDS_CONFIG_FOLDER_PATH + '/sets.txt', 'a') as fp:
                    fp.write('{}-{}-{}\n'.format(name, 1, description))
            else:
                # assuming that the set exists. if it doesn't, catch
                try:
                    if (sets[name] != 0 and sets[name] != 1):
                        chalk.red('Set already exists')
                except KeyError:
                    util.create_folder(FLASHCARDS_CONFIG_FOLDER_PATH)

                    description = raw_input('Enter a description:\n')

                    with open(FLASHCARDS_CONFIG_FOLDER_PATH + '/sets.txt', 'a') as fp:
                        fp.write('{}-{}-{}\n'.format(name, 1, description))

                    # create folder for the set to add cards to it
                    util.create_folder(FLASHCARDS_CONFIG_FOLDER_PATH + '/' + name)

                    chalk.red('Set added')
    else:
        chalk.red('Please enter the name of new study set after the command')
Ejemplo n.º 12
0
def setup():
    util.create_folder(LOVE_CONFIG_FOLDER_PATH)

    if util.ask_overwrite(LOVE_CONFIG_FILE_PATH):
        return

    chalk.blue('Enter their name:')
    name = (raw_input().strip())

    chalk.blue('Enter sex(M/F):')
    sex = (raw_input().strip())

    chalk.blue('Where do they live?')
    place = (raw_input().strip())

    setup_data = dict(
        name=name,
        place=place,
        sex=sex
    )

    util.input_data(setup_data, LOVE_CONFIG_FILE_PATH)
Ejemplo n.º 13
0
        logger.error("Error while running '%s'." %
                     "dpkg -l | grep '%s' | grep -E -o '%s'" %
                     (args.ide, supportedIDEs[args.ide][1]))
        sys.exit(-1)
    if result != version.group() and result != "":
        print("There is a newer version (%s) than installed (%s) available!" %
              (version.group(), result))
        sys.exit(1)
    if result == "":
        print("%s %s is not installed." % (args.ide, args.edition))
    sys.exit(0)

# Checking folders
if not util.check_folder(os.path.join(util.get_script_path(), "output"),
                         logger, False, True):
    if not util.create_folder(os.path.join(util.get_script_path(), "output")):
        logger.error("%s does not exist and can not be created." %
                     os.path.join(util.get_script_path(), "output"))
        sys.exit(-1)

if util.check_folder(os.path.join(util.get_script_path(), "tmp"), logger,
                     False, True):
    if not util.delete_folder(os.path.join(util.get_script_path(), "tmp"),
                              logger, True):
        logger.error("%s does exist and can not be deleted." %
                     os.path.join(util.get_script_path(), "tmp"))
        sys.exit(-1)

for folder in [
        os.path.join(util.get_script_path(), "tmp"),
        os.path.join(util.get_script_path(), "tmp", "root", "usr", "share",
Ejemplo n.º 14
0
def create_temporary_folder_and_store_path(identifier='folder_path'):
    session[identifier] = util.generate_unused_folder_path()
    util.create_folder(session[identifier])
Ejemplo n.º 15
0
def today_entry_check():
    util.create_folder(DIARY_CONFIG_FOLDER_PATH)
Ejemplo n.º 16
0
    result = util.run_cmd("dpkg -l | grep '%s' | grep -E -o '%s' | cat" %
                          (args.ide, supportedIDEs[args.ide][1]), logger, True, True).decode('utf-8').replace("\n", "")
    if result is None:
        logger.error("Error while running '%s'." % "dpkg -l | grep '%s' | grep -E -o '%s'" %
                     (args.ide, supportedIDEs[args.ide][1]))
        sys.exit(-1)
    if result != version.group() and result != "":
        print("There is a newer version (%s) than installed (%s) available!" % (version.group(), result))
        sys.exit(1)
    if result == "":
        print("%s %s is not installed." % (args.ide, args.edition))
    sys.exit(0)

# Checking folders
if not util.check_folder(os.path.join(util.get_script_path(), "output"), logger, False, True):
    if not util.create_folder(os.path.join(util.get_script_path(), "output")):
        logger.error("%s does not exist and can not be created." % os.path.join(util.get_script_path(), "output"))
        sys.exit(-1)

if util.check_folder(os.path.join(util.get_script_path(), "tmp"), logger, False, True):
    if not util.delete_folder(os.path.join(util.get_script_path(), "tmp"), logger, True):
        logger.error("%s does exist and can not be deleted." % os.path.join(util.get_script_path(), "tmp"))
        sys.exit(-1)

for folder in [os.path.join(util.get_script_path(), "tmp"),
               os.path.join(util.get_script_path(), "tmp", "root", "usr", "share", "jetbrains", args.ide),
               os.path.join(util.get_script_path(), "tmp", "root", "usr", "share", "applications"),
               os.path.join(util.get_script_path(), "tmp", "root", "usr", "bin"),
               os.path.join(util.get_script_path(), "tmp", "root", "etc", args.ide),
               os.path.join(util.get_script_path(), "tmp", "root", "etc", "sysctl.d"),
               os.path.join(util.get_script_path(), "tmp", "root", "DEBIAN")]:
Ejemplo n.º 17
0
def verify_configurations(conf: dict):
    """
    Verify the content loaded from configuration file is correct or not. It is checked in the
    beginning to prevent giving errors later in the code.

    :param conf: content of the configuration file
    :return: None
    """
    # TODO: Add checks for content of the configuration file.
    pass


def main():
    config = load_config()
    verify_configurations(config)
    start_web_scrape(config)


if __name__ == '__main__':
    create_folder(os.path.join(os.getcwd(), 'logs'))
    logging.basicConfig(filename='logs/DataCuration.log',
                        filemode='w',
                        level=logging.INFO,
                        format='%(asctime)s: '
                        '%(filename)s: '
                        '%(levelname)s: '
                        '%(lineno)d:\t'
                        '%(message)s')

    main()
Ejemplo n.º 18
0
def main():
    """Determine build-order and start game"""

    self_race_string = race_to_string[self_race]
    enemy_race_string = race_to_string[enemy_race]

    folder = folder_buildorder + self_race_string + race_bot_separator + enemy_race_string + ending_folder + map_name_strategy + ending_folder
    path_strategy = folder + file_strategy

    # Logging based on: https://docs.python.org/3/howto/logging-cookbook.html
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    logger_strategy = logging.getLogger("sc2.strategy")

    loggers = [
        logging.getLogger("sc2.bot_ai"),
        logging.getLogger("sc2.controller"),
        logging.getLogger("sc2.main"),
        logging.getLogger("sc2.maps"),
        logging.getLogger("sc2.paths"),
        logging.getLogger("sc2.sc2process"),
        logging.getLogger("sc2.protocol"),
        logging.getLogger("root"),
        logging.getLogger("sc2.command"), logger_strategy,
        logging.getLogger("sc2.performance")
    ]

    for i in range(eval_number_games):

        print("Evaluation number: {0} of {1}".format(i + 1, eval_number_games))

        hash = get_buildorder_hash(path_strategy, method)

        path = folder + hash + ending_csv
        time_string = str(round(time.time()))
        id = map_name + self_race_string + race_bot_separator + enemy_race_string + time_string + "_" + hash

        subpath = method_experiment_name + ending_folder + map_name + ending_folder

        create_folder(folder_bot_replays + subpath)
        create_folder(folder_bot_logs + subpath)

        output_replay = folder_bot_replays + subpath + id + ending_sc2replay
        log_file_path = folder_bot_logs + subpath + id + ending_logs

        fh = logging.FileHandler(log_file_path, mode="w")
        fh.setFormatter(formatter)
        fh.setLevel(logging.DEBUG)

        # add output to path to loggers
        for logger in loggers:
            logger.addHandler(fh)

        logger_strategy.info("Log file: {0}".format(log_file_path))
        logger_strategy.info("Selected build-order hash: {0}".format(hash))
        logger_strategy.info("Outputfile will be {0}".format(output_replay))
        logger_strategy.info("ID: {0}".format(id))

        # Start game
        run_game(
            maps.get(map_name),
            [
                Bot(
                    self_race,
                    Bot_AI_Extended(path, output_replay, logger_strategy,
                                    method_experiment_name, map_name)),
                #Bot(self_race, Bot_AI_Extended(path, output_replay, logger_strategy))
                Computer(enemy_race, enemy_difficulty)
            ],
            realtime=False,
            save_replay_as=output_replay,
            game_time_limit=max_gametime)

        for logger in loggers:
            logger.removeHandler(fh)

        fh.flush()
        fh.close()

        logging.shutdown()
Ejemplo n.º 19
0
                   lv=bt,
                   GPS=True,
                   geo=polys,
                   tech=tech,
                   nodes=nodes,
                   outputfolder=folder)

#%% Save data!
# Reduced data w/o non connected
htared = hta[~hta.Feeder.isnull()]
ns = unique_nodes(htared)
fnodesred = fnodes.loc[ns]
btred = bt[bt.node.isin(fnodesred.index)]
psred = ps[ps.node.isin(fnodesred.index)]

util.create_folder(folder + r'ProcessedData')
print(
    'Saving reduced data:\n\tLines: {}\n\tML/LV: {}\n\tUnique Nodes:{}'.format(
        len(htared), len(btred), len(ns)))
btred.to_csv(folder + r'ProcessedData\\' + 'MVLV.csv')
psred.to_csv(folder + r'ProcessedData\\' + 'SS.csv')
htared.to_csv(folder + r'ProcessedData\\' + 'MVLines.csv')
fnodesred.to_csv(folder + r'ProcessedData\\' + 'Nodes.csv')

print('Saving Full data:\n\tLines: {}\n\tML/LV: {}\n\tUnique Nodes:{}'.format(
    len(hta), len(bt), len(unique_nodes(hta))))
bt.to_csv(folder + r'ProcessedData\\' + 'MVLV_full.csv')
ps.to_csv(folder + r'ProcessedData\\' + 'SS_full.csv')
hta.to_csv(folder + r'ProcessedData\\' + 'MVLines_full.csv')
fnodes.to_csv(folder + r'ProcessedData\\' + 'Nodes_full.csv')
Ejemplo n.º 20
0
def generate_backlinks_files(notes_folder: str, backlinks_folder: str) -> None:
    logger: Logger = get_logger()

    file_names: List[str] = markdown_filenames(folder_path=notes_folder)
    logger.info(f'Found {len(file_names)} files in {notes_folder}')

    util.create_folder(location=backlinks_folder)
    logger.info(f'Will put backlinks into: {backlinks_folder}/')

    # find all of the files that have changed since the last script run by
    # looking into the JSON state file to speed up the backlinks generation
    state_file: dict = util.read_existing_json_state_file(
        location=backlinks_folder)
    relevant_file_names: Set[str] = set()
    for file_name in file_names:
        key: str = util.strip_file_extension(file_name)
        if state_file['files'][key]['last_checked'] == state_file['runtime']:
            relevant_file_names.add(file_name)
            # ensure that we also refresh the backlinks for the files that are
            # referenced by this file (since the links go two ways)
            with open(util.path(notes_folder, file_name), 'r') as f:
                contents = f.read()
                # the results of re.findall() will look something like
                # [('Page B', 'pageB.md')]
                # where the link in markdown would've been [Page B](pageB.md)
                for _, link in util.md_links.findall(contents):
                    if util.is_md(link):
                        relevant_file_names.add(link)

    # create the backlinks files
    for file_name in relevant_file_names:
        logger.info(f'refreshing backlinks for {file_name}')
        # a list of all of the files that reference this one
        references = []

        # look in all of the other files for references and put them in the
        # above list if we find any
        for other_file in file_names:
            if other_file == file_name:
                continue
            if other_file == 'index.md':
                # the index file is supposed to reference a lot of stuff
                # so I don't want it to pollute the backlinks
                continue

            with open(f'{notes_folder}/{other_file}', 'r') as f:
                contents = f.read()
                # the results of re.findall() will look something like
                # [('Page B', 'pageB.md')]
                # where the link in markdown would've been [Page B](pageB.md)
                for _, link in util.md_links.findall(contents):
                    if link == file_name:
                        logger.debug(
                            f'{file_name}: referenced by {other_file}')
                        title = util.note_title(f'{notes_folder}/{other_file}')
                        references.append((other_file, title))

        # write out all of the backlinks using some properly styled markdown.
        # this bit will be appended to the original note later on when it is
        # converted to a standalone HTML page
        backlinks_file_path = f'{backlinks_folder}/{file_name}.backlinks'
        with open(backlinks_file_path, 'w') as f:
            f.write(backlinks_html(refs=references))
Ejemplo n.º 21
0
ending_folder = "/"
ending_csv = ".csv"
ending_sc2replay = ".SC2Replay"
ending_logs = ".log"

# Strings for file names
race_terran_string = "Terran"
race_protoss_string = "Protoss"
race_zerg_string = "Zerg"
race_bot_separator = "vs"

folder_bot_replays = folder_sc2replays + "bot-replays/"
folder_bot_logs = folder_sc2replays + "bot-logs/"
folder_human_vs_bot = folder_sc2replays + "human-vs-bot/"

create_folder(folder_bot_replays)
create_folder(folder_bot_logs)
create_folder(folder_human_vs_bot)

no_hash = "NoBuildOrder"


folder_buildorder =  folder_sc2replays + "buildorders-csv/"
file_strategy = "strategy"+ending_csv

file_bot_results = folder_bot_replays + "results.csv"

# Strategy util constants -----------------------------------------------------

vespene_buildings = ("REFINERY", "ASSIMILATOR", "EXTRACTOR")
main_buildings = ("COMMANDCENTER", "NEXUS", "HATCHERY")
Ejemplo n.º 22
0
    url = "https://www.zhihu.com/question/" + qid
    answer_url = "https://www.zhihu.com/api/v4/questions/" + qid + "/answers"
    # print(answer_url)

    val = init(url)

    session = val[0]
    question_name = val[1]

    # localPath = get_desktop() + "\\" + question_name[0] + "\\"
    # print(localPath)
    # if not os.path.exists(localPath):
    #     os.mkdir(localPath)

    localPath = create_folder(get_desktop(), question_name[0])

    is_end = False
    start = 0

    while not is_end:
        answer_res = get_answer_data(session, answer_url, 5, start)

        data = answer_res.json()['data']
        is_end = answer_res.json()['paging']['is_end']
        start += 5

        for answer in data:
            imgs = get_imgurl(answer['content'])
            author = answer['author']
            if author['name'] == "匿名用户" or author['name'] == "知乎用户":
Ejemplo n.º 23
0
    modstr = ''
# To set only one overnight ToU: Set start_tous = end_tous
# to set full off-peak day (no ToU): Set everything to 0
h_tous = 6  # hours of off-peak pricing
start_tous = 22  #
end_tous = 3
delta_tous = (end_tous - start_tous) % 24

# Results folder
outputfolder = '{}{}{}_EV{:02d}_W{:02d}'.format(ev_type, modstr,
                                                '_ToU' if tou else '',
                                                int(ev_penetration * 10),
                                                int(ev_work_ratio * 10))

# Check that results folder exists and creates it:
util.create_folder(dir_results, outputfolder, 'Images')

#%% Results

global_data = {}
ev_data = {}
ev_load_day = {}
ev_load_night = {}

#%% Run for multiple ToU overnights

f1, ax1 = plt.subplots()
f1.set_size_inches(7, 6)
f2, ax2 = plt.subplots()
f2.set_size_inches(7, 6)
counter = 0
Ejemplo n.º 24
0
def do_pandoc_generation(notes_folder: str, temp_folder: str, html_folder: str) -> None:
    logger: Logger = get_logger()

    for folder in [notes_folder, temp_folder, html_folder]:
        logger.info('creating folder: \'%s\' if it doesn\'t exist already', folder)
        util.create_folder(folder)

    # only queue up files for pandoc generation if they (or the files that
    # point to them) have been modified recently, so that we don't have to
    # regenerate everything each time we make one change in one file.
    state_file: dict = util.read_existing_json_state_file(location=temp_folder)
    relevant_file_names: Set[str] = set()
    for file_name in os.listdir(notes_folder):
        if not util.is_md(file_name):
            continue
        key: str = util.strip_file_extension(file_name)
        if state_file['files'][key]['last_checked'] == state_file['runtime']:
            relevant_file_names.add(file_name)
            # ensure that we also refresh the backlinks for the files that are
            # referenced by this file (since the links go two ways)
            with open(util.path(notes_folder, file_name), 'r') as f:
                contents = f.read()
                # the results of re.findall() will look something like
                # [('Page B', 'pageB.md')]
                # where the link in markdown would've been [Page B](pageB.md)
                for _, link in util.md_links.findall(contents):
                    if util.is_md(link):
                        relevant_file_names.add(link)

    for file in relevant_file_names:
        # the path to the note is always gonna be in the notes_folder
        file_full_path: str = util.path(notes_folder, file)
        note_title = util.note_title(file_full_path)

        # the output HTML file should have the same name as the note but with
        # the .html suffix and it should be in the html folder
        file_html: str = util.path(html_folder, file)
        file_html: str = util.change_file_extension(file_html, '.html')

        # the backlinks file should have the same name as the note but with
        # the .md.backlinks suffix, and it should be in the temp folder
        file_backlinks: str = util.path(temp_folder, file + '.backlinks')

        logger.info('converting %s to html, title=%s', file, note_title)
        util.do_run(cmd=[
            'pandoc',
            file_full_path, file_backlinks,
            f'--defaults=pandoc.yaml',
            f'--id-prefix={util.to_footnote_id(file)}',
            f'--output={file_html}',
            f'--metadata=pagetitle:{note_title}'
        ])

    # if the index.md was generated in the temp folder, pandocify it
    index_file_name = 'index.md'
    generated_index_file = util.path(temp_folder, index_file_name)
    if util.check_file_exists(generated_index_file):
        output_file = util.path(
            html_folder, util.change_file_extension(index_file_name, '.html'))
        index_title = util.note_title(generated_index_file)
        logger.debug('converting %s to html, title=%s', generated_index_file, index_title)
        util.do_run(cmd=[
            'pandoc',
            generated_index_file,
            f'--defaults=pandoc.yaml',
            f'--id-prefix={util.to_footnote_id(index_file_name)}',
            f'--output={output_file}',
            f'--metadata=pagetitle:{index_title}'
        ])
Ejemplo n.º 25
0
                         for k in penalty_threshold:
                             params = dict(av_payment=av_payment, 
                                           ut_payment=ut_payment,
                                           nevents=nact, 
                                           days_of_service=days_of_service,
                                           conf=j, 
                                           service_time=i,
                                           min_delivery=k, 
                                           min_bid=minbid, 
                                           nscenarios=nscenarios)
                             V1G_bids, V1G_payments, V1G_und = fpf.compute_payments(flex_V1G[i], 
                                                                                    **params)
                             V2G_bids, V2G_payments, V2G_und = fpf.compute_payments(flex_V2G[i], 
                                                                                **params)
                             # Save raw data points:                
                             util.create_folder(folder + r'raw\\')
 #                            filename = 'fleet{}_avw{}_ev{}_nact{}_servt{}_confth{}_penaltyth{}'.format(s, a, nevs_fleet, nact, f, j, k)
 #                            np.save(folder_raw + filename,
 #                                    (V1G_bids, V1G_payments, V1G_und, V2G_bids, V2G_payments, V2G_und))
                             
                             fpf_params = dict(percentile=95)
                             statsb = fpf.get_stats(V1G_bids, **fpf_params)
                             statsu = fpf.get_stats(V1G_und, **fpf_params)
                             for p in penalty_values:
                                 V1G_payments = V1G_payments - V1G_bids.repeat(nscenarios) * V1G_und * p
                                 statsp = fpf.get_stats(V1G_payments, **fpf_params)
                                 stats_VxG['v1g', nevs_fleet, nact, f, j, k, p, sensi, v] = (statsb + statsp + statsu)
                             statsb = fpf.get_stats(V2G_bids, **fpf_params)
                             statsp = fpf.get_stats(V2G_payments, **fpf_params)
                             statsu = fpf.get_stats(V2G_und, **fpf_params)
                             for p in penalty_values: