コード例 #1
0
def simulated_annealing(scenario, time_limit=20000):
    # simulated annealing (described here: # https://en.wikipedia.org/wiki/Simulated_annealing)
    # is generally an optimization algorithm that tweaks the state by a little each time
    # until reaching optimum. in order not to get stuck in local optima, the process sometimes
    # chooses tweaks with high energy (bad score) in order to shake things up and allow for new pathways
    # as the optimization proceeds, the "temperature" lowers and high energy tweaks become less and less
    # likely.

    # the basic version only mutates the route using swaps, AKA a swap between two consecutive nodes

    # time_limit: amount of allowed computation time in milliseconds

    start_time = time_stamp()

    scenario_len = scenario.shape[0]

    # normalizer that makes the scale (width and height) of the scenario irrelevant
    distance_normalizer = get_average_edge_length(scenario)

    # initiate a route randomly
    route = scenario.copy()
    np.random.shuffle(route)

    # score of each possible swap
    energy_deltas = get_energy_deltas(route, distance_normalizer,
                                      np.arange(scenario_len))

    while time_stamp() < start_time + time_limit:
        # figure the current temperature
        stamp = time_stamp()
        temperature = get_temperature(stamp - start_time, time_limit)

        # get the thresholds for all swaps
        thresholds = get_thresholds(energy_deltas, temperature)

        # get a random number
        random_num = np.random.random()

        # get a random swap with threshold above the random number
        available_swaps = np.arange(
            thresholds.shape[0])[thresholds > random_num]

        if available_swaps.size > 0:
            winner_swap = np.random.choice(available_swaps, 1)[0]

            # swap the nodes
            i, j = swap_index_to_node_indices(winner_swap, scenario_len)
            route[[i, j]] = route[[j, i]]

            # recalculate the affected energies
            affected_indices = get_indices_affected_by_swap(
                winner_swap, scenario_len)
            new_energy_deltas = get_energy_deltas(route, distance_normalizer,
                                                  affected_indices)
            energy_deltas[affected_indices] = new_energy_deltas

    # turn the coordinates route to indices route each pointing
    # to its corresponding node in the original scenario array.
    return xy_route_to_indices_route(scenario, route)
コード例 #2
0
def simulated_annealing(scenario, time_limit=40000, use_flips=True, use_pops=True):
    # simulated annealing (described here: # https://en.wikipedia.org/wiki/Simulated_annealing)
    # is generally an optimization algorithm that tweaks the state by a little each time
    # until reaching optimum. in order not to get stuck in local optima, the process sometimes
    # chooses tweaks with high energy (bad score) in order to shake things up and allow for new pathways
    # as the optimization proceeds, the "temperature" lowers and high energy tweaks become less and less
    # likely.

    # the advanced version is basically an improvement over local_search, it checks for both segment
    # flips and node pops.

    # time_limit: amount of allowed computation time in milliseconds

    start_time = time_stamp()
    counter = 0
    # normalizer that makes the scale (width and height) of the scenario irrelevant
    distance_normalizer = get_average_edge_length(scenario)

    # initiate a route randomly
    route = scenario.copy()
    np.random.shuffle(route)

    while time_stamp() < start_time + time_limit:
        # figure the current temperature
        stamp = time_stamp()
        temperature = get_temperature(stamp - start_time, time_limit)

        # get a random threshold, route length changes below that threshold will be accepted
        random_threshold = get_random_threshold(temperature, distance_normalizer)

        if use_flips:
            # the two endpoints of the segment to flip
            segment_start, segment_end = find_segment_flip(route, threshold=random_threshold)

            # if succeeded to find a flip below the threshold
            if segment_start is not None:
                route[segment_start + 1: segment_end] = route[segment_end - 1: segment_start: -1]
                counter += 1

        if use_pops:
            pop_from, place_at = find_pop(route, threshold=random_threshold)
            # if succeeded to find a pop below the threshold
            if pop_from is not None:
                # simulating a python list pop() and insert() using math

                if pop_from > place_at:
                    route[place_at:pop_from + 1] = route[np.r_[pop_from, place_at:pop_from]]
                else:
                    route[pop_from:place_at] = route[np.r_[pop_from + 1:place_at, pop_from]]
                continue
    # turn the coordinates route to indices route each pointing
    # to its corresponding node in the original scenario array.
    return xy_route_to_indices_route(scenario, route)
コード例 #3
0
ファイル: db.py プロジェクト: rwth-i6/drink-kiosk
    def drinker_pay(self, drinker_name, amount):
        """
        Drinker ``drinker_name`` pays some amount ``amount``.
        This function would be called via RPC somehow.

        :param str drinker_name:
        :param Decimal|int|str amount:
        :return: updated Drinker
        :rtype: Drinker
        """
        amount = Decimal(amount)
        print("%s: %s pays %s %s." %
              (time_stamp(), drinker_name, amount, self.currency))
        with self.lock:
            drinker = self.get_drinker(drinker_name)
            drinker.credit_balance += amount
            if drinker.credit_balance >= 0:
                # Reset counts in this case.
                drinker.buy_item_counts.clear()
            self._save_drinker(drinker)
            # We want to have a Git commit right after (after the lock release), so enforce this now.
            self._add_git_commit_drinkers_task(wait_time=0)
            self.admin_cash_position.cash_position += amount
            self._save_admin_cash_position()
        for cb in self.update_drinker_callbacks:
            cb(drinker_name)
        return drinker
コード例 #4
0
ファイル: db.py プロジェクト: rwth-i6/drink-kiosk
 def drinker_buy_item(self, drinker_name, item_name, amount=1):
     """
     :param str drinker_name:
     :param str item_name: intern name
     :param int amount: can be negative, to undo drinks
     :return: updated Drinker
     :rtype: Drinker
     """
     print("%s: %s drinks %s (amount: %i)." %
           (time_stamp(), drinker_name, item_name, amount))
     assert isinstance(amount, int)
     with self.lock:
         drinker = self.get_drinker(drinker_name)
         item = self._get_buy_item_by_intern_name(item_name)
         drinker.buy_item_counts.setdefault(item_name, 0)
         drinker.buy_item_counts[item_name] += amount
         drinker.total_buy_item_counts.setdefault(item_name, 0)
         drinker.total_buy_item_counts[item_name] += amount
         drinker.credit_balance -= item.price * amount
         self._save_drinker(drinker)
         if amount != 1:
             # We want to have a Git commit right after (after the lock release), so enforce this now.
             self._add_git_commit_drinkers_task(wait_time=0)
     for cb in self.update_drinker_callbacks:
         cb(drinker_name)
     return drinker
コード例 #5
0
 def __init__(self, s_dim, t_dim, a_dim, architecture, type):
     super().__init__()
     self.architecture = architecture
     self._s_dim = s_dim
     self._t_dim = t_dim
     self._a_dim = a_dim
     self._type = type
     assert type in ['multitask'], 'Invalid agent type.'
     self._id = time_stamp()
コード例 #6
0
 def create_token(self, special: bool = False):
     salt = random_string(4)
     if special:
         h = _hash(salt, self.__hw_pin, None)
         return salt + ":" + b2a_base64_trimed(h)
     else:
         tm = str(time_stamp())
         h = _hash(salt, self.__dev_sec, tm)
         return salt + ":" + b2a_base64_trimed(h) + ":" + tm
コード例 #7
0
 def _get_data(self, b):
     self.__handler_data = {  #NOSONAR
         'd': self.name,
         'tm': time_stamp(),
         's': [{
             'n': 'alert',
             'v': 1
         }]
     }
コード例 #8
0
    def __init__(self, concept_architecture, first_level_agent, s_dim, a_dim, t_dim):  
        super().__init__()    
        
        self.concept_architecture = concept_architecture
        self.first_level_agent = first_level_agent
        freeze(self.first_level_agent)

        self._s_dim = s_dim
        self._a_dim = a_dim
        self._t_dim = t_dim
        self._id = time_stamp()
コード例 #9
0
def local_search(scenario,
                 initiate_greedy=True,
                 use_segment_flip=True,
                 use_pop=True,
                 time_limit=np.inf):

    stop_time = time_stamp() + time_limit

    # initiate a route
    if initiate_greedy:
        route = scenario[greedy(scenario)]
    else:
        route = scenario[random_walk(scenario)]

    while time_stamp() < stop_time:
        if use_segment_flip:
            # the two endpoints of the segment to flip
            segment_start, segment_end = find_segment_flip(route)

            if segment_start is not None:
                route[segment_start +
                      1:segment_end] = route[segment_end - 1:segment_start:-1]
                continue

        if use_pop:
            pop_from, place_at = find_pop(route)
            # if found a pop that shortens the route
            if pop_from is not None:
                # simulating a python list pop() and insert() using math

                if pop_from > place_at:
                    route[place_at:pop_from +
                          1] = route[np.r_[pop_from, place_at:pop_from]]
                else:
                    route[pop_from:place_at] = route[np.r_[pop_from +
                                                           1:place_at,
                                                           pop_from]]

                continue
        break
    return xy_route_to_indices_route(scenario, route)
コード例 #10
0
    def __init__(self, n_actions, second_level_architecture, first_level_actor, noop_action, temporal_ratio=5):  
        super().__init__()    
        
        self.second_level_architecture = second_level_architecture
        self.first_level_actor = first_level_actor
        freeze(self.first_level_actor)

        self._n_actions = n_actions + int(noop_action)
        self._first_level_a_dim = self.first_level_actor._a_dim
        self._noop = noop_action
        self._temporal_ratio = temporal_ratio
        self._id = time_stamp()
コード例 #11
0
def store_database(database, n_parts):
    part_size = len(database.buffer) // n_parts
    DB_ID = time_stamp()

    os.makedirs(SAVE_PATH + DB_ID)

    for i in range(0, n_parts):
        PATH = SAVE_PATH + DB_ID + '/SAC_training_level2_database_part_' + str(i) + '.p'

        if (i+1) < n_parts:
            pickle.dump(list(itertools.islice(database.buffer, part_size*i, part_size*(i+1))), open(PATH, 'wb'))
        else:
            pickle.dump(list(itertools.islice(database.buffer, part_size*i, None)), open(PATH, 'wb'))
コード例 #12
0
 async def async_sensor_values(self):
     '''
     异步模式获取值
     '''
     from uasyncio import sleep_ms
     if self.__prepare != None:
         self.__prepare()
         await sleep_ms(self.__delay)
     l = []
     for s in self.__sensors:
         l.append(s.value)
     v = {}
     v['d'] = self.__name
     v['s'] = l
     v['tm'] = time_stamp()
     return v
コード例 #13
0
ファイル: clean.py プロジェクト: specialprocedures/matanga
#%%
print("Importing modules")
import pandas as pd

import os
import datetime
from datetime import date, timedelta
from forex_python.bitcoin import BtcConverter
from forex_python.converter import CurrencyRates
from sales import caluclate_sales
from utils import time_stamp, consolidate_data, label_df, recategorise, add_cuts

#%%
TIME_STAMP = time_stamp()
SOURCE_PATH = "DATA/INPUT/SCRAPED"
SAVE_PATH = "DATA/OUTPUT"

#%%
print("Consolidating scraped data")
# Pulling raw scraped data from CSVs, I really need to get into SQL
df = consolidate_data(SOURCE_PATH, SAVE_PATH, TIME_STAMP)

# Consistent records only exist from Feb 4th
START_DATE = datetime.datetime(2020, 2, 4)
END_DATE = pd.to_datetime(datetime.date.today())

df = df[(df.time_stamp >= START_DATE) & (df.time_stamp < END_DATE)]

# Stripping out and reordering columns
df = df[[
    "source",
コード例 #14
0
 def __init__(self, s_dim, latent_dim, n_concepts, noisy=True, lr=1e-4):
     super().__init__(s_dim, latent_dim, n_concepts, noisy, lr)
     self._id = time_stamp()
コード例 #15
0
    arg('-n', '--n-img', type=int, default=20),

    # pipeline
    arg('-is', '--sch', action='store_true'),
    arg('-rs', '--rsch', action='store_true'),
    arg('-pred', action='store_true'),

    # use saved
    arg('-load-urls'),
    arg('-load-preds'),
)

name = opts.name + '__' if is_(opts.name) else ''
RESULT_PREFIX = osp.join('reverse-img-final-preds',
                         '%s_to_%s' % (opts.src, opts.target),
                         name + time_stamp())
mkdir_p(RESULT_PREFIX)

fh = init_logging(file=osp.join(RESULT_PREFIX, 'log.log'), stdout=True)
LOGGER = get_logger(__name__, main=True)

from nlp_utils import get_words
from image_search import image_search
from reverse_image_search import reverse_search_urls

queries = []
if is_(opts.file):
    queries.extend(get_words(opts.file, i=opts.start, j=opts.stop))
if is_(opts.query):
    queries.extend(opts.query)
コード例 #16
0
ファイル: ppmac_ra.py プロジェクト: deepu9/wrasc
    def setup(
        self,
        fetch_cmds=None,
        pass_conds=None,
        cry_cmds=None,
        cry_retries=None,
        cry_pretries=None,
        celeb_cmds=None,
        pass_logs=None,
        csv_file_path=None,
        log_while_waiting=None,
        ongoing=None,
        wait_after_celeb=None,
        **kwargs,
    ):
        """
        sets up class parameters:
        fetch_mds: commands to run when pass values are invalid. These might be establidhsing connection
        pass_conds: pass conditions as list of texts. A pass_cond=True always passes. 
        In case of [] or None or not passing, cry_cmds will be used to create verification condition:
         "=" in statements will be replaced by "==". non-statement commands will be ignored.
        """

        if kwargs:
            # merge with existing
            self.kwargs = {**self.kwargs, **kwargs}

        if wait_after_celeb:
            self.wait_after_celeb = wait_after_celeb

        if ongoing:
            self.ongoing = ongoing

        # every one of cmds and conds pass this point,
        # so its best to esxpand them here

        self.fetch_cmds = expand_pmac_stats(
            fetch_cmds if fetch_cmds else self.fetch_cmds, **self.kwargs)
        self.fetch_cmds_parsed = parse_cmds(self.fetch_cmds)

        self.cry_cmds = expand_pmac_stats(
            cry_cmds if cry_cmds else self.cry_cmds, **self.kwargs)
        self.cry_cmds_parsed = parse_cmds(self.cry_cmds)

        self.celeb_cmds = expand_pmac_stats(
            celeb_cmds if celeb_cmds else self.celeb_cmds, **self.kwargs)
        self.celeb_cmds_parsed = parse_cmds(self.celeb_cmds)

        if cry_retries:
            self.cry_retries = cry_retries

        if cry_pretries:
            self.cry_pretries = cry_pretries

        # TODO fix: this makes an arbitrary pass_cond when cry_cmds are changed
        # via setup, i.e. no NEW pass_conds are supplied.
        # dirty fix is to check if there are only pass_conds parsed, then
        # don't change them.

        if (not self.pass_conds) and (not pass_conds) and (cry_cmds):
            # an empty pass-cond (but not a None) means: check for all of the command statements:
            pass_conds = stats_to_conds(cry_cmds)

        self.pass_conds = expand_pmac_stats(
            pass_conds if pass_conds else self.pass_conds, **self.kwargs)
        self.pass_conds_parsed = parse_stats(self.pass_conds)

        # pass_logs_parsed need to be fetched with pass_conds_parsed, stored, and logged at celeb.
        self.pass_logs = expand_pmac_stats(
            pass_logs if pass_logs else self.pass_logs, **self.kwargs)
        self.pass_logs_parsed = parse_stats(self.pass_logs)

        if csv_file_path:
            self.csv_file_name = csv_file_path

            # setup the headers, they get written when (and only if) the first set of readings are ready
            if self.pass_logs_parsed:
                headers = ["Time"] + list(list(zip(*self.pass_logs_parsed))[2])
                # remove and reshape special caharacters headers

                headers = [normalise_header(header) for header in headers]

                self.csvcontent = ",".join(map(str, headers)) + "\n"

                if self.csvcontent and self.csv_file_name:

                    # time_stamp the filename
                    self.csv_file_stamped = utils.time_stamp(
                        self.csv_file_name)

                    # if file exists, make a backup of the existing file
                    # do not leave until the file doesn't exist!
                    n_copies = 0
                    while os.path.exists(self.csv_file_stamped):
                        name, ext = os.path.splitext(self.csv_file_stamped)
                        modif_time_str = time.strftime(
                            "%y%m%d_%H%M",
                            time.localtime(
                                os.path.getmtime(self.csv_file_stamped)),
                        )
                        n_copies_str = f"({n_copies})" if n_copies > 0 else ""
                        try:
                            os.rename(
                                self.csv_file_stamped,
                                f"{name}_{modif_time_str}{n_copies_str}{ext}",
                            )
                        except FileExistsError:
                            # forget it... the file is already archived...
                            # TODO or you need to be too fussy and break the execution for this?
                            n_copies += 1

                    open(self.csv_file_stamped, "w+")
            else:
                # self.log_vals = []
                self.csvcontent = None

        if log_while_waiting:
            self.log_while_waiting = log_while_waiting

        # TODO change this crap[ solution
        # floating digits used for == comparison
        self.ndigits = 6

        super().setup(**self.kwargs)

        return self
コード例 #17
0
 def publish_op_log(self, p, c, ret):
     x = {'p': p, 'c': c, 'r': ret, 'tm': time_stamp()}
     if ENCRYPTED_OUTPUT:
         st = Sec()
         x = st.enc_paylaod(x)
     return self.publish(x, topic = OP_LOG_TOPIC)