예제 #1
0
    def changelist_view(self, request, extra_context=None):
        t = Timing('WorklogReport')

        # выбранный диапазон месяцев
        year = get_year_param(request)
        month_list = [
            date(year, 1 + i, 1) for i in range(BaseReportAdmin.COLUMNS)
        ]
        stop_date = get_slice_param(request)
        last_date = month_list[-1] + monthdelta(1)
        if stop_date and stop_date > last_date:
            # сброс параметра, если выбран набор данных в прошлом
            stop_date = None

        qs = self.get_queryset(request).filter(
            startdate__range=(month_list[0], stop_date or last_date))

        # загрузка и сохранение в request журнала работ за год
        # (оптимизация для предотвращения необходимости повторной загрузки фрейма в фильтрах)
        worklogframe = WorklogFrame().load(qs)
        # ограничение набора данных только теми, по которым решал задачи пользователь
        if not request.user.has_perm('jiradata.view_all'):
            worklogframe = worklogframe.filter(author=request.user.username)

        request.worklogframe = worklogframe
        rows = worklogframe.rows()

        # заполнение фильтров в базовом классе
        response = super().changelist_view(request,
                                           extra_context=extra_context)

        try:
            cl = response.context_data['cl']
            qs = cl.queryset
        except (AttributeError, KeyError):
            return response

        # фильтрация по выбранному сотруднику
        user = get_user_param(request)
        worklogframe = worklogframe.filter(author=user)

        # список норм рабочего времени
        month_norma = calc_month_norma(month_list, stop_date=stop_date)

        seconds = t.step()

        response.context_data['months'] = month_list
        response.context_data['member'] = JiraUser.objects.filter(
            user_name=user).first()
        response.context_data['summary'], response.context_data[
            'total'] = worklogframe.aggr_month_budget(month_list, month_norma)
        response.context_data['norma'] = month_norma
        response.context_data['slice'] = stop_date
        response.context_data['year'] = year
        response.context_data[
            'stat'] = f'{rows} строк обработано за {seconds:.2} c'

        return response
예제 #2
0
 def getSatXYZ_m(self, prn, t_start, t_end):
     time = Timing()
     df_ephemeris = pd.DataFrame(self.nav_data, columns=['prn', 'year', 'month', 'day', 'hour', 'min', 'sec',
                                                         'sv_clock_bias', 'sv_clock_drift', 'sv_clock_drift_rate',
                                                         'IODE', 'Crs', 'Delta_n', 'M0', 'Cuc', 'e', 'Cus', 'sqrt_A',
                                                         'Toe', 'Cic', 'OMEGA', 'CIS', 'i0', 'Crc', 'omega',
                                                         'OMEGA_DOT', 'IDOT', 'Codes_L2_channel', 'GPS_week',
                                                         'L2_P_data_flag', 'SV_accuracy', 'SV_health', 'TGD',
                                                         'IODC', 'transmission_time', 'fit_interval'])
     df_ephemeris["UnixGPSTime"] = time.iso_ToGPSUnixTime(df_ephemeris.year, df_ephemeris.month, df_ephemeris.day)
     subDataFrame = df_ephemeris.query('prn == @prn')
예제 #3
0
    def __init__(self, gridsize):
        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        initt.start('building grid')

        self._initgrid(gridsize)

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(z, sqrt(x*x + y*y))
            lon = 180/pi * atan2(y, x)
            self.tiles[v] = Tile(lat, lon)

        for t in self.tiles.values():
            t.emptyocean(self.seafloor())
            t.climate = t.seasons = None

        initt.start('building indexes')
        self.shapes = []
        self.adj = Adjacency(self._grid)
        self._glaciationt = 0
        self.initindexes()
        self.populated = {}
        self.agricultural = set()
        self.fauna = []
        self.plants = []
        self.trees = []
        self._species = None

        initt.done()
예제 #4
0
파일: bot.py 프로젝트: premivm/forex_python
 def __init__(self):    
     self.log = LogWrapper("TradingBot")
     self.tech_log = LogWrapper("TechnicalsBot")
     self.trade_pairs = Settings.get_pairs()
     self.settings = Settings.load_settings()
     self.api = OandaAPI()
     self.timings = { p: Timing(self.api.last_complete_candle(p, GRANULARITY)) for p in self.trade_pairs }
     self.log_message(f"Bot started with\n{pprint.pformat(self.settings)}")
     self.log_message(f"Bot Timings\n{pprint.pformat(self.timings)}")
예제 #5
0
    def buildDoubleDifferences(self):
        """Construire les doubles differences"""
        coord_sat_tmp = [0.0, 0.0, 0.0]
        t = Timing()
        flag = 0
        # Il faut d'abord choisir le satellite pivot
        for i in range(0, self.obs[0].rxm_raw.shape[0]):

            for j in range(0, self.eph.nav_data.shape[0]):

                year = int(self.eph.nav_data[j, 1]) + 2000
                month = int(self.eph.nav_data[j, 2])
                day = int(self.eph.nav_data[j, 3])
                hour = int(self.eph.nav_data[j, 4])
                minute = int(self.eph.nav_data[j, 5])
                sec = int(self.eph.nav_data[j, 6])

                if self.obs[0].rxm_raw[i,7] == self.eph.nav_data[j,0] and math.fabs(t.weekToW_ToGPSUnixTime(
                        self.obs[0].rxm_raw[i,1], self.obs[0].rxm_raw[i,0]) - t.iso_ToGPSUnixTime(
                        year, month, day, hour, minute, sec
                        )) < 7200000:
                    coord_sat_tmp = self.eph.getSatXYZ(int(self.obs[0].rxm_raw[i, 0] / 1000), self.eph.nav_data[j])
                    print(coord_sat_tmp)
                    if 10 < float(180 * self.getSatElevation(coord_sat_tmp[0], coord_sat_tmp[1], coord_sat_tmp[2],
                                                  self.coord_pivot[0],self.coord_pivot[1],self.coord_pivot[2])/math.pi):

                        print(float(180 * self.getSatElevation(coord_sat_tmp[0], coord_sat_tmp[1], coord_sat_tmp[2],
                                                  self.coord_pivot[0],self.coord_pivot[1],self.coord_pivot[2])/math.pi))
                        print(self.eph.nav_data[j,0])
                        print(str(year)+"/"+str(month)+"/"+str(day)+"/"+str(hour)+"/"+str(minute)+"/"+str(sec))
                        print(str(self.obs[0].rxm_raw[i,0])+"/"+str(self.obs[0].rxm_raw[i,1]))
                        flag = 1
                        break
            if flag == 1:
                break

        for i in range(1, len(self.obs)):
            # Le nombre des DD = Nbre de cubes fixes x Nbre de cube mobile
            print("OK2")
예제 #6
0
 def parse_timings(self, time_dict:dict):
     timings = []
     for key in time_dict.keys():
         timing = Timing(
             test_uuid=self.uuid,
             function_name=key,
             function_id=self.find_lambda_id(key),
             total_time=time_dict[key]['total_time'],
             exe_time=time_dict[key]['exe_time'],
             latency=time_dict[key]['latency'],
             memory_limit=time_dict[key]['memory'],
             log_stream_name=self.parse_log_stream_name(time_dict[key]['log_stream_name'])
         )
         timings.append(timing)
     return timings
예제 #7
0
파일: log.py 프로젝트: waaaaaaz/TestRelay2
class Log(object):
    # Define logfile directory
    log_dir = os.path.join(PROJECT_PATH, "logs")

    # Define default logfile format.

    file_name_format = Timing.get_current_time_for_log()
    console_msg_format = '%(asctime)s %(levelname)-8s: %(message)s'
    file_msg_format = '%(asctime)s %(levelname)-8s: %(message)s'

    # Define the log level
    log_level = logging.INFO

    @staticmethod
    def logger(logger_name=None):
        # Create the root logger.
        logger = logging.getLogger(logger_name)
        logger.setLevel(Log.log_level)

        # Validate the given directory.
        Log.log_dir = os.path.normpath(Log.log_dir)

        # Create a folder for the logfile.
        TXT.make_dir(Log.log_dir)

        # Build the logfile name
        filename = Log.file_name_format + ".log"
        filename = os.path.join(Log.log_dir, filename)

        # Set up logging to the logfile
        file_handler = RotatingFileHandler(
            filename=filename
            # ,maxBytes=Log.max_bytes, backupCount=Log.backup_count
        )
        file_handler.setLevel(Log.log_level)
        file_formatter = logging.Formatter(Log.file_msg_format)
        file_handler.setFormatter(file_formatter)
        logger.addHandler(file_handler)

        # Set up logging to console
        stream_handler = logging.StreamHandler()
        stream_handler.setLevel(Log.log_level)
        stream_formatter = logging.Formatter(Log.console_msg_format)
        stream_handler.setFormatter(stream_formatter)
        logger.addHandler(stream_handler)

        return logger
예제 #8
0
        self.set_scene_coords_projection()

    def finish_drag(self):
        if not self._is_dragging:
            return

        self._viewport_fixed_center = self.viewport_center
        self._is_dragging = False
        self.set_scene_coords_projection()

    def cancel_drag(self):
        self._is_dragging = False
        self.set_scene_coords_projection()


timing = Timing()

timing.set_value('main_opacity', 1.0)
timing.set_value('main_wireframe_color', Interface.HIDDEN_WIREFRAME_COLOR)
timing.set_value('point_border_color', Interface.HIDDEN_POINT_BORDER_COLOR)
timing.set_value('point_fill_color', Interface.HIDDEN_POINT_FILL_COLOR)
timing.set_value('target_wireframe_color', Interface.VISIBLE_TARGET_COLOR)

interface = Interface()
context = Context(config=config, interface=interface, timing=timing)


def display():
    timing.update_time()

    glClearColor(0.1, 0.1, 0.1, 1)
예제 #9
0
def main(plpy, args: str):
    # some config parameter
    # the map is passed down to the functions
    config = {'src': '/benchmark/sql',
              'dataset': 'tpch',
              'query_types': ['bv', 'cv', 'dv', 'ev', 'fv'],
              'queries': ['q1', 'q3', 'q6', 'q15', 'q20'],
              'batch_size': 1000,
              'max_batch_size': 10000}

    # perform some basic argument parsing
    args = args.split()
    operation = args[0]

    timing = Timing(config)
    db = Database(plpy, timing)

    # load the TPC-H relations
    if operation == 'setup':
        SetupPublic(db, config).execute()
        db.commit()

    # create auxiliary tables, views and functions for the given maintenance approach
    elif operation == 'setup_query' and len(args) == 3:
        clear_query(db, config, args[1], args[2])
        setup_query(db, config, args[1], args[2])

    # check for correctness of the given query
    elif operation == 'compare' and len(args) == 2:
        compare(db, config, args[1])

    # check correctness of all available queries
    elif operation == 'compare_all' and len(args) == 2:
        config['batch_size'] = int(args[1])
        for query in config['queries']:
            compare(db, config, query)

    # benchmark the given query for the obtained batch_size
    elif operation == 'benchmark' and len(args) == 3:
        benchmark(db, config, args[1], args[2], True)
        timing.save(db)

    # benchmark all queries and all maintenance approaches for the given batch size
    elif operation == 'benchmark_all' and len(args) == 3:
        config['batch_size'] = int(args[2])
        for query_type in config['query_types']:
            for query in config['queries']:

                # warmup (discard first three iterations)
                benchmark(db, config, query_type, query, False)
                benchmark(db, config, query_type, query, False)
                benchmark(db, config, query_type, query, False)

                for i in range(int(args[1])):
                    benchmark(db, config, query_type, query, True)

        # write execution times to the database
        timing.save(db)

    # clear everything, including TPC-H relations
    elif operation == 'clear':
        for query_type in config['query_types']:
            for query in config['queries']:
                clear_query(db, config, query_type, query)
        ClearPublic(db, config).execute()
        db.commit()

    else:
        raise RuntimeError('Missing arguments!')
예제 #10
0
파일: main.py 프로젝트: caoanle13/Rise
# temperature
from temperature_sensor import TemperatureSensor

temperature_data = []
temperature = TemperatureSensor()

# humidity
from temperature_sensor import HumiditySensor

humidity_data = []
humidity = HumiditySensor()

# Timing set up
time_data = []
t = Timing()

# mqtt setup
import paho.mqtt.client as mqtt

piTopic = "IC.embedded/tEEEm/TO_PI"
appTopic = "IC.embedded/tEEEm/TO_APP"

# constants on piTopic
SPEECH_TRIGGER = 0
TIME_SET = 1
SUNRISE = 0
AT = 1
ASK_RESULTS = 2
RECEIVED_START_ALARM = 3
RECEIVED_STOP_ALARM = 4
예제 #11
0
    def __init__(self, r, dt):
        """Create a simulation for a planet of radius r km and timesteps of dt
        million years.
        """

        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        # max speed is 100km per million years
        self._dp = 100.0/r * dt

        self._build = dt/5.0
        self._erode = dt

        tilearea = 4 * pi * r**2

        initt.start('building grid')

        grid = Grid()
        while grid.size < 6:
            grid = Grid(grid)
            grid.populate()
        self._grid = grid

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(y, sqrt(x*x + z*z))
            lon = 180/pi * atan2(-x, z)
            self.tiles[v] = Tile(lat, lon)

        initt.start('building indexes')

        self.initindexes()

        initt.start('creating initial landmass')

        tilearea /= len(self._indexedtiles)

        # the numerator of the split probability, where
        # the number of tiles in the shape is the denomenator:
        # a 50M km^2 continent has a 50/50 chance of splitting in a given step
        self._splitnum = 25e6/tilearea

        # initial location
        p = (0, 1, 0)

        # 0 velocity vector
        v = (0, 0, 0)

        # orienting point
        o = (1, 0, 0)

        r = 1.145
        shape = [(r*random.uniform(0.9,1.1)*cos(th),
                  r*random.uniform(0.9,1.1)*sin(th))
                 for th in [i*pi/8 for i in range(16)]]

        shape = Shape(shape, p, o, v).projection()

        self._shapes = [Group([t for t in self.tiles.itervalues() if shape.contains(t.vector)], v)]

        # initial landmass starts at elevation based on distance from center
        c = self._indexedtiles[self._index.nearest(p)[0]]
        r2 = r*r

        # on land, one random tile is the center of a felsic chunk
        f = random.choice(self._shapes[0].tiles)

        for t in self._indexedtiles:
            if t in self._shapes[0].tiles:
                dc = t.distance(c)
                df = t.distance(f)

                r = igneous.extrusive(max(0.5, 1 - df*df/r2))
                h = 1 - dc*dc/r2

                t.emptyland(r, h)
            else:
                t.emptyocean(self.seafloor())

        for t in self.tiles.itervalues():
            t.climate = None

        initt.done()

        self._atmosphere = self._life = False

        self._climatemappings = {}
        self._climateprof = None

        self.dirty = True
예제 #12
0
파일: main.py 프로젝트: majojos/timing
"""
Example usage of module timing
"""

from timing import Timing
import time

my_outer_process = Timing("My outer process")
my_inner_process = Timing("My inner process")

my_outer_process.start()
time.sleep(2)

my_inner_process.start()
time.sleep(2)
my_inner_process.stop()

time.sleep(1)
my_outer_process.stop()

#Output:
# 'My outer process' started.
# 'My inner process' started.
# 'My inner process' finished.
# ++++++++++++SUMMARY+EXECUTION+TIME+'My inner process'+++++++++++++
# Elapsed time 'My inner process':0 min, 2.000 sec
# 'My outer process' finished.
# ++++++++++++SUMMARY+EXECUTION+TIME+'My outer process'+++++++++++++
# Elapsed time 'My outer process':0 min, 5.000 sec
예제 #13
0
def app(unused_argv):
    tf.logging.debug("Starting app")

    # Start action server
    action_server = ActionServer()
    action_server.start()

    # Init midi ports, keep direct references to output_ports for
    # direct sending without the hub player
    if platform.system() == "Windows":
        input_ports = [
            port for port in midi_hub.get_available_input_ports()
            if MIDI_INPUT_PORT in port
        ]
        output_ports = [
            port for port in midi_hub.get_available_output_ports()
            if MIDI_OUTPUT_PORT in port
        ]
        if len(input_ports) is not 1 or len(output_ports) is not 1:
            raise Exception(f"Need exactly 1 midi input ({input_ports}) "
                            f"matching {MIDI_INPUT_PORT}"
                            f"and 1 midi output port ({output_ports}) "
                            f"matching {MIDI_OUTPUT_PORT},"
                            f"you can use LoopMIDI for that")
    else:
        input_ports = [MIDI_INPUT_PORT]
        output_ports = [MIDI_OUTPUT_PORT]
    hub = midi_hub.MidiHub(input_ports, output_ports, None)
    output_port = hub._outport.ports[0]

    # Panic to stop all current messages (note off everywhere)
    [output_port.send(message) for message in mido.ports.panic_messages()]

    # Synchronise event for all the loopers, controlled by the metronome
    bar_start_event = threading.Event()

    # Common stuff
    qpm = 80
    timing = Timing(qpm)

    loopers = []
    try:
        # Init and start the loopers, they block on the event
        drum_looper = SequenceLooper("drums",
                                     bar_start_event,
                                     action_server,
                                     hub,
                                     "drum_kit_rnn",
                                     "drum_kit",
                                     timing,
                                     midi_channel=9,
                                     bar_per_loop=2)
        melody_looper = SequenceLooper("melody",
                                       bar_start_event,
                                       action_server,
                                       hub,
                                       "attention_rnn",
                                       "attention_rnn",
                                       timing,
                                       midi_channel=0,
                                       bar_per_loop=4)

        loopers.append(drum_looper)
        loopers.append(melody_looper)
        [looper.start() for looper in loopers]

        tf.logging.debug("Loopers started " +
                         str([("drum_looper", drum_looper),
                              ("melody_looper", melody_looper)]))

        # Start metronome (wait to make sure everything is started)
        time.sleep(1)
        metronome = Metronome(bar_start_event, timing)
        loopers.append(metronome)
        metronome.start()

        tf.logging.debug("Metronome started " +
                         str([("metronome", metronome)]))

        # Wait for the loopers
        [looper.join() for looper in loopers]
    except KeyboardInterrupt:
        print("SIGINT received, stopping action server, loopers and stuff")
        action_server.stop()
        [looper.stop() for looper in loopers]
        return 1

    return 0
예제 #14
0
        if confidence > args["confidence"]:
            # compute the coordinates of the bounding box for the detection
            box = detections[0, 0, i, 3:7] * np.array([w, h, w, h])
            (startX, startY, endX, endY) = box.astype("int")

            # draw the bounding box and display the confidence
            text = "{:.2f}%".format(confidence * 100)
            y = startY - 10 if startY - 10 > 10 else startY + 10
            cv2.rectangle(image, (startX, startY), (endX, endY), (0, 0, 255),
                          2)
            cv2.putText(image, text, (startX, y), cv2.FONT_HERSHEY_DUPLEX,
                        0.45, (0, 0, 255), 2)
    # cv2.imshow("Output", image)
    cv2.imwrite(f"./outC/detected_{os.path.splitext(image_path)[0]}.jpg",
                image)
    # cv2.imwrite(f"./outC/detected_{image_path}", image)


timer = Timing("1 image test")
detect_faces(images1)
timer.end_log()

timer = Timing("10 image test")
for i in images10:
    detect_faces(i)
timer.end_log()

timer = Timing("100 image test")
for i in images:
    detect_faces(i)
timer.end_log()
예제 #15
0
# -*- coding: utf-8 -*-
import pytz
from datetime import datetime

import redis

from timing import Timing
from .models import TimingTask
from . import models
time_task = Timing()

# 计算优先级


class TimingPriority():
    def calculate(self, name, type, priority, setime, timing_number,
                  timing_type, url, start_minutes, end_minutes):
        # 计算优先级
        level = 0
        # 为电商类网站
        if type == "jd_products" or type == "tb_products":
            level += 2
        else:
            level += 0
        # 计算本身的优先级
        level = level + int(priority)

        #计算时间
        tz = pytz.timezone('Asia/Shanghai')
        now = datetime.now(tz)
        now_time = now.strftime("%m/%d/%Y")
예제 #16
0
class LifeformsSimulation(object):
    mean_temprange = (-25.0, 50.0)
    seasons = [-1, -0.5, 0, 0.5, 1, 0.5, 0, -0.5]

    def __init__(self, gridsize):
        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        initt.start('building grid')

        self._initgrid(gridsize)

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(z, sqrt(x*x + y*y))
            lon = 180/pi * atan2(y, x)
            self.tiles[v] = Tile(lat, lon)

        for t in self.tiles.values():
            t.emptyocean(self.seafloor())
            t.climate = t.seasons = None

        initt.start('building indexes')
        self.shapes = []
        self.adj = Adjacency(self._grid)
        self._glaciationt = 0
        self.initindexes()
        self.populated = {}
        self.agricultural = set()
        self.fauna = []
        self.plants = []
        self.trees = []
        self._species = None

        initt.done()

    def _initgrid(self, gridsize):
        grid = Grid()
        while grid.size < gridsize:
            grid = Grid(grid)
            grid.populate()
        self._grid = grid

    def initindexes(self):
        self._indexedtiles = []
        for t in self.tiles.values():
            self._indexedtiles.append(t)

        self._tileadj = dict()
        for v in self._grid.faces:
            self._tileadj[self.tiles[v]] = set([self.tiles[nv] for nv in self.adj[v]])

        self._index = PointTree(dict([[self._indexedtiles[i].vector, i]
                                      for i in range(len(self._indexedtiles))]))

    def nearest(self, loc):
        return self._indexedtiles[self._index.nearest(loc)[0]]

    @property
    def grid(self):
        return self._grid

    def classify(self):
        c = climate(self.tiles, self.adj, self.seasons, self.cells, self.spin, self.tilt, temprange(self.mean_temprange, self.glaciation), self.glaciation, True, {})
        for v, tile in self.tiles.items():
            tile.climate = c[v]['classification']
            tile.seasons = c[v]['seasons']

    def settle(self):
        timing = self._timing.routine('settling species')
        timing.start('classifying climate')
        self.classify()

        self.fauna = []
        self.plants = []
        self.trees = []
        lifeformsmethod.settle(self.fauna, self.plants, self.trees, self.tiles, self.adj, timing)

        self._species = None
        timing.done()

    def species(self):
        if not self._species:
            timing = self._timing.routine('indexing species')
            types = self.fauna, self.plants, self.trees
            pops = [{} for _ in types]
            for t in range(len(types)):
                p = pops[t]
                for s in types[t]:
                    for f, ss in s.seasonalrange(len(self.seasons)).items():
                        for i in ss:
                            if f not in p:
                                p[f] = [set() for _ in self.seasons]
                            p[f][i].add(s)
            self._species = pops
            timing.done()
        return self._species

    @property
    def glaciation(self):
        return self._glaciation if hasattr(self, '_glaciation') else 0.5

    @glaciation.setter
    def glaciation(self, value):
        self._glaciation = value
        self.settle()

    @staticmethod
    def seafloor():
        return igneous.extrusive(0.5)

    def loaddata(self, data):
        random.setstate(data['random'])
        self._initgrid(data['gridsize'])
        self.spin, self.cells, self.tilt = [data[k] for k in ['spin', 'cells', 'tilt']]
        self.tiles = data['tiles']
        self.shapes = data['shapes']
        self.populated = data['population']
        self.agricultural = data['agricultural']
        self._glaciationt = data['glaciationtime']
        self.initindexes()
        self.settle()

    def load(self, filename):
        self.loaddata(Data.load(filename))

    def savedata(self):
        return Data.savedata(random.getstate(), self._grid.size, 0, self.spin, self.cells, self.tilt, None, None, None, self.tiles, self.shapes, self._glaciationt, self.populated, self.agricultural, True, True, False, [], {}, {}, [], {}, {}, [], [])

    def save(self, filename):
        Data.save(filename, self.savedata())
예제 #17
0
class PrehistorySimulation(object):
    coastprox = 2
    range = 6
    seasons = [-1, -0.5, 0, 0.5, 1, 0.5, 0, -0.5]
    mean_temprange = (-25.0, 50.0)
    minriverelev = 5
    minriverprecip = 0.5
    glaciationstep = 16
    anthroglacial = 6

    def __init__(self, gridsize, spin, cells, tilt):
        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        self.spin, self.cells, self.tilt = spin, cells, tilt

        initt.start('building grid')

        self._initgrid(gridsize)

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(z, sqrt(x*x + y*y))
            lon = 180/pi * atan2(y, x)
            self.tiles[v] = Tile(lat, lon)

        for t in self.tiles.values():
            t.emptyocean(self.seafloor())
            t.climate = t.seasons = None
            t.candidate = False

        initt.start('building indexes')
        self.shapes = []
        self.adj = Adjacency(self._grid)
        self._glaciationt = 0
        self.initindexes()
        self.populated = {}
        self.agricultural = set()

        initt.done()

    def _initgrid(self, gridsize):
        grid = Grid()
        while grid.size < gridsize:
            grid = Grid(grid)
            grid.populate()
        self._grid = grid

    def initindexes(self):
        self._indexedtiles = []
        for t in self.tiles.values():
            self._indexedtiles.append(t)

        self._tileadj = dict()
        for v in self._grid.faces:
            self._tileadj[self.tiles[v]] = set([self.tiles[nv] for nv in self.adj[v]])

        self._index = PointTree(dict([[self._indexedtiles[i].vector, i]
                                      for i in range(len(self._indexedtiles))]))

    def nearest(self, loc):
        return self._indexedtiles[self._index.nearest(loc)[0]]

    def newrace(self):
        # TODO make unique
        vs, cs = phonemes()
        name = output.write(random.choice(list(lexicon(vs, cs, round(random.gauss(-0.5, 1)), 0.5, 0.5, None, 1000))))
        return name[0].upper() + name[1:]

    def update(self):
        stept = self._timing.routine('simulation step')

        stept.start('identifying glaciers')
        gs = [sum([1 for t in s.tiles if t.climate and t.climate.koeppen[0] == u'E']) for s in self.shapes]

        stept.start('iterating climate')
        glaciation = 0.5 - math.cos(self._glaciationt*math.pi/self.glaciationstep)/2
        c = climate(self.tiles, self.adj, self.seasons, self.cells, self.spin, self.tilt, temprange(self.mean_temprange, glaciation), glaciation, True, {})
        for v, tile in self.tiles.items():
            tile.climate = c[v]['classification']
            tile.seasons = c[v]['seasons']
            if not habitable(tile) and tile in self.populated:
                del self.populated[tile]

        stept.start('applying isostasy')
        for s, g in zip(self.shapes, gs):
            dg = sum([1 for t in s.tiles if t.climate and t.climate.koeppen[0] == u'E']) - g
            dh = 0.6 * dg / len(s.tiles)
            for t in s.tiles:
                t.isostasize(dh)

        self._glaciationt += 1

        if not self.populated:
            stept.start('genesis')
            self.populated = eden(self.tiles, self._tileadj, self.newrace())

        stept.start('running rivers')
        rivers = run(self.tiles.values(), self._tileadj, self.minriverelev, self.minriverprecip)

        stept.start('sparking agriculture')
        gfactor = math.pow(glaciation, 2)  # Agriculture more likely in interglacial period
        for r in rivers:
            for t in r:
                if t in self.populated and t not in self.agricultural and random.random() < gfactor * agprob(t.climate.koeppen):
                    self.agricultural.add(self.populated[t])

        popcache = {}
        for i in range(self.anthroglacial):
            stept.start('migration {}'.format(i))
            if not expandpopulation(rivers, self._tileadj, self.populated, self.agricultural, self.range, self.coastprox, popcache):
                break
        stept.start('identifying distinct populations')
        racinate(self.tiles.values(), self._tileadj, self.populated, self.newrace, self.agricultural, self.range)

        stept.done()

    @property
    def grid(self):
        return self._grid

    @property
    def peoples(self):
        return len({p for p in self.populated.values()})

    @staticmethod
    def seafloor():
        return igneous.extrusive(0.5)

    def loaddata(self, data):
        random.setstate(data['random'])
        self._initgrid(data['gridsize'])
        self.spin, self.cells, self.tilt = [data[k] for k in ['spin', 'cells', 'tilt']]
        self.tiles = data['tiles']
        self.shapes = data['shapes']
        self.populated = data['population']
        self.agricultural = data['agricultural']
        self._glaciationt = data['glaciationtime']
        self.initindexes()

    def load(self, filename):
        self.loaddata(Data.load(filename))

    def savedata(self):
        return Data.savedata(random.getstate(), self._grid.size, 0, self.spin, self.cells, self.tilt, None, None, None, self.tiles, self.shapes, self._glaciationt, self.populated, self.agricultural, True, True, False, [], {}, {}, [], {}, {}, [], [])

    def save(self, filename):
        Data.save(filename, self.savedata())
예제 #18
0
import pygame
from pygame.locals import *

from kernels import *
from vector import Vec

#from timing import print_timing
from timing import Timing
timings = Timing()


#@print_timing
@timings
def density_update(sphp, particles):
    #brute force
    for pi in particles:
        pi.dens = 0.
        for pj in particles:
            r = pi.pos - pj.pos
            #print r
            if mag(r) > pi.h: continue
            #pi.dens += pj.mass*Wpoly6(pi.h, r)
            pi.dens += pj.mass * sphp.kernels.poly6(r)


#@print_timing
@timings
def force_update(sphp, particles):
    #brute force
    rho0 = sphp.rho0
    K = sphp.K
예제 #19
0
from light.http_light_writer import HttpLightWriter
from light.light_timing import LightStepListener
from light.priority_light_writer import PriorityLightWriterFactory
from timing import Timing
from midi.midi_output import MidiOutputBpm, MidiOutputTime
import time
import midi.midi_bindings as bindings
from midi.midi_input import *


DEFAULT_IP = "192.168.1.72"
DEFAULT_PORT = 80
DEFAULT_GPIO = 12

if __name__ == "__main__":
    timing = Timing(4 * 8, 1.0 / 4.0)
    midi_binding = bindings.APC_KEY_25

    light_writer = HttpLightWriter(DEFAULT_IP, DEFAULT_PORT, DEFAULT_GPIO)
    priority_light_writer_factory = PriorityLightWriterFactory(light_writer)

    light_step_listener = LightStepListener(timing, priority_light_writer_factory.low())

    output_steps = MidiOutputTime(timing, midi_binding)
    output_bpm = MidiOutputBpm(timing, midi_binding)
    output_bpm.start_bpm_thread()

    input_steps = MidiInputSteps(timing, midi_binding)
    input_steps.start_listening()

    generic_midi_input = MidiGenericInputListener(midi_binding)
예제 #20
0
class PlanetSimulation(object):
    temprange = (-25.0, 50.0)

    def __init__(self, r, gridsize, spin, cells, tilt, landr, dt, atmdt, lifedt):
        """Create a simulation for a planet with the given characteristics. """

        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        self.spin, self.cells, self.tilt = spin, cells, tilt

        # max speed is 100km per million years
        self._dp = 100.0/r * dt

        self._build = dt/5.0
        self._erode = dt

        tilearea = 4 * pi * r**2

        initt.start('building grid')

        self._initgrid(gridsize)

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(z, sqrt(x*x + y*y))
            lon = 180/pi * atan2(y, x)
            self.tiles[v] = Tile(lat, lon)

        initt.start('building indexes')

        self.initindexes()

        initt.start('creating initial landmass')

        tilearea /= len(self._indexedtiles)

        # the numerator of the split probability, where
        # the number of tiles in the shape is the denomenator:
        # a 50M km^2 continent has a 50/50 chance of splitting in a given step
        self._splitnum = 25e6/tilearea

        # initial location
        p = [random.uniform(-1, 1) for i in range(3)]
        p /= norm(p)

        # 0 velocity vector
        v = (0, 0, 0)

        # orienting point
        o = [0, 0, 0]
        mini = min(range(len(p)), key=lambda i: abs(p[i]))
        o[mini] = 1 if p[mini] < 0 else -1

        shape = SphericalPolygon([rotate(rotate(p, o, landr*random.uniform(0.9,1.1)), p, th)
                                  for th in [i*pi/8 for i in range(16)]])

        self._shapes = [Group([t for t in self.tiles.values() if shape.contains(t.vector)], v)]

        # initial landmass starts at elevation based on distance from center
        c = self._indexedtiles[self._index.nearest(p)[0]]
        r2 = landr*landr

        # on land, one random tile is the center of a felsic chunk
        f = random.choice(self._shapes[0].tiles)

        for t in self._indexedtiles:
            if t in self._shapes[0].tiles:
                dc = t.distance(c)
                df = t.distance(f)

                r = igneous.extrusive(max(0.5, 1 - df*df/r2))
                h = 1 - dc*dc/r2

                t.emptyland(r, h)
            else:
                t.emptyocean(self.seafloor())

        for t in self.tiles.values():
            t.climate = t.seasons = None

        initt.done()

        self._atmosphereticks = atmdt / dt
        self._lifeticks = lifedt / dt

        self._climatemappings = {}
        self._climateprof = None

        self.dirty = True

    def _initgrid(self, gridsize):
        grid = Grid()
        while grid.size < gridsize:
            grid = Grid(grid)
            grid.populate()
        self._grid = grid

    @property
    def grid(self):
        return self._grid

    @property
    def hasatmosphere(self):
        return self._atmosphereticks == 0

    @property
    def haslife(self):
        return self._lifeticks == 0

    @staticmethod
    def seafloor():
        return igneous.extrusive(0.5)

    def initindexes(self):
        self._indexedtiles = []
        for t in self.tiles.values():
            self._indexedtiles.append(t)

        self.adj = Adjacency(self._grid)
                
        self._tileadj = dict()
        for v in self._grid.faces:
            self._tileadj[self.tiles[v]] = set([self.tiles[nv] for nv in self.adj[v]])
       
        self._index = PointTree(dict([[self._indexedtiles[i].vector, i]
                                      for i in range(len(self._indexedtiles))]))

    def nearest(self, loc):
        return self._indexedtiles[self._index.nearest(loc)[0]]

    @property
    def continents(self):
        return len(self._shapes)

    @property
    def land(self):
        return int(100.0*sum([len(s.tiles) for s in self._shapes])/len(self._indexedtiles) + 0.5)

    def loaddata(self, data):
        random.setstate(data['random'])
        self._initgrid(data['gridsize'])
        self.spin, self.cells, self.tilt = [data[k] for k in ['spin', 'cells', 'tilt']]
        self._dp = data['dp']
        self._build = data['build']
        self._splitnum = data['splitnum']
        self.tiles = data['tiles']
        self._shapes = data['shapes']
        self._atmosphereticks = data['atmt']
        self._lifeticks = data['lifet']

        self.initindexes()
        self.dirty = True

    def load(self, filename):
        self.loaddata(Data.load(filename))

    def savedata(self):
        return Data.savedata(random.getstate(), self._grid.size, 0, self.spin, self.cells, self.tilt, self._dp, self._build, self._splitnum, self.tiles, self._shapes, 0, {}, set(), self._atmosphereticks, self._lifeticks, False, [], {}, {}, [], {}, {}, [], [])

    def save(self, filename):
        Data.save(filename, self.savedata())

    def update(self):
        """Update the simulation by one timestep."""

        stept = self._timing.routine('simulation step')

        stept.start('determining tile movements')

        old = set([t for shape in self._shapes for t in shape.tiles])
        new = dict()

        overlapping = {}
        for t in self._indexedtiles:
            overlapping[t] = []

        for i in range(len(self._shapes)):
            speed = norm(self._shapes[i].v)
            group, v = move(self._indexedtiles,
                            self._shapes[i].tiles,
                            self._shapes[i].v,
                            self._tileadj,
                            self._index)
            self._shapes[i] = Group(list(group.keys()), v)
            for dest, sources in group.items():
                if dest in new:
                    new[dest].append(TileMovement(sources, speed))
                else:
                    new[dest] = [TileMovement(sources, speed)]
                overlapping[dest].append(i)

        stept.start('applying tile movements')

        collisions = {}

        newe = {}

        seen = set()
        for dest, movements in new.items():
            # get all the source tiles contributing to this one
            newe[dest] = NextTileValue(movements)
            if not dest in seen:
                try:
                    old.remove(dest)
                except KeyError:
                    # calculate the amount to build up the leading edge
                    newe[dest].build(self._build * sum([m.speed for m in movements])/len(movements))
                seen.add(dest)

            for pair in combinations(overlapping[dest], 2):
                if pair in collisions:
                    collisions[pair] += 1
                else:
                    collisions[pair] = 1

        # apply the new values
        for t, e in newe.items():
            e.apply(t)

        # clear out abandoned tiles
        for t in old:
            t.emptyocean(self.seafloor())

        # record each continent's total pre-erosion above-sea size
        heights = [sum([t.elevation for t in s.tiles]) for s in self._shapes]

        if self.hasatmosphere:
            stept.start('"simulating" climate')

            seasons = [0.1*v for v in list(range(-10,10,5)) + list(range(10,-10,-5))]
            c = climate(self.tiles, self.adj, seasons, self.cells, self.spin, self.tilt, self.temprange, 0.5, self.haslife, self._climatemappings, self._climateprof)

            if self._climateprof:
                self._climateprof.dump_stats('climate.profile')

            for v, tile in self.tiles.items():
                tile.climate = c[v]['classification']
                tile.seasons = c[v]['seasons']

            stept.start('determining erosion')

            erosion = erode(self.tiles, self.adj)

            for t in self.tiles.values():
                t.erode(erosion, self._erode)

            for t in self.tiles.values():
                # if the tile is in at least one shape, apply the erosion materials
                if len(overlapping[t]) > 0:
                    if len(erosion[t].materials) > 0:
                        t.deposit(sedimentary.deposit(erosion[t].materials, self.haslife, False, t.climate))
                # otherwise, require a certain threshold
                elif sum([m.amount for m in erosion[t].materials]) > 1.5:
                    t.deposit(sedimentary.deposit(erosion[t].materials, self.haslife, True, t.climate))
                    sourceshapes = set()
                    for e in erosion[t].sources:
                        for shape in overlapping[e]:
                            sourceshapes.add(shape)
                    for s in sourceshapes:
                        if not t in self._shapes[s].tiles:
                            self._shapes[s].tiles.append(t)
                    overlapping[t] = list(sourceshapes)
            if self._lifeticks:
                self._lifeticks -= 1
        else:
            self._atmosphereticks -= 1

        stept.start('applying isostatic effects')

        for s, h in zip(self._shapes, heights):
            dh = (h - sum([t.elevation for t in s.tiles]))/float(len(s.tiles))
            for t in s.tiles:
                t.isostasize(dh)

        stept.start('performing random intrusions')

        for t in self.tiles.values():
            if t.subduction > 0:
                if random.random() < 0.1:
                    t.intrude(igneous.intrusive(max(0, min(1, random.gauss(0.85, 0.15)))))
                    t.transform(metamorphic.contact(t.substance[-1], t.intrusion))

        stept.start('applying regional metamorphism')

        for t in self.tiles.values():
            t.transform(metamorphic.regional(t.substance[-1], t.subduction > 0))

        for t in self.tiles.values():
            t.cleartemp()

        stept.start('merging overlapping shapes')

        # merge shapes that overlap a lot
        groups = []
        for pair, count in collisions.items():
            if count > min([len(self._shapes[i].tiles) for i in pair])/10:
                for group in groups:
                    if pair[0] in group:
                        group.add(pair[1])
                        break
                    elif pair[1] in group:
                        group.add(pair[0])
                        break
                else:
                    groups.append(set(pair))

        gone = []
        for group in groups:
            largest = max(group, key=lambda i: len(self._shapes[i].tiles))
            tiles = list(self._shapes[largest].tiles)
            v = array(self._shapes[largest].v) * len(tiles)
            for other in group:
                if other != largest:
                    v += array(self._shapes[other].v) * len(self._shapes[other].tiles)
                    tiles += self._shapes[other].tiles
                    gone.append(self._shapes[other])
            self._shapes[largest].tiles = list(set(tiles))
            v /= len(tiles)
            self._shapes[largest].v = v
        for s in gone:
            self._shapes.remove(s)

        stept.start('randomly splitting shapes')

        # occaisionally split big shapes
        for i in range(len(self._shapes)):
            if random.uniform(0,1) > self._splitnum / len(self._shapes[i].tiles):
                self._shapes[i:i+1] = [Group(ts, self._shapes[i].v + v * self._dp)
                                       for ts, v in split(self._shapes[i].tiles)]

        stept.done()

        self.dirty = True
예제 #21
0
from imutils.video import VideoStream
import imutils
import time
import cv2
from timing import Timing

print("[INFO] loading cascade file stream...")
haarcascade_path = cv2.data.haarcascades + "/haarcascade_frontalface_default.xml"
face_cascade = cv2.CascadeClassifier(haarcascade_path)
print("[INFO] starting video stream...")
vs = VideoStream(usePiCamera=True).start()
time.sleep(2.0)

frame_count = 0
while True:
    timer = Timing(f"Frame {frame_count} timing")
    # grab the current frame of video and resize it to 400px wide
    frame = vs.read()
    frame = imutils.resize(frame, width=400)

    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

    faces = face_cascade.detectMultiScale(gray, 1.3, 5)
    for (x, y, w, h) in faces:
        frame = cv2.rectangle(frame, (x, y), (x + w, y + h), (255, 0, 0), 2)
        roi_gray = gray[y:y + h, x:x + w]
        roi_color = frame[y:y + h, x:x + w]
    cv2.imshow("Frame", frame)
    timer.end_log()
    frame_count = frame_count + 1
    # if the `q` key was pressed, break from the loop
예제 #22
0
    def __init__(self, r, gridsize, spin, cells, tilt, landr, dt, atmdt, lifedt):
        """Create a simulation for a planet with the given characteristics. """

        self._timing = Timing()

        initt = self._timing.routine('simulation setup')

        self.spin, self.cells, self.tilt = spin, cells, tilt

        # max speed is 100km per million years
        self._dp = 100.0/r * dt

        self._build = dt/5.0
        self._erode = dt

        tilearea = 4 * pi * r**2

        initt.start('building grid')

        self._initgrid(gridsize)

        self.tiles = {}
        for v in self._grid.faces:
            x, y, z = v
            lat = 180/pi * atan2(z, sqrt(x*x + y*y))
            lon = 180/pi * atan2(y, x)
            self.tiles[v] = Tile(lat, lon)

        initt.start('building indexes')

        self.initindexes()

        initt.start('creating initial landmass')

        tilearea /= len(self._indexedtiles)

        # the numerator of the split probability, where
        # the number of tiles in the shape is the denomenator:
        # a 50M km^2 continent has a 50/50 chance of splitting in a given step
        self._splitnum = 25e6/tilearea

        # initial location
        p = [random.uniform(-1, 1) for i in range(3)]
        p /= norm(p)

        # 0 velocity vector
        v = (0, 0, 0)

        # orienting point
        o = [0, 0, 0]
        mini = min(range(len(p)), key=lambda i: abs(p[i]))
        o[mini] = 1 if p[mini] < 0 else -1

        shape = SphericalPolygon([rotate(rotate(p, o, landr*random.uniform(0.9,1.1)), p, th)
                                  for th in [i*pi/8 for i in range(16)]])

        self._shapes = [Group([t for t in self.tiles.values() if shape.contains(t.vector)], v)]

        # initial landmass starts at elevation based on distance from center
        c = self._indexedtiles[self._index.nearest(p)[0]]
        r2 = landr*landr

        # on land, one random tile is the center of a felsic chunk
        f = random.choice(self._shapes[0].tiles)

        for t in self._indexedtiles:
            if t in self._shapes[0].tiles:
                dc = t.distance(c)
                df = t.distance(f)

                r = igneous.extrusive(max(0.5, 1 - df*df/r2))
                h = 1 - dc*dc/r2

                t.emptyland(r, h)
            else:
                t.emptyocean(self.seafloor())

        for t in self.tiles.values():
            t.climate = t.seasons = None

        initt.done()

        self._atmosphereticks = atmdt / dt
        self._lifeticks = lifedt / dt

        self._climatemappings = {}
        self._climateprof = None

        self.dirty = True
예제 #23
0
def timing_test(Container=SortedList, t=None):
    tk = Toolkit(rs, Container=Container)
    note = f"(Using {Container.__name__} as container type)"
    Timing.test(brute_force, tk, note=note, t=t)
    print()
예제 #24
0
# coding=utf-8
"""
Demonstrates how to use the background scheduler to schedule a job that executes on 3 second
intervals.
"""

from timing import Timing

if __name__ == '__main__':
    task = Timing()
    #print task
    task.start()
    



예제 #25
0
            system.at['part_solver'].updateMeshValues(
                old_system.at['electrons'], extent=2)
            system.at['part_solver'].updateMeshValues(
                old_system.at['photoelectrons'], extent=2)
            system.at['part_solver'].updateMeshValues(old_system.at['protons'],
                                                      extent=2)
            out.saveVTK(system.at['mesh'], old_system.at, system.arrangeVTK())
        if system.at['ts'] % 10000 == 0:
            out.saveParticlesTXT(old_system.at, system.arrangeParticlesTXT())
        if system.at['ts'] % 10000 == 0:
            out.particleTracker(old_system.at['ts'], old_system.at['protons'],
                                old_system.at['electrons'],
                                old_system.at['photoelectrons'])

        #Updating previous state
        deepcopy = Timing(copy.deepcopy)
        old_system = deepcopy(system)

        #Execution time of loop step and storage
        t1 = time.perf_counter()
        getattr(Timing, 'time_dict')['Global'] = t1 - t0
        if system.at['ts'] % 10 == 0:
            out.saveTimes(system.at['ts'], getattr(Timing, 'time_dict'))
        Timing.reset_dict()

        #Advance in timestep
        system.at['ts'] += 1

except KeyboardInterrupt:
    out.savePickle(old_system.at, old_system.arrangePickle())
    print('Process aborted')
예제 #26
0
파일: mpc.py 프로젝트: adbuerger/stcs-mimpc
    def run(self):
        def send_controls(timing, solver):

            control = Control(timing=timing, previous_solver=solver)
            control.apply()

        def solve_short_term_problem(timing, previous_solver, queue_st):

            ambient = Ambient(timing=timing)
            ambient.update()

            state = State()
            state.update()

            predictor = Predictor(timing=timing, ambient=ambient, \
                state=state, previous_solver=previous_solver, \
                solver_name="predictor_bin_" \
                    + str(timing.grid_position_cursor))
            predictor.solve()

            timing.increment_grid_position_cursor()

            if timing.grid_position_cursor >= timing.N_short_term:

                timing.shift_time_grid()
                ambient = Ambient(timing=timing)
                ambient.update()

            nlpsolver_bin = NLPSolverBin( \
                timing=timing, ambient=ambient, \
                previous_solver=previous_solver, predictor=predictor, \
                solver_name = "nlpsolver_bin_" \
                    + str(timing.grid_position_cursor))
            nlpsolver_bin.set_solver_max_cpu_time(time_point_to_finish=\
                timing.time_points[timing.grid_position_cursor])
            nlpsolver_bin.solve()
            nlpsolver_bin.reduce_object_memory_size()

            timing.sleep_until_time_grid_point("solve_short_term_problem", \
                timing.grid_position_cursor)

            queue_st.put(nlpsolver_bin)

            send_controls(timing, nlpsolver_bin)

            nlpsolver_bin.save_results()

        def generate_initial_controls(timing, queue_st):

            ambient = Ambient(timing=timing)
            ambient.update()

            state = State()
            state.update()

            simulator = Simulator( \
                timing=timing, ambient=ambient, state=state)

            simulator.solve()

            queue_st.put(simulator)

            simulator.save_results()

        def solve_long_term_problem(timing, previous_solver, queue_lt):

            ambient = Ambient(timing=timing)
            ambient.update()

            state = State()
            state.update()

            predictor = Predictor(timing=timing, ambient=ambient, \
                state=state, previous_solver=previous_solver, \
                solver_name="predictor_rel")
            predictor.solve(n_steps=timing.N_short_term)

            timing_next_interval = copy.deepcopy(timing)
            timing_next_interval.shift_time_grid()

            ambient = Ambient(timing=timing_next_interval)
            ambient.update()

            nlpsolver_rel = NLPSolverRel( \
                timing=timing_next_interval, ambient=ambient, \
                previous_solver=previous_solver, predictor=predictor, \
                solver_name="nlpsolver_rel")
            nlpsolver_rel.solve()
            nlpsolver_rel.save_results()

            binapprox = BinaryApproximation( \
                timing=timing_next_interval, previous_solver=nlpsolver_rel, \
                predictor=predictor, solver_name="binapprox")
            binapprox.set_solver_max_cpu_time(time_point_to_finish=\
                timing_next_interval.time_points[0])
            binapprox.solve()
            binapprox.save_results()

            timing.increment_grid_position_cursor(n_steps=timing.N_short_term -
                                                  1)
            timing.sleep_until_grid_position_cursor_time_grid_point(
                "solve_long_term_problem")

            queue_lt.put((timing_next_interval, binapprox))

        timing = Timing(startup_time=time.time())

        queue_lt = mp.Queue()
        queue_st = mp.Queue()

        p = mp.Process(target=generate_initial_controls,
                       args=(timing, queue_st))
        p.start()

        while timing.mpc_iteration_count < self._MAX_MPC_ITERATIONS:

            previous_solver_st = queue_st.get()

            timing.define_initial_switch_positions(previous_solver_st)

            p = mp.Process(target=solve_long_term_problem, \
                args=(timing, previous_solver_st, queue_lt))
            p.start()

            for k in range(timing.N_short_term - 1):

                p = mp.Process(target=solve_short_term_problem, \
                    args=(timing, previous_solver_st, queue_st))
                p.start()

                timing.increment_grid_position_cursor()

                previous_solver_st = queue_st.get()

            # Retrieve results of long-term optimization

            timing_next_interval, previous_solver_lt = queue_lt.get()

            p = mp.Process(target=solve_short_term_problem, \
                args=(timing, previous_solver_lt, queue_st))
            p.start()

            timing = copy.deepcopy(timing_next_interval)
            timing.increment_mpc_iteration_count()