Exemple #1
0
 def __init__(self, b, x0, y0, e, te, s, ks, rs):
     super(NFW, self).__init__(x0, y0, e, te)
     self.b = b
     self.ks = ks
     self.rs = rs
     self.q = 1.0 - self.e
     self.integrator = Integrator(self.q, self.kappa, self.kappa_prime,
                                  self.phi_r)
     # replaces core radius from s==0 -> 1e-4, fixes /0 situations in potential calculation.
     self.s = s if s != 0.0 else 1e-4
class TLD_IVMIT:
    def __init__(self, frame, window, init_frames_count = 20):
        self.buffer = [cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)]
        self.position = Position(self.buffer, *window)
        self.learning_component = LearningComponent(self.position.calculate_patch())
        self.detector = Detector(self.position, self.learning_component)
        self.tracker = Tracker(self.position)
        self.is_visible = True
        self.integrator = Integrator(self.learning_component)
        self.init_frames_count = init_frames_count
        self.detected_windows = None
        self.tracked_window = None

    def start(self, frame):
        frame = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
        if self.init_frames_count == 0:
            start = time()
            self.tracked_window = self.tracker.track(frame, self.position)
            self.buffer[0] = frame
            print "Tracking:", time()- start

            start = time()
            self.detected_windows = self.detector.detect(self.position, self.tracked_window is not None)
            print "Detected windows count:", len(self.detected_windows)
            print "Detection:", time()- start

            start = time()
            # filtered_detected_windows = [(window, patch, proba) for window, patch, proba in self.detected_windows if proba > 0.7]
            single_window, self.is_visible = self.integrator.get_single_window(self.position, self.detected_windows, self.tracked_window)
            print "Integration:", time()- start

            if self.is_visible:
                self.position.update(*single_window)
            # start = time()
            # self.learning_component.n_expert()
            # self.learning_component.p_expert()
            # print "Update training set:", time()- start
        else:
            self.tracked_window = self.tracker.track(frame, self.position)
            self.buffer[0] = frame
            if self.tracked_window is not None:
                i = 0
                while i < 5:
                    self.position.update(x=np.random.randint(0,self.buffer[0].shape[1]-self.position.width))
                    if self.position.is_correct() and windows_intersection(self.position.get_window(), self.tracked_window) == 0:
                        self.learning_component.update_negatives(self.position.calculate_patch())
                        i += 1

                self.position.update(*self.tracked_window)
                self.learning_component.update_positives(self.position.calculate_patch())

                self.init_frames_count -= 1
            else:
                self.init_frames_count = 0
                self.is_visible = False

        return self.position
Exemple #3
0
 def __init__(self, b, x0, y0, e, te, s, ks, rs):
     super(NFW, self).__init__(x0, y0, e, te)
     self.b = b
     self.ks = ks
     self.rs = rs
     self.q = 1.0 - self.e
     self.integrator = Integrator(self.q, self.kappa, self.kappa_prime, self.phi_r)
     # replaces core radius from s==0 -> 1e-4, fixes /0 situations in potential calculation.
     self.s = s if s != 0.0 else 1e-4
Exemple #4
0
    def __init__(self, n_threads=None):
        """Default constructor.

        Keyword arguments:
        n_threads -- number of threads (optional)

        """
        self.logger = get_sublogger(__name__)
        self.integrator = Integrator()
        self.n_threads = WMI.DEF_THREADS if n_threads == None else n_threads
 def __init__(self, frame, window, init_frames_count = 20):
     self.buffer = [cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)]
     self.position = Position(self.buffer, *window)
     self.learning_component = LearningComponent(self.position.calculate_patch())
     self.detector = Detector(self.position, self.learning_component)
     self.tracker = Tracker(self.position)
     self.is_visible = True
     self.integrator = Integrator(self.learning_component)
     self.init_frames_count = init_frames_count
     self.detected_windows = None
     self.tracked_window = None
Exemple #6
0
    def __init__(self, n_partitions=None):
        """Default constructor, sets the parameters for the time partitioning
        and the distribution fitting.

        Keyword arguments:
        n_partitions -- number of partitions of the day (optional)

        """
        self.init_sublogger(__name__)

        self.integrator = Integrator()

        if n_partitions:
            self.n_partitions = n_partitions
        else:
            self.n_partitions = SRNParser.DEF_N_PARTITIONS

        assert(self.n_partitions < SRNParser.MAX_TIME),\
            "Number of partition cannot be smaller than 1 minute."

        delta = int(SRNParser.MAX_TIME / n_partitions)
        self.partitions = [(i * delta) for i in xrange(n_partitions)]
        self.partitions.append(SRNParser.MAX_TIME)
Exemple #7
0
class SRNParser(Loggable):

    # field descriptors in the raw dataset
    EDGE_FIELD = "LinkDescription"
    TP_FIELD = "TimePeriod"
    AVGJT_FIELD = "AverageJT"
    SELECT_FIELDS = [EDGE_FIELD, TP_FIELD, AVGJT_FIELD]

    # default parameters for the distribution fitting and time partitioning
    DEF_N_PARTITIONS = 12
    MAX_TIME = 60 * 3

    ERR_FIT = "Can't fit the distribution"

    def __init__(self, n_partitions=None):
        """Default constructor, sets the parameters for the time partitioning
        and the distribution fitting.

        Keyword arguments:
        n_partitions -- number of partitions of the day (optional)

        """
        self.init_sublogger(__name__)

        self.integrator = Integrator()

        if n_partitions:
            self.n_partitions = n_partitions
        else:
            self.n_partitions = SRNParser.DEF_N_PARTITIONS

        assert(self.n_partitions < SRNParser.MAX_TIME),\
            "Number of partition cannot be smaller than 1 minute."

        delta = int(SRNParser.MAX_TIME / n_partitions)
        self.partitions = [(i * delta) for i in xrange(n_partitions)]
        self.partitions.append(SRNParser.MAX_TIME)

    @staticmethod
    def write_conditional_plan(path, plan):
        with open(path, 'w') as f:
            for src, i, dest in plan:
                nxt = plan[(src, i, dest)]
                f.write(",".join(map(str, [src, i, dest, nxt])) + "\n")

    @staticmethod
    def read_conditional_plan(path):
        plan = {}
        with open(path, 'r') as f:
            for line in f:
                try:
                    src, i, dst, nxt = line.strip().split(",")
                    i = int(i)
                    plan[(src, i, dst)] = nxt
                except ValueError:
                    continue
        return plan

    def compute_conditional_plan(self, graph):
        msg = "Computing conditional plan: |G.nodes| = {} |G.edges| = {}"
        G = nx.DiGraph()
        n_nodes = len(graph.nodes())
        n_edges = len(graph.edges())
        self.logger.info(msg.format(n_nodes, n_edges))
        computed = 0
        msg = "Computing conditional plan: building auxiliary graph G' {}/{}"
        for a, b in graph.edges():
            self.logger.debug(msg.format(computed, n_edges))
            for i in xrange(self.n_partitions):
                pi, pf = self.partitions[i], self.partitions[i + 1]
                wp = graph.get_edge_data(a, b)[i]['avg']
                next_t = ((pf - pi) / 2.0) + wp
                j = self._tp_to_partition(next_t)
                if j != None:
                    G.add_edge((a, i), (b, j), {'weight': wp})

            computed += 1

        self.logger.debug(msg.format(computed, n_edges))
        msg = "Computing conditional plan: |G'.nodes| = {} |G'.edges| = {}"
        self.logger.info(msg.format(len(G.nodes()), len(G.edges())))

        self.logger.debug("Computing conditional plan: all-pairs Dijkstra")
        paths = nx.all_pairs_dijkstra_path(G, cutoff=None)
        plan = {}

        msg = "Computing conditional plan: computing mapping from paths {}/{}"
        n_entries = self.n_partitions * (n_nodes**2)
        computed = 0

        nodes = {n for n, _ in paths}
        for src, i in paths:
            self.logger.debug(msg.format(computed, n_entries))
            for dst in nodes:
                lengths = []
                shortest = None
                for j in xrange(self.n_partitions):
                    aux_dst = (dst, j)
                    try:
                        lngt = SRNParser._length(G, paths[(src, i)][aux_dst])
                    except KeyError:
                        # reaching dst at interval j, starting from
                        # src at interval i is unfeasible.
                        continue

                    if shortest == None or lngt < shortest:
                        shortest = lngt
                        try:
                            nxt = paths[(src, i)][aux_dst][1][0]
                        except IndexError:
                            nxt = paths[(src, i)][aux_dst][0][0]

                plan[(src, i, dst)] = nxt
                computed += 1
        self.logger.debug(msg.format(computed, n_entries))
        return plan

    @staticmethod
    def _length(graph, path):
        lngt = 0

        for i in xrange(len(path) - 1):
            lngt += graph.get_edge_data(path[i], path[i + 1])['weight']

        return lngt

    def read_raw_dataset(self, path, preprocessed_path=None):
        """Reads and preprocess a raw dataset. Since the preprocessing may
        take some time, the optional parameter 'preprocessed_path' can be
        specified in order to dump the preprocessed data for future uses.

        Keyword arguments:
        path -- the path of the raw dataset to be read
        preprocessed_path -- output path for the preprocessed data
                             (default: None)

        """
        entries = {}
        select_query = SRNParser.SELECT_FIELDS
        rows = SRNParser._read_raw_csv(path, select=select_query)
        for i, row in enumerate(rows):
            edge = SRNParser._parse_edge(row[SRNParser.EDGE_FIELD])
            if edge and (edge[0] != edge[1]):
                x, y = edge
                # time periods are 15 minutes long
                tp = int(row[SRNParser.TP_FIELD]) * 15
                # average journey times are expressed in seconds
                avgjt = float(row[SRNParser.AVGJT_FIELD]) / 60.0
                partition = self._tp_to_partition(tp)
                if partition == None:
                    continue

                if (x, y) not in entries:
                    entries[(x, y)] = {}
                if partition not in entries[(x, y)]:
                    entries[(x, y)][partition] = [avgjt]
                else:
                    entries[(x, y)][partition].append(avgjt)

        preprocessed_data = self._preprocess(entries)
        # if preprocessed_path is given, dump the aggregated data
        if preprocessed_path:
            SRNParser._write_preprocessed_data(preprocessed_path,
                                               preprocessed_data,
                                               self.partitions)

        return preprocessed_data, self.partitions

    @staticmethod
    def read_preprocessed_dataset(path):
        """Reads a preprocessed dataset.

        Keyword arguments:
        path -- the path of the raw dataset to be read

        """
        with open(path, "r") as f:
            preprocessed_data = {}
            partitions = None

            for line in f:
                if not partitions:
                    partitions = map(int, line.strip().split(","))

                else:
                    fields = line.strip().split(",")
                    src, dst = fields[:2]
                    part = int(fields[2])
                    avg = float(fields[3])
                    r_min, r_max = map(float, fields[4:6])
                    rng = r_min, r_max
                    coeffs = map(float, fields[6:])
                    if not (src, dst) in preprocessed_data:
                        preprocessed_data[(src, dst)] = {}
                    if not part in preprocessed_data[(src, dst)]:
                        preprocessed_data[(src, dst)][part] = (avg, rng,
                                                               coeffs)
                    else:
                        assert(False),\
                            "Malformed file: (src, dst, partition) not unique."

            return preprocessed_data, partitions

    def _fit_distribution(self, datapoints):
        frequencies, bin_edges = histogram(datapoints, bins="sturges")
        x = [(bin_edges[i + 1] + bin_edges[i]) / 2.
             for i in xrange(len(bin_edges) - 1)]
        # fitting data with a quadratic polynomial
        coefficients = polyfit(x, frequencies, 2)
        edges = [bin_edges[0], bin_edges[-1]]

        pos = (coefficients[0] > 0)
        zeros = sorted(list({z.real for z in roots(coefficients)}))
        if len(zeros) in [0, 1]:
            if pos:
                rng = edges
            else:
                self.logger.error(SRNParser.ERR_FIT)
                raise WMIParsingError(SRNParser.ERR_FIT)
        elif len(zeros) == 2:
            if pos:
                # changing the 0-th order coefficient by -k
                m = (zeros[1] - zeros[0]) / 2.0
                a, b, c = coefficients
                k = a * (m**2) + b * m + c
                coefficients[2] += abs(k)
                rng = edges
            else:
                rng = zeros
        else:
            self.logger.error(SRNParser.ERR_FIT)
            raise WMIParsingError(SRNParser.ERR_FIT)

        rng[0] = max(0, rng[0])

        # normalize the distribution
        integral = self.integrator.integrate_raw(coefficients, rng)
        if integral <= 0:
            self.logger.error(SRNParser.ERR_FIT)
            raise WMIParsingError(SRNParser.ERR_FIT)

        coefficients = map(lambda c: c / integral, coefficients)

        return rng, coefficients

    @staticmethod
    def _parse_edge(description):
        query = ".* between (.+) and (.+) \(.*\)"
        match = re.search(query, description)

        if match:
            return match.group(1), match.group(2)
        else:
            return None

    def _preprocess(self, entries):
        # build the full graph and identify the SCCs
        full_graph = nx.DiGraph()
        full_graph.add_edges_from(entries.keys())
        sccs = nx.strongly_connected_components(full_graph)

        # keep the biggest SCC
        biggest_scc = None
        size = 0
        nsccs = 0

        for scc in sccs:
            nsccs += 1
            if len(scc) > size:
                size = len(scc)
                biggest_scc = scc

        preprocessed_data = {}
        n_entries = len(entries)
        computed = 0
        msg = "Preprocessing data: {}/{}"

        for src, dst in entries:
            self.logger.debug(msg.format(computed, n_entries))

            if (src in biggest_scc) and (dst in biggest_scc):

                if not (src, dst) in preprocessed_data:
                    preprocessed_data[(src, dst)] = {}

                for partition in entries[(src, dst)]:
                    avg_jts = entries[(src, dst)][partition]
                    avg = sum(avg_jts) / len(avg_jts)
                    rng, coefficients = self._fit_distribution(avg_jts)
                    entry = (avg, rng, coefficients)
                    preprocessed_data[(src, dst)][partition] = entry

            computed += 1

        return preprocessed_data

    @staticmethod
    def _read_raw_csv(path, select=None):
        with open(path, "r") as f:
            header = f.readline().strip().split(",")

            for line in f:
                values = line.strip().split(",")
                assert(len(header) == len(values)),\
                    "Line has a different number of fields than the header"
                yield {
                    header[i]: values[i]
                    for i in xrange(len(header))
                    if not select or (header[i] in select)
                }

    def _tp_to_partition(self, tp):
        for i in xrange(self.n_partitions):
            if (self.partitions[i] <= tp) and (tp < self.partitions[i + 1]):
                return i
        return None

    @staticmethod
    def _write_preprocessed_data(path, preprocessed_data, partitions):
        with open(path, "w") as f:
            f.write(",".join(map(str, partitions)) + "\n")

            for src, dst in preprocessed_data:

                for partition in preprocessed_data[(src, dst)]:
                    entry = preprocessed_data[(src, dst)][partition]
                    avg, rng, coefficients = entry
                    f.write("{},{},{},{},{},{},{}\n".format(
                        src, dst, partition, avg, rng[0], rng[1],
                        ",".join(map(str, coefficients))))
Exemple #8
0
from integration import Integrator
from integration import local_solr_server
from integration import remote_solr_server
from integration import IntegrationException
from IndustryTermRecogniser import IndustryTermRecogniser

import sys
import os

sys.path.append(os.path.join(os.path.dirname(__file__)))

basestring = (str, bytes)

app = Flask(__name__)

integration = Integrator()

_logger = logging.getLogger("IntegrationService")


def crossdomain(origin=None, methods=None, headers=None,
                max_age=21600, attach_to_all=True,
                automatic_options=True):
    if methods is not None:
        methods = ', '.join(sorted(x.upper() for x in methods))
    if headers is not None and not isinstance(headers, basestring):
        headers = ', '.join(x.upper() for x in headers)
    if not isinstance(origin, basestring):
        origin = ', '.join(origin)
    if isinstance(max_age, timedelta):
        max_age = max_age.total_seconds()
Exemple #9
0
class NFW(BaseModel):
    """Navarro-Frenk-White profile"""

    def __init__(self, b, x0, y0, e, te, s, ks, rs):
        super(NFW, self).__init__(x0, y0, e, te)
        self.b = b
        self.ks = ks
        self.rs = rs
        self.q = 1.0 - self.e
        self.integrator = Integrator(self.q, self.kappa, self.kappa_prime, self.phi_r)
        # replaces core radius from s==0 -> 1e-4, fixes /0 situations in potential calculation.
        self.s = s if s != 0.0 else 1e-4

    @BaseModel.standard_frame_rotation
    def phiarray(self, x, y, *args, **kwargs):
        if not isinstance(x, Iterable) and not isinstance(y, Iterable):
            x = [x]
            y = [y]

        potential, phix, phiy, phixx, phiyy, phixy = [[] for i in xrange(6)]
        print "calculating phiarray for {0} (x, y) pairs".format(len(x))
        for i, (local_x, local_y) in enumerate(zip(x, y)):
            potential.append(0)
            phix.append(self.integrator.phi_x(local_x, local_y))
            phiy.append(self.integrator.phi_y(local_x, local_y))
            phixx.append(self.integrator.phi_xx(local_x, local_y))
            phiyy.append(self.integrator.phi_yy(local_x, local_y))
            phixy.append(self.integrator.phi_xy(local_x, local_y))
        return np.array((potential, phix, phiy, phixx, phiyy, phixy))

    @staticmethod
    def funcF(x):
        dx = x ** 2 - 1.0
        if abs(x) < 1e-2:
            # series with O(x^6) error
            log2x = np.log(2.0 / x)
            return log2x + x ** 2 * (0.5 * log2x - 0.25) * x ** 4 * (0.375 * log2x - 0.21875)
        elif abs(dx) < 1e-2:
            # series with O(dx^6) error
            return 1.0 - (dx / 3.0) + (dx ** 2 / 5.0) - (dx ** 3 / 7.0) + (dx ** 4 / 9.0) - (dx ** 5 / 11.0)
        elif x > 1.0:
            tmp = np.sqrt(x ** 2 - 1.0)
            return np.arctan(tmp) / tmp
        else:
            tmp = np.sqrt(1.0 - x ** 2)
            return np.arctanh(tmp) / tmp

    @staticmethod
    def funcF_prime(x):
        return (1.0 - x ** 2 * NFW.funcF(x)) / (x * (x ** 2 - 1.0))

    def kappa(self, r):
        x = r / self.rs
        return 2.0 * self.ks * ((1.0 - NFW.funcF(x)) / (x ** 2.0 - 1.0))

    def kappa_prime(self, r):
        x = r / self.rs
        numerator = (2.0 * self.rs * self.ks * ((self.rs ** 2 - r ** 2) * NFW.funcF_prime(x)
                                                + 2.0 * self.rs * r * NFW.funcF(x)
                                                - 2.0 * self.rs * r))
        denomenator = (self.rs ** 2 - r ** 2) ** 2
        return numerator / denomenator

    def phi(self, r):
        x = r / self.rs
        return 2.0 * self.ks * self.rs ** 2 * (np.log(x / 2.0) ** 2 - np.arctanh(np.sqrt(1.0 - x ** 2)) ** 2)

    def phi_r(self, x):
        """Spherical deflection"""
        return 4.0 * self.ks * self.rs * ((np.log(x / 2.0) + self.funcF(x)) / x)
Exemple #10
0
class NFW(BaseModel):
    """Navarro-Frenk-White profile"""
    def __init__(self, b, x0, y0, e, te, s, ks, rs):
        super(NFW, self).__init__(x0, y0, e, te)
        self.b = b
        self.ks = ks
        self.rs = rs
        self.q = 1.0 - self.e
        self.integrator = Integrator(self.q, self.kappa, self.kappa_prime,
                                     self.phi_r)
        # replaces core radius from s==0 -> 1e-4, fixes /0 situations in potential calculation.
        self.s = s if s != 0.0 else 1e-4

    @BaseModel.standard_frame_rotation
    def phiarray(self, x, y, *args, **kwargs):
        if not isinstance(x, Iterable) and not isinstance(y, Iterable):
            x = [x]
            y = [y]

        potential, phix, phiy, phixx, phiyy, phixy = [[] for i in xrange(6)]
        print "calculating phiarray for {0} (x, y) pairs".format(len(x))
        for i, (local_x, local_y) in enumerate(zip(x, y)):
            potential.append(0)
            phix.append(self.integrator.phi_x(local_x, local_y))
            phiy.append(self.integrator.phi_y(local_x, local_y))
            phixx.append(self.integrator.phi_xx(local_x, local_y))
            phiyy.append(self.integrator.phi_yy(local_x, local_y))
            phixy.append(self.integrator.phi_xy(local_x, local_y))
        return np.array((potential, phix, phiy, phixx, phiyy, phixy))

    @staticmethod
    def funcF(x):
        dx = x**2 - 1.0
        if abs(x) < 1e-2:
            # series with O(x^6) error
            log2x = np.log(2.0 / x)
            return log2x + x**2 * (0.5 * log2x -
                                   0.25) * x**4 * (0.375 * log2x - 0.21875)
        elif abs(dx) < 1e-2:
            # series with O(dx^6) error
            return 1.0 - (dx / 3.0) + (dx**2 / 5.0) - (dx**3 / 7.0) + (
                dx**4 / 9.0) - (dx**5 / 11.0)
        elif x > 1.0:
            tmp = np.sqrt(x**2 - 1.0)
            return np.arctan(tmp) / tmp
        else:
            tmp = np.sqrt(1.0 - x**2)
            return np.arctanh(tmp) / tmp

    @staticmethod
    def funcF_prime(x):
        return (1.0 - x**2 * NFW.funcF(x)) / (x * (x**2 - 1.0))

    def kappa(self, r):
        x = r / self.rs
        return 2.0 * self.ks * ((1.0 - NFW.funcF(x)) / (x**2.0 - 1.0))

    def kappa_prime(self, r):
        x = r / self.rs
        numerator = (2.0 * self.rs * self.ks *
                     ((self.rs**2 - r**2) * NFW.funcF_prime(x) +
                      2.0 * self.rs * r * NFW.funcF(x) - 2.0 * self.rs * r))
        denomenator = (self.rs**2 - r**2)**2
        return numerator / denomenator

    def phi(self, r):
        x = r / self.rs
        return 2.0 * self.ks * self.rs**2 * (
            np.log(x / 2.0)**2 - np.arctanh(np.sqrt(1.0 - x**2))**2)

    def phi_r(self, x):
        """Spherical deflection"""
        return 4.0 * self.ks * self.rs * (
            (np.log(x / 2.0) + self.funcF(x)) / x)
Exemple #11
0
"""
Authors: Masafumi Endo
Date: 04/10/2021
Content: main function to sense line and follow it by PID controller
"""

import sys
sys.path.append('..')

import time

from picarx_organized import PicarX
from photosensing import PhotoSensor, PhotoInterpretor
from controller import Controller
from integration import Integrator

if __name__ == '__main__':
    # call objects
    car = PicarX()
    sensor = PhotoSensor()
    interpretor = PhotoInterpretor()
    controller = Controller()
    integrator = Integrator(car, sensor, interpretor, controller, speed=30)

    integrator.line_trace()