def testTransformSegemntParameter1(self):
        p1 = Pslg.GridPoint(1, 2)
        p2 = Pslg.GridPoint(3, -2)
        s = Pslg.Segment(p1, p2)

        result = Integrator.transformSegementParameter(s, -1)
        expected = [1, 2]
        self.assertAlmostEquals(result[0], expected[0], 9)
        self.assertAlmostEquals(result[1], expected[1], 9)

        result = Integrator.transformSegementParameter(s, -0.5)
        expected = [1.5, 1]
        self.assertAlmostEquals(result[0], expected[0], 9)
        self.assertAlmostEquals(result[1], expected[1], 9)

        result = Integrator.transformSegementParameter(s, 0)
        expected = [2, 0]
        self.assertAlmostEquals(result[0], expected[0], 9)
        self.assertAlmostEquals(result[1], expected[1], 9)

        result = Integrator.transformSegementParameter(s, 0.5)
        expected = [2.5, -1]
        self.assertAlmostEquals(result[0], expected[0], 9)
        self.assertAlmostEquals(result[1], expected[1], 9)

        result = Integrator.transformSegementParameter(s, 1)
        expected = [3, -2]
        self.assertAlmostEquals(result[0], expected[0], 9)
        self.assertAlmostEquals(result[1], expected[1], 9)
    def test_widoco_lang_conf(self):
        sec = ''.join([
            random.choice(string.ascii_letters + string.digits)
            for _ in range(4)
        ])
        folder_name = 'newconf-wlang-' + sec
        # delete the folder if it exists
        comm = "rm" + " -Rf " + os.path.join(autoncore.home, folder_name)
        print(comm)
        call(comm, shell=True)
        abs_repo_dir = autoncore.clone_repo(self.cloning_url,
                                            folder_name,
                                            dosleep=True,
                                            branch="main")
        self.assertTrue(
            os.path.exists(os.path.join(abs_repo_dir, "ALo", "aLo.owl")))
        self.assertTrue(
            os.path.exists(
                os.path.join(abs_repo_dir, "GeO", "geoLinkedData.owl")))
        self.assertFalse(
            os.path.exists(
                os.path.join(abs_repo_dir, "OnToology", "ALo", "aLo.owl")))
        self.assertFalse(
            os.path.exists(
                os.path.join(abs_repo_dir, "OnToology", "GeO",
                             "geoLinkedData.owl")))
        conf_content = """
[owl2jsonld]
enable = false

[widoco]
enable = true
webvowl = false
languages = en,es

[themis]
enable = false

[ar2dtool]
enable = false

[oops]
enable = false

"""
        conf_alo = Integrator.create_of_get_conf(
            os.path.join("ALo", "aLo.owl"), abs_repo_dir)
        conf_file_abs_alo = os.path.join(abs_repo_dir, "OnToology", "ALo",
                                         "aLo.owl", "OnToology.cfg")
        # print("abs alo dir: "+conf_file_abs_alo)
        f = open(conf_file_abs_alo, 'w')
        f.write(conf_content)
        f.close()

        conf_alo = Integrator.create_of_get_conf(
            os.path.join("ALo", "aLo.owl"), abs_repo_dir)
        # print("\n\n conf alo: ")
        print(conf_alo)
        self.assertEqual('en,es', ",".join(conf_alo['widoco']['languages']))
 def test_create_new_conf(self):
     """
     :return:
     """
     cloning_url = self.cloning_url
     sec = ''.join([
         random.choice(string.ascii_letters + string.digits)
         for _ in range(4)
     ])
     folder_name = 'newconf-' + sec
     # delete the folder if it exists
     comm = "rm" + " -Rf " + os.path.join(autoncore.home, folder_name)
     print(comm)
     call(comm, shell=True)
     abs_repo_dir = autoncore.clone_repo(cloning_url,
                                         folder_name,
                                         dosleep=True,
                                         branch="main")
     self.assertTrue(
         os.path.exists(os.path.join(abs_repo_dir, "ALo", "aLo.owl")))
     self.assertTrue(
         os.path.exists(
             os.path.join(abs_repo_dir, "GeO", "geoLinkedData.owl")))
     self.assertFalse(
         os.path.exists(
             os.path.join(abs_repo_dir, "OnToology", "ALo", "aLo.owl")))
     self.assertFalse(
         os.path.exists(
             os.path.join(abs_repo_dir, "OnToology", "GeO",
                          "geoLinkedData.owl")))
     conf_file_abs_alo = os.path.join(abs_repo_dir, "OnToology", "ALo",
                                      "aLo.owl")
     conf_file_abs_geo = os.path.join(abs_repo_dir, "OnToology", "GeO",
                                      "geoLinkedData.owl")
     conf_alo = Integrator.create_of_get_conf(
         os.path.join("ALo", "aLo.owl"), abs_repo_dir)
     conf_geo = Integrator.create_of_get_conf(
         os.path.join("GeO", "geoLinkedData.owl"), abs_repo_dir)
     #
     # conf_alo = autoncore.get_auton_config(conf_file_abs_alo, from_string=False)
     # conf_geo = autoncore.get_auton_config(conf_file_abs_geo, from_string=False)
     self.assertTrue(
         os.path.exists(
             os.path.join(abs_repo_dir, "OnToology", "ALo", "aLo.owl",
                          "OnToology.cfg")))
     self.assertTrue(
         os.path.exists(
             os.path.join(abs_repo_dir, "OnToology", "GeO",
                          "geoLinkedData.owl", "OnToology.cfg")))
     t_alo = open(
         os.path.join(abs_repo_dir, "OnToology", "ALo", "aLo.owl",
                      "OnToology.cfg")).read()
     t_geo = open(
         os.path.join(abs_repo_dir, "OnToology", "GeO", "geoLinkedData.owl",
                      "OnToology.cfg")).read()
     # print("t_alo: \n"+t_alo)
     # print("t_geo: \n"+t_geo)
     self.assertFalse(t_alo.strip() == "")
     self.assertFalse(t_geo.strip() == "")
示例#4
0
def yat15():

    dt = 0.05
    nsteps = 1200
    
    detectorDict = { "norm": np.array([1,0,0]),
                     "dist": 15,
                     "v": np.array([0,1,0]),
                     "w": np.array([0,0,1]),
                     "width": 20,
                     "height": 20  }
    
    thvals = np.linspace(0,np.pi,200)
    yfine = []
    ycoarse = []
    
    for th in thvals:
        detectorDict["norm"] = np.array([np.sin(th), 0, np.cos(th)])
        detectorDict["w"] = np.cross(detectorDict["norm"],detectorDict["v"])
        p0 = 20000 * detectorDict["norm"]
        x0 = np.append([0,0,0],p0)
    
        Params.UseFineBField = False
        traj_coarse = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps)
        Params.UseFineBField = True
        traj_fine = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps)
    
        intersect_coarse = Detector.FindIntersection(traj_coarse,detectorDict)
        ycoarse.append(100*intersect_coarse[0][1])
        intersect_fine = Detector.FindIntersection(traj_fine,detectorDict)
        yfine.append(100*intersect_fine[0][1])
    
    yfine = np.array(yfine)
    ycoarse = np.array(ycoarse)
    thvals *= 180/np.pi
    
    plt.figure(3)
    
    plt.subplot(1,1,1)
    ax1 = plt.gca()
    ax2 = ax1.twinx()
    ax2.plot(thvals,abs(yfine-ycoarse), '-', color='0.75')
    ax1.plot(thvals, yfine, '-b', linewidth=2)
    ax1.plot(thvals, ycoarse, '--r', linewidth=2)
    ax1.set_ylabel('y (cm)')
    ax2.set_ylabel('diff (cm)')
    
    ax1.set_xlabel(r'$\theta$ (deg)')
    ax1.set_title(r'y at r=15m, 20 GeV muon')
    
    plt.savefig("/home/users/bemarsh/public_html/backup/B-integrator/fine_vs_coarse/deltaR_allTheta_nointerp.png")
    

    plt.show()
示例#5
0
def compareTimeSteps(dt1, dt2):
    
    Params.UseFineBField=True

    total_time = 35
    nsteps1 = int(round(total_time/dt1))
    nsteps2 = int(round(total_time/dt2))

    p0 = [20000, 0, 0]
    x0 = np.array([0,0,0]+p0)
    
    detectorDict = { "norm": np.array([1,0,0]),
                     "dist": 0,
                     "v": np.array([0,1,0]),
                     "w": np.array([0,0,1]),
                     "width": 20,
                     "height": 20  }
    
    traj1 = Integrator.rk4(x0, Integrator.traverseBField, dt1, nsteps1)
    traj2 = Integrator.rk4(x0, Integrator.traverseBField, dt2, nsteps2)
    
    y1 = []
    y2 = []
    rvals = np.linspace(0,8.5,100)
    for r in rvals:
        detectorDict["dist"] = r
        
        intersect1 = Detector.FindIntersection(traj1,detectorDict)
        y1.append(100*intersect1[0][1])
        intersect2 = Detector.FindIntersection(traj2,detectorDict)
        y2.append(100*intersect2[0][1])
        
    y1 = np.array(y1)
    y2 = np.array(y2)
        
    plt.figure(1)

    plt.subplot(1,1,1)
    ax1 = plt.gca()
    fplot, = ax1.plot(rvals, y1, '-b', linewidth=2, label='dt='+str(dt1)+' ns')
    cplot, = ax1.plot(rvals, y2, '--r', linewidth=2, label='dt='+str(dt2)+' ns')
    ax2 = ax1.twinx()
    dplot, = ax2.plot(rvals,abs(y1-y2), '-', color='0.75', label='Difference')
    ax1.set_ylabel('y (cm)')
    ax2.set_ylabel('diff (cm)')
        
    ax1.set_xlabel('x (m)')
    ax1.set_title(r'20 GeV muon trajectory ($\eta=0$)')
    
    plt.legend(handles=[fplot,cplot,dplot],loc='lower left')
    
    plt.savefig("/home/users/bemarsh/public_html/backup/B-integrator/fine_vs_coarse/comp_dt_eta0_interp.png")
    
    plt.show()
    def test_fix_old_conf(self):
        """
        :return:
        """
        sec = ''.join([
            random.choice(string.ascii_letters + string.digits)
            for _ in range(4)
        ])
        folder_name = 'fix-old-conf-' + sec
        # delete the folder if it exists
        comm = "rm" + " -Rf " + os.path.join(autoncore.home, folder_name)
        print(comm)
        call(comm, shell=True)
        abs_repo_dir = autoncore.clone_repo(self.cloning_url,
                                            folder_name,
                                            dosleep=True,
                                            branch="main")
        self.assertTrue(
            os.path.exists(os.path.join(abs_repo_dir, "ALo", "aLo.owl")))
        self.assertTrue(
            os.path.exists(
                os.path.join(abs_repo_dir, "GeO", "geoLinkedData.owl")))
        self.assertFalse(
            os.path.exists(
                os.path.join(abs_repo_dir, "OnToology", "ALo", "aLo.owl")))
        self.assertFalse(
            os.path.exists(
                os.path.join(abs_repo_dir, "OnToology", "GeO",
                             "geoLinkedData.owl")))
        conf_content = """
[widoco]
enable = false

[ar2dtool]
enable = false

[oops]
enable = false

        """
        conf_alo = Integrator.create_of_get_conf(
            os.path.join("ALo", "aLo.owl"), abs_repo_dir)
        conf_file_abs_alo = os.path.join(abs_repo_dir, "OnToology", "ALo",
                                         "aLo.owl", "OnToology.cfg")
        f = open(conf_file_abs_alo, 'w')
        f.write(conf_content)
        f.close()

        conf_alo = Integrator.create_of_get_conf(
            os.path.join("ALo", "aLo.owl"), abs_repo_dir)
        print(conf_alo)
        self.assertEqual(['en'], conf_alo['widoco']['languages'])
        self.assertIn('themis', conf_alo)
示例#7
0
    def __init__(self, *statevars):
        self.integrator = Integrator()
        name1 = 's'
        port1 = '/dev/ttySAC0'
        self.statevars = []  # example: ['roll', 'pitch', 'yaw']
        for state in statevars:
            self.statevars.append(state)
        # All states avaible:
        # ['roll', 'pitch', 'yaw']
        # ['health', 'roll', 'pitch', 'yaw'] #'mag_proc_x', 'mag_proc_y', 'mag_proc_z', 'mag_raw_x', 'mag_raw_y',
        # 'mag_raw_z', #'accel_raw_x', 'accel_raw_y', 'accel_raw_z', 'accel_proc_x', 'accel_proc_y', 'accel_proc_z',
        # 'gyro_proc_x', 'gyro_proc_y', #'gyro_proc_z', 'accel_raw_time', 'accel_proc_time', 'euler_time']

        self.s1 = um7.UM7(name1, port1, self.statevars, baud=115200)
        try:
            print('IMU initialition process:')
            # self.s1.reset_to_factory()
            print('GET_FW_REVISION=' + '{}'.format(self.s1.get_fw_revision()))
            print('ZERO_GYROS ' + 'ok.' if self.s1.zero_gyros() else 'failed.')
            time.sleep(1.1)
            print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')
            print('SET_MAG_REFERENCE ' + 'ok.' if self.s1.set_mag_reference() else 'failed.')
            time.sleep(1.1)
			# print('SET_HOME_POSITION ' + 'ok.' if self.s1.set_home_position() else 'failed.')
            # print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')

			# print('FLASH_COMMIT ' + 'ok.' if self.s1.flash_commit() else 'failed.')
        # time.sleep(3)
        except:
            print('------------!ERROR occured!--------------\n')

        # readings view format:
        self.fs = ''
        self.hs = ''
        for i in self.statevars:
            self.hs += '{:>9.9s} '
            if i == 'health':
                self.fs += ' {0[' + i + ']:08b} '
            else:
                self.fs += '{0[' + i + ']:9.3f} '

        self.sv = ['roll', 'pitch', 'yaw']

        self.prev_sample = 0
        self.drift = 0
        self.gravity_vector = (0, 0, 0)
        # init
        time.sleep(1)
        self.catchSamples()
        angles = self.makeAnglesDict(self.getSample('roll'), self.getSample('pitch'), self.getSample('yaw'))
        vec = self.makeVector(self.getSample('accel_proc_x'), self.getSample('accel_proc_y'),
                              self.getSample('accel_proc_z'))
        self.gravity_vector = self.rotate(angles, vec, True)
    def testGetTransformationFactor1(self):
        p1 = Pslg.GridPoint(1, 2)
        p2 = Pslg.GridPoint(3, -2)
        s = Pslg.Segment(p1, p2)

        result = Integrator.getTransformationFactor(s)
        expected = math.sqrt(5)
        self.assertAlmostEqual(result, expected, 9)
    def testTransformOrigin2(self):
        p1 = Pslg.GridPoint(3, 4)
        p2 = Pslg.GridPoint(5, 4)
        p3 = Pslg.GridPoint(3, 4.5)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        values = Integrator.transformOrigin(e, 0, 0)
        self.assertAlmostEqual(values[0], 3)
        self.assertAlmostEqual(values[1], 4)

        values = Integrator.transformOrigin(e, 1, 0)
        self.assertAlmostEqual(values[0], 5)
        self.assertAlmostEqual(values[1], 4)

        values = Integrator.transformOrigin(e, 0, 1)
        self.assertAlmostEqual(values[0], 3)
        self.assertAlmostEqual(values[1], 4.5)

        values = Integrator.transformOrigin(e, 0.5, 0)
        self.assertAlmostEqual(values[0], 4)
        self.assertAlmostEqual(values[1], 4)

        values = Integrator.transformOrigin(e, 0, 0.5)
        self.assertAlmostEqual(values[0], 3)
        self.assertAlmostEqual(values[1], 4.25)

        values = Integrator.transformOrigin(e, 0.5, 0.5)
        self.assertAlmostEqual(values[0], 4)
        self.assertAlmostEqual(values[1], 4.25)

        values = Integrator.transformOrigin(e, 0.25, 0.5)
        self.assertAlmostEqual(values[0], 3.5)
        self.assertAlmostEqual(values[1], 4.25)
    def testTransformOrigin3(self):
        p1 = Pslg.GridPoint(4, 4)
        p2 = Pslg.GridPoint(7, 5)
        p3 = Pslg.GridPoint(5, 7)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        values = Integrator.transformOrigin(e, 0, 0)
        self.assertAlmostEqual(values[0], 4)
        self.assertAlmostEqual(values[1], 4)

        values = Integrator.transformOrigin(e, 1, 0)
        self.assertAlmostEqual(values[0], 7)
        self.assertAlmostEqual(values[1], 5)

        values = Integrator.transformOrigin(e, 0, 1)
        self.assertAlmostEqual(values[0], 5)
        self.assertAlmostEqual(values[1], 7)

        values = Integrator.transformOrigin(e, 0.5, 0)
        self.assertAlmostEqual(values[0], 5.5)
        self.assertAlmostEqual(values[1], 4.5)

        values = Integrator.transformOrigin(e, 0, 0.5)
        self.assertAlmostEqual(values[0], 4.5)
        self.assertAlmostEqual(values[1], 5.5)

        values = Integrator.transformOrigin(e, 0.5, 0.5)
        self.assertAlmostEqual(values[0], 6)
        self.assertAlmostEqual(values[1], 6)

        values = Integrator.transformOrigin(e, 0.25, 0.5)
        self.assertAlmostEqual(values[0], 4 + 5.0 / 4.0)
        self.assertAlmostEqual(values[1], 4 + 7.0 / 4.0)
    def testIntegration1D6(self):
        p1 = Pslg.GridPoint(1, 2)
        p2 = Pslg.GridPoint(3, -2)
        s = Pslg.Segment(p1, p2)

        def f(x, y):
            return 7.0

        result = Integrator.integrate1D(f, s)
        expected = 14 * math.sqrt(5)
        self.assertAlmostEqual(result, expected, 9)
    def testSecondDegreePolynomial1(self):
        p1 = Pslg.GridPoint(0, 0)
        p2 = Pslg.GridPoint(1, 0)
        p3 = Pslg.GridPoint(0, 1)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return x * x

        result = Integrator.integrate2D(f, e)
        self.assertAlmostEqual(result, 1.0 / 12.0, 9)
    def testFirstDegreePolynomial5(self):
        p1 = Pslg.GridPoint(0, 0)
        p2 = Pslg.GridPoint(1, 0)
        p3 = Pslg.GridPoint(0, 1)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return 2.0 * x + 3.0 * y + 4.0

        result = Integrator.integrate2D(f, e)
        self.assertAlmostEqual(result, 2.0 / 6.0 + 3.0 / 6.0 + 2.0, 9)
    def testConstantFunction1(self):
        p1 = Pslg.GridPoint(0, 0)
        p2 = Pslg.GridPoint(1, 0)
        p3 = Pslg.GridPoint(0, 1)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return 1

        result = Integrator.integrate2D(f, e)
        self.assertAlmostEqual(result, 0.5, 9)
    def testIntegration1D7(self):
        p1 = Pslg.GridPoint(1, 2)
        p2 = Pslg.GridPoint(3, -2)
        s = Pslg.Segment(p1, p2)

        def f(x, y):
            return 2.0 * x * x + 3.0 * x * y + 4.0 * y * y + 5.0 * x + 6.0 * y + 7.0

        result = Integrator.integrate1D(f, s)
        expected = 58 * math.sqrt(5)
        self.assertAlmostEqual(result, expected, 9)
    def testIntegration1D5(self):
        p1 = Pslg.GridPoint(1, 2)
        p2 = Pslg.GridPoint(3, -2)
        s = Pslg.Segment(p1, p2)

        def f(x, y):
            return 6 * y

        result = Integrator.integrate1D(f, s)
        expected = 0
        self.assertAlmostEqual(result, expected, 9)
    def testSecondDegreePolynomial7(self):
        p1 = Pslg.GridPoint(0, 0)
        p2 = Pslg.GridPoint(1, 0)
        p3 = Pslg.GridPoint(0, 1)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return 2.0 * x * x + 3.0 * x * y + 4.0 * y * y + 5.0 * x + 6.0 * y + 7.0

        result = Integrator.integrate2D(f, e)
        expected = 5.9583333333333339
        self.assertAlmostEqual(result, expected, 9)
    def testIntegration6(self):
        p1 = Pslg.GridPoint(2, 2)
        p2 = Pslg.GridPoint(5, 4)
        p3 = Pslg.GridPoint(5, 6)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return 7.0

        result = Integrator.integrate2D(f, e)
        expected = 21
        self.assertAlmostEqual(result, expected, 9)
    def testIntegration7(self):
        p1 = Pslg.GridPoint(2, 2)
        p2 = Pslg.GridPoint(5, 4)
        p3 = Pslg.GridPoint(5, 6)
        e = ElementAwarePslg.Element(p1, p2, p3, 0, 0)

        def f(x, y):
            return 2.0 * x * x + 3.0 * x * y + 4.0 * y * y + 5.0 * x + 6.0 * y + 7.0

        result = Integrator.integrate2D(f, e)
        expected = 600.5
        self.assertAlmostEqual(result, expected, 9)
示例#20
0
def main():

    # Initialize log
    try:
        logFile = open('log.txt', 'w')
    except FileNotFoundError:
        logFile = open('log.txt', 'x')
    finally:
        logFile.write('Test Log for Numerical-Analysis \n')
        logFile.write(str(datetime.datetime.now()) + '\n')

    # Root Finding
    print('Root Finding')
    print('Test 1:', RealSolver.realSolver(f1, 1))
    print('Test 2:', RealSolver.realSolver(f2, 10))
    print('Test 3:', RealSolver.realSolver(f3, -2, 2))
    print('Test 4:', RealSolver.realSolver(f4, 1))
    print()

    # Integration
    print('Integration')
    print('Test 1:', Integrator.integrator(f1, 0, pi / 4))
    print('Test 2:', Integrator.integrator(f2, 0, 0.8))
    print('Test 3:', Integrator.integrator(f3, 1, 10))
    print('Test 4:', Integrator.integrator(f4, 10, 20))
    print()
    #
    #    # Differentiation
    #    print('Differentiation')
    #    print('Test 1:', Differentiator.diff(f1, pi/2))
    #    print('Test 2:', Differentiator.diff(f2, 1))
    #    print('Test 3:', Differentiator.diff(f3, 0))
    #    print('Test 4:', Differentiator.diff(f4, 0))
    #    print()

    temp = Integrator.Integrator(f1, 0, pi)
    temp.findIntegral()
    print(temp.toString())

    return None
    def __init__(self):
        threading.Thread.__init__(self)
        self.lock = threading.Lock()
        self.roll_PID = PID()
        self.pitch_PID = PID()
        self.yaw_PID = PID()
        self.velocity_PID = PID()

        self.integrator = Integrator()
        global motors_speed_diff_pid
        self.IMU = None
        self.roll_diff, self.pitch_diff, self.yaw_diff, self.velocity_diff = 0, 0, 0, 0

        max_sum_output = 900.
        self.roll_PID.setMaxOutput(max_sum_output / 4)
        self.pitch_PID.setMaxOutput(max_sum_output / 4)
        self.yaw_PID.setMaxOutput(max_sum_output / 4)
        self.velocity_PID.setMaxOutput(max_sum_output / 4)

        self.pid_motors_speeds_update = [0, 0, 0, 0, 0]
示例#22
0
def computeProductionVector(z, slopeFunctions, parameters):
    #Return value
    P = scipy.sparse.lil_matrix((parameters.n, 1))

    #Functions
    beta = Function.FunctionWrapper(parameters.productionEffciency, "B")
    rho = Function.ConstantFunction(parameters.productionThreshold)

    #For each element
    for element in parameters.omegaThree:
        #Build a sum of non-zero functions on this element
        sum = Function.ConstantFunction(0)
        for i in range(0, 3):
            #Get the point
            xi = element.points[i]
            #The coefficient
            zl = z[xi.index, 0]
            #The shape function
            slopeL = slopeFunctions[element.index][i]

            zFunction = Function.ConstantFunction(zl)
            slopeLWrapper = Function.FunctionWrapper(slopeL.evaluate,
                                                     str(slopeL))
            sumElement = zFunction * slopeLWrapper
            sum = sum + sumElement

        #Build a 'B(x)(rho - sum)+' term
        g = beta * Function.PositivePartFunction(rho - sum)

        #Integrate
        for i in range(0, 3):
            xi = element.points[i]
            slopeK = slopeFunctions[element.index][i]
            slopeKWrapper = Function.FunctionWrapper(slopeK.evaluate,
                                                     str(slopeK))
            h = g * slopeKWrapper
            I = Integrator.integrate2D(h, element)
            P[xi.index, 0] += I

    return P.tocsc()
示例#23
0
def computeProductionVector(z, slopeFunctions, parameters):
    #Return value
    P = scipy.sparse.lil_matrix((parameters.n, 1))
    
    #Functions
    beta = Function.FunctionWrapper(parameters.productionEffciency, "B")
    rho = Function.ConstantFunction(parameters.productionThreshold)
    
    #For each element
    for element in parameters.omegaThree:
        #Build a sum of non-zero functions on this element       
        sum = Function.ConstantFunction(0)
        for i in range(0,3):
            #Get the point
            xi = element.points[i]
            #The coefficient
            zl = z[xi.index, 0]
            #The shape function
            slopeL = slopeFunctions[element.index][i]
            
            zFunction = Function.ConstantFunction(zl)
            slopeLWrapper = Function.FunctionWrapper(slopeL.evaluate, str(slopeL))
            sumElement = zFunction * slopeLWrapper
            sum = sum + sumElement
            
        #Build a 'B(x)(rho - sum)+' term
        g = beta * Function.PositivePartFunction(rho - sum)
        
        #Integrate
        for i in range(0,3):
            xi = element.points[i]
            slopeK = slopeFunctions[element.index][i]
            slopeKWrapper = Function.FunctionWrapper(slopeK.evaluate, str(slopeK))
            h = g * slopeKWrapper
            I = Integrator.integrate2D(h, element)
            P[xi.index, 0] += I
      
    return P.tocsc()
示例#24
0
    # eta distribution is uniform for small eta
    eta = np.random.rand()*(etahigh-etalow) + etalow

    th = 2*np.arctan(np.exp(-eta))
    # magp = magp/np.sin(th)
    phimin, phimax =  rp.phibounds
    phi = np.random.rand() * (phimax-phimin) + phimin
    Params.Q *= np.random.randint(2)*2 - 1 
    phi *= Params.Q/abs(Params.Q)

    # convert to cartesian momentum
    p = 1000*magp * np.array([np.sin(th)*np.cos(phi),np.sin(th)*np.sin(phi),np.cos(th)])
    x0 = np.array([0,0,0,p[0],p[1],p[2]])
    
    # simulate until nsteps steps is reached, or the particle passes x=10
    traj,tvec = Integrator.rk4(x0, dt, nsteps, cutoff=rp.cutoff, cutoffaxis=3, use_var_dt=rp.use_var_dt)
    ntotaltrajs += 1
    if mode=="VIS":
        trajs.append(traj)

    # print np.linalg.norm(traj[:3,-1]), np.linalg.norm(traj[3:,-1])

    # compute the intersection. Will return None if no intersection
    findIntersection = Detector.FindIntersection
    if rp.useCustomIntersectionFunction:
        findIntersection = rp.intersectFunction
    intersection, t, theta, thW, thV, pInt = findIntersection(traj, tvec, detectorDict)
    if intersection is not None:
        intersects.append(intersection)
        print len(trajs), ": p =",magp, ", eta =", eta, ", phi =", phi, ", eff =", float(len(intersects))/ntotaltrajs
        if mode=="VIS":
示例#25
0
Pxy = np.linalg.norm(p0[:2])
E = np.sqrt(P**2+Params.m**2)
Q = 1.
B = 1.
c = 2.9979e-1
R = Pxy/(300*Q*B)
vxy = Pxy/E * 2.9979e-1
T = 2*np.pi*R/vxy
w = 2*np.pi/T

x0 = np.array([0,R,0]+p0)

dt = 0.4
nsteps = 1000

traj = Integrator.rk4(x0, dt, nsteps)

time = np.arange(0,dt*nsteps+1e-10, dt)

predx = R*np.sin(w*time) 
predy = R*np.cos(w*time)
predz = p0[0]/np.linalg.norm(p0) * c * time

plt.plot(time,predx,'-b', label='x pred')
plt.plot(time[::5], traj[0,::5],'ob', label='x sim')

#plt.plot(time,traj[1,:]-predy,'-r')
plt.plot(time,predy,'-r', label='y pred')
plt.plot(time[::5], traj[1,::5],'or', label='y sim')

plt.xlabel('Time (ns)')
示例#26
0
def git_magic(target_repo, user, changed_filesss):
    logger_fname = prepare_logger(user)
    global g
    global parent_folder
    global log_file_dir
    parent_folder = user
    if not settings.test_conf['local']:
        prepare_log(user)
    dolog(
        '############################### magic #############################')
    dolog('target_repo: ' + target_repo)
    change_status(target_repo, 'Preparing')
    from models import Repo
    drepo = Repo.objects.get(url=target_repo)
    # so the tool user can takeover and do stuff
    username = os.environ['github_username']
    password = os.environ['github_password']
    g = Github(username, password)
    local_repo = target_repo.replace(target_repo.split('/')[-2], ToolUser)
    if not settings.test_conf['local']:
        delete_repo(local_repo)
        time.sleep(refresh_sleeping_secs)
    dolog('repo deleted')
    if not settings.test_conf['local'] or settings.test_conf[
            'fork']:  # in case it is not test or test with fork option
        dolog('will fork the repo')
        change_status(target_repo, 'forking repo')
        forked_repo = fork_repo(target_repo)
        cloning_url = forked_repo.ssh_url
        time.sleep(refresh_sleeping_secs)
        dolog('repo forked')
        drepo.progress = 10.0
        drepo.save()
    else:
        print "no fork"
    if not settings.test_conf['local'] or settings.test_conf['clone']:
        change_status(target_repo, 'cloning repo')
        clone_repo(cloning_url, user)
        dolog('repo cloned')
        drepo.progress = 20.0
    files_to_verify = []
    # print "will loop through changed files"
    if log_file_dir is None:
        prepare_log(user)
    Integrator.tools_execution(changed_files=changed_filesss,
                               base_dir=os.path.join(home, user),
                               logfile=log_file_dir,
                               target_repo=target_repo,
                               g_local=g,
                               dolog_fname=logger_fname,
                               change_status=change_status,
                               repo=drepo)

    exception_if_exists = ""
    files_to_verify = [
        c for c in changed_filesss if c[-4:] in ontology_formats
    ]
    for c in changed_filesss:
        if c[:-4] in ontology_formats:
            print "file to verify: " + c
        else:
            print "c: %s c-4: %s" % (c, c[-4:])

    # After the loop
    dolog("number of files to verify %d" % (len(files_to_verify)))
    if len(files_to_verify) == 0:
        print "files: " + str(files_to_verify)
        change_status(target_repo, 'Ready')
        drepo.progress = 100
        drepo.save()
        return
    # if not test or test with push
    if not settings.test_conf['local'] or settings.test_conf['push']:
        commit_changes()
        dolog('changes committed')
    else:
        print 'No push for testing'
    remove_old_pull_requests(target_repo)
    if exception_if_exists == "":  # no errors
        change_status(target_repo, 'validating')
    else:
        change_status(target_repo, exception_if_exists)
        # in case there is an error, create the pull request as well
    # Now to enabled
    # This kind of verification is too naive and need to be eliminated
    # for f in files_to_verify:
    #     repo = None
    #     if use_database:
    #         from models import Repo
    #         repo = Repo.objects.get(url=target_repo)
    #     try:
    #         verify_tools_generation_when_ready(f, repo)
    #         dolog('verification is done successfully')
    #     except Exception as e:
    #         dolog('verification have an exception: ' + str(e))

    if use_database:
        if Repo.objects.get(url=target_repo).state != 'validating':
            r = Repo.objects.get(url=target_repo)
            s = r.state
            s = s.replace('validating', '')
            r.state = s
            r.save()
            # The below "return" is commented so pull request are created even if there are files that are not generated
    # if not testing or testing with pull enabled
    if not settings.test_conf['local'] or settings.test_conf['pull']:
        change_status(target_repo, 'creating a pull request')
        try:
            r = send_pull_request(target_repo, ToolUser)
            dolog('pull request is sent')
            change_status(target_repo, 'pull request is sent')
            change_status(target_repo, 'Ready')
        except Exception as e:
            exception_if_exists += str(e)
            dolog('failed to create pull request: ' + exception_if_exists)
            change_status(target_repo, 'failed to create a pull request')
    else:
        print 'No pull for testing'
    drepo.progress = 100
    drepo.save()
示例#27
0
import Integrator_euler
import Integrator
#  simulate(destination_directory, continue,
#	save_suffix=, init_conds_name=, init_conds_directory=)
init_file="init_conds.txt"
#direc = "/home/ug/c1672922/results/run1"
direc = "results/1x5_seed10/"
#init_dir = "./results/test"

Integrator.simulate(direc,
                    CONT_PREVIOUS=False,
                    init_conds_name=init_file,
                    report_pos=10)

#Integrator.simulate(
#        destination_directory, ----> Directory to save results in
#        CONT_PREVIOUS,		----> Continue from previous data
#        save_suffix=,		----> eg "masses{save_suffix}.csv"
#        init_conds_name=,	----> Name of initial conditions file
#        init_conds_directory=,	----> Location of initial conds file
#        source_directory=,	----> Location of previous data for continuing
#        report_pos=100		----> Frequency of data saves (in time steps)


"""

run1 = some old test. Who knows?
run2 = Test for plotting bug. Getting diagonal line
run3 = Tweaking of x-coords
run4 = increased X x100 and y,z x10
run5 = 10,000 au cluster seperation
示例#28
0
def circle( xy, radius, color="lightsteelblue", facecolor="green", alpha=.6, ax=None ):

    e = plt.Circle( xy=xy, radius=radius )
    if ax is None:
        ax = plt.gca()
    ax.add_artist(e)
    e.set_clip_box(ax.bbox)
    e.set_edgecolor( color )
    e.set_linewidth(3)
    e.set_facecolor( facecolor )
    e.set_alpha( alpha )

c = ContainerInitializer.ContainerInitializer("tri_lattice").getContainer()
f = Force.Force(c)
i = Integrator.Integrator(0.01, f)

state_list = []
count = 0

plt.figure(1)
plt.clf()
plt.ion()
plt.xlim((0, 10))
plt.ylim((0, 10))
plt.grid()
ax = plt.gca()
plt.show()

while count < 4000:
    #print "--------- BEGIN TIMESTEP " + str(count) + " --------------"
示例#29
0
class IMUClass():  # runnable :)) #java zryła banie

    def __init__(self, *statevars):
        self.integrator = Integrator()
        name1 = 's'
        port1 = '/dev/ttySAC0'
        self.statevars = []  # example: ['roll', 'pitch', 'yaw']
        for state in statevars:
            self.statevars.append(state)
        # All states avaible:
        # ['roll', 'pitch', 'yaw']
        # ['health', 'roll', 'pitch', 'yaw'] #'mag_proc_x', 'mag_proc_y', 'mag_proc_z', 'mag_raw_x', 'mag_raw_y',
        # 'mag_raw_z', #'accel_raw_x', 'accel_raw_y', 'accel_raw_z', 'accel_proc_x', 'accel_proc_y', 'accel_proc_z',
        # 'gyro_proc_x', 'gyro_proc_y', #'gyro_proc_z', 'accel_raw_time', 'accel_proc_time', 'euler_time']

        self.s1 = um7.UM7(name1, port1, self.statevars, baud=115200)
        try:
            print('IMU initialition process:')
            # self.s1.reset_to_factory()
            print('GET_FW_REVISION=' + '{}'.format(self.s1.get_fw_revision()))
            print('ZERO_GYROS ' + 'ok.' if self.s1.zero_gyros() else 'failed.')
            time.sleep(1.1)
            print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')
            print('SET_MAG_REFERENCE ' + 'ok.' if self.s1.set_mag_reference() else 'failed.')
            time.sleep(1.1)
			# print('SET_HOME_POSITION ' + 'ok.' if self.s1.set_home_position() else 'failed.')
            # print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')

			# print('FLASH_COMMIT ' + 'ok.' if self.s1.flash_commit() else 'failed.')
        # time.sleep(3)
        except:
            print('------------!ERROR occured!--------------\n')

        # readings view format:
        self.fs = ''
        self.hs = ''
        for i in self.statevars:
            self.hs += '{:>9.9s} '
            if i == 'health':
                self.fs += ' {0[' + i + ']:08b} '
            else:
                self.fs += '{0[' + i + ']:9.3f} '

        self.sv = ['roll', 'pitch', 'yaw']

        self.prev_sample = 0
        self.drift = 0
        self.gravity_vector = (0, 0, 0)
        # init
        time.sleep(1)
        self.catchSamples()
        angles = self.makeAnglesDict(self.getSample('roll'), self.getSample('pitch'), self.getSample('yaw'))
        vec = self.makeVector(self.getSample('accel_proc_x'), self.getSample('accel_proc_y'),
                              self.getSample('accel_proc_z'))
        self.gravity_vector = self.rotate(angles, vec, True)

    # x = 0
    # y = 0
    # z = 0
    # for i in range(50):
    #	self.catchSamples()
    #	xs ``	'= self.getSample('accel_proc_x')
    #	ys = self.getSample('accel_proc_y')
    #	zs = self.getSample('accel_proc_z')

    #	x+=xs
    #	y+=ys
    #	z+=zs
    #	time.sleep(0.05)

    # self.gravity = np.sqrt(np.power(x/50,2)+np.power(y/50,2)+np.power(z/50,2))
    # print('GRAVITY:  ', self.gravity)
    # self.prev_sample = 0

    def catchSamples(self):
        self.s1.catchallsamples(self.sv, 1.0)
        self.printSamples(False)

    def getSamples(self):
        samples = {}
        for state in self.statevars:
            samples[state] = self.getSample(state)

        samples['vel_x'] = self.getSample('vel_x')  # just for now
        return samples

    def getSample(self, sample):
        if (sample == 'roll' or sample == 'pitch'):
            state = self.s1.state[sample]
            return self.correctAngles(state)
        elif sample == 'yaw':
            #state = self.s1.state[sample]
            state = self.integrator.integrate(self.getSample('gyro_proc_z'))
            # return self.correctAngles(state)
            return self.correctAngles(self.debugDrift(state))

        elif (sample == 'vel_x'):
            # return self.integrator.integrate(self.s1.state['accel_raw_x'])
            # acc = self.s1.state['accel_proc_x'] * scipy.cos(self.getSample('pitch')/180*3.14)+self.s1.state['accel_proc_z']*scipy.sin(self.getSample('pitch')/180*3.14)
            # acc = self.s1.state['accel_proc_x'] - self.gravity/scipy.cos(self.getSample('yaw')/180 *3.14)/scipy.sin(self.getSample('roll')/180 *3.14)
            # acc_x = self.s1.state['accel_proc_x']
            # acc_y = self.s1.state['accel_proc_y']
            # acc_z = self.s1.state['accel_proc_z']
            angles = self.makeAnglesDict(self.getSample('roll'), self.getSample('pitch'), self.getSample('yaw'))
            vec = self.makeVector(self.getSample('accel_proc_x'), self.getSample('accel_proc_y'),
                                  self.getSample('accel_proc_z'))
			# vec = tuple(map(lambda x: abs(x), vec))
			# self.gravity_vector = tuple(map(lambda x: abs(x), self.gravity_vector))
            # TODO: DOKONCZYC KURWA!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            acc_vec = self.debugAcceleration(angles, vec)
            acc_x = acc_vec[0]
            if abs(acc_x) > 0.01:
                acc_x = 0
            vel = self.integrator.integrate(acc_x)
            #return self.integrator.integrate(vel)
            return acc_x
        elif (sample == 'vel_y'):
            return self.integrator.integrate(self.s1.state['accel_raw_y'])
        elif (sample == 'vel_z'):
            return self.integrator.integrate(self.s1.state['accel_raw_z'])

        return self.s1.state[sample]

    def correctAngles(self, state):
        if (state > 180):
            # print(str(360 - state))
            return (-360 + state)
        if (state < -180):
            # print("state < -180")
            return (360 + state)
        return state

    def debugDrift(self, sample):
        result = 0
        d = sample - self.prev_sample
        if abs(d) < 0.2 :
            self.drift += d
            result = sample - self.drift
            self.prev_sample = sample
        else:
            result = sample - self.drift
            self.prev_sample = sample

        return result

    def printSamples(self, headerFlag):
        # printing in terminal for testing
        if headerFlag:
            print(self.hs.format(*self.statevars))
        print(self.getSample('roll'), ' ', self.getSample('pitch'), ' ', self.getSample('yaw'), ' ',
              self.getSample('vel_x'))

    def startSendingSamples(self, connectionObject):  # without printing
        # this method can be a target -> in Thread constructor
        # c = 0
        while True:
            # self.catchSamples()
            # self.printSamples(c % 50 == 0)
            # c += 1
            connectionObject.setDataFrame(self.s1.state)

    def makeAnglesDict(self, roll, pitch, yaw):
        return {'roll': roll, 'pitch': pitch, 'yaw': yaw}

    def makeVector(self, x, y, z):
        return x, y, z

    def rotate(self, angles, vector, transposeFlag=False):

        angles = dict(map(lambda x: (x, math.radians(angles[x])), angles))

        rotMatrixYPR = np.array(
            [[math.cos(angles['yaw']) * math.cos(angles['pitch']),
              math.cos(angles['yaw']) * math.sin(angles['pitch']) * math.sin(angles['roll']) - math.sin(
                  angles['yaw']) * math.cos(angles['roll']),
              math.cos(angles['yaw']) * math.sin(angles['pitch']) * math.cos(angles['roll']) + math.sin(
                  angles['yaw']) * math.sin(angles['roll']),
              ],
             [math.sin(angles['yaw']) * math.cos(angles['pitch']),
              math.sin(angles['yaw']) * math.sin(angles['pitch']) * math.sin(angles['roll']) + math.cos(
                  angles['yaw']) * math.cos(angles['roll']),
              math.sin(angles['yaw']) * math.sin(angles['pitch']) * math.cos(angles['roll']) - math.cos(
                  angles['yaw']) * math.sin(angles['roll']),
              ],
             [-math.sin(angles['pitch']),
              math.cos(angles['pitch']) * math.sin(angles['roll']),
              math.cos(angles['pitch']) * math.cos(angles['roll']),
              ]], np.float_)

        if transposeFlag:
            rotMatrixYPR = np.transpose(rotMatrixYPR)

        vectorMatrix = np.asarray(vector, np.float_)
        # vectorMatrix[1], vectorMatrix[2] = vectorMatrix[2], vectorMatrix[1]

        return rotMatrixYPR.dot(vectorMatrix)

    def debugAcceleration(self, angles, vector):
        gravity_vec_rotated = self.rotate(angles, self.gravity_vector, False)
		# vec = tuple(map(lambda x: abs(vector[vector.index(x)] - gravity_vec_rotated[vector.index(x)]), vector))
        vec = tuple(map(lambda x: vector[vector.index(x)] - gravity_vec_rotated[vector.index(x)], vector))

        return vec
示例#30
0
def precomputeMatrices(slopeFunctions, parameters):
    G = scipy.sparse.lil_matrix((parameters.n, parameters.n))
    A = scipy.sparse.lil_matrix((parameters.n, parameters.n))
    BPrime = scipy.sparse.lil_matrix((parameters.n, parameters.n))
      
    for element in parameters.omega:
        for i in range(0,3):
            for j in range(0,3):
                #Get the points
                xi = element.points[i]
                xj = element.points[j]
                
                #Get slope functions
                slopeI = slopeFunctions[element.index][i]
                slopeJ = slopeFunctions[element.index][j]
                                
                #Compute G matrix
                fI = Function.FunctionWrapper(slopeI.evaluate, str(slopeI))
                fJ = Function.FunctionWrapper(slopeJ.evaluate, str(slopeJ))
                f = fI * fJ
                I = Integrator.integrate2D(f, element)
                G[xi.index, xj.index] += I
    
                #Compute A matrix
                sum = 0
                slopeIDerivates = slopeI.getDerivates()
                slopeJDerivates = slopeJ.getDerivates()
                for l in range(0,2):
                    for k in range(0,2):
                        #a_lk
                        tensor = Function.FunctionWrapper(parameters.diffusionTensor[l][k], "a_" + str(l) + str(k))
                        #vj/dx_k 
                        derivate1 = Function.FunctionWrapper(slopeJDerivates[k], "v" + str(xj.index) + "/dx_" + str(k))
                        #vi/dx_l            
                        derivate2 = Function.FunctionWrapper(slopeIDerivates[l], "v" + str(xi.index) + "/dx_" + str(l))
                        f = tensor * derivate1 * derivate2
                        I = Integrator.integrate2D(f, element)
                        sum += I
                A[xi.index, xj.index] -= sum
    
    #Lump the G matrix
    for i in range(0, parameters.n):
        sum = 0
        for j in range(0, parameters.n):
            sum += G[i,j]
            G[i,j] = 0
        G[i,i] = sum
    
    for boundarySegmentDesc in parameters.omegaD:
        segment = boundarySegmentDesc[0]
        element = boundarySegmentDesc[1]
        for i in range(0,2):
            for j in range(0,2):
                xi = segment.points[i]
                xj = segment.points[j]
                slopeI = slopeFunctions[element.index][element.pointToElementIndex(xi)]
                slopeJ = slopeFunctions[element.index][element.pointToElementIndex(xj)]
                fI = Function.FunctionWrapper(slopeI.evaluate, str(slopeI))
                fJ = Function.FunctionWrapper(slopeJ.evaluate, str(slopeJ))
                f = fI * fJ
                I = Integrator.integrate1D(f, segment)
                BPrime[xj.index, xi.index] -= I
                
    return (G.tocsc(), A.tocsc(), BPrime.tocsc())
示例#31
0
def precomputeMatrices(slopeFunctions, parameters):
    G = scipy.sparse.lil_matrix((parameters.n, parameters.n))
    A = scipy.sparse.lil_matrix((parameters.n, parameters.n))
    BPrime = scipy.sparse.lil_matrix((parameters.n, parameters.n))

    for element in parameters.omega:
        for i in range(0, 3):
            for j in range(0, 3):
                #Get the points
                xi = element.points[i]
                xj = element.points[j]

                #Get slope functions
                slopeI = slopeFunctions[element.index][i]
                slopeJ = slopeFunctions[element.index][j]

                #Compute G matrix
                fI = Function.FunctionWrapper(slopeI.evaluate, str(slopeI))
                fJ = Function.FunctionWrapper(slopeJ.evaluate, str(slopeJ))
                f = fI * fJ
                I = Integrator.integrate2D(f, element)
                G[xi.index, xj.index] += I

                #Compute A matrix
                sum = 0
                slopeIDerivates = slopeI.getDerivates()
                slopeJDerivates = slopeJ.getDerivates()
                for l in range(0, 2):
                    for k in range(0, 2):
                        #a_lk
                        tensor = Function.FunctionWrapper(
                            parameters.diffusionTensor[l][k],
                            "a_" + str(l) + str(k))
                        #vj/dx_k
                        derivate1 = Function.FunctionWrapper(
                            slopeJDerivates[k],
                            "v" + str(xj.index) + "/dx_" + str(k))
                        #vi/dx_l
                        derivate2 = Function.FunctionWrapper(
                            slopeIDerivates[l],
                            "v" + str(xi.index) + "/dx_" + str(l))
                        f = tensor * derivate1 * derivate2
                        I = Integrator.integrate2D(f, element)
                        sum += I
                A[xi.index, xj.index] -= sum

    #Lump the G matrix
    for i in range(0, parameters.n):
        sum = 0
        for j in range(0, parameters.n):
            sum += G[i, j]
            G[i, j] = 0
        G[i, i] = sum

    for boundarySegmentDesc in parameters.omegaD:
        segment = boundarySegmentDesc[0]
        element = boundarySegmentDesc[1]
        for i in range(0, 2):
            for j in range(0, 2):
                xi = segment.points[i]
                xj = segment.points[j]
                slopeI = slopeFunctions[element.index][
                    element.pointToElementIndex(xi)]
                slopeJ = slopeFunctions[element.index][
                    element.pointToElementIndex(xj)]
                fI = Function.FunctionWrapper(slopeI.evaluate, str(slopeI))
                fJ = Function.FunctionWrapper(slopeJ.evaluate, str(slopeJ))
                f = fI * fJ
                I = Integrator.integrate1D(f, segment)
                BPrime[xj.index, xi.index] -= I

    return (G.tocsc(), A.tocsc(), BPrime.tocsc())
示例#32
0
def yvsx():
    dt = 0.05
    nsteps = 800
    
    p0 = [20000, 0, 0]
    x0 = np.array([0,0,0]+p0)
    
    detectorDict = { "norm": np.array([1,0,0]),
                     "dist": 0,
                     "v": np.array([0,1,0]),
                     "w": np.array([0,0,1]),
                     "width": 20,
                     "height": 20  }
    
    Params.UseFineBField = False
    traj_coarse = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps)
    Params.UseFineBField = True
    traj_fine = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps)
    
    yfine = []
    ycoarse = []
    zfine = []
    zcoarse = []
    rvals = np.linspace(0,8.5,100)
    for r in rvals:
        detectorDict["dist"] = r
        
        intersect_coarse = Detector.FindIntersection(traj_coarse,detectorDict)
        ycoarse.append(100*intersect_coarse[0][1])
        zcoarse.append(100*intersect_coarse[0][2])
        intersect_fine = Detector.FindIntersection(traj_fine,detectorDict)
        yfine.append(100*intersect_fine[0][1])
        zfine.append(100*intersect_fine[0][2])
        
    yfine = np.array(yfine)
    ycoarse = np.array(ycoarse)
    zfine = np.array(zfine)
    zcoarse = np.array(zcoarse)
        
    plt.figure(1)

    plt.subplot(1,1,1)
    ax1 = plt.gca()
    fplot, = ax1.plot(rvals, yfine, '-b', linewidth=2, label='1 cm field')
    cplot, = ax1.plot(rvals, ycoarse, '--r', linewidth=2, label='10 cm field')
    ax2 = ax1.twinx()
    dplot, = ax2.plot(rvals,abs(yfine-ycoarse), '-', color='0.75', label='Difference')
    ax1.set_ylabel('y (cm)')
    ax2.set_ylabel('diff (cm)')
    
    # plt.subplot(2,1,2)
    # ax1 = plt.gca()
    # ax1.plot(rvals, zfine, '-b')
    # ax1.plot(rvals, zcoarse, '--r')
    # ax2 = ax1.twinx()
    # ax2.plot(rvals,abs(zfine-zcoarse), '-g')
    # ax1.set_ylabel('y (cm)')
    # ax2.set_ylabel('diff (cm)')
    
    ax1.set_xlabel('x (m)')
    ax1.set_title(r'20 GeV muon trajectory ($\eta=0$)')
    
    plt.legend(handles=[fplot,cplot,dplot],loc='lower left')
    
    plt.savefig("/home/users/bemarsh/public_html/backup/B-integrator/fine_vs_coarse/r_vs_x_eta0_interp.png")
    
    plt.show()
示例#33
0
def git_magic(target_repo, user, cloning_repo, changed_filesss):
    logger_fname = prepare_logger(user)
    global g
    global parent_folder
    global log_file_dir
    parent_folder = user
    if not settings.TEST:
        prepare_log(user)
    dolog(
        '############################### magic #############################')
    dolog('target_repo: ' + target_repo)
    change_status(target_repo, 'Preparing')
    # so the tool user can takeover and do stuff
    username = os.environ['github_username']
    password = os.environ['github_password']
    g = Github(username, password)
    local_repo = target_repo.replace(target_repo.split('/')[-2], ToolUser)
    if not settings.TEST or not settings.test_conf['local']:
        delete_repo(local_repo)
    dolog('repo deleted')
    if not settings.TEST or not settings.test_conf['local']:
        dolog('will fork the repo')
        change_status(target_repo, 'forking repo')
        fork_repo(target_repo, username, password)
        dolog('repo forked')
    if not settings.TEST or not settings.test_conf['local']:
        change_status(target_repo, 'cloning repo')
        clone_repo(cloning_repo, user)
        dolog('repo cloned')
    files_to_verify = []
    print "will loop through changed files"
    Integrator.tools_execution(changed_files=changed_filesss,
                               base_dir=os.path.join(home, user),
                               logfile=log_file_dir,
                               target_repo=target_repo,
                               g_local=g,
                               dolog_fname=logger_fname)
    # for chf in changed_filesss:
    #     print "chf: "+chf
    #     auton_conf = {'ar2dtool_enable': False, 'widoco_enable': False,
    #                   'oops_enable': False, 'owl2jsonld_enable': False}
    #     if chf[-4:] not in ontology_formats:  # validate ontology formats
    #         # for now, do not detect the configuration
    #         continue
    #         # print 'check conf file changed is: %s'%(chf)
    #         dolog('check conf file changed is: %s' % chf)
    #         if get_file_from_path(chf) == 'OnToology.cfg':
    #             dolog('OnToology.cfg is changed')
    #             fi = get_level_up(chf)
    #             fi = fi[6:]
    #             dolog('ont file is: ' + fi)
    #             changed_files = [fi]
    #             auton_conf = get_auton_configuration(fi)
    #         elif get_file_from_path(chf) in ar2dtool.ar2dtool_config_types:
    #             auton_conf['ar2dtool_enable'] = True
    #             fi = get_level_up(chf)
    #             fi = get_level_up(fi)
    #             fi = get_level_up(fi)
    #             fi = fi[6:]
    #             changed_files = [fi]
    #             dolog('change in AR2DTool file %s' % fi)
    #         elif 'widoco.conf' in get_file_from_path(chf):
    #             fi = get_level_up(chf)
    #             fi = get_level_up(fi)
    #             fi = fi[6:]
    #             changed_files = [fi]
    #             dolog('change in Widoco file %s' % fi)
    #     else:  # chf is ontology file
    #         dolog('working with: ' + chf)
    #         changed_files = [chf]
    #         auton_conf = get_auton_configuration(chf)
    #         # The below three lines is to add files to verify their output
    #         # later on
    #         ftvcomp = auton_conf
    #         ftvcomp['file'] = chf
    #         files_to_verify.append(ftvcomp)
    #     dolog(str(auton_conf))
    #     exception_if_exists = ""

    # if auton_conf['ar2dtool_enable']:
    #     dolog('ar2dtool_enable is true')
    #     change_status(target_repo, 'drawing diagrams for ' + changed_files[0])
    #     try:
    #         ar2dtool.draw_diagrams(changed_files)
    #         dolog('diagrams drawn successfully')
    #     except Exception as e:
    #         exception_if_exists += chf + ": " + str(e) + "\n"
    #         dolog('diagrams not drawn: ' + str(e))
    # else:
    #     dolog('ar2dtool_enable is false')
    # if auton_conf['widoco_enable']:
    #     dolog('ar2dtool_enable is false')
    #     change_status(target_repo, 'generating documents for ' + changed_files[0])
    #     try:
    #         generate_widoco_docs(changed_files)
    #         dolog('generated docs')
    #     except Exception as e:
    #         exception_if_exists += str(e)
    #         dolog('exception in generating documentation: ' + str(e))
    # else:
    #     dolog('widoco_enable is false')
    # if auton_conf['oops_enable']:
    #     dolog('oops_enable is true')
    #     change_status(
    #         target_repo, 'OOPS is checking for errors for ' + changed_files[0])
    #     try:
    #         oops_ont_files(target_repo, changed_files)
    #         dolog('oops checked ontology for pitfalls')
    #     except Exception as e:
    #         exception_if_exists += str(e)
    #         dolog('exception in generating oops validation document: ' + str(e))
    # else:
    #     dolog('oops_enable is false')
    # if auton_conf['owl2jsonld_enable']:
    #     dolog('owl2jsonld_enable is true')
    #     change_status(target_repo, 'generating context document for ' + changed_files[0])
    #     try:
    #         generate_owl2jsonld_file(changed_files)
    #         dolog('generated context')
    #     except Exception as e:
    #         exception_if_exists += str(e)
    #         dolog('exception in generating context documentation: ' + str(e))
    # else:
    #     dolog('owl2jsonld_enable is false')

    exception_if_exists = ""
    files_to_verify = [
        c for c in changed_filesss if c[-4:] in ontology_formats
    ]
    for c in changed_filesss:
        if c[:-4] in ontology_formats:
            print "file to verify: " + c
        else:
            print "c: %s c-4: %s" % (c, c[-4:])

    # After the loop
    dolog("number of files to verify %d" % (len(files_to_verify)))
    if len(files_to_verify) == 0:
        change_status(target_repo, 'Ready')
        return
    # if not settings.TEST or not settings.test_conf['local']:
    commit_changes()
    dolog('changes committed')
    remove_old_pull_requests(target_repo)
    if exception_if_exists == "":  # no errors
        change_status(target_repo, 'validating')
    else:
        change_status(target_repo, exception_if_exists)
        # in case there is an error, create the pull request as well
    # Now to enabled
    for f in files_to_verify:
        repo = None
        if use_database:
            from models import Repo
            repo = Repo.objects.get(url=target_repo)
        try:
            verify_tools_generation_when_ready(f, repo)
            dolog('verification is done successfully')
        except Exception as e:
            dolog('verification have an exception: ' + str(e))

    if use_database:
        if Repo.objects.get(url=target_repo).state != 'validating':
            r = Repo.objects.get(url=target_repo)
            s = r.state
            s = s.replace('validating', '')
            r.state = s
            r.save()
            # The below "return" is commented so pull request are created even if there are files that are not generated
    # if not settings.TEST or not settings.test_conf['local']:
    if True:
        change_status(target_repo, 'creating a pull request')
        try:
            r = send_pull_request(target_repo, ToolUser)
        except Exception as e:
            exception_if_exists += str(e)
        dolog('pull request is sent')
    change_status(target_repo, 'Ready')
示例#34
0
Pxy = np.linalg.norm(p0[:2])
E = np.sqrt(P**2+Params.m**2)
Q = 1.
B = 1.
c = 2.9979e-1
R = Pxy/(300*Q*B)
vxy = Pxy/E * 2.9979e-1
T = 2*np.pi*R/vxy
w = 2*np.pi/T

x0 = np.array([0,R,0]+p0)

dt = 0.4
nsteps = 1000

traj = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps)

time = np.arange(0,dt*nsteps+1e-10, dt)

predx = R*np.sin(w*time) 
predy = R*np.cos(w*time)
predz = p0[0]/np.linalg.norm(p0) * c * time

plt.plot(time,predx,'-b', label='x pred')
plt.plot(time[::5], traj[0,::5],'ob', label='x sim')

#plt.plot(time,traj[1,:]-predy,'-r')
plt.plot(time,predy,'-r', label='y pred')
plt.plot(time[::5], traj[1,::5],'or', label='y sim')

plt.xlabel('Time (ns)')
示例#35
0
if (sys.argv[1] == "e"):
    # Inicializo parametros
    # Caja
    L = 2
    bx = Box.Box([0, 0, 0], [L, L, L], "Fixed")
    # Particulas
    Npart = sys.argv[2]
    N = sys.argv[3]
    part = Particles.PointParticles(Npart)
    pos = particulas(Npart, L)
    part.x = pos
    part.mass = np.zeros((Npart, 1), dtype=np.float32) + 1
    # Interaccion
    morse = morse.Morse(1.1, 1, 0.2, 0.25)
    # Integrador
    verlet = Integrator.VelVerlet(0.01)
    # Vecinos
    vecinos = Neighbour.Neighbour()
    pares = vecinos.build_list(part.x, "None")

    # Termalizamos
    Epot = np.zeros(N)
    Ecin = np.zeros(N)
    part.f, Epot[0] = morse.forces(part.x, part.v)
    for i in range(N):
        part.x, part.v = verlet.first_step(part.x, part.v, part.f)
        bx.wrap_boundary(part.x, part.v)
        part.f, Epot[i] = morse.forces(part.x, part.v, pares)
        part.x, part.v = verlet.last_step(part.x, part.v, part.f)
        Ecin[i] = energia_cinetica(part.v)
        part.f, Epot[i] = morse.forces(part.x, part.v, pares)
示例#36
0
def git_magic(target_repo, user, cloning_repo, changed_filesss):
    logger_fname = prepare_logger(user)
    global g
    global parent_folder
    global log_file_dir
    parent_folder = user
    if not settings.TEST:
        prepare_log(user)
    dolog('############################### magic #############################')
    dolog('target_repo: ' + target_repo)
    change_status(target_repo, 'Preparing')
    # so the tool user can takeover and do stuff
    username = os.environ['github_username']
    password = os.environ['github_password']
    g = Github(username, password)
    local_repo = target_repo.replace(target_repo.split('/')[-2], ToolUser)
    if not settings.TEST or not settings.test_conf['local']:
        delete_repo(local_repo)
    dolog('repo deleted')
    if not settings.TEST or not settings.test_conf['local']:
        dolog('will fork the repo')
        change_status(target_repo, 'forking repo')
        fork_repo(target_repo, username, password)
        dolog('repo forked')
    if not settings.TEST or not settings.test_conf['local']:
        change_status(target_repo, 'cloning repo')
        clone_repo(cloning_repo, user)
        dolog('repo cloned')
    files_to_verify = []
    print "will loop through changed files"
    Integrator.tools_execution(changed_files=changed_filesss, base_dir=os.path.join(home, user), logfile=log_file_dir,
                               target_repo=target_repo, g_local=g, dolog_fname=logger_fname)
    # for chf in changed_filesss:
    #     print "chf: "+chf
    #     auton_conf = {'ar2dtool_enable': False, 'widoco_enable': False,
    #                   'oops_enable': False, 'owl2jsonld_enable': False}
    #     if chf[-4:] not in ontology_formats:  # validate ontology formats
    #         # for now, do not detect the configuration
    #         continue
    #         # print 'check conf file changed is: %s'%(chf)
    #         dolog('check conf file changed is: %s' % chf)
    #         if get_file_from_path(chf) == 'OnToology.cfg':
    #             dolog('OnToology.cfg is changed')
    #             fi = get_level_up(chf)
    #             fi = fi[6:]
    #             dolog('ont file is: ' + fi)
    #             changed_files = [fi]
    #             auton_conf = get_auton_configuration(fi)
    #         elif get_file_from_path(chf) in ar2dtool.ar2dtool_config_types:
    #             auton_conf['ar2dtool_enable'] = True
    #             fi = get_level_up(chf)
    #             fi = get_level_up(fi)
    #             fi = get_level_up(fi)
    #             fi = fi[6:]
    #             changed_files = [fi]
    #             dolog('change in AR2DTool file %s' % fi)
    #         elif 'widoco.conf' in get_file_from_path(chf):
    #             fi = get_level_up(chf)
    #             fi = get_level_up(fi)
    #             fi = fi[6:]
    #             changed_files = [fi]
    #             dolog('change in Widoco file %s' % fi)
    #     else:  # chf is ontology file
    #         dolog('working with: ' + chf)
    #         changed_files = [chf]
    #         auton_conf = get_auton_configuration(chf)
    #         # The below three lines is to add files to verify their output
    #         # later on
    #         ftvcomp = auton_conf
    #         ftvcomp['file'] = chf
    #         files_to_verify.append(ftvcomp)
    #     dolog(str(auton_conf))
    #     exception_if_exists = ""

        # if auton_conf['ar2dtool_enable']:
        #     dolog('ar2dtool_enable is true')
        #     change_status(target_repo, 'drawing diagrams for ' + changed_files[0])
        #     try:
        #         ar2dtool.draw_diagrams(changed_files)
        #         dolog('diagrams drawn successfully')
        #     except Exception as e:
        #         exception_if_exists += chf + ": " + str(e) + "\n"
        #         dolog('diagrams not drawn: ' + str(e))
        # else:
        #     dolog('ar2dtool_enable is false')
        # if auton_conf['widoco_enable']:
        #     dolog('ar2dtool_enable is false')
        #     change_status(target_repo, 'generating documents for ' + changed_files[0])
        #     try:
        #         generate_widoco_docs(changed_files)
        #         dolog('generated docs')
        #     except Exception as e:
        #         exception_if_exists += str(e)
        #         dolog('exception in generating documentation: ' + str(e))
        # else:
        #     dolog('widoco_enable is false')
        # if auton_conf['oops_enable']:
        #     dolog('oops_enable is true')
        #     change_status(
        #         target_repo, 'OOPS is checking for errors for ' + changed_files[0])
        #     try:
        #         oops_ont_files(target_repo, changed_files)
        #         dolog('oops checked ontology for pitfalls')
        #     except Exception as e:
        #         exception_if_exists += str(e)
        #         dolog('exception in generating oops validation document: ' + str(e))
        # else:
        #     dolog('oops_enable is false')
        # if auton_conf['owl2jsonld_enable']:
        #     dolog('owl2jsonld_enable is true')
        #     change_status(target_repo, 'generating context document for ' + changed_files[0])
        #     try:
        #         generate_owl2jsonld_file(changed_files)
        #         dolog('generated context')
        #     except Exception as e:
        #         exception_if_exists += str(e)
        #         dolog('exception in generating context documentation: ' + str(e))
        # else:
        #     dolog('owl2jsonld_enable is false')

    exception_if_exists = ""
    files_to_verify = [c for c in changed_filesss if c[-4:] in ontology_formats]
    for c in changed_filesss:
        if c[:-4] in ontology_formats:
            print "file to verify: "+c
        else:
            print "c: %s c-4: %s"%(c, c[-4:])

    # After the loop
    dolog("number of files to verify %d" % (len(files_to_verify)))
    if len(files_to_verify) == 0:
        change_status(target_repo, 'Ready')
        return
    # if not settings.TEST or not settings.test_conf['local']:
    commit_changes()
    dolog('changes committed')
    remove_old_pull_requests(target_repo)
    if exception_if_exists == "":  # no errors
        change_status(target_repo, 'validating')
    else:
        change_status(target_repo, exception_if_exists)
        # in case there is an error, create the pull request as well
    # Now to enabled
    for f in files_to_verify:
        repo = None
        if use_database:
            from models import Repo
            repo = Repo.objects.get(url=target_repo)
        try:
            verify_tools_generation_when_ready(f, repo)
            dolog('verification is done successfully')
        except Exception as e:
            dolog('verification have an exception: ' + str(e))

    if use_database:
        if Repo.objects.get(url=target_repo).state != 'validating':
            r = Repo.objects.get(url=target_repo)
            s = r.state
            s = s.replace('validating', '')
            r.state = s
            r.save()
            # The below "return" is commented so pull request are created even if there are files that are not generated
    # if not settings.TEST or not settings.test_conf['local']:
    if True:
        change_status(target_repo, 'creating a pull request')
        try:
            r = send_pull_request(target_repo, ToolUser)
        except Exception as e:
            exception_if_exists += str(e)
        dolog('pull request is sent')
    change_status(target_repo, 'Ready')
示例#37
0
rootfile = ROOT.TFile(rp.infile)
tree = rootfile.Get('EventTree')
nevents = tree.GetEntries()
for event in tree:

    # convert to cartesian momentum
    p = event.pT * np.array(
        [np.cos(event.phi),
         np.sin(event.phi),
         np.sinh(event.eta)])
    x0 = np.array([0, 0, 0, p[0], p[1], p[2]])

    # simulate until nsteps steps is reached, or the particle passes x=10
    traj, tvec = Integrator.rk4(x0,
                                dt,
                                nsteps,
                                cutoff=rp.cutoff,
                                cutoffaxis=3,
                                use_var_dt=rp.use_var_dt)

    # compute the intersection. Will return None if no intersection
    findIntersection = Detector.FindIntersection
    if rp.useCustomIntersectionFunction:
        findIntersection = rp.intersectFunction
    intersection, t, theta, thW, thV, pInt = findIntersection(
        traj, tvec, detectorDict)
    if intersection is not None:
        intersects.append(intersection)
        print len(
            trajs
        ), ": p =", magp, ", eta =", eta, ", phi =", phi, ", eff =", float(
            len(intersects)) / ntotaltrajs
示例#38
0
    print p

    if isBarrell:
        p0 = [p, 0, 0]
    else:
        th = np.arctan(1.0 / 4)
        p0 = [p * np.sin(th) * np.cos(np.pi / 4), p * np.sin(th) * np.sin(np.pi / 4), p * np.cos(th)]

    x0 = np.array([0, 0, 0] + p0)

    rvals = []
    thvals = []

    ## get the trajectory & end coords with no multiple scattering
    Params.MSCtype = "none"
    traj_noMSC = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps, cutoff=co, cutoffaxis=coa)
    if isBarrell:
        k = (8 - getr(traj_noMSC[:, -2])) / (getr(traj_noMSC[:, -1]) - getr(traj_noMSC[:, -2]))
    else:
        k = (14 - traj_noMSC[2, -2]) / (traj_noMSC[2, -1] - traj_noMSC[2, -2])
    cross_noMSC = traj_noMSC[:3, -2] + k * (traj_noMSC[:3, -1] - traj_noMSC[:3, -2])
    projth_noMSC = np.arctan2(traj_noMSC[1, -1], traj_noMSC[0, -1])

    Params.MSCtype = "kuhn"

    ntuple = ROOT.TNtuple("p" + str(p) + "_data", "title", "r:theta")

    for i in range(Nsamp):

        traj = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps, cutoff=co, cutoffaxis=coa)
示例#39
0
class IMUClass():
    """Class for setting up, control and also calculate and validate IMU's input
    This class is kind of runnable class ( like in java ) - can be used as target for Thread object"""
    def __init__(self, *statevars):
        self.gyro_integrator = Integrator()
        self.acc_integrator = Integrator()
        self.vel_integrator = Integrator()
        self.i = 0
        self.gyro_filter = 0.2
        self.integrator = Integrator()
        name1 = 's'
        port1 = '/dev/ttySAC0'
        self.statevars = []  # example: ['roll', 'pitch', 'yaw']
        for state in statevars:
            self.statevars.append(state)
        # All states avaible:
        # ['roll', 'pitch', 'yaw']
        # ['health', 'roll', 'pitch', 'yaw'] #'mag_proc_x', 'mag_proc_y', 'mag_proc_z', 'mag_raw_x', 'mag_raw_y',
        # 'mag_raw_z', #'accel_raw_x', 'accel_raw_y', 'accel_raw_z', 'accel_proc_x', 'accel_proc_y', 'accel_proc_z',
        # 'gyro_proc_x', 'gyro_proc_y', #'gyro_proc_z', 'accel_raw_time', 'accel_proc_time', 'euler_time']

        self.s1 = um7.UM7(name1, port1, self.statevars, baud=115200)
        try:
            print('IMU initialition process:')
            # self.s1.reset_to_factory()
            print('GET_FW_REVISION=' + '{}'.format(self.s1.get_fw_revision()))
            print('ZERO_GYROS ' + 'ok.' if self.s1.zero_gyros() else 'failed.')
            time.sleep(1.1)
            print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')
            print('SET_MAG_REFERENCE ' +
                  'ok.' if self.s1.set_mag_reference() else 'failed.')
            time.sleep(1.1)
# print('SET_HOME_POSITION ' + 'ok.' if self.s1.set_home_position() else 'failed.')
# print('RESET_EKF ' + 'ok.' if self.s1.reset_ekf() else 'failed.')

# print('FLASH_COMMIT ' + 'ok.' if self.s1.flash_commit() else 'failed.')
# time.sleep(3)
        except:
            print('------------!ERROR occured!--------------\n')

        # readings view format:
        self.fs = ''
        self.hs = ''
        for i in self.statevars:
            self.hs += '{:>9.9s} '
            if i == 'health':
                self.fs += ' {0[' + i + ']:08b} '
            else:
                self.fs += '{0[' + i + ']:9.3f} '

        self.sv = ['roll', 'pitch', 'yaw']

        self.prev_sample = 0
        self.drift = 0
        self.gravity_vector = (0, 0, 0)
        # init
        time.sleep(1)
        self.catchSamples()
        angles = self.makeAnglesDict(self.getSample('roll'),
                                     self.getSample('pitch'),
                                     self.getSample('yaw'))
        vec = self.makeVector(self.getSample('accel_proc_x'),
                              self.getSample('accel_proc_y'),
                              self.getSample('accel_proc_z'))
        self.gravity_vector = self.rotate(angles, vec, True)

    # x = 0
    # y = 0
    # z = 0
    # for i in range(50):
    #	self.catchSamples()
    #	xs ``	'= self.getSample('accel_proc_x')
    #	ys = self.getSample('accel_proc_y')
    #	zs = self.getSample('accel_proc_z')

    #	x+=xs
    #	y+=ys
    #	z+=zs
    #	time.sleep(0.05)

    # self.gravity = np.sqrt(np.power(x/50,2)+np.power(y/50,2)+np.power(z/50,2))
    # print('GRAVITY:  ', self.gravity)
    # self.prev_sample = 0

    def catchSamples(self):
        self.s1.catchallsamples(self.sv, 1.0)
        self.printSamples(False)

    def getSamples(self):
        samples = {}
        for state in self.statevars:
            samples[state] = self.getSample(state)

        samples['vel_x'] = self.getSample('vel_x')  # just for now
        return samples

    # method that returns validated and precalculated IMU's state valueas
    def getSample(self, sample):
        if (sample == 'roll' or sample == 'pitch'):
            state = self.s1.state[sample]
            return self.correctAngles(state)
        elif sample == 'yaw':
            #state = self.s1.state[sample]
            state = self.getSample('gyro_raw_z')
            if state > -self.gyro_filter and state < self.gyro_filter:
                state = 0
            return self.correctAngles(
                self.gyro_integrator.integrate(state) / scipy.pi * 18)
            # return self.correctAngles(state)
            #return self.correctAngles(self.debugDrift(state))
            #return state

        elif (sample == 'vel_x'):
            # It was made for estimating volocity of AUV, but doesnt work properly
            # Not used because of issues

            self.i = self.i + 1
            if self.i <= 10:
                return 0

            angles = self.makeAnglesDict(self.getSample('roll'),
                                         self.getSample('pitch'),
                                         self.getSample('yaw'))
            vec = self.makeVector(self.getSample('accel_proc_x'),
                                  self.getSample('accel_proc_y'),
                                  self.getSample('accel_proc_z'))
            acc = self.rotate(angles, vec)
            acc_x = acc[0]
            if abs(acc_x) < 0.02:
                acc_x = 0
            velocity = self.vel_integrator.integrate(acc_x)
            #if abs(velocity) < 0.02:
            #    velocity = 0
            return velocity
# vec = tuple(map(lambda x: abs(x), vec))
# self.gravity_vector = tuple(map(lambda x: abs(x), self.gravity_vector))
# acc_vec = self.debugAcceleration(angles, vec)
# acc_x = acc_vec[0]

# vel = self.acc_integrator.integrate(acc_x)
#return self.integrator.integrate(vel)
#return self.debugDrift(vel)
# return acc_x

        return self.s1.state[sample]

    # Method that calculates raw angles provided by IMU to values from -180 to 180 degrees
    def correctAngles(self, state):
        if (state > 180):
            # print(str(360 - state))
            return (-360 + state)
        if (state < -180):
            # print("state < -180")
            return (360 + state)
        return state

    # Method that fixes low frequency drift
    def debugDrift(self, sample):
        result = 0
        d = sample - self.prev_sample

        if abs(d) < FILTER_VALUE:
            self.drift += d
            result = sample - self.drift
            self.prev_sample = sample
        else:
            result = sample - self.drift
            self.prev_sample = sample

        return result

    def printSamples(self, headerFlag):
        # printing in terminal for testing
        if headerFlag:
            print(self.hs.format(*self.statevars))
        print(self.getSample('roll'), ' ', self.getSample('pitch'), ' ',
              self.getSample('yaw'), ' ', self.getSample('vel_x'))

    def startSendingSamples(self, connectionObject):  # without printing
        # this method can be a target -> in Thread constructor
        # Just for visualizing reasons, not being used now
        # c = 0
        while True:
            # self.catchSamples()
            # self.printSamples(c % 50 == 0)
            # c += 1
            connectionObject.setDataFrame(self.s1.state)

    def makeAnglesDict(self, roll, pitch, yaw):
        return {'roll': roll, 'pitch': pitch, 'yaw': yaw}

    def makeVector(self, x, y, z):
        return x, y, z

    # Method for rotating AUV state, used for deleting gravity acceleration vector from IMU values
    def rotate(self, angles, vector, transposeFlag=False):

        angles = dict(map(lambda x: (x, math.radians(angles[x])), angles))

        rotMatrixYPR = np.array(
            [[
                math.cos(angles['yaw']) * math.cos(angles['pitch']),
                math.cos(angles['yaw']) * math.sin(angles['pitch']) *
                math.sin(angles['roll']) -
                math.sin(angles['yaw']) * math.cos(angles['roll']),
                math.cos(angles['yaw']) * math.sin(angles['pitch']) *
                math.cos(angles['roll']) +
                math.sin(angles['yaw']) * math.sin(angles['roll']),
            ],
             [
                 math.sin(angles['yaw']) * math.cos(angles['pitch']),
                 math.sin(angles['yaw']) * math.sin(angles['pitch']) *
                 math.sin(angles['roll']) +
                 math.cos(angles['yaw']) * math.cos(angles['roll']),
                 math.sin(angles['yaw']) * math.sin(angles['pitch']) *
                 math.cos(angles['roll']) -
                 math.cos(angles['yaw']) * math.sin(angles['roll']),
             ],
             [
                 -math.sin(angles['pitch']),
                 math.cos(angles['pitch']) * math.sin(angles['roll']),
                 math.cos(angles['pitch']) * math.cos(angles['roll']),
             ]], np.float_)

        if transposeFlag:
            rotMatrixYPR = np.transpose(rotMatrixYPR)

        vectorMatrix = np.asarray(vector, np.float_)
        # vectorMatrix[1], vectorMatrix[2] = vectorMatrix[2], vectorMatrix[1]

        return rotMatrixYPR.dot(vectorMatrix)

    def debugAcceleration(self, angles, vector):
        gravity_vec_rotated = self.rotate(angles, self.gravity_vector, False)
        # vec = tuple(map(lambda x: abs(vector[vector.index(x)] - gravity_vec_rotated[vector.index(x)]), vector))
        vec = tuple(
            map(
                lambda x: vector[vector.index(x)] - gravity_vec_rotated[
                    vector.index(x)], vector))

        return vec
示例#40
0
dt = 0.1
nsteps = 5000
thvals = []
bvals = []

pvals = np.arange(2000.,20001.,100.)

nth = 30.

for p in pvals:
    thvals.append(0)
    bvals.append(0)
    for thi in np.arange(0,360-360/nth+1,360/nth):
        p0 = [p*np.cos(thi),p*np.sin(thi),0]
        x0 = np.array([0,0,0]+p0)
        traj = Integrator.rk4(x0, Integrator.traverseBField, dt, nsteps, cutoff=9, cutoffaxis=3)
        
        pf = traj[3:,-1]
        xf = traj[:3,-1]
        
        t = -np.dot(pf,xf)/np.dot(pf,pf)
        b = np.linalg.norm(xf+t*pf)
        bvals[-1] += b
        
        th = abs( np.arccos(np.dot(pf,p0)/np.linalg.norm(p0)/np.linalg.norm(pf)) * 180/np.pi )
        thvals[-1] += th
    thvals[-1] /= nth
    bvals[-1] /= nth

print thvals[-1]
        
示例#41
0
Arrow_source = ColumnDataSource(data = dict(xS=[], xE=[], yS=[], yE=[]))
# create ColumnDataSource for force label
ForceLabel_source = ColumnDataSource(data=dict(x=[0],y=[0],t=[0]))
# create vector of forces applied during simulation
ForceList = [0,0]
# create initial
Active=False
oscForceAngle = pi/2
oscAmp = 200.0
omega = 1
alpha=0.5
Omega0=0.01
dt = 0.1
t=0.0
# create integrator
Int = Integrator([topMass,mainMass],oscAmp,dashpot)

'''
###############################################################################
Define the displacement-time diagram for both masses
###############################################################################
'''
mainMass_displacementTime_source = ColumnDataSource(data=dict(x=[0],y=[0])) # Default values
topMass_displacementTime_source = ColumnDataSource(data=dict(x=[0],y=[0])) # Default values

# Initial space and time plot-boundaries
displacement_range = Range1d(-2,2)
time_range = Range1d(0,10)

displacementTime_plot = figure(
                                plot_width = 1100,