def main(): parser = OptionParser() parser.add_option("--log", dest="log", default="-", help="write log to FILE", metavar="FILE") parser.add_option("-l", "--list", action="store_true", dest="list", help="list groups and repositories") parser.add_option("-i", "--interactive", action="store_true", dest="interactive", help="prompt before actions") parser.add_option("-a", "--all", action="store_true", dest="all", help="Backup all repositories") parser.add_option("-q", "--quiet", action="store_true", dest="quiet", help="only log errors and warnings") parser.add_option("-c", "--config", dest="config", default=core.CONFIG_FILE, help="config file to use (default to %s)" % core.CONFIG_FILE) (options, args) = parser.parse_args() core.setup_logger(options.log, options.quiet) config = core.load_config(options.config) if options.list: do_list(config.groups.values()) return 0 if options.all: groups = config.groups.values() else: groups = core.get_group_list(config, args) if not groups: logging.error("Nothing to synchronize") return 1 if options.interactive: ui = TextUserInterface() else: ui = SilentUserInterface() return ui.do_sync(groups)
def main(): init() config_path = r'config/vpoint_socks_vmess.json' try: with open(config_path, 'r') as cfg: config = core.load_config(cfg.read()) except FileNotFoundError: logger.error('Unable to read config file %s' % config_path) return logger.info("Started the server from %s ..." % config.port) point = core.Point(config) point.start()
def start(): """ starts this manager and calls it's routine loop which monitors heartbeat messages """ config = load_config() with Storage() as storage, MqttClient("keeperheartbeater", config) as mqtt_client, \ Heartbeater(config, storage, mqtt_client) as heartbeater: del config try: loop(heartbeater, mqtt_client) except Exception as ex: if running: raise ex
def start(): """ starts this manager and calls it's routine loop which monitors mqtt connections """ config = load_config() with Storage() as storage, MqttClient("keeperconnector", config) as mqtt_client, Connector(config, storage, mqtt_client) as connector: del config try: loop(connector, mqtt_client) except Exception as ex: if running: raise ex
def run(): argument_parser = create_optparser() args = argument_parser.parse_args() if args.daemonize: daemonize(args.pid_file, args.error_log) configure_logging() install_signal_handlers() config = load_config(args.config) LOG.info('Starting') schedule(lambda: perform_useless_task(config['server']['host']), period=config['period'], run_now=False) wait_for_shutdown()
def main(): logging.basicConfig( handlers=[ logging.FileHandler(os.path.join(".", "run.log")), logging.StreamHandler(sys.stdout), ], level=logging.DEBUG, format="%(asctime)s %(name)s %(levelname)s %(message)s", ) parser = argparse.ArgumentParser( formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument( "--config", type=str, default=None, help="Path to config file", ) parser.add_argument( "--crop", type=int, nargs="+", default=None, help= "Width (int) and height (int) of pieces image needs to be cropped to", ) args = parser.parse_args() # read config logger.info("Reading config file: {}".format(args.config)) config = load_config(args.config) # start processing logger.info("Start creating datapoints") create_datapoints(config, args.crop)
traceColors = [ "rgb(211,255,255)", "rgb(159,216,255)", "rgb(119,181,255)", "rgb(76,148,253)", "#0074D9" ] #################### # # Read in data # #################### path, attack_types, df_reader = download(sample=True) df = load_dataframe(path, df_reader) df['attack_type'] = df['class'].map(attack_types) df1 = df config = load_config() #################### # # Exploration of the Dataset with Plotly # #################### def event_data_table(df): """ """ nb_examples = 1000 df = df[0:nb_examples] color_vals = ['rgb(30, 30, 30)'] * nb_examples # high data entries :
def start(): global epoch global revnum global lastmeananom global lasttime global lastmeanmot revnum =0 lasttime=datetime(2018, 4, 4) lastmeananom=0 lastmeanmot=15.5 gain = 2*(10**(-5)) config = load_config() # Load the data from the YAML. # If GPS is on, get Cartesian (position, velocity) vectors and UTC time from the GPS. # Convert Cartesian coordinates and time to a Keplerian Elements array. # Generate a new TLE using the KOE. if gps_is_on(): # If we ask for GPS coordinates and the GPS responds: # Data is a list (cache) of dictionaries representing one timestep. data = gps_dummy.get_data() if gps_dummy.data_is_valid(data): # If the data is valid: i = len(data)-1 # Get the last dictionary in the cache. # Position state vector. r = [data[i]['x_pos'], data[i]['y_pos'], data[i]['z_pos']] # ECI frame # Velocity state vector. vel = [data[i]['x_vel'], data[i]['y_vel'], data[i]['z_vel']] epoch = data[i]['time'] # Datetime object representing the epoch. # Convert state vectors into an array representing the KOE. koe_array = cart_to_kep(r, vel) koe_list = koe_array.tolist() # koe_array = np.insert(koe_array, 0, epoch) # Add the datetime object epoch to the beginning. koe_list.insert(0, epoch) # koe_array = np.append(koe_array, data['adcs']['tledata']['bstardrag']) # Append the B-star drag coefficient koe_list.append(config['adcs']['sc']['bstardrag']) temp_tle, lastmeanmot, lastmeananom, lasttime = tle_points.propagate(koe_list, lastmeanmot, lastmeananom, lasttime, revnum) # Generate the new TLE. # print(koe_list) # print(temp_tle) tjreverbtle = open(config['adcs']['tlefiles']['tjreverb'], "w") # Open the main TJREVERB TLE for writing. tjreverbtle.write(temp_tle) # Write the new TLE to TJREVERB TLE. tjreverbtle.close() # Close the file. # Backup the TLE data. backuptle = open(config['adcs']['tlefiles']['backup'], "w") backuptle.write(temp_tle) backuptle.close() lla = tle_dummy.get_lla(epoch) # Pull LLA data from TJREVERB TLE. else: # If the GPS is on but the data is invalid: epoch = datetime.utcnow() # Set current time to the system time. # Uses PyOrbital to propogate the TLE using epoch, which returns its LLA. lla = tle_dummy.get_lla(epoch) # If GPS is off, use the system time and TJREVERB TLE to propogate the current LLA. else: # If we ask for GPS coordinates and the GPS not respond: epoch = datetime.utcnow() # Set current time to the system time. # Uses PyOrbital to propogate the TLE using epoch, which returns its LLA. lla = tle_dummy.get_lla(epoch) # needed to incremement revnum print(tle_dummy.get_xyz(epoch)['xyz_pos']) print(tle_dummy.get_xyz(epoch)['xyz_vel']) print(tle_dummy.get_lla(epoch)) pos = [] vel = [] for i in tle_dummy.get_xyz(epoch)['xyz_pos']: pos.append(i*1000) for j in tle_dummy.get_xyz(epoch)['xyz_vel']: vel.append(j*1000) poskep = cart_to_kep(pos, vel) print("KOE (from newly generated TLE): "+str(poskep)) # if (poskep[4]>0 and oldargp<=0): # revnum=revnum+1 # oldargp = poskep[4] # write_config('config_adcs.yaml', utc_to_jul(epoch)) # config['adcs']['sc']['jd0'] = utc_to_jul(epoch) # Instantiates the WrldMagM object. gm = WrldMagM((Path(__file__).parent.resolve() / config['adcs']['wrldmagm'])) # Calculate the magnetic field vector in ECEF. Altitude is multiplied to convert meters to feet. magECEF = gm.wrldmagm(lla['lat'], lla['lon'], lla['alt'], date.today()) magECEF = np.squeeze(np.asarray(magECEF)) magECI = ecef2eci(magECEF, epoch) # Magnetic field in inertial frame, converts teslas to nanoteslas. bI = 1.0*(10e-09) * magECI bI = bI/np.linalg.norm(bI) bI = np.asmatrix(bI) bI = bI.getH() # Sun vector in intertial frame. sI = sun_vec(utc_to_jul(epoch)-utc_to_jul(datetime(1980, 1, 6, 0, 0, 0))) sI = sI/np.linalg.norm(sI) # Normalize sI. print(bI) print(sI) # bV and sV data are taken from the onboard magnetometer and sunsensors. bV = [1,1,2] sV = [1,2,1] dcm = get_dcm(bV, sV, bI, sI) print("DCM: "+str(dcm)) q = dcm_to_q(dcm) print("Quaternion: "+str(q)) qref = get_q_ref_nadir(poskep) print("Reference Quaternion: "+str(qref)) qerr = get_q_err(q, qref) print("Quaternion Error: "+str(qerr)) thetaerr = get_theta_err(qerr) print("Theta Error (radians): "+str(thetaerr.getH())) mmax = .2 mtrans = np.matrix([[1,0,0],[0,1,0],[0,0,1]]) ctcomm=-1*gain*thetaerr.getH() #print(ctcomm) magdip = get_mc(ctcomm.getH(),np.matrix([bV]).getH(),mmax,mtrans) print("Magnetic Dipole (sent to imtq): "+str(magdip)) ctprod = np.cross(magdip,bV) print("Control Torque Produced: "+str(ctprod))
from splinter import Browser import time login_url="https://login.aliexpress.com/" ali_home= "/home/makefu/.ali" config=ali_home+'/config.json' from core import load_config,save_db config = load_config() user_agent=config['user_agent'] with Browser(user_agent=user_agent) as browser: aconf=config['ali'] login_name=aconf['user_name'] password=aconf['password'] browser.visit(login_url) # give it a bit more time :) time.sleep(5) with browser.get_iframe('alibaba-login-box') as iframe: iframe.find_by_id('fm-login-id').fill(login_name) iframe.find_by_id('fm-login-password').fill(password) button = iframe.find_by_id('fm-login-submit') button.click() # we are at the main page again if browser.is_element_present_by_css('.nav-cart') : print("logged in!") save_db(browser.cookies.all(),aconf['cookie_file']) else: print("not logged in:(")