def main(): # Create the top-level parser parser = argparse.ArgumentParser(prog="myason") subparsers = parser.add_subparsers(help="", dest="app") subparsers.required = True # Create the parser for "agent" command parser_agent = subparsers.add_parser(name="agent", help="agent help") parser_agent.add_argument("-lc", "--agent-logger-conf", default="config/agent_logger.yml") parser_agent.add_argument("-ac", "--agent-conf", default="config/agent.yml") # Create the parser for "collector" command parser_collector = subparsers.add_parser(name="collector", help="collector help") parser_collector.add_argument("-lc", "--collector-logger-conf", default="config/collector_logger.yml") parser_collector.add_argument("-cc", "--collector-conf", default="config/collector.yml") # Create the parser for ifconfig parser_ifconfig = subparsers.add_parser( name="ifconfig", help="Prints list of available adapters") # Create the parser for keygen parser_keygen = subparsers.add_parser(name="keygen", help="Generates a Fernet key") # Parse arguments arguments = parser.parse_args() if arguments.app == "agent": # Start agent agent.agent( agent_conf_fn=arguments.agent_conf, logger_conf_fn=arguments.agent_logger_conf, ) elif arguments.app == "collector": # Start collector collector.collector( collector_conf_fn=arguments.collector_conf, logger_conf_fn=arguments.collector_logger_conf, ) elif arguments.app == "ifconfig": # Starts ifconfig adapters.get_adapters() elif arguments.app == "keygen": # Starts keygen keygen.get_key()
def startup(client, params): c = None # detect if the collector already exists in the portal f = collector.find_collector(client, params) if not f: logging.debug('Collector not found') c = collector.collector(client, params) c = collector.create_collector(client, c) else: logging.debug('Collector found') c = f # we want to make a note on the fs that we found an existing collector # so that we don't remove it during a future cleanup, but we should # only make this note if this is the first time the container is run # (otherwise, every subsequent should detect the existing collector # that we're going to create below. Not the behavior we want) if not os.path.isfile(config.FIRST_RUN): util.touch(config.COLLECTOR_FOUND) # let subsequent runs know that this isn't the first container run util.touch(config.FIRST_RUN) # detect if collector is already installed if os.path.isdir(config.INSTALL_PATH + config.AGENT_DIRECTORY): logging.debug('Collector already installed.') util.cleanup() return collector.install_collector(client, c, params)
def __init__(self, target_port, target_host = '127.0.0.1'): threading.Thread.__init__(self) self.server_addr = (target_host, target_port) self.rpc_wait = {} #Key:RPC ID, value: handler self.notify_wait = {} #Key:Notify ID, value:handler self.socket = None self.collector = collector.collector(handler = self.handle_msg, seperater = '\n') self._stop = False
def __init__(self,): self.context = zmq.Context() #sender self.sender = self.context.socket(zmq.PUSH) self.sender.bind('tcp://*:5557') #collector self.collector = collector() self.collector.start()
def collector_submit(): Id = id_entry.get() UserName = username_entry.get() PhoneNumber = phonenumber_entry.get() EmailId = emailid_entry.get() if os.path.exists('./EmployeeDetails/employee_details.csv'): df = pd.read_csv('./EmployeeDetails/employee_details.csv') #Input validation and checking if particular detail already exists in database if int(Id) in df['Ids'].values: output.delete(0.0, END) output.insert(END, 'Database Error: User ID Already Exists') elif len(PhoneNumber) != 10: output.delete(0.0, END) output.insert(END, 'Input Validation Error: Wrong Phone Number') elif int(PhoneNumber) in df['PhoneNumbers'].values: output.delete(0.0, END) output.insert(END, 'Database Error: Phone Number Already Exist') elif EmailId in df['EmailIds'].values: output.delete(0.0, END) output.insert(END, 'Database Error: Email Id Already Exist') else: output.delete(0.0, END) try: collector(Id, UserName, PhoneNumber, EmailId) output_string = 'New User Created: ' + UserName except: print("Failure") output.insert(END, output_string) else: if len(PhoneNumber) != 10: output.delete(0.0, END) output.insert(END, 'Input Validation Error: Wrong Phone Number') output.delete(0.0, END) try: collector(Id, UserName, PhoneNumber, EmailId) output_string = 'New User Created: ' + UserName except: output_string = "Failed" output.insert(END, output_string)
def __init__(self, port): self.port = port self.sessions = {} self.inputs = [] self.outputs = [] self.exceptions = [] self.timeout = 1 self._stop = False self.collector = collector.collector(handler = None, seperater = '\n') self.notifications = {} self.server_notify = notification(0) self.methods = {} #Key is same to method name self.methods['getMethods'] = self.getMethods self.methods['addNotification'] = self.addNotification self.methods['addNotify'] = self.addNotify self.methods['subscribe'] = self.subscribe self.methods['unSubscribe'] = self.unSubscribe
def startup(client, params): c = None # if the kubernetes param is specified, assume this is part of a # collector set and parse the id accordingly, bypassing other id lookups if params['kubernetes']: logging.debug('Kubernetes mode enabled. Parsing id from environment') collector_id = kubernetes.get_collector_id() logging.debug('Parsed id ' + str(collector_id)) params['collector_id'] = collector_id # detect if the collector already exists in the portal f = collector.find_collector(client, params) if not f: logging.debug('Collector not found') if params['kubernetes']: err = 'Running in kubernetes mode but existing collector not found' util.fail(err) c = collector.collector(client, params) c = collector.create_collector(client, c) else: logging.debug('Collector found') c = f # we want to make a note on the fs that we found an existing collector # so that we don't remove it during a future cleanup, but we should # only make this note if this is the first time the container is run # (otherwise, every subsequent should detect the existing collector # that we're going to create below. Not the behavior we want) if not os.path.isfile(config.FIRST_RUN): util.touch(config.COLLECTOR_FOUND) # let subsequent runs know that this isn't the first container run util.touch(config.FIRST_RUN) # detect if collector is already installed if os.path.isdir(config.INSTALL_PATH + config.AGENT_DIRECTORY): logging.debug('Collector already installed.') util.cleanup() return collector.install_collector(client, c, params)
def main(**kwargs): """Entry point for the program.""" # set up logging logger = logging.getLogger('formMailer') logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s', datefmt='%m-%d %H:%M', filename=kwargs['logfile'], filemode='a') console = logging.StreamHandler() console.setLevel(logging.INFO) formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s') console.setFormatter(formatter) logging.getLogger('').addHandler(console) starttime = datetime.datetime.now() logger.info('Begin processing data...') logger.debug('Current run inputs: {0}'.format(locals())) collector = collector() recFilter = recordFilter(collector) db = dbInserter.dbInserter() # build the first record individually, to create the necessary schema rec = recFilter.next() db.insertRec(rec, firstrec=True) # loop over the rest of the records and insert them try: for c, rec in enumerate(recFilter): db.insertRec(rec) if not (c % 5000) and (c > 0): logger.info('Added {0} total records to session so far'.format(c)) if not ((c+1) % 30000): db.flush() db.flush() db.updatesAndViews() db.commit() except Exception, e: logger.error(str(e)) db.rollback()
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5D,filename)
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5C,filename)
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5B, filename)
#!/usr/bin/env python import argparse import sys import os from collector import collector filename = os.path.basename(__file__) parser = argparse.ArgumentParser(description='Collect data from a single MPR121.') parser.add_argument('address', metavar='addr', nargs='?', default='a', choices=['a', 'b', 'c', 'd'], help='valid address for MPR121: a - d') args = parser.parse_args() if args.address == "a": collector(0x5A,filename) elif args.address == "b": collector(0x5B,filename) elif args.address == "c": collector(0x5C,filename) elif args.address == "d": collector(0x5D,filename) else: print "Please use a valid address!" sys.exit(1)
#!/usr/bin/env python from collector import collector collector(0x5d,2)
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5A, filename)
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5D, filename)
import argparse import sys import os from collector import collector filename = os.path.basename(__file__) parser = argparse.ArgumentParser( description='Collect data from a single MPR121.') parser.add_argument('address', metavar='addr', nargs='?', default='a', choices=['a', 'b', 'c', 'd'], help='valid address for MPR121: a - d') args = parser.parse_args() if args.address == "a": collector(0x5A, filename) elif args.address == "b": collector(0x5B, filename) elif args.address == "c": collector(0x5C, filename) elif args.address == "d": collector(0x5D, filename) else: print "Please use a valid address!" sys.exit(1)
#!/usr/bin/env python from collector import collector collector(0x5c,2)
#!/usr/bin/env python from collector import collector collector(0x5a,2)
from source import source from collector import collector #so = source("datasets/TUT-acoustic-scenes-2016-development/") #so.source_save() co = collector() print(co.get_scenes()) print(co.get_feature_vector_array(sound_scene="car",limit_num=1))
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5B,filename)
#!/usr/bin/env python from collector import collector collector(0x5c)
def collector_worker(): try: collector() except Exception: LOGGING.exception('!!!!!collector failed!!!!!')
#!/usr/bin/env python from collector import collector collector(0x5a)
def test_01(self): coins, mags, n, points = tests[0] print('solution points: ', points) path = collector(coins, mags, n) print('my path: ', path) self.assertTrue(check(coins, mags, n, path, points))
def test_03(self): coins, mags, n, points = tests[2] path = collector(coins, mags, n) print('my path: ', path) self.assertTrue(check(coins, mags, n, path, points))
def test_08(self): coins, mags, n, points = tests[7] path = collector(coins, mags, n) self.assertTrue(check(coins, mags, n, path, points))
#!/usr/bin/env python from collector import collector collector(0x5b)
from time import sleep from worker import worker from feeder import feeder from collector import collector DEBUG = True WANTED = 250 WORKERS_CNT = 5 MAX_QUEUE_SIZE = 200 STOP_MARKER = '::QUIT::' if __name__ == '__main__': input_queue = multiprocessing.Queue(MAX_QUEUE_SIZE) results_queue = multiprocessing.Queue(MAX_QUEUE_SIZE) for x in range(WORKERS_CNT): p = multiprocessing.Process(target=worker, args=(input_queue, results_queue, STOP_MARKER, DEBUG)) p.start() sleep(1) f = multiprocessing.Process(target=feeder, args=(input_queue, WANTED, STOP_MARKER, DEBUG)) f.start() collector(results_queue, STOP_MARKER, WORKERS_CNT, DEBUG)
#!/usr/bin/env python from collector import collector collector(0x5d)
#!/usr/bin/env python import os from collector import collector filename = os.path.basename(__file__) collector(0x5C, filename)