コード例 #1
0
ファイル: manager.py プロジェクト: davidecarson/NASA-Project
    def _download_and_parse_input_file(self, task):
        input_file = StringIO.StringIO()
        self._logger.debug(
            'Downloading input file as object. local_uuid: {0}'.format(
                task.local_uuid))
        self._s3_client.download_file_as_object(bucket=self._project_bucket,
                                                key=task.input_file_s3_path,
                                                file_object=input_file)

        task_as_dict = json.loads(input_file.getvalue())
        new_task = utils.Task(
            uuid=task.local_uuid,
            start_time=task_as_dict['start-date'],
            end_time=task_as_dict['end-date'],
            speed_threshold=task_as_dict['speed-threshold'],
            diameter_threshold=task_as_dict['diameter-threshold'],
            miss_threshold=task_as_dict['miss-threshold'],
            n_days=task.days,
            n=task.n)
        return new_task
コード例 #2
0
ファイル: DCFSL-salinas.py プロジェクト: Li-ZK/DCFSL-2021
            source_data, source_label = source_iter.next()
        except Exception as err:
            source_iter = iter(source_loader)
            source_data, source_label = source_iter.next()

        try:
            target_data, target_label = target_iter.next()
        except Exception as err:
            target_iter = iter(target_loader)
            target_data, target_label = target_iter.next()

        # source domain few-shot + domain adaptation
        if episode % 2 == 0:
            '''Few-shot claification for source domain data set'''
            # get few-shot classification samples
            task = utils.Task(metatrain_data, CLASS_NUM, SHOT_NUM_PER_CLASS,
                              QUERY_NUM_PER_CLASS)  # 5, 1,15
            support_dataloader = utils.get_HBKC_data_loader(
                task,
                num_per_class=SHOT_NUM_PER_CLASS,
                split="train",
                shuffle=False)
            query_dataloader = utils.get_HBKC_data_loader(
                task,
                num_per_class=QUERY_NUM_PER_CLASS,
                split="test",
                shuffle=True)

            # sample datas
            supports, support_labels = support_dataloader.__iter__().next(
            )  # (5, 100, 9, 9)
            querys, query_labels = query_dataloader.__iter__().next(
コード例 #3
0
    def CheckHost(self):
	'Check one host using nmap.'
	#
	# Create a tmp file for storing nmap output
	#
	# The tempfile module from python 1.5.2 is stupid
	# two processes runing at aprox the same time gets 
	# the same tempfile...
	# For this reason I use a random suffix for the tmp-file
	# Still not 100% safe, but reduces the risk significally
	# I also inserted checks at various places, so that
	# _if_ two processes in deed get the same tmp-file
	# the only result is a normal error message to nagios
	#
	self.tmp_file=tempfile.mktemp('.%s') % random.randint(0,100000)
	if self.debug:
	    print 'Tmpfile is: %s'%self.tmp_file
	#
	# If a range is given, only run nmap on this range
	#
	if self.ranges<>'':
	    global nmap_cmd # needed, to avoid error on next line
	                    # since we assigns to nmap_cmd :)
	    nmap_cmd='%s -p %s' %(nmap_cmd,self.ranges)	
	#
	# Prepare a task
	#
	t=utils.Task('%s %s' %(nmap_cmd,self.host))
	#
	# Configure a time-out handler
	#
	th=utils.TimeoutHandler(t.Kill, time_to_live=self.timeout, 
	                        debug=self.debug)
	#
	#  Fork of nmap cmd
	#
	t.Run(detach=0, stdout=self.tmp_file,stderr='/dev/null')
	#
	# Wait for completition, error or timeout
	#
	nmap_exit_code=t.Wait(idlefunc=th.Check, interval=1)
	#
	# Check for timeout
	#
	if th.WasTimeOut():
	    self.exit_code=self.CRITICAL
	    self.exit_msg='CRITICAL - Plugin timed out after %s seconds' % self.timeout
	    return
	#
	# Check for exit status of subprocess
	# Must do this after check for timeout, since the subprocess
	# also returns error if aborted.
	#
	if nmap_exit_code <> 0:
	    self.exit_code=self.UNKNOWN
	    self.exit_msg='nmap program failed with code %s' % nmap_exit_code
	    return
	#
	# Read output
	#
	try:
	    f = open(self.tmp_file, 'r')
	    output=f.readlines()
	    f.close()
	except:
	    self.exit_code=self.UNKNOWN
            self.exit_msg='Unable to get output from nmap'
	    return

	#
	# Store open ports in list
	#  scans for lines where first word contains '/'
	#  and stores part before '/'
	#
	self.active_ports=[]
	try:
	    for l in output:
		if len(l)<2:
		    continue
		s=string.split(l)[0]
		if string.find(s,'/')<1:
		    continue
		p=string.split(s,'/')[0]
		if string.find(l,'open')>1:
		    self.active_ports.append(int(p))
	except:
	    # failure due to strange output...
	    pass

	if self.debug:
	    print 'Ports found by nmap:   ',self.active_ports
	#
	# Filter out optional ports, we don't check status for them...
	#
	try:
	    for p in self.opt_ports:
		self.active_ports.remove(p)
	    
	    if self.debug and len(self.opt_ports)>0:
		print 'optional ports removed:',self.active_ports
	except:
	    # under extreame loads the remove(p) above failed for me
	    # a few times, this exception hanlder handles
	    # this bug-alike situation...
	    pass

	opened=self.CheckOpen()	
	closed=self.CheckClosed()
	
	if opened <>'':
	    self.exit_code=self.CRITICAL
            self.exit_msg='PORTS CRITICAL - Open:%s Closed:%s'%(opened,closed)
	elif closed <>'':
	    self.exit_code=self.WARNING
	    self.exit_msg='PORTS WARNING - Closed:%s'%closed
	else:
	    self.exit_code=self.OK
	    self.exit_msg='PORTS ok - Only defined ports open'