Exemplo n.º 1
0
def main():
    args = parse_args()
    projectOut = os.path.join(args.dest[0], args.projName[0])
    os.mkdir(projectOut)
    genSrc(projectOut, args.projName[0], ["iostream", "string", "unistd.h", "vector", "cstdint", "cstdio", "cstddef", "utility", "map", "unordered_map", "algorithm"])
    CC = genHelper.determineCC(args)
    CXX = genHelper.determineCXX(args)
    external = "external"
    outname = args.projName[0]
    prefix = "./"
    installName = args.projName[0]
    neededLibs = "none"        
    if args.externalLoc:
        external = os.path.realpath(args.externalLoc[0])
    if args.neededLibs:
        neededLibs = args.neededLibs[0].split(",")
    genHelper.generateCompfileFull(os.path.join(projectOut, "compfile.mk"), external, CC, CXX, outname, installName, prefix, neededLibs)
    with open(os.path.join(projectOut, "configure.py"), "w") as configFile:
        if(args.neededLibs):
            configFile.write(genHelper.mkConfigFileStr(outname, args.neededLibs[0]))
        else:
            configFile.write(genHelper.mkConfigFileStr(outname, ""))
    os.chmod(os.path.join(projectOut, "configure.py"), stat.S_IXGRP | stat.S_IXOTH | stat.S_IXUSR | stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH | stat.S_IWUSR)
    exFrom = os.path.abspath(os.path.dirname(__file__))
    cpSetUpCmd = exFrom + "/copySetUpFiles.py -from " + exFrom +"/../../ -to " + projectOut
    print CT.boldBlack(cpSetUpCmd)
    Utils.run(cpSetUpCmd)
    cpMakefilesCmd = "cp " + exFrom + "/../cppSetUpFiles/*akefile* " + projectOut
    print CT.boldBlack(cpMakefilesCmd)
    Utils.run(cpMakefilesCmd)
Exemplo n.º 2
0
    def send_file_with_oob_tcp(s, filename):
        sending_file = open(filename, 'rb')
        filesize = os.stat(filename).st_size
        oob_sent = 0
        try:
            bytes_sent = int(s.recv(Constants.FILE_CHUNK_SIZE))
            print "Already sent {0} / {1}".format(bytes_sent, filesize)
        except:
            print 'Lost Connection'
            return 0
        sending_file.seek(int(bytes_sent), 0)

        while True:
            chunk = sending_file.read(Constants.FILE_CHUNK_SIZE)
            if not chunk:
                break
            try:
                s.settimeout(Constants.DEFAULT_TIMEOUT)
                s.send(chunk)
            except socket.error:
                print 'Transfer fail'
                return 0
            bytes_sent += Constants.FILE_CHUNK_SIZE
            percent = int(float(bytes_sent) * 100 / float(filesize))
            print "{0} / {1} Kb sent ({2}%)".format(Utils.to_kilobytes(bytes_sent),
                                                    Utils.to_kilobytes(filesize), percent)
            sys.stdout.write('\033M')
            if (percent % 10 == 0) & (oob_sent != percent) & (percent < 91):
                oob_sent = percent
                sys.stdout.write('\033D')
                print '\033[37;1;41m Urgent flag sent at {0}% \033[0m'.format(percent)
                s.send(b'{}'.format(percent / 10), socket.MSG_OOB)

        sending_file.close()
 def test_flight(self):
     while not self.copter_commander.is_commander_link_set():
         print("In Test Flight Mode ...")
         time.sleep(0.3)
     Utils.test_flight_for_short_duration(CopterConfigs.TEST_FLIGHT_TIME, self.crazyflie,
                                          CopterControlParams(thrust=25000))
     print("Test Flight Success.")
Exemplo n.º 4
0
	def test_sync_files(self):
		sourceFiles = ['/movie/transformers.mp4']
		target = '/users/willsam100/Desktop/'
		ftpMock = FTP('')
		ftpMock.retrbinary = MagicMock()

		now = util.now()
		targetFile = util.join(target, util.basename(sourceFiles[0]))
		self.__cleanUp__(targetFile)
		
		utilMock = Utils();
		utilMock.splitDirectoriesAndFiles = MagicMock(return_value=([], sourceFiles))
		utilMock.exists = MagicMock(return_value=False)
		utilMock.computeSpeed = MagicMock(return_value=40)
		utilMock.now = MagicMock(return_value=now)

		transfer = Transfer(ftpMock, utilMock, target)
		transfer.sync(sourceFiles)

		utilMock.splitDirectoriesAndFiles.assert_called_with(ftpMock, sourceFiles)
		utilMock.exists.assert_called_with(targetFile)
		ftpMock.retrbinary.assert_called_with('RETR ' + sourceFiles[0], mock.ANY)
		self.assertTrue(util.exists(targetFile))
		self.__cleanUp__(targetFile)
		utilMock.computeSpeed.assert_called_with(now, targetFile)
Exemplo n.º 5
0
    def send_file_multicast(s, filename):
        connections = {}
        filesize = os.stat(filename).st_size
        try:
            while True:
                readable, _, _ = select.select([s], [], [])
                for rd in readable:
                    bytes_sent = 0
                    package, client_address = s.recvfrom(Constants.FILE_CHUNK_SIZE)
                    unpacked_package = Utils.unpack_package(package)

                    if not connections.has_key(client_address) or connections[client_address] is None:
                        connections[client_address] = open(filename, 'rb')

                    if unpacked_package['command'] == Constants.INIT_TRANSMIT:
                        bytes_sent = int(unpacked_package['payload'])
                        connections[client_address].seek(bytes_sent)
                        data = connections[client_address].read(Constants.FILE_CHUNK_SIZE)
                        if not data:
                            rd.sendto(Utils.pack_package(Constants.FIN, ''), client_address)
                            connections[client_address].close()
                            connections[client_address] = None
                        else:
                            rd.sendto(Utils.pack_package(Constants.ACK, data), client_address)

                    bytes_sent += len(data)
                    percent = int(float(bytes_sent) * 100 / float(filesize))

                    print "{0} / {1} Kb sent to client {2}({3}%)".format(Utils.to_kilobytes(bytes_sent),
                                                                         Utils.to_kilobytes(filesize), client_address,
                                                                         percent)
                    sys.stdout.write('\033M')

        except socket.error, value:
            print value
Exemplo n.º 6
0
 def process(self, job_definitions):
     Utils.execute_ansible_playbook(self.job_name, 'collectd.yaml', self.options)
     files_to_copy = [{"src":"/tmp/rundeck/ansible/%s__fping_monitor.py" % self.job_name, "dst":"/home/%s/fping_monitor.py" % self.options['osType']},
         {"src":"/tmp/rundeck/ansible/%s__collectd.conf" % self.job_name, "dst":"/home/%s/collectd.conf" % self.options['osType']},
         {"src":"/tmp/rundeck/ansible/%s__killer.sh" % self.job_name, "dst":"/home/%s/killer.sh" % self.options['osType']}]
     job_definitions['copyfiles'] = files_to_copy
     print job_definitions
Exemplo n.º 7
0
    def create_new_user(self, rawname, role, email=""):
        """ Creates a new Person record with a unique username, or returns a person wiht the matching username"""

        ut = Utils()
        username = ut.cleanUserName(rawname)

        if ut.validateUserName(username):

            # check for username
            person = self.get_person(username)

            # create a new person if we have not got this one
            if person == None:
                person = Person()
                person.email = email
                person.passhash = ""
                # no passhash if not got a direct logon
                person.username = username
                person.displayname = ut.tidyUserName(
                    rawname
                )  # allow the username to be updated on the myHoots or profile or summink
                person.before_put()
                person.role = role
                person.put()

            self._person = person

            return username
        else:
            return ""
Exemplo n.º 8
0
    def processMacArthur(self, url):
        utils = Utils()
        r = requests.get(url)
        soup = BeautifulSoup(r.text)

        file_name = self.get_file_name(self.subject + "/macArthur-all-fellows", '')
        file_lines = self.countFileLineNum(file_name)
        f = self.open_db(file_name + ".tmp")
        self.count = 0

        for table in soup.find_all('table', class_='multicol'):
            sp = BeautifulSoup(table.prettify())
            for li in sp.find_all('li'):
                url = ''
                if li.a != None:
                    url = 'https://en.wikipedia.org' + li.a['href']
                data = utils.removeDoubleSpace(li.text.strip().replace('\n', ''))
                title = data[0 : data.find(',')].strip()
                desc = "description:" + data[data.find(',') + 1 :].strip()
                print title
                self.count += 1
                self.write_db(f, 'macArthur-fellow-' + str(self.count), title, url, desc)

        self.close_db(f)
        if file_lines != self.count and self.count > 0:
            self.do_upgrade_db(file_name)
            print "before lines: " + str(file_lines) + " after update: " + str(self.count) + " \n\n"
        else:
            self.cancel_upgrade(file_name)
            print "no need upgrade\n"
Exemplo n.º 9
0
def requestBuilder(data):
    prop=Properties.getProperties()
    if isinstance(data,dict):
      try:
        req=APIRequest()
        if IS_FULL_URL(data['requestPath'].strip()):
           req.url=data['requestPath'].strip()
        else:
           req.url='{}{}'.format(_url(prop.get('tomcat.host'),prop.get('tomcat.port')),data['requestPath'].strip())
        if str(data.get('requestMethod')).upper() in METHODS:
           req.method=data['requestMethod']
        else:
           req.method=DEFUALT_METHOD
        if data.get('requestParameters'):
            if data.get('requestParameters')!='':req.params=Utils.to_json(data['requestParameters'].strip())
        if data.get('requestBody'):
            if data.get('requestBody')!='':req.data=Utils.to_json(data['requestBody'].strip())
        if data.get('requestJson'):
            if data.get('requestJson')!='':req.json=Utils.to_json(data['requestJson'].strip())
        if data.get('requestHeader'):
            if data.get('requestHeader')!='':req.headers=Utils.to_json(data['requestHeader'].strip())
        if data.get('keywords'):
            if 'needUserToken:Y'.upper() in data.get('keywords').upper():
               if 'mogoroom-partner' in data['requestPath'] and not prop.get('status.partner'):
                    if req.data:
                        for key,value in prop.get('partner.token').items():
                            req.data[key]=value
                    else:
                        req.data=prop.get('partner.token')
        return req.prepare()
      except:
        raise ValueError("Worng args. for build HTTP request.")
Exemplo n.º 10
0
    def __init__(
            self, answers, APP, nodeps=False, update=False, target_path=None,
            dryrun=False, answers_format=ANSWERS_FILE_SAMPLE_FORMAT, **kwargs):
        self.dryrun = dryrun
        self.kwargs = kwargs

        app = APP  # FIXME

        self.nulecule_base = Nulecule_Base(
            nodeps, update, target_path, dryrun, answers_format)

        if os.path.exists(app):
            logger.info("App path is %s, will be populated to %s", app, target_path)
            app = self._loadApp(app)
        else:
            logger.info("App name is %s, will be populated to %s", app, target_path)

        printStatus("Loading app %s ." % app)
        if not target_path:
            if self.nulecule_base.app_path:
                self.nulecule_base.target_path = self.nulecule_base.app_path
            else:
                self.nulecule_base.target_path = os.getcwd()

        self.utils = Utils(self.nulecule_base.target_path)

        self.nulecule_base.app = app

        self.answers_file = answers
        self.docker_cli = Utils.getDockerCli(self.dryrun)
Exemplo n.º 11
0
    def processTR35(self):
        utils = Utils()
        for i in range(0, 3):
            year = str(2013 + i)
            r = requests.get('http://www.technologyreview.com/lists/innovators-under-35/' + year)
            soup = BeautifulSoup(r.text)
            ul = soup.find('ul', class_='people')
            soup = BeautifulSoup(ul.prettify())

            file_name = self.get_file_name(self.subject + "/mit-tr35/tr35-" + year + "#", '')
            file_name = file_name[0 : file_name.find('#')]
            file_lines = self.countFileLineNum(file_name)
            f = self.open_db(file_name + ".tmp")
            self.count = 0

            for li in soup.find_all('li'):
                data = utils.removeDoubleSpace(li.text.strip().replace('\t', '').replace('\n', ''))
                title = data[0 : data.find(',')].strip()
                desc = 'description:' + data[data.find(',') + 1 :].strip() 
                print title
                print desc
                self.count += 1
                self.write_db(f, 'tr35-' + year + '-' + str(self.count), title, 'http://www.technologyreview.com/' + li.a['href'], desc)
            self.close_db(f)
            if file_lines != self.count and self.count > 0:
                self.do_upgrade_db(file_name)
                print "before lines: " + str(file_lines) + " after update: " + str(self.count) + " \n\n"
            else:
                self.cancel_upgrade(file_name)
                print "no need upgrade\n"
Exemplo n.º 12
0
def updateWeeklyTop10Collection(top10, startDate, endDate):
    if top10 != None and startDate != None and endDate != None:
        if len(top10) > 0:
            result = ttDB['week'].replace_one(
                {'startDateStr': startDate['dateStr'], 'endDateStr': endDate['dateStr']},
                {
                    'startDateStr': str(startDate['dateStr']),
                    'startDay': str(startDate['day']),
                    'startMonth': str(startDate['month']),
                    'startYear': str(startDate['year']),
                    'endDateStr': str(endDate['dateStr']),
                    'endDay': str(endDate['day']),
                    'endMonth': str(endDate['month']),
                    'endYear': str(endDate['year']),
                    'top10' : [
                        top10[0].getObject(),
                        top10[1].getObject(),
                        top10[2].getObject(),
                        top10[3].getObject(),
                        top10[4].getObject(),
                        top10[5].getObject(),
                        top10[6].getObject(),
                        top10[7].getObject(),
                        top10[8].getObject(),
                        top10[9].getObject()
                    ]
                },
                True,
                False
            )
            if result.acknowledged == False:
                Utils.emitWarning([str(datetime.utcnow()),"Failed to save weekly top10. Acknowledgment was False."])
    else:
        Utils.emitWarning([str(datetime.utcnow()),"Params were not as expected when trying to save weekly top10."])
Exemplo n.º 13
0
def updateMonthlyTop10Collection(top10, month, year):
    if top10 != None and month != None and year != None:
        if len(top10) > 0:
            result = ttDB['month'].replace_one(
                {'month': month, 'year': year},
                {
                    'month': month,
                    'year': year,
                    'top10' : [
                        top10[0].getObject(),
                        top10[1].getObject(),
                        top10[2].getObject(),
                        top10[3].getObject(),
                        top10[4].getObject(),
                        top10[5].getObject(),
                        top10[6].getObject(),
                        top10[7].getObject(),
                        top10[8].getObject(),
                        top10[9].getObject()
                    ]
                },
                True,
                False
            )
            if result.acknowledged == False:
                Utils.emitWarning([str(datetime.utcnow()),"Failed to save monthly top10. Acknowledgment was False."])
    else:
        Utils.emitWarning([str(datetime.utcnow()),"Params were not as expected when trying to save monthly top10."])
Exemplo n.º 14
0
def updateDailyTop10Collection(top10, date):
    if date['dateStr'] != None and date['day'] != None and date['month'] != None and date['year'] != None and top10 != None:
        if len(top10) > 0:
            result = ttDB['day'].replace_one(
                date,
                {
                    'dateStr': str(date['dateStr']),
                    'day': str(date['day']),
                    'month': str(date['month']),
                    'year': str(date['year']),
                    'top10' : [
                        top10[0].getObject(),
                        top10[1].getObject(),
                        top10[2].getObject(),
                        top10[3].getObject(),
                        top10[4].getObject(),
                        top10[5].getObject(),
                        top10[6].getObject(),
                        top10[7].getObject(),
                        top10[8].getObject(),
                        top10[9].getObject()
                    ]
                },
                True,
                False
            )
            if result.acknowledged == False:
                Utils.emitWarning([str(datetime.utcnow()),"Failed to save daily top10. Acknowledgment was False."])
    else:
        Utils.emitWarning([str(datetime.utcnow()),"Params were not as expected when trying to save daily top10."])
Exemplo n.º 15
0
def updateAllTimeTop10Collection(top10):
    if top10 != None:
        if len(top10) > 0:
            result = ttDB['alltime'].replace_one(
                {'replace_key': 'AllTime'},
                {
                    'replace_key': 'AllTime',
                    'top10' : [
                        top10[0].getObject(),
                        top10[1].getObject(),
                        top10[2].getObject(),
                        top10[3].getObject(),
                        top10[4].getObject(),
                        top10[5].getObject(),
                        top10[6].getObject(),
                        top10[7].getObject(),
                        top10[8].getObject(),
                        top10[9].getObject()
                    ]
                },
                True,
                False
            )
            if result.acknowledged == False:
                Utils.emitWarning([str(datetime.utcnow()),"Failed to save alltime top10. Acknowledgment was False."])
    else:
        Utils.emitWarning([str(datetime.utcnow()),"Params were not as expected when trying to save alltime top10."])
Exemplo n.º 16
0
 def __init__(self, interesting_columns):
     Utils.__init__(self)
     self.training_list = self.get_training_list()
     self.test_list = self.get_test_list()
     self.stores_list = self.get_stores_list()
     self.classifier = []
     self.interesting_columns = interesting_columns;
Exemplo n.º 17
0
class snapshot(unittest.TestCase):
    def setUp(self):
        self.configfile = sys.argv[1]
        self.utils = Utils()
        self.all_config = self.utils.init_allconfig(self.configfile)

        self.utils.deploy_usx(self.all_config)

        self.amc_ip = self.all_config['amc_ip']
        self.tests = Ha(self.configfile)
        self.tools = Tools(self.amc_ip)

    def tearDown(self):
        clean_testbed_op = ["clean_testbed:"]
        self.tests._exec(clean_testbed_op)
        print("done!!!!!!!!!!!!")

    def snapshot(self):
        volume_type_list = self.tests._get_volume_type_list_from_config()
        for volume_type in volume_type_list:
            self.assertEqual(self.test_snapshot(volume_type), True)
  #      self.assertEqual(self.test_snapshot('simplememory'), True)

    def test_snapshot(self, volume):

        daily_schedule_snapshot_op = ["create_daily_schedule_snapshot:'vols'[" + volume + "][0]:"]
        self.tests._exec(daily_schedule_snapshot_op)
        
        check_snapshot_by_lvs_op = ["check_snapshot_by_lvs:'vols'[" + volume + "][0]:"]
        self.tests._exec(check_snapshot_by_lvs_op)

        check_snapshot_number_op = ["check_snapshot_number:'vols'[" + volume + "][0]:1"]
        return self.tests._exec(check_snapshot_number_op)
Exemplo n.º 18
0
 def process_actions(self):
     self._get_actions()
     for action in self.actions:
         action_class_name = Utils.convert_action_to_class_name(action)
         action_class = Utils.load_class(action_class_name)(self.job_name, self.options)
         action_class.process(self.job_definitions)
     return self.job_definitions
Exemplo n.º 19
0
def runAnalysis(originalFile, saveDir,transform, metric, step, mergeLimit, centralDirectory, transformName, metricName):
    

    array = utils.get_array(originalFile)

    transformed = transform(array)

    
    print("size: " + str(len(transformed)) + " X " + str(len(transformed[0])) + " (pixels: " + str(len(transformed) * len(transformed[0])) + ")")
   
    ws = ws_map.build_index_map(utils.get_gradient(transformed))

    edge_merges = dict()
    
    edge_map = ws.get_edge_weight_map(metric)
    
    try:
        FileWriter.writeSparseMatrix(saveDir + getFilename(originalFile) + '-' +transformName + '-' +metricName +'.data',edge_map);
        if len(centralDirectory) != 0:
            FileWriter.writeSparseMatrix(centralDirectory +getFilename(originalFile) + '-' +transformName + '-' +metricName +'.data',edge_map);
    except:
        sys.stderr.write("Unable to write file")
      
    
    s = 0
    while s < mergeLimit:
        edge_merges = WatershedMerge.merge_watersheds_at_limit(edge_merges, ws, s, edge_map)
        utils.write_image(ws.get_watershed_picture(edge_merges,len(transformed), len(transformed[0])), saveDir + str(s) + '.png')
        s += step
    def adj_west(self):
        """Adjustments FoV position -> weast direction"""
         
        entries_GWsky = self.load_entries("GWsky_entries")
        fov_center_ra, fov_center_dec = entries_GWsky[0::2], entries_GWsky[1::2]

        for ra_start, dec_start in zip (fov_center_ra, fov_center_dec):
            ra_start, dec_start = float(ra_start), float(dec_start)

            aladin.select("P:"+str(ra_start) + ',' + str(dec_start))
             
            ra_distance = self.ra0ra1((0 - self.SHIFT_CORRECTION + self.shift_right),
                                        float(dec_start), float(dec_start))
            
            aladin.select("P:"+str(ra_start) + ',' + str(dec_start))
                
            west_adj = [(float(ra_start) - ra_distance), (float(dec_start) + 0)]
            ra, dec = west_adj[0], west_adj[1]

            aladin.set_target(ra, dec)
            aladin.set_plane_id("P:"+str(ra) + ',' + str(dec))       

            new_sky_pos = [ra,dec] # cycle variables
            self.entries_GWsky_new.extend(new_sky_pos)

            Utils.delete_pointing(infile="GWsky_pointings.txt",
                                  ra=str(ra_start), dec=str(dec_start))  

        with open('GWsky_entries', 'wb') as data:
            pickle.dump(self.entries_GWsky_new, data)
Exemplo n.º 21
0
	def setViewIndex(self, index):
		old_index = self.viewIndex()
		if index == old_index:
			return

		self.stacked.setCurrentIndex(index)
		if index == 0:
			self.setWindowTitle( "Filter/Plot panel" )
		elif index == 1:
			self.setWindowTitle( "Classification panel" )
		elif index == 2:
			self.setWindowTitle( "Processing panel" )
		else:
			self.setWindowTitle( "GEM-MT panel" )

		# show/hide layers
		QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
		prev_render_flag = Utils.iface.mapCanvas().renderFlag()
		Utils.iface.mapCanvas().setRenderFlag( False )
		try:
			if index in (1, 2):
				# add the layer with classified data
				Utils.addVectorLayer( Utils.classifiedVl() )
				# show or hide the events layer respectively when the classification or processing panel is shown
				Utils.iface.legendInterface().setLayerVisible( Utils.eventsVl(), index == 1 )
		finally:
			# restore render flag state and cursor
			Utils.iface.mapCanvas().setRenderFlag( prev_render_flag )
			QApplication.restoreOverrideCursor()
    def clicked(self, event):
        """Moving the user-defined FoV footprint."""
        
        run_sequence = ShowSkyCoverage()
        
        if event.widget == self.B02:
            run_sequence.north()         # north

        if event.widget == self.B12:
            move_fov = ShiftFoV()
            move_fov.north_shift()       # ↕↔
                      
        if event.widget == self.B30:
            run_sequence.east()          # east

        if event.widget == self.B31:
            move_fov = ShiftFoV()
            move_fov.east_shift()        # ↕↔
            
        if event.widget == self.B32: 
            new_starting_fov = StartingFoV()  # start FoV
            new_starting_fov

        if event.widget == self.B33:
            move_fov = ShiftFoV()
            move_fov.west_shift()        # ↕↔
            
        if event.widget == self.B34:   
            run_sequence.west()          # west
            
        if event.widget == self.B42:
            move_fov = ShiftFoV()
            move_fov.south_shift()       # ↕↔
            
        if event.widget == self.B52:    
            run_sequence.south()         # south

        if event.widget == self.B60:     # ↞        
            adj = Adjustments()
            adj.adj_east()
            
        if event.widget == self.B61:     # ↠       
            adj = Adjustments()
            adj.adj_west()            
                
        if event.widget == self.B63:     # ↟          
            adj = Adjustments()
            adj.adj_north()

        if event.widget == self.B64:     # ↡           
            adj = Adjustments()
            adj.adj_south()         
               
        if event.widget == self.B62:     # ✓ Accept
            adj = Adjustments()
            adj.adj_accept()

        if event.widget == self.B72:     # ▶ Folder
            Utils.move_to_folder(planes=['Q:*','P:*'],
                                 folders=['Queries','FoV'])
Exemplo n.º 23
0
def test(X, y, learned_params):
    
    N = np.shape(X)[0] #no of instances
    X = np.append(np.ones((N,1)), X,1) #appending a column of ones as bias (used in logistic regression weights prediction)
    F = np.shape(X)[1] #no of features+1
    
    p_old = 1
    class_prob = []
    for w in learned_params.keys():
        p = Utils.logistic_transformation( learned_params[w], X )
        class_prob.append(p_old-p)
        p_old = p
    class_prob.append(p_old)
    


    max_prob = np.max(class_prob, 0)

    predicted_y = []
    output_label = range(min_class_label, max_class_label+1)
    for i in xrange(np.size(max_prob)):
            class_label = np.where(class_prob == max_prob[i])[0]
            #print class_label
            predicted_y.append(output_label[class_label[0]])
    
    #print "predicted y :", predicted_y
    #print "Actual y:", y
    accuracy = Utils.calculate_accuracy(np.array(y), np.array(predicted_y))
    f_score_mean, f_score_std = Utils.calculate_average_F1score(np.array(y), np.array(predicted_y), min_class_label, max_class_label)
    return (accuracy, f_score_mean, f_score_std)
Exemplo n.º 24
0
 def create_server(cls, nova_client, name, network_id, data):
     image = "5cebb13a-f783-4f8c-8058-c4182c724ccd"      # Ubuntu 12.04
     flavor = 6      # 8GB
     server = None
     
     while server is None:
         try:
             server = nova_client.servers.create(
                     name = name,
                     image = image,
                     flavor = flavor,
                     nics = [
                         {"net-id": default_nics[0]},
                         {"net-id": default_nics[1]},
                         {"net-id": network_id},
                         ],
                     files = {
                         "/root/.ssh/authorized_keys": \
                             Utils.read_data("/root/.ssh/id_rsa.pub"),
                         "/etc/prep.sh": \
                             Utils.read_data("vm_scripts/prep.sh"),
                         "/root/upgrade.sh": \
                             Utils.read_data("vm_scripts/upgrade.sh"),
                         "/root/install_oc.sh": data,
                         }
                     )
             
             msg = "Scheduled server creation: %s | %s" % \
                     (server.id, server.name)
             logger.info(msg)
         except Exception,e:
             logger.error(str(e))
             logger.error("Retrying in 10 secs...")
             sleep(10)
Exemplo n.º 25
0
 def draw(self):
     if self.buttons[0].selected: #PvP
         self.buttons[0].selected = False
         self.window.player1.isComputer = False
         self.window.player1.name = "Player 1"
         self.window.player2.isComputer = False
         self.window.player2.name = "Player 2"
         self.window.setupScreenP1.resetBottomText()
         self.window.setupScreenP2.resetBottomText()
         self.window.currentScreen = self.window.setupScreenP1
     elif self.buttons[1].selected: #PvPC
         self.window.player1.isComputer = False
         self.window.player1.name = "Player"
         self.window.player2.isComputer = True
         self.window.player2.name = "Computer"
         self.window.setupScreenP1.resetBottomText()
         self.window.currentScreen = self.window.setupScreenP1
         self.buttons[1].selected = False
     elif self.buttons[2].selected: #PCvPC
         self.window.player1.isComputer = True
         self.window.player1.name = "Computer 1"
         self.window.player2.isComputer = True
         self.window.player2.name = "Computer 2"
         self.window.currentScreen = self.window.setupScreenP1
         self.buttons[2].selected = False
     else:
         for button in self.buttons:
             Utils.drawButton(button)              
         pyglet.text.Label('Stratego-Ascension',
                           font_name='Arial',
                           font_size=30,
                           x=self.width/2, y= self.height-200,
                           anchor_x='center', anchor_y='center').draw()
Exemplo n.º 26
0
 def convert_json(self, object_type=None, detail=True, file_name='contrail_debug_output.json'):
     print_list = self._get_objects_from_context(object_type)
     print_list = Utils.remove_none(print_list)
     print_list = Utils.remove_none(print_list)
     with open(file_name, 'w') as fp:
         json.dump(print_list, fp)
         fp.close()
Exemplo n.º 27
0
 def convert_to_file_structure(self, object_type=None, cur_path='./', console_print=False):
     convert_dict = self._get_objects_from_context(object_type)
     Utils.dict_to_filesystem({'visited_vertexes':convert_dict['visited_vertexes']},
                              cur_path=cur_path,
                              console=console_print, depth=3)
     Utils.dict_to_filesystem({'summary_of_visited_vertexes': convert_dict['summary_of_visited_vertexes']},
                              cur_path=cur_path, console=console_print, depth=1)
Exemplo n.º 28
0
 def play(self):
   if len(self.hand) > 1:
     pick = int(Utils.read('Card to Play? '))
     played = self.hand[pick]
     self.discard(played)
     Utils.write('')
     played.effect(self, self._other_players())
Exemplo n.º 29
0
	def test_sync_directories(self):
		sourceFiles = ['/movie/transformers', '/movie/transformers/transformers.mp4']
		target = '/users/willsam100/Desktop/'
		ftpMock = FTP('')
		ftpMock.retrbinary = MagicMock()
		ftpMock.nlst = MagicMock(return_value=[])

		now = util.now()
		targetDir = util.join(target, util.basename(sourceFiles[0]))
		targetFile = util.join(targetDir, util.basename(sourceFiles[1]))
		self.__cleanUp__(targetDir)
		
		utilMock = Utils();
		def splitDirectoriesAndFiles(*args):
			def secondCall_splitDirectoriesAndFiles(*args):
				return ([], sourceFiles[1:])
			utilMock.splitDirectoriesAndFiles.side_effect = secondCall_splitDirectoriesAndFiles
			return ([sourceFiles[0]], [])

		utilMock.splitDirectoriesAndFiles = MagicMock(side_effect=splitDirectoriesAndFiles)
		utilMock.exists = MagicMock(return_value=False)
		utilMock.computeSpeed = MagicMock(return_value=40)
		utilMock.now = MagicMock(return_value=now)

		transfer = Transfer(ftpMock, utilMock, target)
		transfer.sync(sourceFiles)

		
		utilMock.splitDirectoriesAndFiles.call_args_list == (mock.call(ftpMock, targetDir), mock.call(ftpMock, targetFile))
		utilMock.splitDirectoriesAndFiles.assert_called_with(ftpMock, [])
		utilMock.exists.call_args_list == [mock.call(targetDir), mock.call(targetFile)]
		ftpMock.retrbinary.assert_called_with('RETR ' + sourceFiles[1], mock.ANY)
		self.assertTrue(util.exists(targetFile))
		self.__cleanUp__(targetDir)
		utilMock.computeSpeed.assert_called_with(now, targetFile)
    def adj_south(self):
        """Adjustments FoV position -> south direction"""
         
        entries_GWsky = self.load_entries("GWsky_entries")        
        fov_center_ra, fov_center_dec = entries_GWsky[0::2], entries_GWsky[1::2]
            
        for ra_start, dec_start in zip (fov_center_ra, fov_center_dec):
            ra_start, dec_start = float(ra_start), float(dec_start)
               
            aladin.select("P:"+str(ra_start) + ',' + str(dec_start))
                
            dist = self.intercardinal_distance(ra_start, dec_start,
                                               self.shift_down, shift_right_left=0)
            south_adj = [(dist),
                         (dec_start + 0 - self.shift_down)]
             
            ra, dec = south_adj[0], south_adj[1]
                
            aladin.set_target(ra, dec)
            aladin.set_plane_id("P:"+str(ra) + ',' + str(dec))
                
            new_sky_pos = [ra,dec] # cycle variables
            self.entries_GWsky_new.extend(new_sky_pos)
            
            #aladin.remove("Q:"+str(ra_start)+"/"+str(dec_start))
            Utils.delete_pointing(infile="GWsky_pointings.txt",
                                  ra=str(ra_start), dec=str(dec_start))  

        with open('GWsky_entries', 'wb') as data:
            pickle.dump(self.entries_GWsky_new, data)
class TestHtmlAnalysis(FunctionalTest):
    def setUp(self):
        super(TestHtmlAnalysis, self).setUp()
        print "Initializing Test Fixture..."
        self.test_db = UnitTestDatabase()
        self.test_db.load_historical_data_for_week(2013, 1)
        self.test_db.load_historical_data_for_week(2013, 2)
        self.utils = Utils(self.browser, self.server_url)
        self.utils.unlock_game_scores(2013, 1)
        self.utils.unlock_game_scores(2013, 2)

        # load and submit the update games page
        self.player = self.utils.get_player_from_ss_name(
            2013, 'Reams, Byron L')
        self.utils.login_assigned_user(name='Byron',
                                       password='******',
                                       player=self.player)
        self.utils.update_games_page(year=2013, week_number=1)
        self.assertEqual(self.browser.title, 'Week 1 Update Games')
        self.__verify_user_logged_in("Byron")
        self.utils.click_input_button('submit_form')
        # ensure update page has logged in user
        # Looks like this behaves differently in a cygwin environment.
        # I believe what happened is that the webdriver did not return
        # control until after the cache update was completed, which means
        # it went straight to the Week 1 Leaderboard page.
        if self.browser.title != 'Week 1 Leaderboard':
            self.assertEqual(self.browser.title, 'Week 1 Page Update')
            self.__verify_user_logged_in("Byron")

        # wait for page to redirect to week results within 3 minutes
        # verify still logged in
        self.utils.wait_for_page('Week 1 Leaderboard', timeout=60 * 3)
        self.__verify_user_logged_in("Byron")

        self.utils.landing_page()

    def test_check_overall_results_weeknotstarted_page(self):
        self.test_db.setup_week_not_started(2013, 3)
        self.utils.set_pick_deadline_to_expired(2013, 3)
        self.utils.overall_results_page(2013)
        db = Database()
        self.assertEqual(db.get_pool_state(2013), 'week_not_started')
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        tags = soup.find_all(id='weeknotstarted-pool-state')
        self.assertEqual(len(tags), 1)
        self.assertEqual(
            tags[0].text,
            'week 3 pick entry deadline has passed, no games have started')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_overall_results_enterpicks_page(self):
        self.test_db.setup_week_not_started(2013, 3)
        self.utils.set_pick_deadline_not_expired(2013, 3)
        self.utils.unlock_picks(2013, 3)
        self.utils.overall_results_page(2013)
        db = Database()
        self.assertEqual(db.get_pool_state(2013), 'enter_picks')
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        tags = soup.find_all(id='enterpicks-pool-state')
        self.assertEqual(len(tags), 1)
        self.assertEqual(tags[0].text, 'currently entering picks for week 3')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_overall_results_weekinprogress_page(self):
        self.test_db.setup_week_in_progress(2013, 3)
        self.utils.set_pick_deadline_to_expired(2013, 3)
        self.utils.overall_results_page(2013)
        db = Database()
        self.assertEqual(db.get_pool_state(2013), 'week_in_progress')
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        tags = soup.find_all(id='weekinprogress-pool-state')
        self.assertEqual(len(tags), 1)
        self.assertEqual(tags[0].text, 'week 3 in progress')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_overall_results_weekfinal_page(self):
        self.utils.overall_results_page(2013)
        db = Database()
        self.assertEqual(db.get_pool_state(2013), 'week_final')
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        tags = soup.find_all(id='weekfinal-pool-state')
        self.assertEqual(len(tags), 1)
        self.assertEqual(tags[0].text, 'week 2 final')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_overall_results_final_page(self):
        self.test_db.setup_week_final(2013, 13)
        self.utils.overall_results_page(2013)
        db = Database()
        self.assertEqual(db.get_pool_state(2013), 'end_of_year')
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        tags = soup.find_all(id='final-pool-state')
        self.assertEqual(len(tags), 1)
        self.assertEqual(tags[0].text, 'final results')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_week_results_weeknotstarted_page(self):
        self.test_db.setup_week_not_started(2013, 3)
        self.utils.set_pick_deadline_to_expired(2013, 3)
        self.utils.week_results_page(2013, 3)
        cwr = CalculateWeekResults(2013, 3, True)
        self.assertEqual(cwr.get_week_state(), NOT_STARTED)
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_week_results_weekinprogress_page(self):
        self.test_db.setup_week_in_progress(2013, 3)
        self.utils.set_pick_deadline_to_expired(2013, 3)
        self.utils.week_results_page(2013, 3)
        cwr = CalculateWeekResults(2013, 3, True)
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        self.assertEqual(cwr.get_week_state(), IN_PROGRESS)
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_week_results_weekfinal_page(self):
        self.utils.week_results_page(2013, 2)
        cwr = CalculateWeekResults(2013, 2, True)
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        self.assertEqual(cwr.get_week_state(), FINAL)
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def test_check_player_results_page(self):
        self.utils.player_results_page(2013, 2, self.player.id)
        soup = BeautifulSoup(self.browser.page_source, 'lxml')
        all_ids_counter = Counter(
            [elem.get('id') for elem in soup.find_all(id=True)])
        duplicate_ids = [
            id for id in all_ids_counter if all_ids_counter[id] > 1
        ]
        self.longMessage = True
        self.assertEqual(
            duplicate_ids, [],
            'The following id attributes are duplicate: \n%s' % '\n'.join(
                ['%s: %d' % (id, all_ids_counter[id])
                 for id in duplicate_ids]))
        self.test_db.delete_database()

    def __verify_user_logged_in(self, name):
        logged_in_text = self.browser.find_element_by_id('ident_id').text
        expected = 'Logged in as: %s' % (name)
        self.assertEquals(expected, logged_in_text)
Exemplo n.º 32
0
class WPGenerator:
    """ High level object to entirely setup a WP sites with some users.

        It makes use of the lower level object (WPSite, WPUser, WPConfig)
        and provides methods to access and control the DB
    """

    DB_NAME_LENGTH = 32
    MYSQL_USER_NAME_LENGTH = 16
    MYSQL_PASSWORD_LENGTH = 20

    MYSQL_DB_HOST = Utils.get_mandatory_env(key="MYSQL_DB_HOST")
    MYSQL_SUPER_USER = Utils.get_mandatory_env(key="MYSQL_SUPER_USER")
    MYSQL_SUPER_PASSWORD = Utils.get_mandatory_env(key="MYSQL_SUPER_PASSWORD")

    WP_ADMIN_USER = Utils.get_mandatory_env(key="WP_ADMIN_USER")
    WP_ADMIN_EMAIL = Utils.get_mandatory_env(key="WP_ADMIN_EMAIL")

    def __init__(self, site_params, admin_password=None):
        """
        Class constructor

        Argument keywords:
        site_params -- dict with row coming from CSV file (source of truth)
                    - Field wp_tagline can be :
                    None    -> No information
                    String  -> Same tagline for all languages
                    Dict    -> key=language, value=tagline for language
        admin_password -- (optional) Password to use for 'admin' account
        """

        self._site_params = site_params

        # set the default values
        if 'unit_name' in self._site_params and 'unit_id' not in self._site_params:
            logging.info(
                "WPGenerator.__init__(): Please use 'unit_id' from CSV file (now recovered from 'unit_name')"
            )
            self._site_params['unit_id'] = self.get_the_unit_id(
                self._site_params['unit_name'])

        # if it's not given (it can happen), we initialize the title with a default value so we will be able, later, to
        # set a translation for it.
        if 'wp_site_title' not in self._site_params:
            self._site_params['wp_site_title'] = 'Title'

        # tagline
        if 'wp_tagline' not in self._site_params:
            self._site_params['wp_tagline'] = None
        else:
            # if information is not already in a dict (it happen if info is coming for the source of truth in which
            # we only have tagline in primary language
            if not isinstance(self._site_params['wp_tagline'], dict):
                wp_tagline = {}
                # We loop through languages to generate dict
                for lang in self._site_params['langs'].split(','):
                    # We set tagline for current language
                    wp_tagline[lang] = self._site_params['wp_tagline']
                self._site_params['wp_tagline'] = wp_tagline

        if self._site_params.get('installs_locked', None) is None:
            self._site_params[
                'installs_locked'] = settings.DEFAULT_CONFIG_INSTALLS_LOCKED

        if self._site_params.get('updates_automatic', None) is None:
            self._site_params[
                'updates_automatic'] = settings.DEFAULT_CONFIG_UPDATES_AUTOMATIC

        if self._site_params.get('from_export', None) is None:
            self._site_params['from_export'] = False

        if self._site_params.get('theme', None) is None:
            self._site_params['theme'] = settings.DEFAULT_THEME_NAME

        if ('theme_faculty' not in self._site_params
                or ('theme_faculty' in self._site_params
                    and self._site_params['theme_faculty'] == '')):
            self._site_params['theme_faculty'] = None

        if self._site_params.get('openshift_env') is None:
            self._site_params['openshift_env'] = settings.OPENSHIFT_ENV

        # validate input
        self.validate_mockable_args(self._site_params['wp_site_url'])
        validate_openshift_env(self._site_params['openshift_env'])

        validate_theme(self._site_params['theme'])

        if self._site_params['theme_faculty'] is not None:
            validate_theme_faculty(self._site_params['theme_faculty'])

        # create WordPress site and config
        self.wp_site = WPSite(self._site_params['openshift_env'],
                              self._site_params['wp_site_url'],
                              wp_site_title=self._site_params['wp_site_title'],
                              wp_tagline=self._site_params['wp_tagline'])

        self.wp_config = WPConfig(
            self.wp_site,
            installs_locked=self._site_params['installs_locked'],
            updates_automatic=self._site_params['updates_automatic'],
            from_export=self._site_params['from_export'])

        # prepare admin for exploitation / maintenance
        self.wp_admin = WPUser(self.WP_ADMIN_USER, self.WP_ADMIN_EMAIL)
        self.wp_admin.set_password(password=admin_password)

        # create mysql credentials
        self.wp_db_name = Utils.generate_name(self.DB_NAME_LENGTH,
                                              prefix='wp_').lower()
        self.mysql_wp_user = Utils.generate_name(
            self.MYSQL_USER_NAME_LENGTH).lower()
        self.mysql_wp_password = Utils.generate_password(
            self.MYSQL_PASSWORD_LENGTH)

    def __repr__(self):
        return repr(self.wp_site)

    def default_lang(self):
        """
        Returns default language for generated website
        :return:
        """
        return self._site_params['langs'].split(',')[0]

    def run_wp_cli(self,
                   command,
                   encoding=sys.getdefaultencoding(),
                   pipe_input=None,
                   extra_options=None):
        """
        Execute a WP-CLI command using method present in WPConfig instance.

        Argument keywords:
        command -- WP-CLI command to execute. The command doesn't have to start with "wp ".
        pipe_input -- Elements to give to the command using a pipe (ex: echo "elem" | wp command ...)
        extra_options -- Options to add at the end of the command line. There value is taken from STDIN so it
                         has to be at the end of the command line (after --path)
        encoding -- encoding to use
        """
        return self.wp_config.run_wp_cli(command,
                                         encoding=encoding,
                                         pipe_input=pipe_input,
                                         extra_options=extra_options)

    def run_mysql(self, command):
        """
        Execute MySQL request using DB information stored in instance

        Argument keywords:
        command -- Request to execute in DB.
        """
        mysql_connection_string = "mysql -h {0.MYSQL_DB_HOST} -u {0.MYSQL_SUPER_USER}" \
            " --password={0.MYSQL_SUPER_PASSWORD} ".format(self)
        return Utils.run_command(mysql_connection_string + command)

    def list_plugins(self, with_config=False, for_plugin=None):
        """
        List plugins (and configuration) for WP site

        Keyword arguments:
        with_config -- (Bool) to specify if plugin config has to be displayed
        for_plugin -- Used only if 'with_config'=True. Allow to display only configuration for one given plugin.
        """
        logging.info(
            "WPGenerator.list_plugins(): Add parameter for 'batch file' (YAML)"
        )
        # Batch config file (config-lot1.yml) needs to be replaced by something clean as soon as we have "batch"
        # information in the source of trousse !
        plugin_list = WPPluginList(settings.PLUGINS_CONFIG_GENERIC_FOLDER,
                                   'config-lot1.yml',
                                   settings.PLUGINS_CONFIG_SPECIFIC_FOLDER,
                                   self._site_params)

        return plugin_list.list_plugins(self.wp_site.name, with_config,
                                        for_plugin)

    def generate(self, deactivated_plugins=None):
        """
        Generate a complete and fully working WordPress website

        :param deactivated_plugins: List of plugins to let in 'deactivated' state after installation.
        """
        # check if we have a clean place first
        if self.wp_config.is_installed:
            logging.warning("%s - WordPress files already found", repr(self))
            return False

        # create specific mysql db and user
        logging.info("%s - Setting up DB...", repr(self))
        if not self.prepare_db():
            logging.error("%s - could not set up DB", repr(self))
            return False

        # download, config and install WP
        logging.info("%s - Downloading WP...", repr(self))
        if not self.install_wp():
            logging.error("%s - could not install WP", repr(self))
            return False

        # install and configure theme (default is settings.DEFAULT_THEME_NAME)
        logging.info("%s - Installing all themes...", repr(self))
        WPThemeConfig.install_all(self.wp_site)
        logging.info("%s - Activating theme '%s'...", repr(self),
                     self._site_params['theme'])
        theme = WPThemeConfig(self.wp_site, self._site_params['theme'],
                              self._site_params['theme_faculty'])
        if not theme.activate():
            logging.error("%s - could not activate theme '%s'", repr(self),
                          self._site_params['theme'])
            return False

        # install, activate and config mu-plugins
        # must be done before plugins if automatic updates are disabled
        logging.info("%s - Installing mu-plugins...", repr(self))
        self.generate_mu_plugins()

        # delete all widgets, inactive themes and demo posts
        self.delete_widgets()
        self.delete_inactive_themes()
        self.delete_demo_posts()

        # install, activate and configure plugins
        logging.info("%s - Installing plugins...", repr(self))
        if deactivated_plugins:
            logging.info("%s - %s plugins have to stay deactivated...",
                         repr(self), len(deactivated_plugins))
        self.generate_plugins(deactivated_plugins=deactivated_plugins)

        # flag success
        return True

    def prepare_db(self):
        """
        Prepare the DB to store WordPress configuration.
        """
        # create htdocs path
        if not Utils.run_command("mkdir -p {}".format(self.wp_site.path)):
            logging.error("%s - could not create tree structure", repr(self))
            return False

        # create MySQL DB
        command = "-e \"CREATE DATABASE {0.wp_db_name} CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci;\""
        if not self.run_mysql(command.format(self)):
            logging.error("%s - could not create DB", repr(self))
            return False

        # create MySQL user
        command = "-e \"CREATE USER '{0.mysql_wp_user}' IDENTIFIED BY '{0.mysql_wp_password}';\""
        if not self.run_mysql(command.format(self)):
            logging.error("%s - could not create user", repr(self))
            return False

        # grant privileges
        command = "-e \"GRANT ALL PRIVILEGES ON \`{0.wp_db_name}\`.* TO \`{0.mysql_wp_user}\`@'%';\""
        if not self.run_mysql(command.format(self)):
            logging.error("%s - could not grant privileges to user",
                          repr(self))
            return False

        # flag success by returning True
        return True

    def install_wp(self):
        """
        Execute WordPress installation
        """
        # install WordPress
        if not self.run_wp_cli("core download --version={}".format(
                self.wp_site.WP_VERSION)):
            logging.error("%s - could not download", repr(self))
            return False

        # config WordPress
        command = "config create --dbname='{0.wp_db_name}' --dbuser='******'" \
            " --dbpass='******' --dbhost={0.MYSQL_DB_HOST}"
        # Generate options to add PHP code in wp-config.php file to switch to ssl if proxy is in SSL.
        # Also allow the unfiltered_upload capability to be set, this is used just during export, the
        # capability is explicitly removed after the export.
        extra_options = "--extra-php <<PHP \n" \
            "if (isset( \$_SERVER['HTTP_X_FORWARDED_PROTO'] ) && \$_SERVER['HTTP_X_FORWARDED_PROTO'] == 'https'){\n" \
            "\$_SERVER['HTTPS']='on';} \n" \
            "define('ALLOW_UNFILTERED_UPLOADS', true);"
        if not self.run_wp_cli(command.format(self),
                               extra_options=extra_options):
            logging.error("%s - could not create config", repr(self))
            return False

        # fill out first form in install process (setting admin user and permissions)
        command = "--allow-root core install --url={0.url} --title='{0.wp_site_title}'" \
            " --admin_user={1.username} --admin_password='******'"\
            " --admin_email='{1.email}'"
        if not self.run_wp_cli(command.format(self.wp_site, self.wp_admin),
                               encoding="utf-8"):
            logging.error("%s - could not setup WP site", repr(self))
            return False

        # Set Tagline (blog description) if we have one. If we don't have a tagline and set it to "empty", it won't
        # be available in Polylang to translate it so we let the default value set by WordPress
        if self._site_params['wp_tagline']:
            # Command is in simple quotes and tagline between double quotes to avoid problems in case of simple quote
            # in tagline text. We initialize blogdescription with default language
            if not self.run_wp_cli('option update blogdescription "{}"'.format(
                    self._site_params['wp_tagline'][self.default_lang()]),
                                   encoding="utf-8"):
                logging.error("%s - could not configure blog description",
                              repr(self))
                return False

        # Configure permalinks
        command = "rewrite structure '/%postname%/' --hard"
        if not self.run_wp_cli(command):
            logging.error("%s - could not configure permalinks", repr(self))
            return False

        # Configure TimeZone
        command = "option update timezone_string Europe/Zurich"
        if not self.run_wp_cli(command):
            logging.error("%s - could not configure time zone", repr(self))
            return False

        # Configure Time Format 24H
        command = "option update time_format H:i"
        if not self.run_wp_cli(command):
            logging.error("%s - could not configure time format", repr(self))
            return False

        # Configure Date Format d.m.Y
        command = "option update date_format d.m.Y"
        if not self.run_wp_cli(command):
            logging.error("%s - could not configure date format", repr(self))
            return False

        # Add french for the admin interface
        command = "language core install fr_FR"
        self.run_wp_cli(command)

        # remove unfiltered_upload capability. Will be reactivated during
        # export if needed.
        command = 'cap remove administrator unfiltered_upload'
        self.run_wp_cli(command)

        # flag success by returning True
        return True

    def delete_widgets(self, sidebar="homepage-widgets"):
        """
        Delete all widgets from the given sidebar.

        There are 2 sidebars :
        - One sidebar for the homepage. In this case sidebar parameter is "homepage-widgets".
        - Another sidebar for all anothers pages. In this case sidebar parameter is "page-widgets".
        """
        cmd = "widget list {} --fields=id --format=csv".format(sidebar)
        # Result is sliced to remove 1st element which is name of field (id).
        # Because WPCLI command can take several fields, the name is displayed in the result.
        widgets_id_list = self.run_wp_cli(cmd).split("\n")[1:]
        for widget_id in widgets_id_list:
            cmd = "widget delete " + widget_id
            self.run_wp_cli(cmd)
        logging.info("%s - All widgets deleted", repr(self))

    def validate_mockable_args(self, wp_site_url):
        """ Call validators in an independant function to allow mocking them """
        if Utils.get_domain(wp_site_url) != settings.HTTPD_CONTAINER_NAME:
            URLValidator()(wp_site_url)

    def get_the_unit_id(self, unit_name):
        """
        Get unit id via LDAP Search
        """
        if unit_name is not None:
            return get_unit_id(unit_name)

    def delete_inactive_themes(self):
        """
        Delete all inactive themes
        """
        cmd = "theme list --fields=name --status=inactive --format=csv"
        themes_name_list = self.run_wp_cli(cmd).split("\n")[1:]
        for theme_name in themes_name_list:
            cmd = "theme delete {}".format(theme_name)
            self.run_wp_cli(cmd)
        logging.info("%s - All inactive themes deleted", repr(self))

    def delete_demo_posts(self):
        """
        Delete 'welcome blog' and 'sample page'
        """
        cmd = "post list --post_type=page,post --field=ID --format=csv"
        posts_list = self.run_wp_cli(cmd).split("\n")
        for post in posts_list:
            cmd = "post delete {} --force".format(post)
            self.run_wp_cli(cmd)
        logging.info("%s - All demo posts deleted", repr(self))

    def get_number_of_pages(self):
        """
        Return the number of pages
        """
        cmd = "post list --post_type=page --fields=ID --format=csv"
        return len(self.run_wp_cli(cmd).split("\n")[1:])

    def generate_mu_plugins(self):
        # TODO: add those plugins into the general list of plugins (with the class WPMuPluginConfig)
        WPMuPluginConfig(self.wp_site, "epfl-functions.php").install()
        WPMuPluginConfig(self.wp_site, "EPFL_custom_editor_menu.php").install()

        if self.wp_config.installs_locked:
            WPMuPluginConfig(self.wp_site,
                             "EPFL_installs_locked.php").install()

        # If the site is created from a jahia export, the automatic update is disabled and will be re-enabled
        # after the export process is done.
        if self.wp_config.updates_automatic and not self.wp_config.from_export:
            WPMuPluginConfig(self.wp_site,
                             "EPFL_enable_updates_automatic.php").install()
        else:
            WPMuPluginConfig(self.wp_site,
                             "EPFL_disable_updates_automatic.php").install()

    def enable_updates_automatic_if_allowed(self):
        if self.wp_config.updates_automatic:
            WPMuPluginConfig(self.wp_site,
                             "EPFL_enable_updates_automatic.php").install()

    def generate_plugins(self,
                         only_one=None,
                         force=True,
                         deactivated_plugins=None,
                         **kwargs):
        """
        Get plugin list for WP site and do appropriate actions on them
        - During WordPress site creation, 'only_plugin_name' and 'force' are not given. Plugins are installed/configured
        as described in plugin structure (generic+specific)
        - If WordPress site already exists, update are performed on installed plugins, depending on information
        present in plugin structure. Those updates can be specific to one plugin ('only_plugin_name') and not
        intrusive (only add new options, deactivate instead of delete) or intrusive (overwrite existing options,
        deactivate AND delete)

        Arguments keywords
        :param only_one: Plugin name for which we do the action. If not given, all plugins are processed
        :param force:    True|False
                           - if False
                              - Plugin(s) to be uninstalled will be only deactivated
                              - Only new options will be added to plugin(s)
                           - if True
                              - Plugin(s) to be uninstalled will be deactivated AND uninstalled (deleted)
                              - New plugin options will be added and existing ones will be overwritten
        :param deactivated_plugins: List of plugins to let in 'deactivated' state after installation.
        """
        logging.warning("%s - Add parameter for 'batch file' (YAML)",
                        repr(self))
        # Batch config file (config-lot1.yml) needs to be replaced by something clean as soon as we have "batch"
        # information in the source of trousse !
        plugin_list = WPPluginList(settings.PLUGINS_CONFIG_GENERIC_FOLDER,
                                   'config-lot1.yml',
                                   settings.PLUGINS_CONFIG_SPECIFIC_FOLDER,
                                   self._site_params)

        # Looping through plugins to install
        for plugin_name, config_dict in plugin_list.plugins(
                self.wp_site.name).items():

            # If a filter on plugin was given and it's not the current plugin, we skip
            if only_one is not None and only_one != plugin_name:
                continue

            # Fetch proper PluginConfig class and create instance
            plugin_class = Utils.import_class_from_string(
                config_dict.config_class)
            plugin_config = plugin_class(self.wp_site, plugin_name,
                                         config_dict)

            # If we have to uninstall the plugin
            if config_dict.action == settings.PLUGIN_ACTION_UNINSTALL:

                logging.info("%s - Plugins - %s: Uninstalling...", repr(self),
                             plugin_name)
                if plugin_config.is_installed:
                    if force:
                        plugin_config.uninstall()
                        logging.info("%s - Plugins - %s: Uninstalled!",
                                     repr(self), plugin_name)
                    else:
                        logging.info(
                            "%s - Plugins - %s: Deactivated only! (use --force to uninstall)",
                            repr(self), plugin_name)
                        plugin_config.set_state(False)
                else:
                    logging.info("%s - Plugins - %s: Not installed!",
                                 repr(self), plugin_name)

            else:  # We have to install the plugin (or it is already installed)

                # We may have to install or do nothing (if we only want to deactivate plugin)
                if config_dict.action == settings.PLUGIN_ACTION_INSTALL:
                    logging.info("%s - Plugins - %s: Installing...",
                                 repr(self), plugin_name)
                    if not plugin_config.is_installed:
                        plugin_config.install()
                        logging.info("%s - Plugins - %s: Installed!",
                                     repr(self), plugin_name)
                    else:
                        logging.info("%s - Plugins - %s: Already installed!",
                                     repr(self), plugin_name)

                # By default, after installation, plugin is deactivated. So if it has to stay deactivated,
                # we skip the "change state" process
                if deactivated_plugins and plugin_name in deactivated_plugins:
                    logging.info(
                        "%s - Plugins - %s: Deactivated state forced...",
                        repr(self), plugin_name)

                else:
                    logging.info("%s - Plugins - %s: Setting state...",
                                 repr(self), plugin_name)
                    plugin_config.set_state()

                    if plugin_config.is_activated:
                        logging.info("%s - Plugins - %s: Activated!",
                                     repr(self), plugin_name)
                    else:
                        logging.info("%s - Plugins - %s: Deactivated!",
                                     repr(self), plugin_name)

                # Configure plugin
                plugin_config.configure(force=force)

    def update_plugins(self, only_one=None, force=False):
        """
        Update plugin list:
        - Install missing plugins
        - Update plugin state (active/inactive)
        - For plugins that are not required anymore
          + if force -> Deactivate & uninstall
          + if not force -> Deactivate only
        - For plugin options
          + if force -> Overwrite existing options
          + not force -> only add new options

        Note: This function exists to be overriden if necessary in a child class.

        Arguments keywords
        only_one -- (optional) given plugin to update.
        force -- True|False tells if we have to really uninstall a plugin marked as "uninstall".
                           If not given, plugin is only deactivated
        """
        # check we have a clean place first
        if not self.wp_config.is_installed:
            logging.error("{} - Wordpress site doesn't exists".format(
                repr(self)))
            return False

        self.generate_plugins(only_one=only_one, force=force)
        return True

    def clean(self):
        """
        Completely clean a WordPress install, DB and files.
        """
        # retrieve db_infos
        try:
            db_name = self.wp_config.db_name
            db_user = self.wp_config.db_user

            # clean db
            logging.info("%s - cleaning up DB", repr(self))
            if not self.run_mysql(
                    '-e "DROP DATABASE IF EXISTS {};"'.format(db_name)):
                logging.error("%s - could not drop DATABASE %s", repr(self),
                              db_name)

            if not self.run_mysql('-e "DROP USER {};"'.format(db_user)):
                logging.error("%s - could not drop USER %s", repr(self),
                              db_name, db_user)

            # clean directories before files
            logging.info("%s - removing files", repr(self))
            for dir_path in settings.WP_DIRS:
                path = os.path.join(self.wp_site.path, dir_path)
                if os.path.exists(path):
                    shutil.rmtree(path)

            # clean files
            for file_path in settings.WP_FILES:
                path = os.path.join(self.wp_site.path, file_path)
                if os.path.exists(path):
                    os.remove(path)

        # handle case where no wp_config found
        except (ValueError, subprocess.CalledProcessError) as err:
            logging.warning("%s - could not clean DB or files: %s", repr(self),
                            err)

    def active_dual_auth(self):
        """
        Active dual authenticate for development only
        """
        cmd = "option update plugin:epfl_tequila:has_dual_auth 1"
        self.run_wp_cli(cmd)
        logging.debug("Dual authenticate is activated")

    def install_basic_auth_plugin(self):
        """
        Install and activate the basic auth plugin.

        This plugin is used to communicate with REST API of WordPress site.
        """
        zip_path = os.path.join(settings.EXPORTER_DATA_PATH, 'Basic-Auth.zip')
        cmd = "plugin install --activate {}".format(zip_path)
        self.run_wp_cli(cmd)
        logging.debug("Basic-Auth plugin is installed and activated")

    def uninstall_basic_auth_plugin(self):
        """
        Uninstall basic auth plugin

        This plugin is used to communicate with REST API of WordPress site.
        """
        # Uninstall basic-auth plugin
        cmd = "plugin uninstall Basic-Auth --deactivate"
        self.run_wp_cli(cmd)
        logging.debug("Basic-Auth plugin is uninstalled")
Exemplo n.º 33
0
def main():
    name = "MIPWrangler"
    libs = "seekdeep:v3.0.0"
    args = genHelper.parseNjhConfigureArgs()
    cmd = genHelper.mkConfigCmd(name, libs, sys.argv, private=True)
    Utils.run(cmd)
Exemplo n.º 34
0
    def run(self, argv):

        #==================================================
        # Process/Load commanline args and config file
        #==================================================

        self.parse_parameters(argv)

        # load the config file
        if (self.config["config_filename"] is not None):
            temp1 = self.config
            temp2 = Utils.load_config(self.config["config_filename"])
            self.config = dict(temp2.items() + temp1.items())
        else:
            if Utils.is_readable("default.cfg"):
                self.display.error(
                    "a CONFIG FILE was not specified...  defaulting to [default.cfg]"
                )
                print
                temp1 = self.config
                temp2 = Utils.load_config("default.cfg")
                self.config = dict(temp2.items() + temp1.items())
            else:
                self.display.error("a CONFIG FILE was not specified...")
                print
                sys.exit()

        # set verbosity level
        if (self.config['verbose'] >= 1):
            self.display.enableVerbose()
        if (self.config['verbose'] > 1):
            self.display.enableDebug()

        # set logging path
        self.logpath = os.getcwd() + "/" + self.config[
            "domain_name"] + "_" + self.config["phishing_domain"] + "/"
        if not os.path.exists(os.path.dirname(self.logpath)):
            os.makedirs(os.path.dirname(self.logpath))

        self.display.setLogPath(self.logpath)
        #print self.logpath
        self.db = MyDB(sqlite_file=self.logpath)

        self.display.log("STARTTIME=%s\n" %
                         (time.strftime("%Y/%m/%d %H:%M:%S")),
                         filename="INFO.txt")
        self.display.log("TARGETDOMAIN=%s\n" % (self.config["domain_name"]),
                         filename="INFO.txt")
        self.display.log("PHISHINGDOMAIN=%s\n" %
                         (self.config["phishing_domain"]),
                         filename="INFO.txt")

        #==================================================
        # Load/Gather target email addresses
        #==================================================

        if ((self.config["email_list_filename"] is not None)
                or (self.config["gather_emails"] == True)):
            print
            self.display.output("Obtaining list of email targets")
            if (self.config["always_yes"]
                    or self.display.yn("Continue", default="y")):

                # if an external emaillist file was specified, read it in
                if self.config["email_list_filename"] is not None:
                    file = open(self.config["email_list_filename"], 'r')
                    temp_list = file.read().splitlines()
                    self.display.verbose(
                        "Loaded [%s] email addresses from [%s]" %
                        (len(temp_list), self.config["email_list_filename"]))
                    self.email_list += temp_list

                # gather email addresses
                if self.config["gather_emails"] == True:
                    if (self.config["domain_name"] == ""):
                        self.display.error(
                            "No target domain specified.  Can not gather email addresses."
                        )
                    else:
                        self.display.verbose(
                            "Gathering emails via built-in methods")
                        self.display.verbose(Gather.get_sources())
                        if (not self.gather):
                            self.gather = Gather(self.config["domain_name"],
                                                 display=self.display)
                        temp_list = self.gather.emails()
                        self.display.verbose(
                            "Gathered [%s] email addresses from the Internet" %
                            (len(temp_list)))
                        self.email_list += temp_list
                        print

                        # gather email addresses from external sources
                        if (self.config["gather_emails"]
                                == True) and (self.config["enable_externals"]
                                              == True):
                            # theHarvester
                            self.display.verbose(
                                "Gathering emails via theHarvester")
                            thr = theHarvester(
                                self.config["domain_name"],
                                self.config["theharvester_path"],
                                display=self.display)
                            out = thr.run()
                            if (not out):
                                temp_list = thr.emails()
                                self.display.verbose(
                                    "Gathered [%s] email addresses from theHarvester"
                                    % (len(temp_list)))
                                self.email_list += temp_list
                            else:
                                self.display.error(out)
                            print

    #                        # Recon-NG
    #                        self.display.verbose("Gathering emails via Recon-NG")
    #                        temp_list = reconng(self.config["domain_name"], self.config["reconng_path"]).gather()
    #                        self.display.verbose("Gathered [%s] email addresses from Recon-NG" % (len(temp_list)))
    #                        self.email_list += temp_list

    # sort/unique email list
                self.email_list = Utils.unique_list(self.email_list)
                self.email_list.sort()

                self.db.addUsers(self.email_list)

                # print list of email addresses
                self.display.verbose("Collected [%s] unique email addresses" %
                                     (len(self.email_list)))
                self.display.print_list("EMAIL LIST", self.email_list)
                for email in self.email_list:
                    self.display.log(email + "\n",
                                     filename="email_targets.txt")

        #==================================================
        # Gather dns hosts
        #==================================================

        if (self.config["gather_dns"] == True):
            print
            self.display.output("Obtaining list of host on the %s domain" %
                                (self.config["domain_name"]))
            self.display.verbose("Gathering hosts via built-in methods")

            # Gather hosts from internet search
            self.display.verbose(Gather.get_sources())
            if (not self.gather):
                self.gather = Gather(self.config["domain_name"],
                                     display=self.display)
            temp_list = self.gather.hosts()
            self.display.verbose(
                "Gathered [%s] hosts from the Internet Search" %
                (len(temp_list)))
            self.hostname_list += temp_list

            # Gather hosts from DNS lookups
            temp_list = Dns.xfr(self.config["domain_name"])
            self.display.verbose("Gathered [%s] hosts from DNS Zone Transfer" %
                                 (len(temp_list)))
            self.hostname_list += temp_list

            temp_list = Dns.ns(self.config["domain_name"])
            temp_list = Utils.filterList(temp_list, self.config["domain_name"])
            self.display.verbose("Gathered [%s] hosts from DNS NS lookups" %
                                 (len(temp_list)))
            self.hostname_list += temp_list

            temp_list = Dns.mx(self.config["domain_name"])
            temp_list = Utils.filterList(temp_list, self.config["domain_name"])
            self.display.verbose("Gathered [%s] hosts from DNS MX lookups" %
                                 (len(temp_list)))
            self.hostname_list += temp_list

            # Gather hosts from dictionary lookup
            temp_list = Dns.brute(self.config["domain_name"],
                                  display=self.display)
            self.display.verbose(
                "Gathered [%s] hosts from DNS BruteForce/Dictionay Lookup" %
                (len(temp_list)))
            self.hostname_list += temp_list

            # sort/unique hostname list
            self.hostname_list = Utils.unique_list(self.hostname_list)
            self.hostname_list.sort()

            self.db.addHosts(self.hostname_list)

            # print list of hostnames
            self.display.verbose("Collected [%s] unique host names" %
                                 (len(self.hostname_list)))
            self.display.print_list("HOST LIST", self.hostname_list)

        #==================================================
        # Perform Port Scans
        #==================================================

        if (self.config["gather_dns"] == True):
            self.display.output(
                "Performing basic port scans of any identified hosts.")
            self.server_list[80] = []
            self.server_list[443] = []
            self.server_list[110] = []
            self.server_list[995] = []
            self.server_list[143] = []
            self.server_list[993] = []
            self.server_list[25] = []

            for host in self.hostname_list:
                openports = portscan.scan(host,
                                          [25, 80, 110, 143, 443, 993, 995])
                found = False
                for port in openports:
                    self.db.addPort(port, host)
                    if (port == 80):
                        self.display.verbose("Found website at: %s 80" %
                                             (host))
                        self.server_list[80].append(host)
                        found = True
                    elif (port == 443):
                        self.display.verbose("Found website at: %s 443" %
                                             (host))
                        self.server_list[443].append(host)
                        found = True
                    elif (port == 110):
                        self.display.verbose("Found POP at    : %s 110" %
                                             (host))
                        self.server_list[110].append(host)
                        found = True
                    elif (port == 995):
                        self.display.verbose("Found POPS at   : %s 995" %
                                             (host))
                        self.server_list[995].append(host)
                        found = True
                    elif (port == 143):
                        self.display.verbose("Found IMAP at   : %s 143" %
                                             (host))
                        self.server_list[143].append(host)
                        found = True
                    elif (port == 993):
                        self.display.verbose("Found IMAPS at  : %s 993" %
                                             (host))
                        self.server_list[993].append(host)
                        found = True
                    elif (port == 25):
                        self.display.verbose("Found SMTP at   : %s 25" %
                                             (host))
                        self.server_list[25].append(host)
                        found = True
                    if (found):
                        self.display.log(host + "\n", filename="hosts.txt")

        #==================================================
        # Profile Web Sites
        #==================================================

        if (self.config["profile_domain"] == True):
            self.display.output(
                "Determining if any of the identified hosts have web servers.")

            for host in self.server_list[80]:
                p = profiler()
                profile_results = p.run("http://" + host, debug=False)
                if (profile_results and (len(profile_results) > 0)):
                    max_key = ""
                    max_value = 0
                    for key, value in profile_results:
                        if (value.getscore() > max_value):
                            max_key = key
                            max_value = value.getscore()
                    if (max_value > 0):
                        self.display.verbose(
                            "POSSIBLE MATCH FOR [http://%s] => [%s]" %
                            (host, max_key))
                        self.profile_valid_web_templates.append(max_key)
                else:
                    if (p.hasLogin("http://" + host)):
                        self.profile_dynamic_web_templates.append("http://" +
                                                                  host)

            for host in self.server_list[443]:
                p = profiler()
                profile_results = p.run("https://" + host, debug=False)
                if (profile_results and (len(profile_results) > 0)):
                    max_key = ""
                    max_value = 0
                    for key, value in profile_results:
                        if (value.getscore() > max_value):
                            max_key = key
                            max_value = value.getscore()
                    if (max_value > 0):
                        self.display.verbose(
                            "POSSIBLE MATCH FOR [https://%s] => [%s]" %
                            (host, max_key))
                        self.profile_valid_web_templates.append(max_key)
                else:
                    if (p.hasLogin("https://" + host)):
                        self.display.verbose(
                            "POSSIBLE DYNAMIC TEMPLATE SITE [https://%s]" %
                            (host))
                        self.profile_dynamic_web_templates.append("https://" +
                                                                  host)

            self.profile_valid_web_templates = Utils.unique_list(
                self.profile_valid_web_templates)
            self.profile_valid_web_templates.sort()
            # print list of valid templatess
            self.display.verbose("Collected [%s] valid web templates" %
                                 (len(self.profile_valid_web_templates)))
            self.display.print_list("VALID TEMPLATE LIST",
                                    self.profile_valid_web_templates)

            self.profile_dynamic_web_templates = Utils.unique_list(
                self.profile_dynamic_web_templates)
            self.profile_dynamic_web_templates.sort()

            # print list of valid templatess
            self.display.verbose("Collected [%s] dynamic web templates" %
                                 (len(self.profile_dynamic_web_templates)))
            self.display.print_list("DYNAMIC TEMPLATE LIST",
                                    self.profile_dynamic_web_templates)

            self.display.output("Cloning any DYNAMIC sites")
            for template in self.profile_dynamic_web_templates:
                sc = SiteCloner(clone_dir=self.logpath)
                tdir = sc.cloneUrl(template)
                self.display.verbose("Cloning [%s] to [%s]" % (template, tdir))
                self.db.addWebTemplate(ttype="dynamic",
                                       src_url=template,
                                       tdir=tdir)

            for f in os.listdir(self.config["web_template_path"]):
                template_file = os.path.join(self.config["web_template_path"],
                                             f) + "/CONFIG"
                #                self.db.addWebTemplate(ttype="static", src_url="", tdir=os.path.join(self.config["web_template_path"], f))
                for line in open(template_file).readlines():
                    for tem in self.profile_valid_web_templates:
                        if re.match("^VHOST=\s*" + tem + "\s*$", line,
                                    re.IGNORECASE):
                            self.db.addWebTemplate(
                                ttype="static",
                                src_url="",
                                tdir=os.path.join(
                                    self.config["web_template_path"], f))
                            break

        #==================================================
        # Load web sites
        #==================================================

        if self.config["enable_web"] == True:
            print
            self.display.output("Starting phishing webserver")
            if (self.config["always_yes"]
                    or self.display.yn("Continue", default="y")):

                path = os.path.dirname(os.path.realpath(__file__))
                # Start process
                cmd = [path + "/../web.py", Utils.compressDict(self.config)]
                self.webserver = subprocess.Popen(cmd,
                                                  shell=False,
                                                  stdout=subprocess.PIPE)

                # monitor output to gather website information
                while True:
                    line = self.webserver.stdout.readline()
                    line = line.strip()
                    if line == 'Websites loaded and launched.':
                        break
                    if line != '':
                        self.display.verbose(line)
                        match = re.search("Started website", line)
                        VHOST = ""
                        PORT = ""
                        if match:
                            parts = line.split("[")
                            VHOST = parts[1].split("]")
                            VHOST = VHOST[0].strip()
                            PORT = parts[2].split("]")
                            PORT = PORT[0].strip()
                            PORT = PORT[7:]
                            # keep the URL clean
                            # if port is 80, then it does not need to be included in the URL
                            if (PORT[-3:] == ":80"):
                                PORT = PORT[:-3]

                            self.config[VHOST + "_port"] = PORT
                            self.config[VHOST + "_vhost"] = VHOST
                            Utils.screenCaptureWebSite(
                                "http://" + PORT,
                                self.logpath + PORT + "_" + VHOST + ".png")
                            Utils.screenCaptureWebSite(
                                "http://" + VHOST + "." +
                                self.config["phishing_domain"],
                                self.logpath + VHOST + "." +
                                self.config["phishing_domain"] + ".png")

                # Write PID file
                pidfilename = os.path.join(self.pid_path, "spfwebsrv.pid")
                pidfile = open(pidfilename, 'w')
                pidfile.write(str(self.webserver.pid))
                pidfile.close()
                self.webserverpid = self.webserver.pid
                self.display.verbose("Started WebServer with pid = [%s]" %
                                     self.webserver.pid)

        #==================================================
        # Build array of email templates
        #==================================================

        if (((self.email_list is not None) and (self.email_list))
                and ((self.config["enable_email_sending"] == True) or
                     (self.config["simulate_email_sending"] == True))):
            print
            self.display.verbose("Locating phishing email templates")
            if (self.config["always_yes"]
                    or self.display.yn("Continue", default="y")):

                # loop over each email template
                for f in os.listdir("templates/email/"):
                    template_file = os.path.join("templates/email/", f)
                    self.display.debug(
                        "Found the following email template: [%s]" %
                        template_file)

                    if ((Utils.is_readable(template_file))
                            and (os.path.isfile(template_file))):
                        # read in the template SUBJECT, TYPE, and BODY
                        TYPE = ""
                        SUBJECT = ""
                        BODY = ""
                        with open(template_file, "r") as myfile:
                            for line in myfile.readlines():
                                match = re.search("TYPE=", line)
                                if match:
                                    TYPE = line.replace('"', "")
                                    TYPE = TYPE.split("=")
                                    TYPE = TYPE[1].lower().strip()
                                match2 = re.search("SUBJECT=", line)
                                if match2:
                                    SUBJECT = line.replace('"', "")
                                    SUBJECT = SUBJECT.split("=")
                                    SUBJECT = SUBJECT[1].strip()
                                match3 = re.search("BODY=", line)
                                if match3:
                                    BODY = line.replace('"', "")
                                    BODY = BODY.replace(r'\n', "\n")
                                    BODY = BODY.split("=")
                                    BODY = BODY[1].strip()
                        self.email_templates[TYPE].append(
                            EmailTemplate(TYPE, SUBJECT, BODY))

        #==================================================
        # Generate/Send phishing emails
        #==================================================

        if ((self.config["enable_email_sending"] == True)
                or (self.config["simulate_email_sending"] == True)):
            if ((self.config["determine_smtp"] == "1")
                    and (self.config["use_specific_smtp"] == "1")):
                self.display.error(
                    "ONLY 1 of DETERMINE_SMTP or USE_SPECIFIC_SMTP can be enabled at a time."
                )
            else:
                print
                self.display.output("Sending phishing emails")
                if (self.config["always_yes"]
                        or self.display.yn("Continue", default="y")):

                    templates_logged = []
                    #do we have any emails top send?
                    if self.email_list:
                        temp_target_list = self.email_list
                        temp_delay = 1
                        if (self.config["email_delay"] is not None):
                            temp_delay = int(self.config["email_delay"])
                        send_count = 0
                        # while there are still target email address, loop
                        while (temp_target_list
                               and (send_count <
                                    (int(self.config["emails_max"])))):
                            # inc number of emails we have attempted to send
                            send_count = send_count + 1
                            # delay requested amount of time between sending emails
                            time.sleep(temp_delay)
                            # for each type of email (citrix, owa, office365, ...)
                            for key in self.email_templates:
                                # double check
                                if temp_target_list:
                                    # for each email template of the given type
                                    for template in self.email_templates[key]:
                                        # double check
                                        if temp_target_list:
                                            # grab a new target email address
                                            target = temp_target_list.pop(0)
                                            self.display.verbose(
                                                "Sending Email to [%s]" %
                                                target)
                                            #FROM = "support@" + self.config["phishing_domain"]
                                            FROM = self.config["smtp_fromaddr"]

                                            SUBJECT = template.getSUBJECT()
                                            BODY = template.getBODY()

                                            # perform necessary SEARCH/REPLACE
                                            if self.config[
                                                    "enable_host_based_vhosts"] == "1":
                                                BODY = BODY.replace(
                                                    r'[[TARGET]]', "http://" +
                                                    key + "." + self.
                                                    config["phishing_domain"])
                                                if self.config[
                                                        "default_web_port"] != "80":
                                                    BODY += ":" + self.config[
                                                        "default_web_port"]
                                            else:
                                                BODY = BODY.replace(
                                                    r'[[TARGET]]', "http://" +
                                                    self.config[key + "_port"])

                                            # log
                                            if (key not in templates_logged):
                                                self.display.log(
                                                    "----------------------------------------------\n\n"
                                                    + "TO: <XXXXX>\n" +
                                                    "FROM: " + FROM + "\n" +
                                                    "SUBJECT: " + SUBJECT +
                                                    "\n\n" + BODY + "\n\n" +
                                                    "----------------------------------------------\n\n"
                                                    + "TARGETS:\n" +
                                                    "--------\n",
                                                    filename="email_template_"
                                                    + key + ".txt")
                                                templates_logged.append(key)
                                            self.display.log(
                                                target + "\n",
                                                filename="email_template_" +
                                                key + ".txt")

                                            # send the email
                                            if (self.config[
                                                    "simulate_email_sending"]
                                                    == True):
                                                self.display.output(
                                                    "Would have sent an email to [%s] with subject of [%s], but this was just a test."
                                                    % (target, SUBJECT))
                                            else:
                                                try:
                                                    if self.config[
                                                            "determine_smtp"] == "1":
                                                        emails.send_email_direct(
                                                            target,
                                                            FROM,
                                                            SUBJECT,
                                                            BODY,
                                                            debug=True)
                                                    if self.config[
                                                            "use_specific_smtp"] == "1":
                                                        #self.display.error("[USE_SPECIFIC_SMTP] not implemented")
                                                        print self.config[
                                                            "smtp_fromaddr"]
                                                        emails.send_email_account(
                                                            self.config[
                                                                "smtp_server"],
                                                            int(self.config[
                                                                "smtp_port"]),
                                                            self.config[
                                                                "smtp_user"],
                                                            self.config[
                                                                "smtp_pass"],
                                                            target,
                                                            self.config[
                                                                "smtp_fromaddr"],
                                                            SUBJECT,
                                                            BODY,
                                                            debug=True)
                                                except:
                                                    self.display.error(
                                                        sys.exc_info()[0])

        #==================================================
        # Monitor web sites
        #==================================================

        if self.config["enable_web"] == True:
            print
            self.display.output("Monitoring phishing website activity!")
            self.display.alert(
                "(Press CTRL-C to stop collection and generate report!)")
            if (self.webserver):
                while True:
                    line = self.webserver.stdout.readline()
                    line = line.strip()
                    if (self.config["pillage_email"]):
                        self.pillage(line)
                    self.display.output(line)
Exemplo n.º 35
0
    def parse_parameters(self, argv):
        parser = argparse.ArgumentParser()

        #==================================================
        # Required Args
        #==================================================
        #        requiredgroup = parser.add_argument_group('required arguments')
        #        requiredgroup.add_argument("-d",
        #                            metavar="<domain>",
        #                            dest="domain",
        #                            action='store',
        #                            required=True,
        #                            help="domain name to phish")

        #==================================================
        # Input Files
        #==================================================
        filesgroup = parser.add_argument_group('input files')
        filesgroup.add_argument(
            "-f",
            metavar="<list.txt>",
            dest="email_list_file",
            action='store',
            #                            type=argparse.FileType('r'),
            help="file containing list of email addresses")
        filesgroup.add_argument(
            "-C",
            metavar="<config.txt>",
            dest="config_file",
            action='store',
            #                            type=argparse.FileType('r'),
            help="config file")

        #==================================================
        # Enable Flags
        #==================================================
        enablegroup = parser.add_argument_group('enable flags')
        enablegroup.add_argument(
            "--all",
            dest="enable_all",
            action='store_true',
            help="enable ALL flags... same as (-g --external -s -w -v -v -y)")
        enablegroup.add_argument(
            "--test",
            dest="enable_test",
            action='store_true',
            help=
            "enable all flags EXCEPT sending of emails... same as (-g --external --simulate -w -y -v -v)"
        )
        enablegroup.add_argument(
            "--recon",
            dest="enable_recon",
            action='store_true',
            help=
            "gather info (i.e. email addresses, dns hosts, websites, etc...) same as (-e --dns)"
        )
        enablegroup.add_argument("--external",
                                 dest="enable_external",
                                 action='store_true',
                                 help="enable external tool utilization")
        enablegroup.add_argument(
            "--dns",
            dest="enable_gather_dns",
            action='store_true',
            help="enable automated gathering of dns hosts")
        enablegroup.add_argument(
            "-g",
            dest="enable_gather_email",
            action='store_true',
            help="enable automated gathering of email targets")
        enablegroup.add_argument(
            "-s",
            dest="enable_send_email",
            action='store_true',
            help="enable automated sending of phishing emails to targets")
        enablegroup.add_argument(
            "--simulate",
            dest="simulate_send_email",
            action='store_true',
            help="simulate the sending of phishing emails to targets")
        enablegroup.add_argument(
            "-w",
            dest="enable_web",
            action='store_true',
            help="enable generation of phishing web sites")
        enablegroup.add_argument(
            "-W",
            dest="daemon_web",
            action='store_true',
            help="leave web server running after termination of spf.py")

        #==================================================
        # Advanced Flags
        #==================================================
        advgroup = parser.add_argument_group('ADVANCED')
        advgroup.add_argument(
            "--adv",
            dest="enable_advanced",
            action='store_true',
            help=
            "perform all ADVANCED features same as (--dns --profile --pillage)"
        )
        advgroup.add_argument(
            "--profile",
            dest="profile_domain",
            action='store_true',
            help="profile the target domain (requires the --dns flag)")
        advgroup.add_argument(
            "--pillage",
            dest="pillage_email",
            action='store_true',
            help="auto pillage email accounts (requires the --dns flag)")

        #==================================================
        # Optional Args
        #==================================================
        parser.add_argument("-d",
                            metavar="<domain>",
                            dest="domain",
                            action='store',
                            help="domain name to phish")
        parser.add_argument("-p",
                            metavar="<domain>",
                            dest="phishdomain",
                            default="example.com",
                            action='store',
                            help="newly registered 'phish' domain name")
        parser.add_argument("-c",
                            metavar="<company's name>",
                            dest="company",
                            action='store',
                            help="name of company to phish")
        parser.add_argument("--ip",
                            metavar="<IP address>",
                            dest="ip",
                            default=Utils.getIP(),
                            action='store',
                            help="IP of webserver defaults to [%s]" %
                            (Utils.getIP()))
        parser.add_argument("-v",
                            "--verbosity",
                            dest="verbose",
                            action='count',
                            help="increase output verbosity")

        #==================================================
        # Misc Flags
        #==================================================
        miscgroup = parser.add_argument_group('misc')
        miscgroup.add_argument(
            "-y",
            dest="always_yes",
            action='store_true',
            help="automatically answer yes to all questions")

        args = parser.parse_args()

        # convert parameters to values in the config dict
        self.config["domain_name"] = args.domain
        if (self.config["domain_name"] is None):
            self.config["domain_name"] = ""
        self.config["phishing_domain"] = args.phishdomain
        if (self.config["phishing_domain"] is None):
            self.config["phishing_domain"] = "example.com"
        self.config["company_name"] = args.company
        self.config["ip"] = args.ip
        self.config["config_filename"] = args.config_file
        self.config["email_list_filename"] = args.email_list_file
        self.config["verbose"] = args.verbose
        self.config["gather_emails"] = args.enable_gather_email
        self.config["gather_dns"] = args.enable_gather_dns
        self.config["profile_domain"] = args.profile_domain
        self.config["pillage_email"] = args.pillage_email
        self.config["enable_externals"] = args.enable_external
        self.config["enable_web"] = args.enable_web
        self.config["enable_email_sending"] = args.enable_send_email
        self.config["simulate_email_sending"] = args.simulate_send_email
        self.config["daemon_web"] = args.daemon_web
        self.config["always_yes"] = args.always_yes

        if (args.enable_recon == True):
            self.config["gather_emails"] = True
            self.config["gather_dns"] = True

        if (args.enable_all == True):
            self.config["gather_emails"] = True
            self.config["enable_externals"] = True
            self.config["enable_web"] = True
            self.config["enable_email_sending"] = True
            self.config["verbose"] = 2
            self.config["always_yes"] = True

        if (args.enable_test == True):
            self.config["gather_emails"] = True
            self.config["enable_externals"] = True
            self.config["simulate_email_sending"] = True
            self.config["enable_web"] = True
            self.config["always_yes"] = True
            self.config["verbose"] = 2

        if (args.enable_advanced == True):
            self.config["gather_dns"] = True
            self.config["profile_domain"] = True
            self.config["pillage_email"] = True

        if (self.config["profile_domain"] and not self.config["gather_dns"]):
            self.config["profile_domain"] = False
            self.display.error(
                "--profile requires the --dns option to be enabled as well.")

        if (self.config["pillage_email"] and not self.config["gather_dns"]):
            self.config["pillage_email"] = False
            self.display.error(
                "--pillage requires the --dns option to be enabled as well.")

        good = False
        if (self.config["gather_emails"] or self.config["enable_externals"]
                or self.config["enable_web"]
                or self.config["enable_email_sending"]
                or self.config["simulate_email_sending"]
                or self.config["gather_dns"] or self.config["profile_domain"]
                or self.config["pillage_email"]):
            good = True
        if (not good):
            self.display.error(
                "Please enable at least one of the following parameters: -g --external --dns -s --simulate -w ( --all --test --recon --adv )"
            )
            print
            parser.print_help()
            sys.exit(1)
Exemplo n.º 36
0
class Preview(BaseExtension):
    def __init__(self):
        BaseExtension.__init__(self)
        self.utils = Utils()
        self.preview_url_args = ''  #'?start=' #'?start=0&tag='
        self.preview_next_page = ''
        self.preview_page_step = 1
        self.preview_page_start = 1
        self.preview_page_max = 10
        self.preview_frame_width = 471
        self.preview_frame_height = 700
        self.preview_frame_check = True

    def initArgs(self, url, resourceType):
        if url.startswith('http') == False and url.find('[') != -1:
            url = url[url.find('(') + 1:url.find(')')]
        self.preview_url_args = Config.preview_url_args  #'?start=' #'?start=0&tag='
        self.preview_page_step = Config.preview_page_step
        self.preview_page_start = Config.preview_page_start
        self.preview_page_max = Config.preview_page_max
        self.preview_frame_width = Config.preview_frame_width
        self.preview_frame_height = Config.preview_frame_height
        self.preview_frame_check = Config.preview_frame_check

        for k, v in Config.preview_dict.items():
            if url.lower().find(k.lower()) != -1 or (
                    resourceType != '' and k.lower() == resourceType.lower()):
                print 'matched:' + k
                print v
                if v.has_key('url_args'):
                    self.preview_url_args = v['url_args']
                if v.has_key('next_page'):
                    self.preview_next_page = v['next_page']
                if v.has_key('page_step'):
                    self.preview_page_step = v['page_step']
                if v.has_key('page_start'):
                    self.preview_page_start = v['page_start']
                if v.has_key('page_max'):
                    self.preview_page_max = v['page_max']
                if v.has_key('frame_width'):
                    self.preview_frame_width = v['frame_width']
                if v.has_key('frame_height'):
                    self.preview_frame_height = v['frame_height']
                if v.has_key('frame_check'):
                    self.preview_frame_check = v['frame_check']
                #if self.preview_smart_engine == '' and self.utils.search_engin_dict.has_key(k):
                #    self.preview_smart_engine = k
                break

    def previewPages(self, texts, urls):

        htmlList, notSuportLink = self.utils.genAllInOnePage(
            texts,
            urls,
            frameWidth=self.preview_frame_width,
            frameHeight=self.preview_frame_height,
            frameCheck=self.preview_frame_check,
            changeBG=False)
        if len(htmlList) > 0:
            print htmlList[0]
            return htmlList[0]
        return ''

    def excute(self, form_dict):
        rID = form_dict['rID'].encode('utf8')
        url = form_dict['url'].encode('utf8')
        screenWidth = form_dict['screenWidth'].encode('utf8')
        screenHeight = form_dict['screenHeight'].encode('utf8')
        print 'screenWidth: ' + screenWidth
        print 'screenHeight: ' + screenHeight
        self.initArgs(url, '')
        texts = []
        urls = []
        if self.preview_url_args != '' or self.preview_next_page != '':
            if self.preview_url_args != '':

                for page in range(self.preview_page_start,
                                  self.preview_page_max + 1,
                                  self.preview_page_step):
                    texts.append(str(page))
                    urls.append(url + self.preview_url_args + str(page))

            return self.previewPages(texts, urls)

        if url.find('[') != -1 and url.find(']') != -1:
            keys = []
            value = ''

            if url.startswith('['):
                keys = self.utils.splitText(url[1:url.find(']')])
                value = url[url.find('(') + 1:url.find(')')]
            else:
                part1 = url[0:url.find('[')]
                part2 = url[url.find(']') + 1:]
                keys = self.utils.splitText(url[url.find('[') +
                                                1:url.find(']')])
                value = part1 + '%s' + part2

            for k in keys:
                texts.append(k.replace('%20', ' '))
                if value.startswith('http'):
                    urls.append(value.replace('%s', k))
                else:
                    urls.append(
                        self.utils.toQueryUrl(self.utils.getEnginUrl(value),
                                              k))

            return self.previewPages(texts, urls)
        elif url.find(',') != -1:
            urls = url.split(',')
            return self.previewPages(urls, urls)

        if url == '':
            url = self.utils.toSmartLink(form_dict['rTitle'].encode('utf8'))
        src = ''
        width = str(int(screenWidth) / 3 + 50)
        height = str(int(screenHeight) / 3 + 50)
        column = form_dict['column']
        if url.startswith('file') or url.startswith('/User'):
            subprocess.check_output(
                "/Applications/Google\ Chrome.app/Contents/MacOS/Google\ Chrome "
                + url,
                shell=True)
            return 'ok'

        if column == '1':
            width = str(int(screenWidth) - 70)
            height = str(int(screenHeight) - 150)
        elif column == '2':
            width = str(int(screenWidth) / 2 - 20)
            height = str(int(screenHeight) / 2 - 50)

        if url.find('youtube') != -1 and url.find('watch') != -1:
            src = "https://www.youtube.com/embed/" + url[url.rfind('v=') + 2:]
            if column == '1':
                width = str(int(screenWidth) / 3 + 200)
                height = str(int(screenHeight) / 2)
        elif url.find('163') != -1:
            src = url.replace('open', 'v')
        elif rID.find('arxiv') != -1:
            arxiv_id = rID[rID.find('arxiv-') + 6:].replace('-', '.')
            version = self.utils.get_last_arxiv_version(arxiv_id)
            src = 'http://arxiv.org/pdf/' + arxiv_id + version
        else:
            src = url
            if self.utils.suportFrame(url, 5) == False:
                return url

        html = '<div class="ref"><br><iframe width="' + width + '" height="' + height + '" src="' + self.getUrl(
            src) + '" frameborder="0" allowfullscreen></iframe>'
        if url.find('youtube') != -1 and url.find('watch') != -1:
            r = requests.get(url)
            soup = BeautifulSoup(r.text)
            div = soup.find('div', id='watch-uploader-info')
            div2 = soup.find('div', id='watch-description-text')
            div_watch = soup.find('div', class_='watch-view-count')
            div_user = soup.find('div', class_='yt-user-info')

            text = div_user.text.strip()
            html += '<br><br><div style="background-color:#F8F8FF; border-radius: 5px 5px 5px 5px; width:auto;">'
            html += '<a target="_blank" href="' + 'https://www.youtube.com' + div_user.a[
                'href'] + '">' + text + '</a>'
            count = 0
            html +=  ' ' + div_watch.text.strip() + '<br>' +\
              div.text.strip() + '<br>' + div2.text.strip() + '<br><br>'
            for type in ['videos', 'playlists']:

                ref_divID = form_dict['divID'].encode('utf8')
                rID = form_dict['rID']
                ref_divID += '-' + type
                linkID = 'a-' + ref_divID[ref_divID.find('-') + 1:]
                appendID = count
                count += 1
                link = 'https://www.youtube.com' + div_user.a[
                    'href'] + '/' + type
                script = self.utils.genMoreEnginScript(
                    linkID, ref_divID,
                    "loop-" + rID.replace(' ', '-') + '-' + str(appendID),
                    text, link, '-')

                if script != "":
                    html += '<div>'
                html += '<a target="_blank" href="' + link + '">' + text + '</a>' + " 's " + type
                if script != "":
                    html += self.utils.genMoreEnginHtml(
                        linkID, script.replace("'", '"'), '...', ref_divID, '',
                        False)
                    html += '</div>'
            html += '</div>'

        return html + '</div>'

    def getUrl(self, url):
        '''
        if 'weixin' in url:
            r = requests.get(url)
            soup = BeautifulSoup(r.text)
            p = soup.find('p', class_='tit')

            url = p.a['href']

        print 'getUrl:' + url
        '''
        return url

    def check(self, form_dict):
        url = form_dict['url'].encode('utf8')
        return url != None and url != '' and url.startswith(
            'http') and url.find(Config.ip_adress) == -1 or url.find('[') != -1
Exemplo n.º 37
0
def main(argv):
    """" Main function
    
    This is the flow of actions of this main
    0: Initial steps
    1: Load data (X and y_emb) and needed dictionaries (activity-to-int, etc.)    
    2: Generate K partitions of the dataset (KFold cross-validation)
    3: For each partition (train, test):
       3.1: Build the LSTM model
       3.2: Manage imbalanced data in the training set (SMOTE?)
       3.3: Train the model with the imbalance-corrected training set and use the test set to validate
       3.4: Store the generated learning curves and metrics with the best model (ModelCheckpoint? 
               If results get worse with epochs, use EarlyStopping)
    4: Calculate the mean and std for the metrics obtained for each partition and store
    """
    # 0: Initial steps
    print_configuration_info()
    # fix random seed for reproducibility
    np.random.seed(7)
    # Make an instance of the class Utils
    utils = Utils()

    # Obtain the file number
    maxnumber = utils.find_file_maxnumber(RESULTS + DATASET + '/')
    filenumber = maxnumber + 1
    print('file number: ', filenumber)

    # 1: Load data (X and y_emb)
    print('Loading data')

    # Load activity_dict where every activity name has its associated word embedding
    with open(ACTIVITY_EMBEDDINGS) as f:
        activity_dict = json.load(f)

    # Load the activity indices
    with open(ACTIVITY_TO_INT) as f:
        activity_to_int_dict = json.load(f)

    # Load the index to activity relations
    with open(INT_TO_ACTIVITY) as f:
        int_to_activity = json.load(f)

    # Load embedding matrix, X and y sequences (for y, load both, the embedding and index version)
    embedding_matrix = np.load(EMBEDDING_WEIGHTS)
    X = np.load(X_FILE)
    y_emb = np.load(Y_EMB_FILE)
    # We need the following two lines for StratifiedKFold
    y_index_one_hot = np.load(Y_INDEX_FILE)
    y_index = np.argmax(y_index_one_hot, axis=1)

    # To use oversampling methods in imbalance-learn, we need an activity_index:embedding relation
    # Build it using INT_TO_ACTIVITY and ACTIVITY_EMBEDDINGS files
    activity_index_to_embedding = {}
    for key in int_to_activity:
        activity_index_to_embedding[key] = activity_dict[int_to_activity[key]]

    max_sequence_length = X.shape[
        1]  # TODO: change this to fit the maximum sequence length of all the datasets
    #total_activities = y_train.shape[1]
    ACTION_MAX_LENGTH = embedding_matrix.shape[1]

    print('X shape:', X.shape)
    print('y shape:', y_emb.shape)
    print('y index shape:', y_index.shape)

    print('max sequence length:', max_sequence_length)
    print('features per action:', embedding_matrix.shape[0])
    print('Action max length:', ACTION_MAX_LENGTH)

    # 2: Generate K partitions of the dataset (KFold cross-validation)
    # TODO: Decide between KFold or StratifiedKFold
    # if StratifiedKFold
    skf = StratifiedKFold(n_splits=FOLDS)

    # if KFold
    #kf = KFold(n_splits = FOLDS)

    fold = 0
    # 4: For each partition (train, test):
    metrics_per_fold = utils.init_metrics_per_fold()
    best_epochs = []

    #for train, test in kf.split(X):
    for train, test in skf.split(X, y_index):
        print("%d Train: %s,  test: %s" % (fold, len(train), len(test)))
        X_train = X[train]
        y_train = y_emb[train]
        y_train_index = y_index[train]
        X_val = X[test]
        y_val = y_emb[test]
        y_val_index = y_index_one_hot[test]
        print('Activity distribution %s' % Counter(y_index))

        #   3.1: Build the LSTM model
        print('Building model...')
        sys.stdout.flush()

        model = Sequential()

        model.add(
            Embedding(input_dim=embedding_matrix.shape[0],
                      output_dim=embedding_matrix.shape[1],
                      weights=[embedding_matrix],
                      input_length=max_sequence_length,
                      trainable=EMB_TRAINABLE))
        # Change input shape when using embeddings
        model.add(
            LSTM(512,
                 return_sequences=False,
                 recurrent_dropout=DROPOUT,
                 dropout=DROPOUT,
                 input_shape=(max_sequence_length, embedding_matrix.shape[1])))
        # For regression use a linear dense layer with embedding_matrix.shape[1] size (300 in this case)
        # TODO: consider the need of normalization before calculating the loss (we may use a Lambda layer with L2 norm)
        model.add(Dense(embedding_matrix.shape[1]))
        # TODO: check different regression losses; cosine_proximity could be the best one for us?
        #model.compile(loss='mean_squared_error', optimizer='adam', metrics=['mse', 'mae'])
        model.compile(loss=LOSS,
                      optimizer=OPTIMIZER,
                      metrics=['cosine_proximity', 'mse', 'mae'])
        print('Model built')
        print(model.summary())
        sys.stdout.flush()

        #   3.2: Manage imbalanced data in the training set (SMOTE?) -> Conf option TREAT_IMBALANCE
        # NOTE: We may have a problem with SMOTE, since there are some classes with only 1-3 samples and SMOTE needs n_samples < k_neighbors (~5)
        # NOTE: RandomOverSampler could do the trick, however it generates just copies of current samples
        # TODO: Think about a combination between RandomOverSampler for n_samples < 5 and SMOTE?
        # TODO: First attempt without imbalance management
        if (TREAT_IMBALANCE == True):
            ros = RandomOverSampler(
                random_state=42
            )  # sampling_strategy={4:10, 12:10, 14:10, 8:10, 13:10}
            print('Original dataset samples for training %s' %
                  len(y_train_index))
            print('Original dataset shape for training %s' %
                  Counter(y_train_index))
            X_train_res, y_train_index_res = ros.fit_resample(
                X_train, y_train_index)
            print('Resampled dataset samples for training %s' %
                  len(y_train_index_res))
            print('Resampled dataset shape for training %s' %
                  Counter(y_train_index_res))
            y_train_res = []
            for j in y_train_index_res:
                y_train_res.append(activity_index_to_embedding[str(
                    y_train_index_res[j])])
            y_train_res = np.array(y_train_res)
            print("y_train_res shape: ", y_train_res.shape)
        else:
            X_train_res = X_train
            y_train_res = y_train

        #   3.3: Train the model with the imbalance-corrected training set and use the test set to validate
        print('Training...')
        sys.stdout.flush()
        # Define the callbacks to be used (EarlyStopping and ModelCheckpoint)
        # TODO: Do we need EarlyStopping here?
        #earlystopping = EarlyStopping(monitor='val_loss', patience=100, verbose=0)
        # TODO: improve file naming for multiple architectures
        weights_file = WEIGHTS + DATASET + '/' + str(filenumber).zfill(
            2) + '-' + EXPERIMENT_ID + '-fold' + str(fold) + WEIGHTS_FILE_ROOT
        modelcheckpoint = ModelCheckpoint(weights_file,
                                          monitor='val_loss',
                                          save_best_only=True,
                                          verbose=0)
        callbacks = [modelcheckpoint]
        history = model.fit(X_train_res,
                            y_train_res,
                            batch_size=BATCH_SIZE,
                            epochs=EPOCHS,
                            validation_data=(X_val, y_val),
                            shuffle=True,
                            callbacks=callbacks)
        #   3.4: Store the generated learning curves and metrics with the best model (ModelCheckpoint?) -> Conf option SAVE
        plot_filename = PLOTS + DATASET + '/' + str(filenumber).zfill(
            2) + '-' + EXPERIMENT_ID + '-fold' + str(fold)
        #plot_training_info(['loss'], True, history.history, plot_filename)
        if SAVE == True:
            utils.plot_training_info(['loss'], True, history.history,
                                     plot_filename)
            print("Plots saved in " + PLOTS + DATASET + '/')
        print("Training finished")

        # Print the best val_loss
        min_val_loss = min(history.history['val_loss'])
        min_val_loss_index = history.history['val_loss'].index(min_val_loss)
        print("Validation loss: " + str(min_val_loss) + " (epoch " +
              str(history.history['val_loss'].index(min_val_loss)) + ")")
        best_epochs.append(min_val_loss_index)
        model.load_weights(weights_file)
        yp = model.predict(X_val, batch_size=BATCH_SIZE, verbose=1)
        # yp has the embedding predictions of the regressor network
        # Obtain activity labels from embedding predictions
        ypreds = obtain_class_predictions(yp, activity_dict,
                                          activity_to_int_dict,
                                          int_to_activity)

        # Calculate the metrics
        ytrue = np.argmax(y_val_index, axis=1)
        print("ytrue shape: ", ytrue.shape)
        print("ypreds shape: ", ypreds.shape)

        # Use scikit-learn metrics to calculate confusion matrix, accuracy, precision, recall and F-Measure
        """
        cm = confusion_matrix(ytrue, ypreds)
    
        # Normalize the confusion matrix by row (i.e by the number of samples
        # in each class)
        cm_normalized = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis]
        np.set_printoptions(precision=3, linewidth=1000, suppress=True)
        
        # Save also the cm to a txt file
        results_file_root = RESULTS + DATASET + '/' + str(filenumber).zfill(2) + '-' + EXPERIMENT_ID + '-fold' + str(fold)
        np.savetxt(results_file_root + '-cm.txt', cm, fmt='%.0f')   
        
        np.savetxt(results_file_root+'-cm-normalized.txt', cm_normalized, fmt='%.3f')
        print("Confusion matrices saved in " + RESULTS + DATASET + '/')
        """
        # Plot non-normalized confusion matrix -> Conf option SAVE
        if SAVE == True:
            results_file_root = RESULTS + DATASET + '/' + str(
                filenumber).zfill(2) + '-' + EXPERIMENT_ID + '-fold' + str(
                    fold)
            utils.plot_heatmap(
                ytrue,
                ypreds,
                classes=activity_to_int_dict.keys(),
                title='Confusion matrix, without normalization, fold ' +
                str(fold),
                path=results_file_root + '-cm.png')

            # Plot normalized confusion matrix
            utils.plot_heatmap(ytrue,
                               ypreds,
                               classes=activity_to_int_dict.keys(),
                               normalize=True,
                               title='Normalized confusion matrix, fold ' +
                               str(fold),
                               path=results_file_root + '-cm-normalized.png')

        #Dictionary with the values for the metrics (precision, recall and f1)
        metrics = utils.calculate_evaluation_metrics(ytrue, ypreds)
        metrics_per_fold = utils.update_metrics_per_fold(
            metrics_per_fold, metrics)
        # Update fold counter
        fold += 1

    # 5: Calculate the mean and std for the metrics obtained for each partition and store (always)
    metrics_per_fold = utils.calculate_aggregate_metrics_per_fold(
        metrics_per_fold)
    metrics_filename = RESULTS + DATASET + '/' + str(filenumber).zfill(
        2) + '-' + EXPERIMENT_ID + '-complete-metrics.json'
    with open(metrics_filename, 'w') as fp:
        json.dump(metrics_per_fold, fp, indent=4)
    print("Metrics saved in " + metrics_filename)
    print("Avg best epoch: " + str(np.mean(best_epochs)) + ", min: " +
          str(min(best_epochs)) + ", max: " + str(max(best_epochs)))
Exemplo n.º 38
0
import matplotlib.pyplot as plt
import pickle as cp
import numpy as np
import argparse
from utils import Utils, ArgumentError

u = Utils()


def _build_plot_description(metric):
    """
    Build the plot description based on the metric
    :param metric:  One of accuracy or rmse
    :return:        Title, x label and y label
    """
    if metric == "accuracy":
        title = "Median accuracy over 10 runs using linear regression"
        y_label = "Accuracy (%)"
    elif metric == "rmse":
        title = "Median RMSE over 10 runs using linear regression"
        y_label = "RMSE"
    else:
        raise ArgumentError("Metric argument '%s' invalid" % metric)
    x_label = "Relative time"

    return title, x_label, y_label


def plot_data(data, title, x_label, y_label, args):
    """
    Plot the data.
Exemplo n.º 39
0
class Botnet:
    ut = Utils()

    def __init__(self, player):
        self.username = player.username
        self.password = player.password
        self.uhash = player.uhash
        self.botNetServers = 3
        self.botnet = []
        self.p = player
        self.ofwhat = ["fw", "av", "smash", "mwk"]
        self.energy = 0
        self._initbot()

    def _initbot(self):
        """
        Grab the amount of bots in the botnet
        and populate and array of Bot class
        :return: none
        """
        data = self._botnetInfo()
        bots = json.loads(data)
        self.botnet = []
        if int(bots['count']) > 0:
            for i in bots['data']:
                bot = Bot(i['running'], self.ofwhat[random.randint(0, 3)],
                          self.energy, i['hostname'], self.username,
                          self.password, self.uhash)
                self.botnet.append(bot)

    def printbots(self):
        """
        Print a list of player PCs in the botnet
        :return: None
        """
        for bot in self.botnet:
            logger.info(bot)

    def getbotnetdata(self):
        """
        Return an array of bot class.
        Contains all the bots in the botnet.
        :return: list of bot class
        """
        return self.botnet

    def getInfo(self):
        """
        Get info about the entire botnet.
        Including if you can attack bot net servers etc.
        Also botnet PC info.
        :return: list of vHack serves that can be hacked.
                 ['1','2','1']. '1' = can be hacked, '2' time not elapsed.
        """
        response = self.ut.requestString(self.username, self.password,
                                         self.uhash, "vh_botnetInfo.php")
        response = json.loads(response)
        return response

    def attack(self):
        """
        Check if vHack server botnet is attackable,
        then attack if can.
        :return: none
        """
        self._initbot()
        logger.info("Trying Bot Net")
        cinfo = self.getInfo()

        for i in range(1, self.botNetServers + 1):
            if cinfo[i - 1] == '1':
                logger.debug('I am attacking #{}'.format(i))
                if i == 1:
                    response = self.ut.requestString(self.username,
                                                     self.password,
                                                     self.uhash,
                                                     "vh_attackCompany.php",
                                                     company=str(i))
                else:
                    response = self.ut.requestString(self.username,
                                                     self.password,
                                                     self.uhash,
                                                     "vh_attackCompany" +
                                                     str(i) + ".php",
                                                     company=str(i))
                logger.debug('I attacked #{} with response {}'.format(
                    i, response))
                if response == '0':
                    logger.info('#{} Netcoins gained'.format(i))
                else:
                    logger.info('#{} Failed! No netcoins...'.format(i))
            else:
                logger.info("Botnet #{} not hackable yet".format(i))

    def upgradebotnet(self, hostname, running, count):
        """
        Check if there is enough money to upgrade a botnet PC.
        Cycle through and upgrade until no money.
        :return: None
        """
        ofwhat = self.ofwhat[random.randint(0, 3)]
        logger.info("Prepare attempting to upgrade botnet PC " + hostname +
                    " [upgrading: " + ofwhat + "]")
        get_infobot = self.getInfo()

        if (int(get_infobot['data'][count]['strength']) == 3000):
            logger.info("The bot '" + hostname +
                        "' is on max strength [max strength 3000] ")
            return True

        if (int(get_infobot['data'][count]['running']) == 0):
            new_bal = self.upgradesinglebot(hostname, ofwhat)
            if new_bal:
                logger.info("Waiting! Doing updates for bot '" + hostname +
                            "' ..")
                return True
            else:
                logger.info("You don't have enough energy to upgrade '" +
                            hostname + "'! :(")
                return False
        else:
            logger.info("Waiting! Doing updates for bot '" + hostname + "' ..")
            return False
        logger.error("The bot '{}' is not upgradeable".format(hostname))
        return False

    def _botnetInfo(self):
        """
        Get the botnet information including vHack servers and PC data.
        :return: string
        '{"count":"14",
        "data":[{"bID":"1","bLVL":"100","bSTR":"100","bPRICE":"10000000"},
        {"bID":"2","bLVL":"100","bSTR":"100","bPRICE":"10000000"}],
        "strength":23,"resethours1":"","resetminutes1":"14","resethours2":"4","resetminutes2":"15",
        "resethours3":"3","resetminutes3":"15",
        "canAtt1":"2","canAtt2":"2","canAtt3":"2"}'
        """
        temp = self.ut.requestString(self.username, self.password, self.uhash,
                                     "vh_botnetInfo.php")
        return temp

    def upgradesinglebot(self, hostname, ofwhat):
        """
        Pass in bot class object and call upgrade function based on bot ID.
        details :
        {u'strength': u'22', u'old': u'30', u'mm': u'68359859',
        u'money': u'66259859', u'costs': u'2100000',
        u'lvl': u'21', u'new': u'22'}
        current lvl, bot number, x, x, upgrade cost, lvl, next lvl
        :return: None 
        """
        response = self.ut.requestString(self.username,
                                         self.password,
                                         self.uhash,
                                         "vh_upgradePC.php",
                                         hostname=hostname,
                                         ofwhat=ofwhat,
                                         inst="0",
                                         much="1")
        jsons = json.loads(response)
        if int(jsons['result']) == 0:
            return True
        else:
            logger.error("Upgrades on " + hostname + " Failed !")
            return False

    def __repr__(self):
        return "Botnet details: vHackServers: {0}, Bot Net PC's: {1}".format(
            self.botNetServers, self.botnet)
Exemplo n.º 40
0
from flask import Flask, render_template
from utils import Utils, get_total_risk
from db_utils import save_data

app = Flask(__name__)
risk_db = Utils('data/risks.json')
dashboard_db = Utils('data/dashboard_risks.json')


@app.route('/')
def welcome_page():
    return render_template('welcome.html')


@app.route('/login')
def login_page():
    return render_template('login.html')


@app.route('/dashboard')
def dashboard_page():
    dashboard_db.load()
    return render_template('dashboard.html',
                           risk_score=get_total_risk(dashboard_db.data))


@app.route('/dashboard/risks')
def dashboard_risks():
    dashboard_db.load()
    return render_template('dashboard_risks.html', risks=dashboard_db.data)
Exemplo n.º 41
0
 def get(self, user):
     """Handle GET Requests."""
     posts = Utils.toJson(user.posts, host=self.request.host)
     self.response.write(json.dumps(posts))
Exemplo n.º 42
0
class Bot:
    ut = Utils()

    def __init__(self, running, ofwhat, energy, hostname, username, password,
                 uhash):
        self.username = username
        self.uhash = uhash
        self.password = password
        self.running = int(running)
        self.ofwhat = ofwhat
        self.energy = energy
        self.hostname = hostname

    def botupgradable(self, running):
        """
        Determine if botnet PC is at max level or not.
        :return: Bool
        """
        if running == 0:
            return True
        else:
            return False

    def nextlevelcostenergy(self):
        """
        Return the cost of upgrading bot to the next level
        :return:int
        """
        return self.energy

    def parse_json_stream(self, stream):
        decoder = json.JSONDecoder()
        while stream:
            obj, idx = decoder.raw_decode(stream)
            yield obj
            stream = stream[idx:].lstrip()

    def upgradesinglebot(self, hostname, ofwhat):
        """
        Pass in bot class object and call upgrade function based on bot ID.
        details :
        {u'strength': u'22', u'old': u'30', u'mm': u'68359859',
        u'money': u'66259859', u'costs': u'2100000',
        u'lvl': u'21', u'new': u'22'}
        current lvl, bot number, x, x, upgrade cost, lvl, next lvl
        :return: None
        """
        response = self.ut.requestString(self.username,
                                         self.password,
                                         self.uhash,
                                         "vh_upgradePC.php",
                                         hostname=hostname,
                                         ofwhat=ofwhat)
        #response = response.split('}{')[0] + '}'
        #jsons = json.loads(response)
        #logger.info(jsons)
        return True

    def __repr__(self):

        return "Bot details: running: {0}, energy: {1}, upgrade: {2}, botname: {3}".format(
            self.running, self.energy, self.ofwhat, self.hostname)
Exemplo n.º 43
0
 def __init__(self, id, strength):
     self.net = torchvision.models.squeezenet1_1(pretrained=True).features
     self.net.type(Utils.getDtype())
     for param in self.net.parameters():
         param.requires_grad = False
     self.init_hyper_params(id, 2**((strength - 50.0) / 50))
Exemplo n.º 44
0
    def post(self, user):
        """Handle POST Requests."""
        body = json.loads(self.request.body)
        post_data = body['post']
        institution_key = post_data['institution']

        institution = ndb.Key(urlsafe=institution_key).get()

        Utils._assert(not institution.is_active(),
                      "This institution is not active", NotAuthorizedException)

        permission = get_permission(post_data)

        user.key.get().check_permission(
            permission, "You don't have permission to publish post.",
            institution_key)

        @ndb.transactional(xg=True, retries=10)
        def create_post(post_data, user, institution):
            created_post = PostFactory.create(post_data, user.key,
                                              institution.key)
            user.add_post(created_post)

            institution.add_post(created_post)

            return created_post

        post = create_post(post_data, user, institution)

        entity_type = PostFactory.get_type(post_data)

        params = {
            'sender_key': user.key.urlsafe(),
            'entity_key': post.key.urlsafe(),
            'entity_type': entity_type,
            'institution_key': post.institution.urlsafe(),
            'current_institution': user.current_institution.urlsafe()
        }

        enqueue_task('notify-followers', params)

        if (post.shared_post):
            shared_post = post.shared_post.get()
            entity_type = 'SHARED_POST'
            params = {
                'receiver_key': shared_post.author.urlsafe(),
                'sender_key': user.key.urlsafe(),
                'entity_key': shared_post.key.urlsafe(),
                'entity_type': entity_type,
                'current_institution': user.current_institution.urlsafe(),
                'sender_institution_key': shared_post.institution.urlsafe()
            }

            enqueue_task('post-notification', params)
        elif post.shared_event:
            shared_event = post.shared_event.get()
            if shared_event.author_key != user.key:
                notification_message = post.create_notification_message(
                    user_key=user.key,
                    current_institution_key=user.current_institution,
                    sender_institution_key=shared_event.institution_key)
                send_message_notification(
                    receiver_key=shared_event.author_key.urlsafe(),
                    notification_type='SHARED_EVENT',
                    entity_key=post.key.urlsafe(),
                    message=notification_message)

        self.response.write(json.dumps(post.make(self.request.host)))
Exemplo n.º 45
0
 def show_image(self, img, iteration):
     print('Iteration {}'.format(iteration))
     plt.axis('off')
     plt.imshow(Utils.deprocess(img.data.cpu()))
     plt.show()
Exemplo n.º 46
0
    def get_sizes(m_df, m_df_spy = None):
        """return size indicators and updown in a row for open close and for candle to candle specific column
            SPY count only mean between open close value

        Args:
            m_df ([type]): [description]
            m_df_spy ([type], optional): [description]. Defaults to None.

        Returns:
            [type]: [description]
        """
          
        if len(m_df_spy) > 0 :
            m_df_spy["oc_mean"] = ((m_df_spy.close + m_df_spy.open)/2)
            
        m_df = sdf.retype(m_df)
        m_df.get("boll")
        m_df = FinI.add_sma(9, m_df)
        m_df = FinI.add_sma(20, m_df)
        m_df = FinI.add_weekday(m_df)
        m_df = FinI.add_week_of_month(m_df)
        m_df = FinI.add_levels(m_df)

        m_df["size_top"] = m_df.apply(lambda row: Utils.calc_perc(
            row.open, row.high) if row.open > row.close else Utils.calc_perc(row.close, row.high), axis=1)


        m_df["size_btm"] = m_df.apply(lambda row: Utils.calc_perc(
            row.low, row.close) if row.open > row.close else Utils.calc_perc(row.low, row.open), axis=1)

        m_df["size_body"] = m_df.apply(lambda row: Utils.calc_perc(row.open, row.close), axis=1)
        m_df["size_sma9"] = m_df.apply(lambda row: Utils.calc_perc(row.sma9, row.close), axis=1)
        m_df["size_sma20"] = m_df.apply(lambda row: Utils.calc_perc(row.sma20, row.close), axis=1)
        m_df["size_boll"] = m_df.apply(
            lambda row: Utils.calc_perc(row.boll, row.close), axis=1)
        m_df["size_boll_ub"] = m_df.apply(
            lambda row: Utils.calc_perc(row.boll_ub, row.close), axis=1)
        m_df["size_boll_lb"] = m_df.apply(
            lambda row: Utils.calc_perc(row.boll_lb, row.close), axis=1)

        m_df["size_top-1"] = m_df.shift(1).size_top

        m_df["size_btm-1"] = m_df.shift(1).size_btm

        m_df["size_body-1"] = m_df.shift(1).size_body

        m_df["size_top-2"] = m_df.shift(2).size_top

        m_df["size_btm-2"] = m_df.shift(2).size_btm

        m_df["size_body-2"] = m_df.shift(2).size_body

        m_df["size_top-3"] = m_df.shift(3).size_top

        m_df["size_btm-3"] = m_df.shift(3).size_btm

        m_df["size_body-3"] = m_df.shift(3).size_body
        
        m_df["size_prev_chng"] = (
            m_df.open - m_df.shift(1).close) / (m_df.shift(1).close/100)

        m_df = FinI.get_up_down_sum_in_row(m_df)
        m_df = FinI.get_green_red_sum_in_row(m_df)

        return m_df, m_df_spy
Exemplo n.º 47
0
def createImage(options):
    (validImage, config) = verifyImageTypeAndConfig(options.config_file, options.img_name)
    if not validImage:
        raise Exception("Image type/config not supported")

    if 'ova' in config['artifacttype'] and shutil.which("ovftool") is None:
        raise Exception("ovftool is not available")

    install_config = config['installer']

    image_type = config['image_type']
    image_name = config.get('image_name', 'photon-' + image_type)
    workingDir = os.path.abspath(options.stage_path + "/" + image_type)
    if os.path.exists(workingDir) and os.path.isdir(workingDir):
        shutil.rmtree(workingDir)
    os.mkdir(workingDir)
    script_dir = os.path.dirname(os.path.realpath(__file__))

    grub_script = replaceScript(script_dir, image_type, "mk-setup-grub.sh", options.installer_path)
    install_config['setup_grub_script'] = grub_script

    # Set absolute path for 'packagelist_file'
    if 'packagelist_file' in install_config:
        plf = install_config['packagelist_file']
        if not plf.startswith('/'):
            plf = os.path.join(options.generated_data_path, plf)
        install_config['packagelist_file'] = plf

    os.chdir(workingDir)

    if 'log_level' not in install_config:
        install_config['log_level'] = options.log_level

    install_config['search_path'] = [
        os.path.abspath(os.path.join(script_dir, image_type)),
        os.path.abspath(script_dir),
    ]

    # if 'photon_docker_image' is defined in config_<img>.json then ignore
    # commandline param 'PHOTON_DOCKER_IMAGE' and 'config.json' value
    if 'photon_docker_image' not in install_config:
        install_config['photon_docker_image'] = options.photon_docker_image

    if 'size' in config and 'disks' in config:
        raise Exception("Both 'size' and 'disks' key should not be defined together.Please use 'disks' for defining multidisks only.")
    elif 'size' in config:
        # 'BOOTDISK' key name doesn't matter. It is just a name given for better understanding
        config['disks'] = {"BOOTDISK": config['size']}
    elif 'disks' not in config:
        raise Exception("Disk size not defined!!")

    image_file = []
    loop_device = {}
    # Create disk image
    for ndisk, k in enumerate(config['disks']):
        image_file.append(workingDir + "/" + image_name + "-" + str(ndisk) + ".raw")
        Utils.runshellcommand(
            "dd if=/dev/zero of={} bs=1024 seek={} count=0".format(image_file[ndisk], config['disks'].get(k) * 1024))
        Utils.runshellcommand(
            "chmod 755 {}".format(image_file[ndisk]))
        # Associating loopdevice to raw disk and save the name as a target's 'disk'
        loop_device[k] = (Utils.runshellcommand(
            "losetup --show -f {}".format(image_file[ndisk]))).rstrip('\n')

    # Assigning first loop device as BOOTDISK
    install_config['disk'] = loop_device[next(iter(loop_device))]

    # Mapping the given disks to the partition table disk
    # Assigning the appropriate loop device to the partition 'disk'
    if 'partitions' in install_config:
        for partition in install_config['partitions']:
            if len(loop_device) == 1:
                partition['disk'] = install_config['disk']
            elif 'disk' in partition:
                if partition['disk'] in loop_device.keys():
                    partition['disk'] = loop_device[partition['disk']]
                else:
                    cleanup(loop_device.values(), image_file)
                    raise Exception("disk name:{} defined in partition table not found in list of 'disks'!!".format(partition['disk']))
            else:
                cleanup(loop_device.values(), image_file)
                raise Exception("disk name must be defined in partition table for multidisks!!")

    # No return value, it throws exception on error.
    runInstaller(options, install_config, workingDir)

    # Detaching loop device from vmdk
    for loop_dev in loop_device.values():
        Utils.runshellcommand("losetup -d {}".format(loop_dev))

    os.chdir(script_dir)
    imagegenerator.createOutputArtifact(
                                image_file,
                                config,
                                options.src_root,
                                options.src_root + '/tools/bin/'
                              )
Exemplo n.º 48
0
    def generate(self,
                 content_img,
                 style_img,
                 content_size=192,
                 style_size=512,
                 init_random=False):
        self.CONTENT_SIZE = content_size
        self.STYLE_SIZE = style_size

        content_img = Utils.preprocess(content_img, size=self.CONTENT_SIZE)
        style_img = Utils.preprocess(style_img, size=self.STYLE_SIZE)

        features = Utils.extract_features(content_img, self.net)
        content_target = features[self.CONTENT_LAYER].clone()

        features = Utils.extract_features(style_img, self.net)
        style_activations = []
        for index in self.STYLE_LAYERS:
            style_activations.append(Utils.gramMatrix(features[index].clone()))

        if init_random:
            img = torch.Tensor(content_img.size()).uniform_(0, 1).type(
                Utils.getDtype())
        else:
            img = content_img.clone().type(Utils.getDtype())

        img.requires_grad_()

        optimizer = torch.optim.Adam([img], lr=self.LEARNING_RATE)
        # test with pyplot
        '''f, axarr = plt.subplots(1, 2)
        axarr[0].axis('off')
        axarr[1].axis('off')
        axarr[0].set_title('Content Source Img.')
        axarr[1].set_title('Style Source Img.')
        axarr[0].imshow(Utils.deprocess(content_img.cpu()))
        axarr[1].imshow(Utils.deprocess(style_img.cpu()))
        plt.show()
        plt.figure()'''

        for t in range(self.ITERATIONS):
            if t < (self.ITERATIONS / 2):
                img.data.clamp_(-1.5, 1.5)
            optimizer.zero_grad()

            forward_activations = Utils.extract_features(img, self.net)

            content_loss = Utils.contentLoss(
                self.CONTENT_WEIGHT, forward_activations[self.CONTENT_LAYER],
                content_target)
            style_loss = Utils.styleLoss(forward_activations,
                                         self.STYLE_LAYERS, style_activations,
                                         self.STYLE_WEIGHTS)
            tv_loss = Utils.tvLoss(img, self.TV_WEIGHT)
            loss = content_loss + style_loss + tv_loss

            loss.backward()

            if t == self.DECAY_LR_AT:
                optimizer = torch.optim.Adam([img], lr=self.DECAY_LR)
            optimizer.step()

            if t % 50 == 0 and self.verbose:
                self.show_image(img, t)
        #self.show_image(img, t)
        self.generated_image = np.array(Utils.deprocess(img.data.cpu()))
Exemplo n.º 49
0
import sys
import tweepy
import json

from utils import Utils

# emoji unicode causes the program to crash, attempted this solution:
# https://stackoverflow.com/questions/47436649/trying-to-extract-tweets-with-tweepy-but-emojis-make-it-crash
# but it didnt' work, instead went with outright removing the emojis as shown here:
# https://stackoverflow.com/questions/33404752/removing-emojis-from-a-string-in-pytho

if __name__ == "__main__":
    username = "******"  # the twitter handle of an account to scrub the timeline of
    api = Utils.get_api()
    user = Utils.get_user(api, username)
    print(user)
Exemplo n.º 50
0
class Figures(BaseExtension):
    def __init__(self):
        BaseExtension.__init__(self)
        self.semanticscholar = Semanticscholar()
        self.utils = Utils()
        self.category_obj = Category()
        self.record = None
        self.category = ''
        self.img_style = "-webkit-border-radius: 8px; -moz-border-radius: 8px; border-radius: 8px; background: #f8f8f8; border-top:1px solid #ccc; border-right:1px solid #666; border-bottom:2px solid #999; border-left:1px solid #ccc; padding: 0px;"
        self.img_style_2 = 'border-radius:50%;'
        self.img_style_3 = '-webkit-filter: blur(1px); -moz-filter: blur(30px); -ms-filter: blur(30px); filter: blur(1px); filter:progid:DXImageTransform.Microsoft.Blur(PixelRadius=30, MakeShadow=false);'

    def excute(self, form_dict):
        name = form_dict['rTitle']
        fileName = form_dict['fileName'].encode('utf8')
        rID = form_dict['rID'].encode('utf8')
        figures = []
        links = []
        if fileName.find('papers') != -1:
            figures = self.semanticscholar.getFigures(name)
        else:
            figures, links = self.getRandomFigures(name)

        thumbs = ''
        if rID.find('arxiv') >= 0 or form_dict['url'].find('arxiv') != -1:
            thumbs = "http://www.arxiv-sanity.com/static/thumbs/" + self.getPid(
                form_dict['url'])
            version = self.utils.get_last_arxiv_version(
                rID[rID.find('arxiv-') + 6:].replace('-', '.'))
            jpg = '.pdf.jpg'
            thumbs = thumbs + version + jpg
            print 'thumbs ' + thumbs
            return self.genHtml(figures, form_dict['column'], links, thumbs)
        return self.getRefImage(rID, form_dict['url']) + self.genHtml(
            figures, form_dict['column'], links, thumbs)

    def getRefImage(self, rID, url):
        html = ''
        if rID.find('arxiv') >= 0:
            return html
        if Config.disable_reference_image == False and url.strip() != '':
            user_agent = {'User-agent': 'Mozilla/5.0'}
            r = requests.get(url, headers=user_agent)
            soup = BeautifulSoup(r.text)
            count = 0
            for img in soup.find_all('img'):
                if img['src'].endswith('.gif'):
                    continue
                if count == 0:
                    html += '<div>'
                count += 1
                html += '<img src="' + self.utils.fixUrl(
                    url, img['src']
                ) + '" width="50" height="50" style="' + self.img_style_2 + '"/>&nbsp;'
                if count > 5:
                    count = 0
                    html += '</div><div>'

            html += '</div>'
        return html

    def getPid(self, url):
        return url[url.rfind('/') + 1:].replace('.pdf', '').strip()

    def getRandomFigures(self, title):
        return self.getPinterestImg(title)
        #return self.getGoogleImage(title)

    def getGoogleImage(self, title):
        r = requests.get(
            'https://www.google.com/search?q=%s&newwindow=1&biw=1435&bih=481&source=lnms&tbm=isch&sa=X'
            .replace('%s', title.replace(' ', '%20')))
        soup = BeautifulSoup(r.text)
        figures = []
        links = []
        for div in soup.find_all('div', class_='rg_di rg_bx rg_el ivg-i'):
            links.append('http://www.google.com' + div.a['href'])
            figures.append('http://www.google.com' + div.a['href'])
        return figures, links

    def getPinterestImg(self, title):
        r = requests.get('https://www.pinterest.com/search/pins/?q=' + title)
        soup = BeautifulSoup(r.text)
        figures = []
        links = []
        for div in soup.find_all('div', class_='pinHolder'):
            links.append('https://www.pinterest.com' + div.a['href'])
            sp = BeautifulSoup(div.prettify())
            img = sp.find('img')
            figures.append(img['src'])
        return figures, links

    def genHtml(self, figures, column, links=[], thumb=''):
        html = '<div>'
        width = "100"
        height = "100"
        thumb_width = '570px'
        row_count = 5
        space = ''
        if column == '3':
            width = "100"
            height = "100"
            thumb_width = '450px'
            row_count = 4
            space = ''
        if column == '2':
            width = "130"
            height = "130"
            thumb_width = '563px'
            row_count = 4
            space = 3 * '&nbsp;'
        if column == '1':
            width = "150"
            height = "150"
            thumb_width = '600px'
            row_count = 7
            space = 2 * '&nbsp;'
        if thumb != '':
            html += self.utils.enhancedLink(
                thumb,
                '',
                img='<img width="' + thumb_width + '" src="' + thumb +
                '" style="' + self.img_style + '"/>',
                module='figures') + '<br/>'
        if figures != None:
            count = 0
            for fig in figures:
                count += 1
                if len(links) > 0:
                    html += self.utils.enhancedLink(
                        links[count - 1],
                        '',
                        img='<img class="demo-img pos-center" height="' +
                        height + '" width="' + width + '" src="' + fig +
                        '" style="' + self.img_style + '"/>',
                        module='figures') + space
                else:
                    html += self.utils.enhancedLink(
                        fig,
                        '',
                        img='<img height="' + height + '" width="' + width +
                        '" src="' + fig + '" style="' + self.img_style + '"/>',
                        module='figures') + space
                if count % row_count == 0:
                    html += '<br/>'

        html += '</div>'
        return html

    def check(self, form_dict):
        return True
    def doWork(self):
        utils = Utils()
        wiki_dict = {'google' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Google',\
                     'facebook' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Facebook',\
                     'microsoft' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Microsoft',\
                     'apple' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Apple',\
                     'ibm' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_IBM',\
                     'yahoo' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Yahoo!',\
                     'twitter' : 'http://en.wikipedia.org/wiki/List_of_mergers_and_acquisitions_by_Twitter'}
        for key, url in wiki_dict.items():
            r = requests.get(url)
            soup = BeautifulSoup(r.text)
            table = soup.find('table', class_='wikitable')
            #print table
            soup = BeautifulSoup(table.prettify())
            count = 0
            title = ''
            desc = 'description:'
            file_name = self.get_file_name('economics/' + key, self.school)
            file_lines = self.countFileLineNum(file_name)
            f = self.open_db(file_name + ".tmp")
            self.count = 0
            item_id = key + '-merger-'

            for td in soup.find_all('td'):
                count += 1

                if key == 'google':
                    if count > 8:
                        print title
                        count = 1
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 8:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip() + ' '
                if key == 'facebook':
                    if count > 10:
                        count = 1
                        print title
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 9 and count != 10:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        elif count == 5 and td.a != None:
                            desc += td.a.text.strip() + ' '
                        else:
                            desc += td.text.strip() + ' '
                if key == 'microsoft':
                    if count > 7:
                        count = 1
                        print title
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 7:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip() + ' '
                if key == 'apple':
                    if count > 8:
                        print title
                        count = 1
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 7 and count != 8:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip() + ' '
                if key == 'ibm':
                    if count > 6:
                        print title
                        count = 1
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 6:
                        if count == 1:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 2:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip().replace('\n', '') + ' '
                if key == 'yahoo':
                    if count > 8:
                        count = 1
                        print title
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 8:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip() + ' '
                if key == 'twitter':
                    if count > 8:
                        count = 1
                        print title
                        self.count += 1
                        self.write_db(f, item_id + str(self.count), title, '',
                                      utils.removeDoubleSpace(desc))
                        title = ''
                        desc = 'description:'
                        print '----------------------------------'
                    if count != 1 and count != 8:
                        if count == 2:
                            desc += td.text.strip()[td.text.strip().
                                                    find(' '):].strip() + ' '
                        elif count == 3:
                            title = utils.removeDoubleSpace(td.text.strip())
                        else:
                            desc += td.text.strip() + ' '

            self.close_db(f)
            if file_lines != self.count and self.count > 0:
                self.do_upgrade_db(file_name)
                print "before lines: " + str(
                    file_lines) + " after update: " + str(self.count) + " \n\n"
            else:
                self.cancel_upgrade(file_name)
                print "no need upgrade\n"
Exemplo n.º 52
0
def cleanup(loop_devices, raw_image):
    for i,loop_dev in enumerate(loop_devices):
        Utils.runshellcommand("losetup -d {}".format(loop_dev))
        os.remove(raw_image[i])
Exemplo n.º 53
0
 def __init__(self, clientsocket, addr, ecfg):
     self.clientsocket = clientsocket
     self.addr = addr
     self.ECONFIG = ecfg
     self.utils = Utils(ecfg)
Exemplo n.º 54
0

def execute_cli(args):
    Utils.click_validate_required_options(click.get_current_context(), conf)
    module = click.get_current_context().info_name
    myclass = __import__("tail_toolkit.modules." + module)
    clazz = getattr(getattr(myclass.modules, module), module.title())
    getattr(clazz(conf, args),
            args['action'].replace("-", "_") + "_" + module)().save_config()


@click.group()
def cli(**kwargs):
    pass


@cli.command()
@click.argument('action',
                required=True,
                type=click.Choice(Utils.click_get_command_choice("tail",
                                                                 conf)))
@click.option('--loggroupname', '-l', help="Define the loggroupname.")
@Utils.docstring_parameter(conf)
def tail(**kwargs):
    execute_cli(kwargs)


print("Initializing tail-toolkit CLI (v" + __version__ + ") - Region: " +
      conf.region)
cli()
Exemplo n.º 55
0
from emoji import Emoji
from utils import Utils

utils = Utils()
emoji = Emoji(utils.get_api_key())

message = ''

print('Emoji converter terminal test app')
print("Press 'q' or 'Q' to quit")

while message.lower() != 'q':
    message = input('> ')
    if message.lower() != 'q':
        print(emoji.get_emoji(message))
Exemplo n.º 56
0
class Platform:
    def __init__(self, conf):
        self.conf = conf
        self.utils = Utils(conf)

        # Which logs will be collected from the nodes
        self.logs = {
            "files": [],
            "dirs": ["/var/log/pods"],
            "services": ["kubelet"]
        }

        # Files that will be deleted during the cleanup stage
        self.tmp_files = []

    @step
    def cleanup(self):
        """Clean up"""
        try:
            self._cleanup_platform()
        except Exception as ex:
            logger.warning(
                "Received the following error '{}'\nAttempting to finish cleanup"
                .format(ex))
            raise Exception("Failure(s) during cleanup")
        finally:
            self.utils.cleanup_files(self.tmp_files)
            self.utils.ssh_cleanup()

    @timeout(600)
    @step
    def gather_logs(self):
        logging_errors = False

        node_ips = {
            "master": self.get_nodes_ipaddrs("master"),
            "worker": self.get_nodes_ipaddrs("worker")
        }

        if not os.path.isdir(self.conf.log_dir):
            os.mkdir(self.conf.log_dir)
            logger.info(f"Created log dir {self.conf.log_dir}")

        for node_type in node_ips:
            for ip_address in node_ips[node_type]:
                node_log_dir = self._create_node_log_dir(
                    ip_address, node_type, self.conf.log_dir)
                logging_error = self.utils.collect_remote_logs(
                    ip_address, self.logs, node_log_dir)

                if logging_error:
                    logging_errors = logging_error

        platform_log_error = self._get_platform_logs()

        if platform_log_error:
            logging_errors = platform_log_error

        return logging_errors

    def get_lb_ipaddr(self):
        """
        Get the IP of the Load Balancer
        :return:
        """
        pass

    def get_nodes_ipaddrs(self, role):
        """
        Get the IP addresses of the given type of node
        :param role: the type of node
        :return:
        """
        return []

    def get_num_nodes(self, role):
        """
        Get the number of nodes of a  given type
        :param role: the type of node
        :return: num of nodes
        """
        pass

    @step
    def provision(self, num_master=-1, num_worker=-1, retries=4):
        """Provision a cluster"""
        if num_master > -1 or num_worker > -1:
            logger.warning("Overriding number of nodes")
            if num_master > -1:
                self.conf.master.count = num_master
                logger.warning("   Masters:{} ".format(num_master))

            if num_worker > -1:
                self.conf.worker.count = num_worker
                logger.warning("   Workers:{} ".format(num_worker))

        # TODO: define the number of retries as a configuration parameter
        for i in range(0, retries):
            retry = i + 1

            try:
                self._provision_platform()
                break
            except Exception as ex:
                logger.warning(f"Provision attempt {retry}/{retries} failed")
                if retry == retries:
                    raise Exception(
                        f"Failed {self.__class__.__name__} deployment") from ex

    def ssh_run(self, role, nr, cmd):
        ip_addrs = self.get_nodes_ipaddrs(role)
        if nr >= len(ip_addrs):
            raise ValueError(f'Node {role}-{nr} not deployed in platform')

        return self.utils.ssh_run(ip_addrs[nr], cmd)

    @staticmethod
    def _create_node_log_dir(ip_address, node_type, log_dir_path):
        node_log_dir_path = os.path.join(
            log_dir_path, f"{node_type}_{ip_address.replace('.', '_')}")

        if not os.path.isdir(node_log_dir_path):
            os.mkdir(node_log_dir_path)
            logger.info(f"Created log dir {node_log_dir_path}")

        return node_log_dir_path

    def _cleanup_platform(self):
        """Platform specific cleanup. Expected to be overridden by platforms"""

    def _env_setup_cmd(self):
        """Returns the command for setting up the platform environment"""
        return ""

    def _provision_platform(self):
        """Platform specific provisioning"""

    def _get_platform_logs(self):
        """Platform specific logs to collect. Expected to be overridden by platforms"""
        return False
Exemplo n.º 57
0
class ScapyDriver(object):
    def __init__(self, port, dry=False, dbg=0, logger=None):
        self.port = port
        self.dry = dry
        self.dbg = dbg
        self.errs = []
        self.finished = False
        self.logger = logger or Logger()
        self.utils = Utils(self.dry, logger=self.logger)
        self.iface = port.iface
        self.iface_status = None
        self.packet = ScapyPacket(port.iface, dry=self.dry, dbg=self.dbg,
                                  logger=self.logger)
        self.rxInit()
        self.txInit()
        self.statState.set()

    def __del__(self):
        self.logger.debug("ScapyDriver {} exiting...".format(self.iface))
        self.cleanup()
        del self.packet

    def get_alerts(self):
        errs = []
        errs.extend(self.errs)
        errs.extend(self.packet.get_alerts())
        self.errs = []
        return errs

    def cleanup(self):
        print("ScapyDriver {} cleanup...".format(self.iface))
        self.finished = True
        self.captureState.clear()
        self.statState.clear()
        self.txState.clear()
        self.protocolState.clear()
        self.packet.cleanup()

    def rxInit(self):
        self.captureQueueInit()
        self.captureState = threading.Event()
        self.captureState.clear()
        self.protocolState = threading.Event()
        self.protocolState.clear()
        self.statState = threading.Event()
        self.statState.clear()
        self.rxThread = threading.Thread(target=self.rxThreadMain, args=())
        self.rxThread.daemon = True
        self.rxThread.start()
        #self.linkThread = threading.Thread(target=self.linkThreadMain, args=())
        #self.linkThread.daemon = True
        #self.linkThread.start()
        self.linkThread = None

    def captureQueueInit(self):
        self.pkts_captured = []

    def startCapture(self):
        self.logger.debug("start-cap: {}".format(self.iface))
        self.pkts_captured = []
        self.captureState.set()

    def stopCapture(self):
        self.logger.debug("stop-cap: {}".format(self.iface))
        self.captureState.clear()
        time.sleep(1)
        return len(self.pkts_captured)

    def clearCapture(self):
        self.logger.debug("clear-cap: {}".format(self.iface))
        self.captureState.clear()
        time.sleep(3)
        self.pkts_captured = []
        return len(self.pkts_captured)

    def getCapture(self):
        self.logger.debug("get-cap: {}".format(self.iface))
        retval = []
        for pkt in self.pkts_captured:
            (data, hex_bytes) = (str(pkt), [])
            for index in range(len(data)):
                hex_bytes.append("%02X"% ord(data[index]))
            retval.append(hex_bytes)
        return retval

    def rx_any_enable(self):
        return self.captureState.is_set() or self.statState.is_set() or self.protocolState.is_set()

    def rxThreadMain(self):
        while not self.finished:
            # wait till captures or stats collection is enabled
            self.logger.debug("RX Thread {} start {}/{}/{}".format(self.iface,
                self.captureState.is_set(), self.statState.is_set(), self.protocolState.is_set()))
            while not self.rx_any_enable():
                time.sleep(1)
                OrEvent(self.captureState, self.statState, self.protocolState).wait()

            # read packets
            while self.rx_any_enable():
                try:
                    packet = self.packet.readp(iface=self.iface)
                    if packet:
                        self.handle_recv(None, packet)
                except Exception as e:
                    if str(e) != "[Errno 100] Network is down":
                        self.logger.debug(e, traceback.format_exc())
                    self.logger.debug("Driver(%s): '%s' - ignoring", self.iface, str(e))
                    while self.rx_any_enable() and not self.is_up():
                        time.sleep(1)

    def is_up(self):
        if self.linkThread:
            return bool(self.iface_status)
        return isLinkUp(self.iface)

    def linkThreadMain(self):
        self.iface_status = isLinkUp(self.iface)
        while not self.finished:
            time.sleep(2)
            status = isLinkUp(self.iface)
            if status != self.iface_status:
                self.packet.set_link(status)
            self.iface_status = status

    def handle_stats(self, packet):
        pktlen = 0 if not packet else len(packet)
        framesReceived = self.port.incrStat('framesReceived')
        self.port.incrStat('bytesReceived', pktlen)
        if self.dbg > 2:
            self.logger.debug("{} framesReceived: {}".format(self.iface, framesReceived))
        if pktlen > 1518:
            self.port.incrStat('oversizeFramesReceived')
        for stream in self.port.track_streams:
            if self.packet.match_stream(stream, packet):
                stream.incrStat('framesReceived')
                stream.incrStat('bytesReceived', pktlen)
                break # no need to check in other streams

    def handle_capture(self, packet):
        self.pkts_captured.append(packet)

    def handle_recv(self, hdr, packet):
        if self.statState.is_set():
            self.handle_stats(packet)
        if self.captureState.is_set():
            self.handle_capture(packet)

    def txInit(self):
        self.txState = threading.Event()
        self.txState.clear()
        self.txStateAck = dict()
        self.stream_pkts = dict()
        self.txThread = threading.Thread(target=self.txThreadMain, args=())
        self.txThread.daemon = True
        self.txThread.start()

    def set_stream_enable2(self, handle, value, duration, msg):
        requested_stream = None
        for stream_id, stream in self.port.streams.items():
            if not handle:
                stream.enable2 = value
                if value: self.stream_pkts[stream_id] = 0
                self.logger.debug("{}-all: {} {} PKTS: {}".format(
                    msg, self.iface, stream_id, self.stream_pkts[stream_id]))
            elif stream_id == handle:
                requested_stream = stream
                stream.enable2 = value
                if value: self.stream_pkts[stream_id] = 0
                self.logger.debug("{}: {} {} PKTS: {}".format(
                    msg, self.iface, stream_id, self.stream_pkts[stream_id]))
            if duration > 0:
                stream.kws["duration2"] = duration
            else:
                stream.kws.pop("duration2", 0)
        return requested_stream

    def stop_ack_wait(self, handle):
        if handle in self.txStateAck:
            self.txStateAck[handle].set()

    def start_ack_wait(self, handle, wait=0):
        if handle not in self.txStateAck:
            self.txStateAck[handle] = threading.Event()
            self.txStateAck[handle].clear()
        elif wait <= 0:
            self.txStateAck[handle].clear()

        if wait:
            self.txStateAck[handle].wait(wait)

    def startTransmit(self, **kws):
        self.logger.debug("start-tx: {} {}".format(self.iface, kws))

        # enable selected streams
        handle = kws.get('handle', None)
        duration = self.utils.intval(kws, 'duration', 0)
        self.set_stream_enable2(handle, True, duration, "tx-enable")

        # signal the start
        self.logger.debug("signal-tx: {} {}".format(self.iface, kws))
        self.start_ack_wait(handle, 0)

        threading.Timer(1.0, self.txState.set).start()
        #self.txState.set()

    def startTransmitComplete(self, **kws):

        # wait for first packet to be sent
        handle = kws.get('handle', None)
        self.logger.debug("start-tx-ack-0: {} {}".format(self.iface, kws))
        self.start_ack_wait(handle, 10)
        self.logger.debug("start-tx-ack-1: {} {}".format(self.iface, kws))

        # check if all streams are non-contineous
        duration = self.utils.intval(kws, 'duration', 0)
        non_continuous = False
        for stream in self.port.streams.values():
            if stream.kws.get("transmit_mode", "continuous") != "continuous":
                non_continuous = True

        if non_continuous:
            # wait for max 30 seconds to finish ????
            for _ in range(30):
                time.sleep(1)
                if not self.txState.is_set():
                    self.logger.debug("TX Completed waiting 3 sec for RX")
                    time.sleep(3)
                    break
        elif duration > 0:
            self.logger.debug("waiting for duration: {}".format(duration))
            time.sleep(duration)
            self.set_stream_enable2(handle, False, duration, "tx-disable")
            if not handle: self.txState.clear()
        else:
            self.logger.debug("waiting 3 sec")
            time.sleep(3)

        self.logger.debug("start-tx-finished: {} {}".format(self.iface, kws))

    def stopTransmit(self, **kws):

        # disable selected streams
        handle = kws.get('handle', None)
        if handle:
            for stream_id, stream in self.port.streams.items():
                if stream_id == handle:
                    stream.enable2 = False
            return

        if not self.txState.is_set():
            return
        self.logger.debug("stop-tx: {}".format(self.iface))
        self.txState.clear()
        for _ in range(10):
            time.sleep(1)
            if not self.txState.is_set():
                break

    def clear_stats(self):
        self.packet.clear_stats()

    def txThreadMain(self):
        while not self.finished:
            while not self.txState.is_set():
                self.logger.debug("txThreadMain {} Wait".format(self.iface))
                self.txState.wait()
            try:
                self.txThreadMainInner()
            except Exception as e:
                self.logger.log_exception(e, traceback.format_exc())
            self.txState.clear()

    def txThreadMainInnerStart(self, pwa_list, sids):
        if self.dbg > 2:
            self.logger.debug("txThreadMainInnerStart {} {}".format(self.iface, sids.keys()))
        try:
            for stream in self.port.streams.values():
                if stream.stream_id in sids:
                    continue
                if self.dbg > 2:
                    self.logger.debug(" start {} {}/{}".format(stream.stream_id, stream.enable, stream.enable2))
                if stream.enable and stream.enable2:
                    pwa = self.packet.build_first(stream)
                    pwa.tx_time = time.clock()
                    pwa_list.append(pwa)
                    sids[stream.stream_id] = 0
                    self.stop_ack_wait(stream.stream_id)
        except Exception as exp:
            self.logger.log_exception(exp, traceback.format_exc())

        return bool(pwa_list)

    def txThreadMainInner(self):

        sids = {}
        pwa_list = []
        self.logger.debug("txThreadMainInner {} start {}".format(self.iface, self.port.streams.keys()))
        if not self.txThreadMainInnerStart(pwa_list, sids):
            self.logger.debug("txThreadMainInner {} Nothing Todo".format(self.iface))
            return

        tx_count = 0
        while (self.txState.is_set()):
            # call start again to see if new streams are created
            # while there are transmitting streams
            if not self.txThreadMainInnerStart(pwa_list, sids):
                break

            # sort based on next packet send time
            pwa_list.sort(key=self.pwa_sort)

            pwa_next_list = []
            for pwa in pwa_list:
                if not pwa.stream.enable or not pwa.stream.enable2:
                    continue
                self.pwa_wait(pwa)
                try:
                    send_start_time = time.clock()
                    pkt = self.send_packet(pwa, pwa.stream.stream_id)
                    bytesSent = len(pkt)
                    send_time = time.clock() - send_start_time

                    # increment port counters
                    framesSent = self.port.incrStat('framesSent')
                    self.port.incrStat('bytesSent', bytesSent)
                    if self.dbg > 2:
                        self.logger.debug("{} framesSent: {}".format(self.iface, framesSent))
                    pwa.stream.incrStat('framesSent')
                    pwa.stream.incrStat('bytesSent', bytesSent)
                    tx_count = tx_count + 1

                    # increment stream counters
                    stream_tx = self.stream_pkts[pwa.stream.stream_id] + 1
                    self.stream_pkts[pwa.stream.stream_id] = stream_tx
                    if self.dbg > 2 or (self.dbg > 1 and stream_tx%100 == 99):
                        self.logger.debug("{}/{} framesSent: {}".format(self.iface,
                                            pwa.stream.stream_id, stream_tx))
                except Exception as e:
                    self.logger.log_exception(e, traceback.format_exc())
                    pwa.stream.enable2 = False
                else:
                    build_start_time = time.clock()
                    pwa = self.packet.build_next(pwa)
                    if not pwa: continue
                    build_time = time.clock() - build_start_time
                    ipg = self.packet.build_ipg(pwa)
                    pwa.tx_time = time.clock() + ipg - build_time - send_time
                    pwa_next_list.append(pwa)
            pwa_list = pwa_next_list
        self.logger.debug("txThreadMainInner {} Completed {}".format(self.iface, tx_count))

    def pwa_sort(self, pwa):
        return pwa.tx_time

    def pwa_wait(self, pwa):
        delay = pwa.tx_time - time.clock()
        if self.dbg > 2 or (self.dbg > 1 and pwa.left != 0):
            self.logger.debug("stream: {} delay: {} pps: {}".format(pwa.stream.stream_id, delay, pwa.rate_pps))
        if delay <= 0:
            # yield
            time.sleep(0)
        elif delay > 1.0/10:
            self.utils.msleep(delay * 1000, 10)
        elif delay > 1.0/100:
            self.utils.msleep(delay * 1000, 1)
        elif delay > 1.0/200:
            self.utils.usleep(delay * 1000 * 1000, 100)
        elif delay > 1.0/500:
            self.utils.usleep(delay * 1000 * 1000, 10)
        else:
            self.utils.usleep(delay * 1000 * 1000)

    def send_packet(self, pwa, stream_name):
        return self.packet.send_packet(pwa, self.iface, stream_name, pwa.left)

    def createInterface(self, intf):
        return self.packet.if_create(intf)

    def deleteInterface(self, intf):
        return self.packet.if_delete(intf)

    def ping(self, intf, ping_dst, index=0):
        return self.packet.ping(intf, ping_dst, index)

    def send_arp(self, intf, index=0):
        return self.packet.send_arp(intf, index)

    def apply_bgp(self, op, enable, intf):
        return self.packet.apply_bgp(op, enable, intf)

    def apply_bgp_route(self, enable, route):
        return self.packet.apply_bgp_route(enable, route)

    def config_igmp(self, mode, intf, host):
        return self.packet.config_igmp(mode, intf, host)
Exemplo n.º 58
0
class UserClient:
    userkey = None
    joinedservers = []
    latestmessagejson = json.loads("{}")
    latestmessageserver = ""
    lastgetmessagerequest = datetime.datetime.now().timestamp()
    lastgetmessagetotalrequest = datetime.datetime.now().timestamp()

    def __init__(self, clientsocket, addr, ecfg):
        self.clientsocket = clientsocket
        self.addr = addr
        self.ECONFIG = ecfg
        self.utils = Utils(ecfg)

    def register(self, usernm, passwd):
        if os.path.exists(self.ECONFIG.providerstorage + os.sep + "users" + os.sep + usernm):
            return '{"resp": false, "reason": "user already exists"}'
        userstartjson = json.loads('{"passwd": "' + passwd + '"}')
        userstartjson["timesloggedin"] = 0
        userstartjson["joinedservers"] = []
        self.utils.putjson(userstartjson, self.ECONFIG.providerstorage +
                           os.sep + "users" + os.sep + usernm)
        return '{"resp": true}'

    def login(self, usernm, passwd):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "users" + os.sep + usernm):
            return '{"resp": false, "reason":"username does not exist"}'

        userfile = open(self.ECONFIG.providerstorage +
                        os.sep + "users" + os.sep + usernm, "r")
        userfiletext = userfile.read()
        userfilejson = json.loads(userfiletext)
        userfilejson["timesloggedin"] = userfilejson["timesloggedin"] + 1
        userfile.close()
        if userfilejson["passwd"] == passwd:
            self.utils.putjson(userfilejson, self.ECONFIG.providerstorage +
                               os.sep + "users" + os.sep + usernm)
            userfile.close()
            self.usernm = usernm
            self.passwd = passwd
            self.joinedservers = userfilejson["joinedservers"]
            self.userkey = UserAuth(usernm, passwd, self.ECONFIG)
            return '{"resp": true, "key": "' + self.userkey.userkey.key + '"}'
        else:
            return '{"resp": false, "reason":"password is incorrect"}'

    def createserver(self, name, spasswd, owner):
        if os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + name):
            return '{"resp": false, "reason":"server already exists by that name"}'

        os.mkdir(self.ECONFIG.providerstorage +
                 os.sep + "servers" + os.sep + name)
        serverjson = json.loads('{}')
        serverjson["owner"] = owner
        serverjson["passwd"] = spasswd
        serverjson["channels"] = []
        serverjson["users"] = []
        self.utils.putjson(serverjson, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + name + os.sep + "info")

        self.createchannel("general", name, owner)
        self.joinserver(name, spasswd, owner)

        return '{"resp": true}'

    def createchannel(self, name, servername, key):
        if os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + name):
            return '{"resp": false, "reason":"channel already exists"}'

        os.mkdir(self.ECONFIG.providerstorage + os.sep +
                 "servers" + os.sep + servername + os.sep + name)
        serverinfofile = open(self.ECONFIG.providerstorage + os.sep +
                              "servers" + os.sep + servername + os.sep + "info", "r")
        serverinfojson = json.load(serverinfofile)
        serverinfojson["channels"].append(name)
        serverinfofile.close()

        if not self.userkey.userkey.usernm == serverinfojson["owner"]:
            return '{"resp": false, "reason":"only owner can create channels"}'

        self.utils.putjson(serverinfojson, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + servername + os.sep + "info")

        channelmessagesjson = json.loads('{}')
        channelmessagesjson["messages"] = 0

        self.utils.putjson(channelmessagesjson, self.ECONFIG.providerstorage + os.sep +
                           "servers" + os.sep + servername + os.sep + name + os.sep + "messages")

        return '{"resp": true}'

    def joinserver(self, servername, serverpasswd, recip):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + "info"):
            return '{"resp": false, "reason":"server does not exist"}'

        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")
        userfile = self.utils.openjson(
            self.ECONFIG.providerstorage + os.sep + "users" + os.sep + recip)

        if not serverinfo["passwd"] == serverpasswd:
            return '{"resp": false, "reason":"incorrect server password"}'

        serverinfo["users"].append(recip)
        self.utils.putjson(serverinfo, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + servername + os.sep + "info")

        userfile["joinedservers"].append(servername)
        self.utils.putjson(userfile, self.ECONFIG.providerstorage +
                           os.sep + "users" + os.sep + recip)

        return '{"resp": true}'

    def deleteserver(self, servername, serverpasswd):
        # TODO: only owner can delete server
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + "info"):
            return '{"resp": false, "reason":"server does not exist"}'

        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")
        serverusers = serverinfo["users"]

        for i in range(0, len(serverusers)):
            serveruserinfo = self.utils.openjson(
                self.ECONFIG.providerstorage + os.sep + "users" + os.sep + serverusers[i])
            serveruserinfo["joinedservers"].remove(servername)
            self.utils.putjson(serveruserinfo, self.ECONFIG.providerstorage +
                               os.sep + "users" + os.sep + serverusers[i])

        shutil.rmtree(self.ECONFIG.providerstorage +
                      os.sep + "servers" + os.sep + servername)
        return '{"resp": true}'

    def leaveserver(self, servername):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + "info"):
            return '{"resp": false, "reason":"server does not exist"}'

        userinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                       os.sep + "users" + os.sep + self.userkey.userkey.usernm)
        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")
        userinfo["joinedservers"].remove(servername)
        serverinfo["users"].remove(self.userkey.userkey.usernm)

        self.utils.putjson(userinfo, self.ECONFIG.providerstorage +
                           os.sep + "users" + os.sep + self.userkey.userkey.usernm)
        self.utils.putjson(serverinfo, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + servername + os.sep + "info")

        return '{"resp": true}'

    def message(self, msg, servername, channel):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + "info"):
            return '{"resp": false, "reason":"server does not exist"}'

        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")
        channelmessages = self.utils.openjson(self.ECONFIG.providerstorage + os.sep +
                                              "servers" + os.sep + servername + os.sep + channel + os.sep + "messages")

        if not channel in serverinfo["channels"]:
            return '{"resp": false, "reason":"channel does not exist"}'

        channelmessages["messages"] = channelmessages["messages"] + 1

        self.utils.putjson(channelmessages, self.ECONFIG.providerstorage + os.sep +
                           "servers" + os.sep + servername + os.sep + channel + os.sep + "messages")

        messagejson = json.loads("{}")
        messagejson["message"] = msg
        messagejson["timestamp"] = datetime.datetime.now().timestamp()
        messagejson["author"] = self.userkey.userkey.usernm

        self.utils.putjson(messagejson, self.ECONFIG.providerstorage + os.sep + "servers" + os.sep +
                           servername + os.sep + channel + os.sep + str(channelmessages["messages"] - 1))

        messagejson["call"] = "newmessage"

        self.latestmessagejson = messagejson
        self.latestmessageserver = servername

        if self.ECONFIG.sendotherusersdata:
            self.sendotherusers(self.latestmessagejson,
                                self.latestmessageserver)

        return '{"resp": true}'

    def getmessages(self, servername, channel, begin, end, key):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + "info"):
            return '{"resp": false, "reason":"server does not exist"}'
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + channel + os.sep + "messages"):
            return '{"resp": false, "reason":"channel does not exist"}'
        if not type(begin) is int:
            return '{"resp": false, "reason":"begin is not an int"}'
        if not type(end) is int:
            return '{"resp": false, "reason":"end is not an int"}'
        if end < begin:
            return '{"resp": false, "reason":"begin is greater than end"}'
        if end < 0 or begin < 0:
            return '{"resp": false, "reason":"end or begin is less than 0"}'

        channelmessages = self.utils.openjson(self.ECONFIG.providerstorage + os.sep +
                                              "servers" + os.sep + servername + os.sep + channel + os.sep + "messages")
        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")

        if end > channelmessages["messages"] or begin > channelmessages["messages"]:
            return '{"resp": false, "reason":"end or begin is less than 0"}'
        if not self.userkey.userkey.usernm in serverinfo["users"]:
            return '{"resp": false, "reason":"user not in server"}'

        outjson = json.loads("{}")
        outjson["resp"] = True
        outjson["messages"] = []

        for i in range(0, end - begin):
            messagedata = self.utils.openjson(self.ECONFIG.providerstorage + os.sep + "servers" +
                                              os.sep + servername + os.sep + channel + os.sep + str(begin + i))
            outjson["messages"].append(messagedata)

        return json.dumps(outjson)

    def gettotalmessages(self, servername, channel):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername):
            return '{"resp": false, "reason":"server does not exist"}'
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + channel + os.sep + "messages"):
            return '{"resp": false, "reason":"channel does not exist"}'

        channelmessages = self.utils.openjson(self.ECONFIG.providerstorage + os.sep +
                                              "servers" + os.sep + servername + os.sep + channel + os.sep + "messages")

        outjson = json.loads("{}")
        outjson["resp"] = True
        outjson["amount"] = channelmessages["messages"]

        return json.dumps(outjson)

    def getjoinedservers(self):
        userinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                       os.sep + "users" + os.sep + self.userkey.userkey.usernm)

        outjson = json.loads("{}")
        outjson["resp"] = True
        outjson["servers"] = userinfo["joinedservers"]

        return json.dumps(outjson)

    def getserverchannels(self, key, servername):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername):
            return '{"resp": false, "reason":"server does not exist"}'

        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")

        if not self.userkey.userkey.usernm in serverinfo["users"]:
            return '{"resp": false, "reason":"user not in server"}'

        outjson = json.loads("{}")
        outjson["resp"] = True
        outjson["channels"] = serverinfo["channels"]

        return json.dumps(outjson)

    def deletechannel(self, name, servername, key):
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername):
            return '{"resp": false, "reason":"server does not exist"}'
        if not os.path.exists(self.ECONFIG.providerstorage + os.sep + "servers" + os.sep + servername + os.sep + name + os.sep + "messages"):
            return '{"resp": false, "reason":"channel does not exist"}'

        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servername + os.sep + "info")

        if not self.userkey.userkey.usernm == serverinfo["owner"]:
            return '{"resp": false, "reason":"user not owner of the server"}'

        serverinfo["channels"].remove(name)

        self.utils.putjson(serverinfo, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + servername + os.sep + "info")

        shutil.rmtree(self.ECONFIG.providerstorage + os.sep +
                      "servers" + os.sep + servername + os.sep + name)

        return '{"resp": true}'

    def getprovidername(self):
        providerinfo = json.loads("{}")
        providerinfo["resp"] = True
        providerinfo["name"] = self.ECONFIG.providername
        providerinfo["welcome"] = self.ECONFIG.providerwelcome
        if not self.ECONFIG.providericondata == None:
            providerinfo["icon"] = self.ECONFIG.providericondata

        return json.dumps(providerinfo)

    def setserverimage(self, servernm, img):
        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servernm + os.sep + "info")
        if not self.userkey.userkey.usernm == serverinfo["owner"]:
            return '{"resp": false, "reason": "not the owner of the server"}'

        serverinfo["img"] = img

        self.utils.putjson(serverinfo, self.ECONFIG.providerstorage +
                           os.sep + "servers" + os.sep + servernm + os.sep + "info")
        return '{"resp":true}'

    def getserverimage(self, servernm):
        serverinfo = self.utils.openjson(self.ECONFIG.providerstorage +
                                         os.sep + "servers" + os.sep + servernm + os.sep + "info")
        if not self.userkey.userkey.usernm in serverinfo["users"]:
            return '{"resp": false, "reason": "not in the server"}'

        outjson = json.loads("{}")
        outjson["resp"] = True
        outjson["img"] = serverinfo["img"]

        print(json.dumps(outjson))

        return json.dumps(outjson)

    def rawcommand(self, command):
        if command == "":
            self.clientsocket.close()
            return "CLIENT UNGRACEFULLY CLOSED"

        commandjson = json.loads(command)
        call = commandjson["call"]

        # print(commandjson)
        # print(call)
        # print(commandjson["usernm"])
        # print(commandjson["passwd"])

        try:
            if call == "login":
                return self.login(commandjson["usernm"], commandjson["passwd"])
            elif call == "register":
                return self.register(commandjson["usernm"], commandjson["passwd"])
            elif call == "createserver":
                return self.createserver(commandjson["servername"], commandjson["serverpasswd"], commandjson["usernm"])
            elif call == "joinserver":
                return self.joinserver(commandjson["servername"], commandjson["serverpasswd"], commandjson["usernm"])
            elif call == "createchannel":
                return self.createchannel(commandjson["name"], commandjson["servername"], commandjson["usernm"])
            elif call == "deleteserver":
                return self.deleteserver(commandjson["servername"], commandjson["serverpasswd"])
            elif call == "leaveserver":
                return self.leaveserver(commandjson["servername"])
            elif call == "message":
                requesttimediff = datetime.datetime.now().timestamp() - \
                    self.lastgetmessagerequest
                # print(requesttimediff) #prints time difference between message requests
                self.lastgetmessagerequest = datetime.datetime.now().timestamp()
                if requesttimediff > 0:
                    return self.message(commandjson["msg"], commandjson["servername"], commandjson["channel"])
                else:
                    return '{"resp": false, "reason":"requesting too many times, only request every 0.75 seconds"}'
            elif call == "getjoinedservers":
                return self.getjoinedservers()
            elif call == "getmessages":
                return self.getmessages(commandjson["servername"], commandjson["channel"], commandjson["begin"], commandjson["end"], commandjson["usernm"])
            elif call == "gettotalmessages":
                requesttimediff = datetime.datetime.now().timestamp() - \
                    self.lastgetmessagetotalrequest
                # print(requesttimediff) #prints time difference between message total requests
                self.lastgetmessagetotalrequest = datetime.datetime.now().timestamp()
                if requesttimediff > 0.75:
                    return self.gettotalmessages(commandjson["servername"], commandjson["channel"])
                else:
                    return '{"resp": false, "reason":"requesting too many times, only request every 0.75 seconds"}'
            elif call == "getprovidername":
                return self.getprovidername()
            elif call == "getserverchannels":
                return self.getserverchannels(commandjson["usernm"], commandjson["servername"])
            elif call == "deletechannel":
                return self.deletechannel(commandjson["name"], commandjson["servername"], commandjson["usernm"])
            elif call == "setserverimage":
                return self.setserverimage(commandjson["servername"], commandjson["img"])
            elif call == "getserverimage":
                return self.getserverimage(commandjson["servername"])
            else:
                out = self.msg(command)
                print("UNKNOWN CALL:")
                print(json.dumps(commandjson))
                return out
        except KeyError as err:
            print(err)
            return '{"resp": false, "reason":"incorrect request values"}'

    def msg(self, msg):
        return str(msg)

    def sendotherusers(self, jsondata, servername):
        from elemental import transmit
        transmit(jsondata, servername, self)
Exemplo n.º 59
0
 def __init__(self):
     
     self.__utils = Utils()
Exemplo n.º 60
0
    def main(self, config_path: str, auth_file: str, auto_confirm: bool):
        """
        Creates new config files within the default config file dir.
        Uses both user input and authentification file for auth informations.

        Args:
            config_path (str): Config file DIR
            auth_file (str): File with pairs of authentification data
            auto_confirm (bool): Skip any confirm messages
        """

        fileDirPath = dirname(sys.argv[0])
        logPath = join(fileDirPath, "logs", "installLog.txt")

        global LOGGER_NAME
        LOGGER_NAME = 'configFileLogger'
        global LOGGER
        LOGGER = Utils.setupLogger(LOGGER_NAME, logPath)

        Utils.printRow()
        Utils.auto_confirm = auto_confirm
        Utils.LOGGER = LOGGER
        signal.signal(signal.SIGINT, Utils.signalHandler)

        LOGGER.info("> Checking for sudo rights")
        # Only works on Linux, therefore error here.
        if os.name == 'posix':
            if os.geteuid() == 0:
                print("Already root")
            else:
                print("Root rights required to run script.")
                subprocess.call(['sudo', 'python3', *sys.argv])
                sys.exit()

        LOGGER.info("> Generating new Config files")

        # ### Config dir setup
        config_path = realpath(config_path)
        LOGGER.info(
            f"> All new configurations files will be written into the directory:\n {config_path}"
        )

        # ### authFile setup
        try:
            if (not auth_file):
                LOGGER.info("> No authentification file specifed")
                Utils.setupAuthFile(None)
            else:  # take none if not exists, otherwise take auth path
                Utils.setupAuthFile(auth_file)
        except Exception as error:
            LOGGER.info(f"> Setup of auth-file failed due error: {error}")

        # ########## EXECUTION ################

        LOGGER.info("> You may add multiple SPP-Server now.")
        print("> Each server requires it's own config file")

        try:
            while (Utils.confirm(
                    "\nDo you want to to add a new SPP-Server now?")):

                config_file_path: str = ""
                server_name: str = ""
                while (not config_file_path or not server_name):
                    # Servername for filename and config
                    server_name = Utils.prompt_string(
                        "What is the name of the SPP-Server? (Human Readable, no Spaces)",
                        filter=(lambda x: not " " in x))
                    # Replace spaces
                    config_file_path = join(realpath(config_path),
                                            server_name + ".conf")

                    if (isfile(config_file_path)):
                        LOGGER.info(
                            f"> There is already a file at {config_file_path}."
                        )
                        if (not Utils.confirm("Do you want to replace it?")):
                            LOGGER.info(
                                "> Please re-enter a different server name")
                            # remove content to allow loop to continue
                            config_file_path = ""
                            server_name = ""
                        else:
                            LOGGER.info("> Overwriting old config file")

                os.system("touch " + config_file_path)
                os.system("sudo chmod 600 " + config_file_path)
                LOGGER.info(f"> Created config file under {config_file_path}")

                # Overwrite existing file
                with open(config_file_path, "w") as config_file:
                    LOGGER.info(
                        f"> Accessed config file under {config_file_path}")

                    # Structure of the config file
                    configs: Dict[str, Any] = {}

                    # #################### SERVER ###############################
                    Utils.printRow()
                    LOGGER.info("> collecting server information")

                    # Saving config
                    configs["sppServer"] = ConfigFileSetup.createServerDict()

                    LOGGER.info("> finished collecting server information")
                    # #################### influxDB ###############################
                    Utils.printRow()
                    LOGGER.info("> collecting influxDB information")

                    # Saving config
                    configs["influxDB"] = ConfigFileSetup.createInfluxDict(
                        server_name)

                    LOGGER.info("> finished collecting influxdb information")
                    # #################### ssh clients ###############################
                    Utils.printRow()
                    LOGGER.info("> collecting ssh client information")

                    ssh_clients: List[Dict[str, Any]] = []

                    print("")
                    print(
                        "> NOTE: You will now be asked for multiple ssh logins"
                    )
                    print(
                        "> You may test all these logins yourself by logging in via ssh"
                    )
                    print("> Following categories will be asked:")
                    # server excluded here
                    ssh_types: List[str] = [
                        "vsnap", "vadp", "cloudproxy", "other"
                    ]
                    LOGGER.info("> server, " + ", ".join(ssh_types))
                    print("> Please add all clients accordingly.")
                    print()
                    print(
                        "> If you misstyped anything you may edit the config file manually afterwards"
                    )
                    print(
                        "> NOTE: It is highly recommended to add at least one vSnap client"
                    )

                    if (not Utils.confirm("Do you want to continue now?")):
                        json.dump(configs, config_file, indent=4)
                        LOGGER.info(
                            f"> saved all information into file {config_file_path}"
                        )
                        LOGGER.info("> finished setup for this server.")
                        continue  # Contiuing to the next server config file loop

                    # #################### ssh clients: SERVER ###############################
                    Utils.printRow()
                    LOGGER.info("> Collecting SPP-Server ssh information")

                    ssh_server: Dict[str, Any] = {}

                    print(
                        "> Test the requested logins by logging into the SPP-Server via ssh yourself."
                    )
                    ssh_server["name"] = server_name
                    spp_server_dict: Dict[str, Any] = configs["sppServer"]
                    ssh_server["srv_address"] = spp_server_dict["srv_address"]
                    ssh_server["srv_port"] = int(
                        Utils.prompt_string(
                            f"Please enter the SSH port of the SPP server",
                            "22",
                            filter=(lambda x: x.isdigit())))
                    ssh_server["username"] = Utils.prompt_string(
                        "Please enter the SPP-Server SSH username (equal to login via ssh)"
                    )
                    ssh_server["password"] = Utils.prompt_string(
                        "Please enter the SPP-Server SSH user password (equal to login via ssh)",
                        is_password=True)
                    ssh_server["type"] = "server"

                    # Saving config
                    ssh_clients.append(ssh_server)

                    # #################### ssh clients all other ###############################
                    for ssh_type in ssh_types:
                        try:
                            ssh_clients.extend(
                                ConfigFileSetup.addSshClient(ssh_type))
                        except ValueError as err:
                            LOGGER.error(err)
                            LOGGER.info(
                                "Skipped this type of SSH-Client. Continuing with next type."
                            )

                    # save all ssh-clients
                    configs["sshclients"] = ssh_clients
                    print("> Finished setting up SSH Clients")

                    # #################### SAVE & EXIT ###############################
                    LOGGER.info("> Writing into config file")
                    json.dump(configs, config_file, indent=4)
                    LOGGER.info(
                        f"> Configuraton saved into the file:\n{config_file_path}"
                    )
                    Utils.printRow()
                    continue  # Contiuing to the next server config file loop
        except ValueError as err:
            LOGGER.error(err)

        LOGGER.info("> Finished config file creation")