Exemple #1
0
def parse_radius_log(radius_log_file, out_dir):
    log_file = open(radius_log_file, 'r')
    outfile_name = "RADIUS_MAC2NSP_%s.txt" % (DATE.strftime('%Y%m%d'))
    output_file = open(os.path.join(out_dir, outfile_name), 'w')
    existing = set()
    try:
        for record in rad_recv_record_extractor(log_file):
            for converter_func, check_func in CONVERTER_CHAIN:
                if check_func(record):
                    try:
                        mac, nssp = converter_func(record)
                    except Exception, e:
                        #logging.error("Error passing radius record: " + repr(e))
                        log.exception('Error parsing radius record')
                        continue
                    mac2nsp = "%s %s" % (mac, nssp)
                    if mac2nsp in existing:
                        log.warn("Duplicate record found: %s, ignoring..." % mac2nsp)
                    else:
                        log.info("Matched: %s %s" % (mac, nssp))
                        output_file.write("%s %s\n" % (mac, nssp))
                        existing.add(mac2nsp)
                    break
            else:
                log.info("Radius record not matching rule: " + repr(record))
    finally:
        log_file.close()
        output_file.close()
    def selectionchange(self,i):
        log.info("Items in the list are %d:" %i)
		
        for count in range(self.ui.lenses_comboBox.count()):
            log.debug(self.ui.lenses_comboBox.itemText(count))
        
        log.debug("Current index %d: selection changed %s" % (i,self.ui.lenses_comboBox.currentText()))
Exemple #3
0
    def __init__(
        self, xml_state, root_dir, allow_existing=False
    ):
        """
            setup and host bind new root system at given root_dir directory
        """
        log.info('Setup root directory: %s', root_dir)
        root = RootInit(
            root_dir, allow_existing
        )
        root.create()
        root_bind = RootBind(
            root
        )
        root_bind.setup_intermediate_config()
        root_bind.mount_kernel_file_systems()
        root_bind.mount_shared_directory()

        self.xml_state = xml_state
        self.profiles = xml_state.profiles
        self.root_bind = root_bind

        # A list of Uri references is stored inside of the System instance
        # in order to delay the Uri destructors until the System instance
        # dies. This is needed to keep bind mounted Uri locations alive
        # for System operations
        self.uri_list = []
Exemple #4
0
 def __init__(self, usersettings=None):
     settings = {}
     
     # Set all settings first by deep-copying the defaults. These are in an authoratitive
     # format and guaranteed to work with the current Toolkit version
     for key, value in defaultsettings.items():
         settings[key] = copy.deepcopy(value)
     
     # Now incorporate any saved settings. Here we have to be careful, because the stuff
     # we're sucking in might have screwy stuff in it from the old days. The point of the
     # incorporations framework is to try and move as much stuff from the user into the
     # settings as possible without screwing up the fine structure of the settings and making
     # the toolkit inoperable:
     settings = incorporatebykeydict({
             "tonecolors" : incorporatepositionallist({}),
             "extraquickaccesscolors" : incorporatepositionallist({}),
             "candidateFieldNamesByKey" : incorporatebykeydict({})
         })(settings, usersettings or {})
     
     log.info("Initialized configuration with settings %s", settings)
     self.settings = settings
     
     # /Transient/ flag recording whether Google translate appears to be up. To begin
     # with, we aren't sure
     self.__googletranslateworking = None
Exemple #5
0
    def __init__(self, *args):
        QMainWindow.__init__(self, *args)
        log.debug("main window initialization starting")
        # these are all categories apple is providing for now
        self.categories = [(self.tr('Just added'), '/trailers/home/feeds/just_added.json'),
                     (self.tr('Exclusive'), '/trailers/home/feeds/exclusive.json'),
                     (self.tr('Only HD'), '/trailers/home/feeds/just_hd.json'),
                     (self.tr('Most popular'), '/trailers/home/feeds/most_pop.json'),
                     (self.tr('Search'), '/trailers/home/scripts/quickfind.php?&q=')]

        # pick sane defaults
        self.config = configparser.SafeConfigParser({'downloadDir':'/tmp',
                                       'filters':json.dumps([y for x, y in PyTrailerSettings.filters]),
                                       'readAhead':'4',
                                       'parallelDownload':'2',
                                       'player':'mplayer -user-agent %%a %%u'})

        log.info("settings loaded: %s" % self.config.items("DEFAULT"))
        # run initializations
        self.player_proc = None
        self.list_loader = None
        self.list_loader_p = None
        self.movieDict = {}
        self.config.read(self.configPath)
        self.load_cache()
        self.init_preloaders()
        self.init_widget()
        self.init_menus()
        self.downloader.start()
        signal.signal(signal.SIGTERM, PyTrailerWidget.term_handler)
        signal.signal(signal.SIGINT, PyTrailerWidget.term_handler)
        log.debug("main window initialization done")
    def send(self):
        log.info("FindRequest.send(host=%s, query=%s) called" % (self.store.host, self.query))

        self.cachedResult = cache.get(self.cacheKey)
        if self.cachedResult is not None:
            log.info("FindRequest(host=%s, query=%s) using cached result" % (self.store.host, self.query))
            return

        self.connection = HTTPConnectionWithTimeout(self.store.host)
        self.connection.timeout = settings.REMOTE_FIND_TIMEOUT

        query_params = [
            ('local', '1'),
            ('format', 'pickle'),
            ('query', self.query.pattern),
        ]
        if self.query.startTime:
            query_params.append( ('from', self.query.startTime) )

        if self.query.endTime:
            query_params.append( ('until', self.query.endTime) )

        query_string = urlencode(query_params)

        try:
            self.connection.request('GET', '/metrics/find/?' + query_string)
        except:
            log.exception("FindRequest.send(host=%s, query=%s) exception during request" % (self.store.host, self.query))
            self.store.fail()
            self.failed = True
Exemple #7
0
 def delete_packages(self, manager, packages, force=False):
     """
         delete one or more packages using the package manager inside
         of the new root directory
     """
     log.info('Deleting system packages (chroot)')
     all_delete_items = self.__setup_requests(
         manager, packages
     )
     if all_delete_items:
         process = CommandProcess(
             command=manager.process_delete_requests(force),
             log_topic='system'
         )
         try:
             process.poll_show_progress(
                 items_to_complete=all_delete_items,
                 match_method=process.create_match_method(
                     manager.match_package_deleted
                 )
             )
         except Exception as e:
             raise KiwiSystemDeletePackagesFailed(
                 'Package deletion failed: %s' % format(e)
             )
Exemple #8
0
def get_libraries_impl(path, ctxitems, ctx):
	libraries = []

	libs = get_libraries_raw(ctxitems)
	root = get_document_root(path)
	log.info('libs are %s' % libs)
	if libs != None:
		for lib in libs:
			lib_path = get_lib_path_by_name(path, lib, ctx)
			if lib_path:
				libraries.append(lib_path)
		if not libraries:
			log.info('there are no precofigured libs to include, try defaults ...')
			libraries = libraries_default(path, ctx)

	import coffeescript
	def patch_coffeescript_lib(lib):
		if re.search(r'\.coffee$', lib):
			root = get_document_root(lib)
			fullpath = get_full_path(root, lib)
			return coffeescript.compile2js(None, lib, fullpath)
		return lib
	libraries = map( patch_coffeescript_lib, libraries )

	return libraries
Exemple #9
0
def create_new_users(mycursor, usertype=None):
	"""Takes a cursor to the Bixby DB and Staff or Student as the usertype.
	I'd like to fix this so that by default it creates all types. BEH
	The usertype thing is BROKEN BEH"""
	log.info('######## Creating New Users ########')
	mycursor.execute(queries.get_new_users, (usertype,))
	new_users = mycursor.fetchall()
	ac = google.appsclient.AppsClient()

	for user in new_users:
		uid, user_domain, user_type, first_name, last_name, external_usernumber = user
		new_username = unique_username(mycursor, uid)
		if user_type == 'Student':
			new_user_password = config.STUDENT_PASSWORD_PREFIX+external_usernumber
		else:
			new_user_password = config.STAFF_PASSWORD_PREFIX

		log.info('Creating %s User: %s (UID: %d)' %(user_type, new_username+'@'+user_domain, uid))

		try:
			ac.create_user(user_type, new_username, last_name, first_name, new_user_password)
		except gdata.apps.service.AppsForYourDomainException, e:
			if e.error_code == 1300:
				# Entity Exists
				mycursor.execute(queries.update_username, (new_username, new_user_password, 0, uid))
			log.exception('Error: %d Group: %s Reason: %s User: %s' %(e.error_code, e.invalidInput, e.reason, new_username))

		mycursor.execute(queries.update_username, (new_username, new_user_password, 2, uid)) # Mark Created and Insert Username
		time.sleep(config.SLEEP_TIME_SECONDS)
Exemple #10
0
 def reading(self, sentence):
     log.info("Requested reading for %s", sentence)
     
     def addword(words, _text, readingtokens):
         words.append(Word(*readingtokens))
     
     return self.mapparsedtokens(sentence, addword)
Exemple #11
0
 def wrapper(*args, **kwargs):
     start_time = time.time()
     ret = func(*args, **kwargs)
     end_time = time.time()
     log.info("{} executed in {}".format(
         func_name, end_time - start_time))
     return ret
 def showCurrentMode(self):
     if self._isEnabled:
         log.info('Sixth Sense Message enabled')
         Utils.addClientMessage(sm_settings.get('EnableSystemMsg'), True)
     else:
         log.info('Sixth Sense Message disabled')
         Utils.addClientMessage(sm_settings.get('DisableSystemMsg'), True)
    def showSixthSenseIndicator(self):
        if not self._isEnabled or not self._activeParams:
            log.info('sixth sense message is disabled or nothing to do.')
            return
        currentTime = Utils.getTime()
        cooldownTime = self._getCooldownTime(currentTime, self._cooldownInterval)
        if cooldownTime > 0:
            log.info('[time:{:.1f}] invoke sixth sense, but it\'s not time yet. (rest {:.1f}s)'.format(currentTime, cooldownTime))
            Utils.addClientMessage(sm_settings.get('CooldownMsg').format(rest=int(math.ceil(cooldownTime))))
            return
        log.info('[time:{:.1f}] invoke sixth sense.'.format(currentTime))

        player = Utils.getPlayer()
        teamAmount = Utils.getTeamAmount()
        cellIndex = MinimapInfo.getCellIndexByPosition(Utils.getPos())
        
        messenger = IngameMessanger()
        log.info('current chat channel: {}'.format(messenger.getChannelLabels()))
        log.info('current team amount: {}'.format(teamAmount))

        self._isDone = {}
        for index, param in enumerate(self._activeParams):
            self._currentIndex = index
            self._currentParam = param
            self._doSixthSense(messenger, currentTime, player, cellIndex, teamAmount)
        if self._isDone:
            log.debug('success commands, update last activity.')
            self._lastActivity = currentTime
Exemple #14
0
 def cancel_command(self, signum, frame):
     """
     Exits program with return value of 1
     """
     print
     log.info("Exiting...")
     sys.exit(1)
Exemple #15
0
 def _deactivate(self, names, tag=None):
     for name in names:
         if self._domains.get(name):
             addrs = self._domains.remove(name, tag)
             if addrs:
                 for addr in addrs:
                     log.info('removed %s -> %s', name.idna(), addr)
Exemple #16
0
 def save(self):
         
     log.info("GPSWindow SAVE")
     
     
     self.close()
     return self.latitude, self.longitude
Exemple #17
0
    def __init__(self):
        # init config reader
        self._config = ConfigMonitor()
        # start debugger
        log.debug(self._config.getVar('logging','dir'))

        # init serverpush
        self._push = Serverpush(self._config.getVar('cluster','host'))

        # basic informations
        info = BasicInformations()

        # register on server
        if self._config.getVar('cluster','id')!='':
            print(self._config.getVar('cluster','id'))
        else:
            log.info("reciving new id")
            response = self._push.push('register',json.dumps(info.getInfo()))

            print(response)


        # start webserver
        s = WebServer(self._config.getVar("webserver","host"),self._config.getVar("webserver","port"))
        s.start()
        pass
Exemple #18
0
    def download(self,pbar):
        completed = 0

        log.info("download")
        while completed < 100:
            completed += 0.001
            pbar.setValue(completed)
Exemple #19
0
    def readFiles(self,files, item_list,pbar):

        log.info("Filemanager.readFiles")
        log.debug("Filemanager.readFiles: %s" % files)
        
        completed = 0
        pbar.setValue(0)
        pbar.setMaximum(len(files))
        for file in files:
            completed += 1
            pbar.setValue(completed)
            
            st_ino = os.stat(file)[1]
            if exif.inode_inlist(item_list, st_ino) == False:
                if exif.readJson(file) == True:
                    exif.readJsonCreateDate()
                    item_list.append([1,st_ino,
                                        Path(file).name,
                                        exif.fileModifyDate,
                                        exif.exifCreateDate,
                                        str(exif.LensModel),
                                        str(exif.FNumber),
                                        exif.GPSLatitude,
                                        exif.GPSLongitude])
        
        return item_list
Exemple #20
0
    def actionDateTime_trigger(self):
        global CreateDate
        global item_list

        log.info("====== DateTime ======")
        self.iw=DateTimeWindow("","","",0)
        self.iw.exec_()
        res = self.iw.result()
        log.debug("resultat=%d" % res)
        if res == 1:
            CreateDate, incSecond = self.iw.save()
            log.info("CreateDate is %s, incSeconds %s" % (CreateDate, incSecond))

            for i in range(self.ui.treeWidget.topLevelItemCount()):
                item = self.ui.treeWidget.topLevelItem(i)
                if item.checkState(0) == 2:
                    exif.initArglist()
                    exif.appendCreateDate(CreateDate)
                    exif.appendArglist(SourceDir + os.path.sep + item.text(2))
                    exif.executeExiftool()
                    CreateDate = self.iw.inctime(CreateDate)


            item_list.clear()
            item_list = filemgr.readFiles(Filelist, item_list,self.ui.progressBar)                                
            self.redrawTreeWidget(item_list)
Exemple #21
0
    def openSource(self):
        global SourceDir
        global mime
        global Filelist
        global item_list
        
        
        options = QFileDialog.Options()
        file_path, file_type = QFileDialog.getOpenFileNames(self, 
                                                            "Get images...", 
                                                            SourceDir , 
                                                            "Images (*.arw *.jpg *.png *.tif)",
                                                            options=options)        
        if file_path:
            Filelist = file_path
            # Change SourceDir to current directory from first file in list
            SourceDir = os.path.dirname(file_path[0])
            self.ui.sourcedir_label.setText(SourceDir)
            log.info("directory is %s" % SourceDir)
            
            self.ui.toggleall_checkBox.setCheckState(Qt.Checked)
            self.ui.togglenolens_checkBox.setCheckState(Qt.Checked)

            item_list = filemgr.readFiles(file_path, item_list,self.ui.progressBar)
            
            self.redrawTreeWidget(item_list)
Exemple #22
0
def api_ledticker():
	"""
	if request.method == 'GET':
		text = request.args.get('text', '')
		ticker = ledticker.LedTicker()
		ticker.add_item(text)
		return ticker.get_output()

	if request.method == 'POST':
	"""

	text = getParam('text')
	lowPriority = isTrue(getParam('lowpriority')) if paramExists('lowpriority') else False

	ticker = ledticker.LedTicker(LEDTICKER_FILE)

	longAgo = False
	if lowPriority:
		mtime = stat(LEDTICKER_FILE).st_mtime
		longAgo = (datetime.datetime.fromtimestamp(mtime) + datetime.timedelta(seconds=30)) < datetime.datetime.now()

	if not lowPriority or (lowPriority and ticker.items_available() == 0 and longAgo):
		if not lowPriority:
			log.info("Received high priority message: %s" % text)
		ticker.add_item(text)
		ledtickerEvent.set()
		ledtickerEvent.clear()
	return jsonify({ 'success': True })
Exemple #23
0
    def fetch_users(self):
        untouched_user_list = get_user_cursor(self.db, EXPLORE_USER_COUNT)
        for user in untouched_user_list:
            user = user["user_id"]
            self.insert_user_all_answers(user)

        log.info("Finished crawling {} users.".format(len(EXPLORE_USER_COUNT)))
Exemple #24
0
    def __init__(self,config=Config,result_handler=None,**kwargs):
        self.config = config
        
	if result_handler and getattr(result_handler,'handle',None):
            self.result_handler = result_handler
	else:
	    from resultHandler import xmlHandler
	    self.result_handler = xmlHandler()
	testdir = self.config.testdir
        # scan test file
        self.testcases = {} # 管理测试用例
	#self.testCasesFile = {}#文件与testcase对应

        for testfile in self.get_test_files():
        # 构造测试并添加,视情况看是否需要启用新定时器
            testcase = TestCase()
	    file_name = os.path.sep.join((os.getcwd(),testfile))
            try:
                xml = self.parsexml(testfile)
                testcase.fromxml(xml)
                if testcase.is_valid():
                    self.testcases[testcase.name] = testcase
		    self.file2testcase[file_name] = testcase.name
		    #后来添加
		    #self.testCasesFile[testfile] = testcase.name
                else:
                    raise Exception,'no testcase found of empty testcase'
            except:
                log_exce('invalid testcase definition in file : %s, will be discard.'%testfile)

        log.info('load test file done! there are %d valided testcases.'%len(self.testcases.keys()))
Exemple #25
0
    def update_question_insert_answer(self, question_id):
        change_question_status(db=self.db, question_id=question_id, status=FLAG.IN_USE)
        log.info("Updating untouched question {}".format(question_id))

        # Update question detail
        question_url = 'http://www.zhihu.com/question/' + question_id
        question_content = self.session.get(question_url).content.decode('utf-8')

        q_soup = BeautifulSoup(question_content, BS_PARSER)
        if type(q_soup.find(Magic.Question.title)) is not None:
            self.update_question_detail(q_soup, question_id)
        else:
            return  # 404

        # First 50 answers
        answers = q_soup.findAll('div', class_=Magic.Question.answer_div)
        if len(answers) > 0:
            for answer in answers:
                answer_id = int(Magic.answer_id_in_answer.findall(str(answer))[0])

                # TODO: save different version?
                # if self.db.answers.find({'answer_id': answer_id}).count() == 1:
                #     continue  # jump by this answer

                author, comments_count, content = self.process_answer(answer)
                insert_answer(self.db, answer_id, author, question_id, comments_count, content)

        # Users
        users = set(Magic.Question.mentioned_userid.findall(str(question_content)))
        for user in users:
            insert_new_user(self.db, user)

        change_question_status(db=self.db, question_id=question_id, status=FLAG.FINISHED)
Exemple #26
0
def backup_mysql():
	"""Backups the DB until things get very large I am going to do this every time.
	Or until I am sure my code is good."""
	dnsdt = str(time.strftime('%Y%m%d%H%M%S', time.localtime()))
	log.info("""Creating mysqldump: BIXBY_DB_Back.'%s'.sql""" %dnsdt)
	os.system("""mysqldump -h '%s' -u '%s' -p'%s' '%s' | gzip -9 > DB_Backups/BIXBY_DB_Back.'%s'.sql.gz""" \
		%(config.MySQL_Host, config.MySQL_User, config.MySQL_Password, config.MySQL_DB, dnsdt))
 def execute(self, args):
     if len(args) != 2:
         self.parser.error(
             'you must specify an <image_id> and <destination_directory>')
     image_id, destdir = args
     self.ec2.download_image_files(image_id, destdir)
     log.info("Finished downloading AMI: %s" % image_id)
Exemple #28
0
    def fetch_questions(self):
        questions_list_cursor = get_untouched_question_cursor(self.db, EXPLORE_QUESTION_COUNT)
        for question_item in questions_list_cursor:
            question = question_item['question_id']
            self.update_question_insert_answer(question)

        log.info("Finished crawling {} questions.".format(EXPLORE_QUESTION_COUNT))
Exemple #29
0
    def find_confusion_nulls(self, corpus, 
                                   ngrams_prefix=None, 
                                   levels=LEVELS,
                                   min_count=5):
        """
        Finds <null> positions for given lists of n-grams at certain processing
        levels, e.g. tokens, part-of-speech tags and/or automatic word classes.
        """
        self.__load_ngrams(ngrams_prefix, levels, min_count)
        files = self.__tag_file(corpus, levels)
        n = 0

        for level in levels:
            files[level] = open(files[level])

        with open(corpus) as corpus_io:
            for s, line in enumerate(corpus_io):
                err_toks, edits = self.parse_corpus_line(line)
                all_tags = { level : files[level].next().strip().lower().split()
                             for level in levels }

                confs = self.__find_nulls(err_toks, all_tags, edits)
                for i, j, err, cor in confs:
                    n += 1
                    yield (s, i, j, err, cor)
        
        log.info("found {} confusion examples".format(n))

        for level in levels:
            files[level].close()
Exemple #30
0
def handle_outbound_queue():
    while thread_loop_active:
        try:
            reply = outbound_messages.get(block=0)
            if reply and getattr(reply, 'msg') and getattr(reply, 'addr'):
                if reply.msg_type != ClientRTP:
                    log.info('server sends %s to %s' 
                        % (reply.msg_type, repr(reply.addr)))
                else:
                    log.debug('server sends %s to %s' 
                        % (reply.msg_type, repr(reply.addr)))
                    
                try:
                    data = reply.msg.pack()
                    reactor.callFromThread(
                        servers_pool.send_to,reply.addr, data)
                        
                except Exception, inst:
                    log.exception('exception')
                    
        except Queue.Empty:
            time.sleep(0.010)
        except:
            log.exception('exception')
            
    log.info('terminating thread: handle_outbound_queue')
 def create_day2_cluster(self, name: str, cluster_uuid: str, **cluster_params) -> models.cluster.Cluster:
     cluster = models.AddHostsClusterCreateParams(name=name, id=cluster_uuid, **cluster_params)
     log.info("Creating day 2 cluster with params %s", cluster.__dict__)
     result = self.client.register_add_hosts_cluster(new_add_hosts_cluster_params=cluster)
     return result
 def deregister_host(self, infra_env_id: str, host_id: str):
     log.info(f"Deleting host {host_id} in infra_env {infra_env_id}")
     self.client.v2_deregister_host(infra_env_id=infra_env_id, host_id=host_id)
 def delete_cluster(self, cluster_id: str):
     log.info("Deleting cluster %s", cluster_id)
     self.client.v2_deregister_cluster(cluster_id=cluster_id)
 def update_cluster(self, cluster_id, update_params) -> models.cluster.Cluster:
     log.info("Updating cluster %s with params %s", cluster_id, update_params)
     return self.client.v2_update_cluster(cluster_id=cluster_id, cluster_update_params=update_params)
 def set_pull_secret(self, cluster_id: str, pull_secret: str) -> models.cluster.Cluster:
     log.info("Setting pull secret for cluster %s", cluster_id)
     update_params = models.ClusterUpdateParams(pull_secret=pull_secret)
     return self.update_cluster(cluster_id=cluster_id, update_params=update_params)
 def update_infra_env(self, infra_env_id: str, infra_env_update_params):
     log.info("Updating infra env %s with values %s", infra_env_id, infra_env_update_params)
     self.client.update_infra_env(infra_env_id=infra_env_id, infra_env_update_params=infra_env_update_params)
 def delete_infra_env(self, infra_env_id: str) -> None:
     log.info("Deleting infra_env %s", infra_env_id)
     self.client.deregister_infra_env(infra_env_id=infra_env_id)
Exemple #38
0
def countdown(seconds):
    log.info("sleeping: " + str(seconds) + " seconds")
    for i in range(seconds, 0, -1):
        # print("\x1b[2K\r" + str(i) + " ")
        time.sleep(3)
    log.info("waking up")
Exemple #39
0
def execute_day1_flow(cluster_name):
    client = None
    cluster = {}
    if args.managed_dns_domains:
        args.base_dns_domain = args.managed_dns_domains.split(":")[0]

    if not args.vm_network_cidr:
        net_cidr = IPNetwork('192.168.126.0/24')
        net_cidr += args.ns_index
        args.vm_network_cidr = str(net_cidr)

    if not args.vm_network_cidr6:
        net_cidr = IPNetwork('1001:db8::/120')
        net_cidr += args.ns_index
        args.vm_network_cidr6 = str(net_cidr)

    if not args.network_bridge:
        args.network_bridge = f'tt{args.ns_index}'

    set_tf_config(cluster_name)
    image_path = None
    image_type = args.iso_image_type

    if not args.image:
        utils.recreate_folder(consts.IMAGE_FOLDER, force_recreate=False)
        client = assisted_service_api.create_client(
            url=utils.get_assisted_service_url_by_args(args=args))
        if args.cluster_id:
            cluster = client.cluster_get(cluster_id=args.cluster_id)
        else:

            cluster = client.create_cluster(cluster_name,
                                            ssh_public_key=args.ssh_key,
                                            **_cluster_create_params())

        image_path = os.path.join(consts.IMAGE_FOLDER,
                                  f'{args.namespace}-installer-image.iso')

        if args.with_static_network_config:
            tf_folder = utils.get_tf_folder(cluster_name, args.namespace)
            static_network_config = static_network.generate_static_network_data_from_tf(
                tf_folder)
        else:
            static_network_config = None

        client.generate_and_download_image(
            cluster_id=cluster.id,
            image_path=image_path,
            image_type=image_type,
            ssh_key=args.ssh_key,
            static_network_config=static_network_config,
        )

    # Iso only, cluster will be up and iso downloaded but vm will not be created
    if not args.iso_only:
        try:
            nodes_flow(client, cluster_name, cluster)
        finally:
            if not image_path or args.keep_iso:
                return
            log.info('deleting iso: %s', image_path)
            os.unlink(image_path)

    return cluster.id
Exemple #40
0
def prob(probability):
    rando = random.random()
    log.info("prob: " + str(probability) + " rolled: " + str(rando))
    return rando < probability
Exemple #41
0
def create_nodes(tf):
    log.info('Start running terraform')
    with utils.file_lock_context():
        return tf.apply()
Exemple #42
0
    for line in disallowed_subs_obj:
        DISALLOWED_SUBS.append(line.lower().strip())

def get_args():
  parser = argparse.ArgumentParser(description='The bot needs stuff')
  parser.add_argument('-u','--username', default=os.environ.get('REDDIT_USERNAME'))
  parser.add_argument('-p','--password', default=os.environ.get('REDDIT_PASSWORD'))
  parser.add_argument('-c','--clientid', default=os.environ.get('REDDIT_CLIENT_ID'))
  parser.add_argument('-s','--secret', default=os.environ.get('REDDIT_SECRET'))
  parser.add_argument('-a','--useragent', default=os.environ.get('REDDIT_USER_AGENT'))
  parser.add_argument('-l','--sublist', default=os.environ.get('REDDIT_SUBREDDITS'))
  return  parser.parse_args()

if get_args().sublist: # Prefer subreddit list from envars
  SUBREDDIT_LIST = get_args().sublist.strip().split(",")
  log.info("Getting subreddit list from envar or args")
else:
  log.info('Using subreddit list from utils.py')

log.info(SUBREDDIT_LIST)

def get_current_epoch():
    return int(time.time())

def convert_size_to_bytes(size_str):
    """Convert human filesizes to bytes.
    https://stackoverflow.com/questions/44307480/convert-size-notation-with-units-100kb-32mb-to-number-of-bytes-in-python
    Special cases:
     - singular units, e.g., "1 byte"
     - byte vs b
     - yottabytes, zetabytes, etc.
Exemple #43
0
            since = ex.parse8601(f'{y}-{m}-01T00:00:00Z')
            data = ex.fetch_ohlcv(pair, timeframe=freq, since=since)
        # data = ex.fetch_ohlcv(pair, timeframe=freq)
    except Exception as e:
        log.error("Error fetch_ohlcv: " + pair + " " + freq + ", e: " + str(e))
        return pd.DataFrame()
    df = pd.DataFrame(columns=['date', 'open', 'high', 'low', 'close', 'vol'])
    for d in data:
        df = df.append(
            {
                "date": int(d[0]),
                "open": d[1],
                "high": d[2],
                "low": d[3],
                "close": d[4],
                "vol": d[5]
            },
            ignore_index=True)
    log.debug(pair)
    log.debug(df.iloc[-3:])
    return df


if __name__ == '__main__':
    log.reset('../log/log', log.INFO)
    log.reset('', log.DEBUG)
    log.info('test')
    for exchange_id in exchanges:
        getData(exchange_id, '1h')
        getData(exchange_id, '1d')
Exemple #44
0
def nodes_flow(client, cluster_name, cluster):
    tf_folder = utils.get_tf_folder(cluster_name, args.namespace)
    nodes_details = utils.get_tfvars(tf_folder)
    if cluster:
        nodes_details["cluster_inventory_id"] = cluster.id
        utils.set_tfvars(tf_folder, nodes_details)

    tf = terraform_utils.TerraformUtils(working_dir=tf_folder)
    machine_net = MachineNetwork(args.ipv4, args.ipv6, args.vm_network_cidr,
                                 args.vm_network_cidr6, args.ns_index)

    create_nodes_and_wait_till_registered(inventory_client=client,
                                          cluster=cluster,
                                          nodes_details=nodes_details,
                                          tf=tf)

    is_ipv4 = machine_net.has_ip_v4 or not machine_net.has_ip_v6
    main_cidr = args.vm_network_cidr if is_ipv4 else args.vm_network_cidr6
    secondary_cidr = machine_net.provisioning_cidr_v4 if is_ipv4 else machine_net.provisioning_cidr_v6

    if client:
        cluster_info = client.cluster_get(cluster.id)
        macs = utils.get_libvirt_nodes_macs(
            nodes_details["libvirt_network_name"])
        if is_none_platform_mode():
            macs += utils.get_libvirt_nodes_macs(
                nodes_details["libvirt_secondary_network_name"])

        if not (cluster_info.api_vip and cluster_info.ingress_vip):
            utils.wait_till_hosts_with_macs_are_in_status(
                client=client,
                cluster_id=cluster.id,
                macs=macs,
                statuses=[
                    consts.NodesStatus.INSUFFICIENT,
                    consts.NodesStatus.PENDING_FOR_INPUT,
                    consts.NodesStatus.KNOWN
                ],
            )

            if args.master_count == 1:
                tf.change_variables({
                    "single_node_ip":
                    helper_cluster.Cluster.get_ip_for_single_node(
                        client, cluster.id, main_cidr, ipv4_first=is_ipv4)
                })
                set_cluster_machine_cidr(client,
                                         cluster.id,
                                         machine_net,
                                         set_vip_dhcp_allocation=False)
            elif is_none_platform_mode():
                set_cluster_vips(client, cluster.id, machine_net)
            elif args.vip_dhcp_allocation:
                set_cluster_machine_cidr(client, cluster.id, machine_net)
            else:
                set_cluster_vips(client, cluster.id, machine_net)
        else:
            log.info("VIPs already configured")

        set_hosts_roles(client, cluster, nodes_details, machine_net, tf,
                        args.master_count, args.with_static_network_config)

        if is_none_platform_mode() and args.master_count > 1:
            master_ips = helper_cluster.Cluster.get_master_ips(
                client, cluster.id,
                main_cidr) + helper_cluster.Cluster.get_master_ips(
                    client, cluster.id, secondary_cidr)
            load_balancer_ip = _get_host_ip_from_cidr(
                machine_net.cidr_v6 if machine_net.has_ip_v6
                and not machine_net.has_ip_v4 else machine_net.cidr_v4)
            lb_controller = LoadBalancerController(tf)
            lb_controller.set_load_balancing_config(load_balancer_ip,
                                                    master_ips)

        utils.wait_till_hosts_with_macs_are_in_status(
            client=client,
            cluster_id=cluster.id,
            macs=macs,
            statuses=[consts.NodesStatus.KNOWN],
        )

        if args.install_cluster:
            time.sleep(10)
            install_cluster.run_install_flow(
                client=client,
                cluster_id=cluster.id,
                kubeconfig_path=consts.DEFAULT_CLUSTER_KUBECONFIG_PATH,
                pull_secret=args.pull_secret,
                tf=tf)
            # Validate DNS domains resolvability
            validate_dns(client, cluster.id)
            if args.wait_for_cvo:
                cluster_info = client.cluster_get(cluster.id)
                log.info("Start waiting till CVO status is available")
                api_vip = helper_cluster.get_api_vip_from_cluster(
                    client, cluster_info)
                config_etc_hosts(cluster_info.name,
                                 cluster_info.base_dns_domain, api_vip)
                utils.wait_for_cvo_available()
def random_reply():
    log.info("making random reply")
    # Choose a random submission from /r/all that is currently hot
    if SUBREDDIT_LIST:
        subreddit = random.choice(SUBREDDIT_LIST)
        submission = random.choice(list(api.subreddit(subreddit).hot()))
    else:
        submission = random.choice(list(api.subreddit("all").hot()))

    submission.comments.replace_more(
        limit=0
    )  # Replace the "MoreReplies" with all of the submission replies

    sub_name = submission.subreddit.display_name
    brain = "{}/{}.db".format(DB_DIR, sub_name)
    log.info(brain)
    if not glob.glob(brain):
        learn(sub_name)

    reply_brain = bot.Brain(brain)

    try:
        #         if prob(.1): # small chance we advertise
        #           content = share()
        #           comment = random.choice(submission.comments.list())
        #           log.info('sharing - thanks for helping out!')
        #           sharing = '{} {}'.format(content['comment'], content['url'])
        #           reply = comment.reply(sharing)
        #           log.info("Replied to comment: {}".format(comment.body))
        #           log.info("Replied with: {}".format(reply))
        #           return
        if prob(.35):  # There's a larger chance that we'll reply to a comment.
            log.info("replying to a comment")
            comment = random.choice(submission.comments.list())
            response = reply_brain.reply(comment.body)

            # We might not be able to learn enough from the subreddit to reply
            # If we don't, then pull a reply from the general database.
            if "I don't know enough to answer you yet!" in response:
                log.info(
                    "I don't know enough from {}, using main brain db to reply"
                    .format(sub_name))
                brain = "{}/{}.db".format(DB_DIR, "brain")
                reply_brain = bot.Brain(brain)
                response = reply_brain.reply(comment.body)

            reply = comment.reply(response)
            log.info("Replied to comment: {}".format(comment.body))
            log.info("Replied with: {}".format(response))
        else:
            log.info("replying to a submission")
            # Pass the users comment to chatbrain asking for a reply
            response = reply_brain.reply(submission.title)

            # same as above. nobody will ever see this so it's fine.
            if "I don't know enough to answer you yet!" in response:
                log.info(
                    "I don't know enough from {}, using main brain db to reply"
                    .format(sub_name))
                brain = "{}/{}.db".format(DB_DIR, "brain")
                reply_brain = bot.Brain(brain)
                response = reply_brain.reply(submission.title)

            submission.reply(response)
            log.info("Replied to Title: {}".format(submission.title))
            log.info("Replied with: {}".format(response))
    except praw.exceptions.APIException as e:
        raise e
    except Exception as e:
        log.error(e, exc_info=False)
Exemple #46
0
                        default='')
    parser.add_argument('--platform',
                        help='VMs platform mode (\'baremetal\' or \'none\')',
                        type=str,
                        default='baremetal')
    parser.add_argument(
        "--bootstrap-in-place",
        help="single node cluster with bootstrap in place flow",
        action="store_true",
    )
    parser.add_argument(
        "--proxy",
        help="use http proxy with default values",
        type=distutils.util.strtobool,
        nargs='?',
        const=True,
        default=False,
    )

    oc_utils.extend_parser_with_oc_arguments(parser)
    args = parser.parse_args()
    if not args.pull_secret:
        raise Exception(
            "Can't install cluster without pull secret, please provide one")

    if args.master_count == 1:
        log.info("Master count is 1, setting workers to 0")
        args.number_of_workers = 0

    main()
Exemple #47
0
 def __del__(self):
     if self.temp_image_dir and os.path.exists(self.temp_image_dir):
         log.info('Cleaning up %s instance', type(self).__name__)
         Path.wipe(self.temp_image_dir)
Exemple #48
0
#     matches = re.match(r'.+\"(.+)\", \"(.+)\".+',param)
#     req.add_header(matches.group(1), matches.group(2))

#     try:
#         body = b"{\"device\":{\"id\":\"\",\"type\":\"WEB\"},\"channel\":{\"name\":\"Western Union\",\"type\":\"WEB\",\"version\":\"9Z00\",\"device_identifier\":\"RESPONSIVE_WEB\",\"is_responsive\":\"true\"},\"external_reference_no\":\"1\",\"locale\":{\"country_code\":\"us\",\"language_code\":\"en\"},\"security\":{\"black_box_data\":{\"data\":\"0400CIfeAe15Cx8Nf94lis1ztjXGwoPOvePAXyx2B+S3GHv4zk88XIRRraRLunIct8bff+0OYje9QG3QyfTeh05zWmOBOa6UBu99tJnG7aBZufFTM+276GxRfLnYpgrFwByeqHV8lCsVIFbWfvi2lw6riEVfH5Uu1Pa6eRLHB1R9v2d8MjlSDtQj+Jtpu4sEMnf8VWodMUtjVJMS2ITlncEvFXsdUywEcm1K+F9+VNHlcz2VwAGEd3H4L952RAUPKwCzLQAfZaA7jSiwHHrDW3HSYxnPse2ZJCsq4e3f1u/ETzp5VpgkQXTQzZ2bCkUkx/iDf2bn3Sw5z10y0BqqmpPWK5eIIfzGRmuyVIAW7uDQjXP1CYi+TBqr3g2vHHGOZYr7JaKI7U4dmdRZBrj3LMq6iXlL9ZZMAKELmgn6CSbaXfRSaQNEG0FLiAWeQwVS3/hrPOYe85lXFCRN2krK3MiK1aItlVfahFwAQkjj/Bf3sgBpWZmpMPu3J+3HIgrXK7aMtUplBKI7zAe5LqBNk4kdSgpA2sdPp297uf8vNtTCr7/8HBzsKudZ5X1CE/Fgu0JWx7yacWI+PSKq5dbmqXWqTFN9ILpoIeXbhWKXcVtbrPCixPMqjFULp9I8UWAJoym9T+HoQVdeZcQIrULZXTBjG6UIdAMpjc1v/zE8byqzj+BO44oNAQPdfLI+vS3zO1UWOjx45x9ZlwMJdmNWP4mnL4jqdQ4rX1XOyAfUMZ5LKFqFl81CC76WZKXakoCa7XCW4IhJrOz08Pzf+x4pw6W9esbGEpq7CoRYvFQ3SxgXyvTx/y4JwLTE4Nxv4KowoTq90t/25gxtwQokB0ojkm20iUzI+mPNi65X5WFPYlDG9N0Lbh5nOj3u3DXqRCiKCUrsEkMt8z9fxO9pLLGVQUKIYR2wTw53CiWK96FOpPevDWtH2XR0QkfOd02D73n81x6hEMCy0s3hRLn08Th9FlNHDMJBqLj+Tz8r0O/E9ABp/9e7Ass1MT2qnLUp3jXsoo0BtaQs3aTWzAj/ypZq/h+ZaTb+e2ERnQvcpeWgW0V2eQpdgS6+ebIBT9vO/g2GqbAcLd4t9XvVSBPuppttgqi/0yfOq1vG2COf7bsfFYzAaFLxVajijX/olA2vHHGOZYr7JaKI7U4dmdRZBrj3LMq6iXlL9ZZMAKELmgn6CSbaXfRSaQNEG0FLiAihm4Lf/K7HlU99UoH2xklR3N/aeGt5EB/svVUJi7EGUSnZsY8TL9w0j8/lZ9+74sNpkoqaTrmhoW0aLFYgm2YxWGZYhQabfSN8veAtIY6ioVg6fdSBnYKijDqHtCq29GqemMBz8q5Len0BJTgLAg0K0EGZNc9EzZa3c3jVldoE\",\"length\":1380},\"client_ip\":\"103082043065\"},\"bashPath\":\"/us/en\"}"
#         response = urllib.request.urlopen(req, body)
#         data = response.read()
#         if not data:
#             exit('Session id could not be created. Empty response from server. Exiting...')
#         session_id = json.loads(data.decode('utf-8'))['security']['session']['id']
#     except Exception as e:
#         log.info(e)
#         exit(1)

req, session_id = get_random_session_request()
log.info('session id created: {}'.format(session_id))

# Test the urls from APIS_FILE
urls_200 = []
for url in urls_to_check:
    if req.full_url.find('EmailValidation'):
        req.full_url = '{}?timestamp={}'.format(url,
                                                str(int(time.time() * 1000)))

    #GetUserChallenges
    else:
        req.full_url = url

    body = json.dumps({
        'email': email,
        'security': {
 def host_get_next_step(self, cluster_id, host_id):
     log.info(
         f"Getting next step for host: {host_id} in cluster: {cluster_id}")
     return self.client.get_next_steps(cluster_id=cluster_id,
                                       host_id=host_id)
def random_submission():
    log.info("making random submission")
    # Get a random submission from a random subreddit
    END_DATE_PY = datetime.datetime.now() - datetime.timedelta(
        days=NUMBER_DAYS_FOR_POST_TOBE_OLD)
    ED = END_DATE_PY.strftime("%s")

    START_DATE_PY = END_DATE_PY - datetime.timedelta(days=1)
    SD = START_DATE_PY.strftime("%s")

    log.info(START_DATE_PY)
    log.info(END_DATE_PY)
    log.info(SD)
    log.info(ED)
    DATE_DIFF = ""

    log.info("choosing subreddits")
    if SUBREDDIT_LIST:
        log.info('using SUBREDDIT_LIST: {}'.format(SUBREDDIT_LIST))
        subreddits = []
        for subname in SUBREDDIT_LIST:
            subreddits.append(
                subreddit(name=subname,
                          rank=1,
                          url="https://example.com",
                          subscribers=1000,
                          type="what"))
    else:
        log.info("using get_top_subreddits")
        subreddits = get_top_subreddits()
        log.info(subreddits)

    total_posts = []

    for sub in subreddits[:TOP_SUBREDDIT_NUM]:
        big_upvote_posts = []
        log.info("\n{}\n{}".format("#" * 20, sub))
        tops = get_submissions(SD, ED, sub.name)
        big_upvote_posts = list(
            filter(lambda item: item["score"] >= MIN_SCORE, tops))
        total_posts += big_upvote_posts
        log.info("found {} posts with score >= {}".format(
            len(big_upvote_posts), MIN_SCORE))
        del big_upvote_posts

    post_to_repost = random.choice(total_posts)
    # print(post_to_repost)
    # print("doing submission")
    rand_sub = api.submission(id=post_to_repost["id"])

    log.info(rand_sub.title)
    log.info(str(rand_sub))

    # Check if there's any items in the submissions list. If not display error
    if rand_sub:
        try:
            # Check if the we're reposting a selfpost or a link post.
            # Set the required params accodingly, and reuse the content
            # from the old post
            log.info("submission title: " + rand_sub.title)
            log.info("posting to: {}".format(rand_sub.subreddit.name))
            if rand_sub.is_self:
                params = {
                    "title": rand_sub.title,
                    "selftext": rand_sub.selftext
                }
            else:
                params = {"title": rand_sub.title, "url": rand_sub.url}

            # Submit the same content to the same subreddit. Prepare your salt picks
            api.subreddit(rand_sub.subreddit.display_name).submit(**params)
        except praw.exceptions.APIException as e:
            raise e
        except Exception as e:
            log.info(e)
    else:
        log.error("something broke")
 def get_cluster_discovery_ignition(self, cluster_id):
     log.info("Getting discovery ignition for cluster %s", cluster_id)
     return self.client.get_discovery_ignition(cluster_id=cluster_id, )
Exemple #52
0
 def print_results(self):
     if self.result_files:
         log.info('Result files:')
         for key, value in self.result_files.iteritems():
             if value:
                 log.info('--> %s: %s', key, value)
 def get_cluster_install_config(self, cluster_id):
     log.info("Getting install-config for cluster %s", cluster_id)
     return self.client.get_cluster_install_config(cluster_id=cluster_id)
 def register_host(self, cluster_id, host_id):
     log.info(f"Registering host: {host_id} to cluster: {cluster_id}")
     host_params = models.HostCreateParams(host_id=host_id)
     self.client.register_host(cluster_id, host_params)
 def reset_cluster_install(self, cluster_id):
     log.info("Reset installation of cluster %s", cluster_id)
     return self.client.reset_cluster(cluster_id=cluster_id)
 def patch_cluster_discovery_ignition(self, cluster_id, ignition_info):
     log.info("Patching cluster %s discovery ignition", cluster_id)
     return self.client.update_discovery_ignition(
         cluster_id=cluster_id,
         discovery_ignition_params=models.DiscoveryIgnitionParams(
             config=json.dumps(ignition_info)))
 def update_cluster_install_config(self, cluster_id, update_params):
     log.info("Updating cluster install config with %s", update_params)
     return self.client.update_cluster_install_config(
         cluster_id, json.dumps(update_params))
 def disable_host(self, cluster_id, host_id):
     log.info(f"Disabling host: {host_id}, in cluster id: {cluster_id}")
     return self.client.disable_host(cluster_id=cluster_id, host_id=host_id)
    def download_cluster_events(self, cluster_id, output_file):
        log.info("Downloading cluster events to %s", output_file)

        with open(output_file, "wb") as _file:
            _file.write(
                json.dumps(self.get_events(cluster_id), indent=4).encode())
 def cancel_cluster_install(self, cluster_id):
     log.info("Canceling installation of cluster %s", cluster_id)
     return self.client.cancel_installation(cluster_id=cluster_id)