def buff_end_proc(this, e): log('buff', this.name, '%s: %.2f' % (this.mod_type, this.value()), this.name + ' buff end <timeout>') this.__active = 0 if this.__stored: idx = len(this._static.all_buffs) while 1: idx -= 1 if idx < 0: break if this == this._static.all_buffs[idx]: this._static.all_buffs.pop(idx) break this.__stored = 0 stack = 0 for i in this._static.all_buffs: if i.name == this.name: if i.__active != 0: stack += 1 if stack > 0: log('buff', this.name, '%s: %.2f' % (this.mod_type, this.__value * stack), this.name + ' buff stack <%d>' % stack) this.modifier.off()
def _http_serve(self): if self._video_address is not None: self._httpd = HTTPServer((self._video_address, self._video_port), VideoHandler) core.log("ui available at http://%s:%d/" % (self._video_address, self._video_port)) self._httpd.serve_forever() else: core.log("could not get ip of usb0, video server not starting")
def __init__(self, config, state={}): super(Display, self).__init__(config, state) self._enabled = config['ui']['display']['enabled'] self._rotation = config['ui']['display']['rotation'] self._video_enabled = config['ui']['display']['video']['enabled'] self._video_port = config['ui']['display']['video']['port'] self._video_address = config['ui']['display']['video']['address'] self._display_type = config['ui']['display']['type'] self._display_color = config['ui']['display']['color'] self.full_refresh_count = 0 self.full_refresh_trigger = config['ui']['display']['refresh'] self._render_cb = None self._display = None self._httpd = None self.canvas = None if self._enabled: self._init_display() else: self.on_render(self._on_view_rendered) core.log("display module is disabled") if self._video_enabled: _thread.start_new_thread(self._http_serve, ())
def on(this, name, rate, duration=None): this.rate = rate if duration: this.duration = duration if this._static.active_cc and this._static.active_cc.get(): if this._static.active_name == this.name: this.cc.on() return 0 else: r = Afflic.on(this) if random.random() < r: this._static.active_cc.off() this.cc = Dot('o_%s_%s' % (name, this.name), 0, this.duration, this.duration + 0.001) this.cc.cb_end = this.cb_end this.cc.on() this._static.active_name = this.name this._static.active_cc = this.cc return 1 else: log('debug', 'cc', 'miss %f' % r, '%s_%s' % (name, this.name)) return 0 else: # clean now log('debug', 'cc', 'clean') this.cc = Dot('o_%s_%s' % (name, this.name), 0, this.duration, this.duration + 0.001) this.cc.cb_end = this.cb_end this.cc.on() this._static.active_name = this.name this._static.active_cc = this.cc return Afflic.on(this)
def recoverdp(this, number): if this.d.getdoing().name == 'd': return this.dp += number if this.dp > 100: this.dp = 100 log('debug', 'dp', this.dp)
def do_GET(self): if self.path == '/': self.send_response(200) self.send_header('Content-type', 'text/html') self.end_headers() try: self.wfile.write( bytes(self._index % ('localhost', 1000), "utf8")) except BaseException: pass elif self.path.startswith('/ui'): with self._lock: self.send_response(200) self.send_header('Content-type', 'image/png') self.end_headers() try: with open( "/tmp/pwnagotchi-{rand}.png".format( rand=id(CustomVideoHandler)), 'rb') as fp: shutil.copyfileobj(fp, self.wfile) except BaseException: core.log("could not open preview") else: self.send_response(404)
def unload(self, connection: psclient.PSConnection, module: str, force: bool = False) -> str: """Unloads a module Args: connection (Connection): the Connection object to unload the module into module (string): the ID of the module to unload force (bool, optional): whether or not to force-unload modules that are not loaded. Defaults to False. Returns: string: a description of what happened """ if module not in connection.modules and not force: return f"The ``{module}`` module isn't loaded." try: for command in importlib.import_module( module).Module().commands.keys(): # type: ignore del connection.commands[command] connection.modules.remove(module) return f"Successfully unloaded the ``{module}`` module." except Exception as err: response = f"Error unloading module: {str(err)}." core.log(f"I: admin.unload(): {response}") return response
def l_melee_fs(this, e): log('fs', 'succ') dmg_coef = this.conf.fs.dmg this.dmg_make('fs', dmg_coef) this.fs_proc(e) this.think_pin('fs') this.charge('fs', this.conf.fs.sp)
def twovlines(article, text): errorcount = 0 text = str(text) oldtext = text saves = '' zeroedit = 0 brackets = re.findall(r"\[(.*?)\]", text) for item in brackets: if '||' in item and 'Kuva:' not in item and 'Tiedosto:' not in item and 'Image:' not in item and 'File:' not in item: errorcount += 1 olditem = '[' + item + ']]' item = '[' + item + ']]' item = item.replace('||', '|') log('twovlines invalid link found: ' + article + '\n' + olditem + ' --> ' + item) text = text.replace(olditem, item) if text != oldtext: zeroedit = 1 printlog('fixvlines error found: ' + article) if errorcount > 1 and lang == 'fi': saves = u"Botti poisti ylimääräiset pystyviivat linkeistä. " elif errorcount == 1 and lang == 'fi': saves = u"Botti poisti ylimääräisen pystyviivan linkistä. " elif errorcount > 1 and lang == 'en': saves = u"Bot has removed excessive vertical line from link. " elif errorcount == 1 and lang == 'en': saves = u"Bot has removed excessive vertical lines from links. " elif errorcount == 0: printlog('fixvlines error found: ' + article) return errorcount, text, saves, zeroedit
def authenticateUser(session, req, username, password): """authenticate the username/password combination. Only used if config.AUTH_TYPE=='FORM'. This sets session['username'], iff authentication is successful. This should raise an Exception if authentication fails (the caller must make sure to sanitize any error message, since there's no guarantee it won't contain passwords or other sensitive information). """ if password=='': raise Exception("empty password") #the ldap bind does not fail for empty password, so must catch it before import ldap ldap.set_option(ldap.OPT_DEBUG_LEVEL,255) try: try: authenticated = False for host in ('dc2-rc', 'dc3-rc'): try: l = ldap.initialize("ldaps://%s:636/" % host) l.protocol_version = ldap.VERSION3 l.simple_bind_s( core.getStdout("/n/sw/rc/bin/username2ldapatts -a distinguishedName %s" % core.shQuote(username)).strip(), password ) #will raise ldap.INVALID_CREDENTIALS in case of failure authenticated = True break except ldap.SERVER_DOWN, e: msg = "got ldap.SERVER_DOWN for [%s]: %s; will retry other hosts if available" % (host, e) core.log(msg, session, req) continue if not authenticated: raise Exception("cannot contact LDAP server(s)") except ldap.INVALID_CREDENTIALS: raise
def update(self): self.uptime() nostack_p = 1.0 for stack_p in self.stacks: nostack_p *= 1.0 - stack_p self._get = 1.0 - nostack_p log("affliction", self.name, self._get)
def on(self): timer = Timer(self.stack_end, self.duration).on() if self.states is None: self.states = defaultdict(lambda: 0.0) self.states[self.State(frozenset(), self.resist)] = 1.0 states = defaultdict(lambda: 0.0) total_p = 0.0 for start_state, start_state_p in self.states.items(): res = start_state.resist - self.res_modifier if res >= self.rate or res >= 1 or len( start_state.timers) >= self.stack_cap: states[start_state] += start_state_p else: rate_after_res = min(1, self.rate - res) succeed_timers = frozenset(list(start_state.timers) + [timer]) state_on_succeed = self.State(succeed_timers, min(1.0, res + self.tolerance)) overall_succeed_p = start_state_p * rate_after_res overall_fail_p = start_state_p * (1.0 - rate_after_res) total_p += overall_succeed_p states[state_on_succeed] += overall_succeed_p if overall_fail_p > 0: states[start_state] += overall_fail_p self.states = states self.update() self.event.rate = total_p self.event() self.start_rate = round(self.get(), 3) log('cc', self.name, self.start_rate or 'fail') return total_p
def load(self, connection: psclient.PSConnection, module: str, force: bool = False) -> str: """Loads a module Args: connection (Connection): the Connection object to load the module into module (string): the ID of the module to load force (bool, optional): whether or not to force-load modules that are already loaded. Defaults to False. Returns: string: a description of what happened """ if module in connection.modules and not force: return f"The ``{module}`` module is already loaded -- did you mean to hotpatch it?" try: connection.commands.update( importlib.import_module( module).Module().commands) # type: ignore connection.modules.add(module) return f"Successfully loaded the ``{module}`` module." except Exception as err: response = f"Error loading module: {str(err)}." core.log(f"I: admin.load(): {response}") return response
def DateAdd (t, n, d): try: if type(d) is str: if d == "": d = "2000-01-01 00:00:00" d = datetime.datetime.strptime(d, "%Y-%m-%d %H:%M:%S") except: log ("DateDiff ERROR: Got an error converting strings to datetimes, make sure they are in the format of Y-m-d H:M:S!") raise return if t.lower() == "days": ret = d + datetime.timedelta(0,float( ((n * 60) * 60) * 24 )) if t.lower() == "hours": ret = d + datetime.timedelta(0,float( (n * 60) * 60 )) if t.lower() == "minutes": ret = d + datetime.timedelta(0,float(n * 60)) if t.lower() == "seconds": ret = d + datetime.timedelta(0,float(n)) return ret
def update_cache(groups_limit=3000): query = 'SELECT u_group, count(*) FROM users ' \ 'GROUP BY u_group ORDER BY count(*) DESC LIMIT {}'.format(groups_limit) groups = core.DBManager.execute_query(query) start_time = time.time() today = datetime.date.today().strftime('%d.%m.%Y') ans = 'Завантажено розклад для {} груп:\n\n'.format(len(groups)) for group in groups: timetable = get_timetable_to_cache(group=group[0], sdate=today, edate=today) if timetable or isinstance(timetable, list): if not len(timetable): ans += '\U00002705 \U0001F534 {} - {}\n'.format( group[0], group[1]) # No lessons else: ans += '\U00002705 \U0001F535 {} - {}\n'.format( group[0], group[1]) else: ans += '\U0000274E {} - {}\n'.format(group[0], group[1]) ans += '\n\U0001F552 <b>Час:</b> {} с.'.format( round(time.time() - start_time, 2)) core.log(m='Завантаження кешу. Кількість груп: {}, час: {}'.format( len(groups), round(time.time() - start_time, 2))) return ans
def twovlines(article, text): errorcount = 0 text = str(text) oldtext = text saves = '' zeroedit = 0 brackets = re.findall(r"\[(.*?)\]", text) for item in brackets: if '||' in item and 'Kuva:' not in item and 'Tiedosto:' not in item and 'Image:' not in item and 'File:' not in item: errorcount += 1 olditem = '['+item+']]' item = '['+item+']]' item = item.replace('||', '|') log('twovlines invalid link found: '+article+'\n'+olditem+' --> '+item) text = text.replace(olditem, item) if text != oldtext: zeroedit = 1 printlog('fixvlines error found: '+ article) if errorcount > 1 and lang == 'fi': saves = u"Botti poisti ylimääräiset pystyviivat linkeistä. " elif errorcount == 1 and lang == 'fi': saves = u"Botti poisti ylimääräisen pystyviivan linkistä. " elif errorcount > 1 and lang == 'en': saves = u"Bot has removed excessive vertical line from link. " elif errorcount == 1 and lang == 'en': saves = u"Bot has removed excessive vertical lines from links. " elif errorcount == 0: printlog('fixvlines error found: '+ article) return errorcount, text, saves, zeroedit
def dmg_make(this, name, dmg_coef, dtype=None): if dtype == None: dtype = name count = this.dmg_formula(dtype, dmg_coef) log('dmg', name, count) this.dmg_proc(name, count) return count
def log(self, step, prefix=None, print_terminal_info=True): """ Log stored data to disk and tensorboard """ log(self._logger, self._writer, self._model_name, prefix=prefix, step=step, print_terminal_info=print_terminal_info)
def _exec(self, tree, thing, result_dir): if "run" in self.info: return self._exec_run(tree, thing, result_dir) elif "pymod" in self.info: core.log("START", datetime.datetime.now().strftime("%H:%M:%S.%f")) ret, desc = self._exec_pyfunc(tree, thing, result_dir) core.log("END", datetime.datetime.now().strftime("%H:%M:%S.%f")) return ret, "", "", desc
def send(self, binary, base64, json, json_base64): s = "Output: sizes - binary(" + str(len(binary)) + ") base64(" + str( len(base64)) + ") json(" + str(len(json)) + ") json_base64(" + str( len(json_base64)) + ")\n" s += " BASE64: " + base64 + "\n" s += " JSON BASE64: " + json_base64 + "\n" s += " JSON:\n" + json log(self.level, s)
def _cb_act_end(this, e): if this.getdoing() == this: if loglevel >= 2: log('ac_end', this.name) this.status = -2 this._setprev() # turn this from doing to prev this._static.doing = this.nop this.idle_event()
def render(img): with CustomVideoHandler._lock: try: img.save("/tmp/pwnagotchi-{rand}.png".format( rand=id(CustomVideoHandler)), format='PNG') except BaseException: core.log("could not write preview")
def _sender(self): core.log("started advertiser thread (period:%s sid:%s) ..." % (str(self._period), self._me.session_id)) while self._running: try: sendp(self._frame, iface=self._iface, verbose=False, count=5, inter=self._period) except Exception as e: core.log("error: %s" % e) time.sleep(self._period)
def charge(this, name, sp): sp = sp * this.sp_mod(name) this.s1.charge(sp) this.s2.charge(sp) this.s3.charge(sp) this.think_pin('sp') log('sp', name, sp,'%d/%d, %d/%d, %d/%d'%(\ this.s1.charged, this.s1.sp, this.s2.charged, this.s2.sp, this.s3.charged, this.s3.sp) )
def _cb_act_end(this, e): if this.getdoing() == this: if loglevel >= 2: log("ac_end", this.name) this.status = -2 this._setprev() # turn this from doing to prev this._static['doing'] = this.nop this.e_idle.trigger()
def charge_gauge(this, value): if this.status != -1: if this.adv.slots.c.wt == 'sword': this.dragon_gauge += value*1.15 else: this.dragon_gauge += value this.dragon_gauge = min(this.dragon_gauge, 100) log('dragon', 'gauge', '{:.2f} / 100'.format(this.dragon_gauge))
def d_shift_end(this, t): log('debug', 'dshift_end', 'duration {:.2f}'.format(this.shift_end_timer.timing - this.shift_start_time)) this.dracolith_mod.off() this.has_skill = True this.status = -2 this._setprev() # turn this from doing to prev this._static.doing = this.nop this.idle_event()
def setup_events(self): core.log("connecting to %s ..." % self.url) for tag in self._config['bettercap']['silence']: try: self.run('events.ignore %s' % tag, verbose_errors=False) except Exception as e: pass
def next(self): if self.any_activity is False and self.did_handshakes is False: self.inactive_for += 1 self.active_for = 0 else: self.active_for += 1 self.inactive_for = 0 now = time.time() cpu = pwnagotchi.cpu_load() mem = pwnagotchi.mem_usage() temp = pwnagotchi.temperature() self.epoch_duration = now - self.epoch_started # cache the state of this epoch for other threads to read self._epoch_data = { 'duration_secs': self.epoch_duration, 'slept_for_secs': self.num_slept, 'blind_for_epochs': self.blind_for, 'inactive_for_epochs': self.inactive_for, 'active_for_epochs': self.active_for, 'missed_interactions': self.num_missed, 'num_hops': self.num_hops, 'num_deauths': self.num_deauths, 'num_associations': self.num_assocs, 'num_handshakes': self.num_shakes, 'cpu_load': cpu, 'mem_usage': mem, 'temperature': temp } self._epoch_data['reward'] = self._reward(self.epoch + 1, self._epoch_data) self._epoch_data_ready.set() core.log( "[epoch %d] duration=%s slept_for=%s blind=%d inactive=%d active=%d hops=%d missed=%d " "deauths=%d assocs=%d handshakes=%d cpu=%d%% mem=%d%% temperature=%dC reward=%s" % (self.epoch, core.secs_to_hhmmss( self.epoch_duration), core.secs_to_hhmmss( self.num_slept), self.blind_for, self.inactive_for, self.active_for, self.num_hops, self.num_missed, self.num_deauths, self.num_assocs, self.num_shakes, cpu * 100, mem * 100, temp, self._epoch_data['reward'])) self.epoch += 1 self.epoch_started = now self.did_deauth = False self.num_deauths = 0 self.did_associate = False self.num_assocs = 0 self.num_missed = 0 self.did_handshakes = False self.num_shakes = 0 self.num_hops = 0 self.num_slept = 0 self.any_activity = False
def performBackup( dataID ): log("trying to perform backup for dataID " + str( dataID) ) c = config() MAINVOLUME = c.getMainVolume() debug = c.getDebug() data = database() container = data.getDataContainer( dataID )[0] if( container.type == "rsync" ): if container.options == "" or container.options == None: checkDirs( container ) rsync_cmd = "rsync -avz " + container.remotePath + " " + MAINVOLUME + "/" + container.localPath + "/cur/" returnValue = 0 id = 0 #get directory size before backup start_size = getDirectorySize( MAINVOLUME + "/" + container.localPath + "/cur/" ) log( rsync_cmd ) id = data.startJob( "rsync" , int(dataID)) returnValue, errorMessage, output = executeCommand( rsync_cmd ) #if len(errorMessage) == 0: # errorMessage = output log( "backup command returned: " + str(returnValue )) #get directory size after backup final_size = getDirectorySize( MAINVOLUME + "/" + container.localPath + "/cur/" ) transferredSize = final_size - start_size log( "transferred " + str(transferredSize) + "kb") if int(returnValue) == 0: data.finishJob(int(dataID), int(id), "finished", errorMessage, output, transferredSize) #start to archive the backup, if necessary archive , method , compress,ttl = data.getArchiveInfo( int(dataID) ) if archive != "disabled": id = data.startJob( "archive" , int(dataID)) archiveFolder( container , method , compress ) data.finishJob( int(dataID),int(id), "finished","","", 0) mailBody = "Backup finished on host '" + str(c.getHostname()) + "'\n" mailBody += "Job for dataID " + str(dataID) + " was succesful: " + str(output) notifyByMail( mailBody ) else: #Oh, the backup was not successful. Maybe we should try again later? data.finishJob( int(dataID), int(id), "aborted", errorMessage, output, transferredSize ) mailBody = "Backup aborted on host '" + str(c.getHostname()) + "'\n" mailBody +="Job for dataID " + str(dataID) + " was not succesful: " + str(errorMessage) notifyByMail(mailBody) else: log("Unsupported container type: %s" % container.type) syncMonitorData()
def index(): core.User.create_user_table_if_not_exists() core.MetricsManager.create_metrics_table_if_not_exists() core.create_audience_db_if_not_exists() bot.delete_webhook() bot.set_webhook(settings.WEBHOOK_URL + settings.WEBHOOK_PATH, max_connections=1) bot.send_message('204560928', 'Running...') core.log(m='Webhook is setting: {} by run url'.format(bot.get_webhook_info().url)) return 'ok'
def ticker(tag): now = time.time() global last_time global start_time if do_print: core.log(str.format("[{0}] {1} , total = {2}", tag, now - last_time, now - start_time)) last_time = now
def on_handshake(agent, filename, access_point, client_station): if running: info = agent.session() gps = info['gps'] gps_filename = filename.replace('.pcap', '.gps.json') core.log("saving GPS to %s (%s)" % (gps_filename, gps)) with open(gps_filename, 'w+t') as fp: json.dump(gps, fp)
def off(this): if this.__active == 0: return log('buff', this.name, '%s: %.2f' % (this.mod_type, this.value()), this.name + ' buff end <turn off>') this.__active = 0 this.modifier.off() this.buff_end_timer.off() return this
def _on_error(self, who, e): error = "%s" % e # when we're trying to associate or deauth something that is not in range anymore # (if we are moving), we get the following error from bettercap: # error 400: 50:c7:bf:2e:d3:37 is an unknown BSSID or it is in the association skip list. if 'is an unknown BSSID' in error: self._on_miss(who) else: core.log("error: %s" % e)
def startServer( self ): os.chdir( "/usr/lib/datenfresser/web") try: pid = os.fork() if pid > 0: return except OSError, e: print >>sys.stderr, "fork #2 failed: %d (%s)" % (e.errno, e.strerror) log("Starting webserver failed: " + e) sys.exit(1)
def Request(): import core Header("Content-type: text/html") ute = core.Utils() d = ute.Timestamp() msg = "calling core time time %s" % (d) core.log(msg) Write(msg)
def create(): global globalInteger location = 'Placeholder' while len(location) > 0: #idct = input("Directory>") #if len(idct) > 0: # dct = idct location = input("Keyword> ") if len(location) > 0: globalInteger = globalInteger + 1 data ="{}\n{}".format(downloadDir, location) core.log('info', data) data = core.read('info') return
def createPlaylist(): global otherGlobalInteger playlistUrl = 'Placeholder' while len(playlistUrl) > 0: #idct = input("Directory>") #if len(idct) > 0: # dct = idct playlistUrl = input("Playlist Url> ") if len(playlistUrl) > 0: otherGlobalInteger = otherGlobalInteger + 1 playlistUrls ="{}\n{}".format(downloadDir, playlistUrl) core.log('playlistUrls', playlistUrls) playlist(playlistUrl) playlistUrls = core.read('playlistUrls') return
def search(data): global globalInteger il = globalInteger ii = 1 while il > 0: location = data[ii] query_string = urllib.parse.urlencode({"search_query" : location}) html_content = urllib.request.urlopen("http://www.youtube.com/results?" + query_string) search_results = re.findall(r'href=\"\/watch\?v=(.{11})', html_content.read().decode()) print("Adding video1: " + search_results[searchInteger]) core.log('videos',("http://www.youtube.com/watch?v=" + search_results[0])) ii = ii + 2 il = il - 1 videos = core.read('videos') return
def playlist(playlistUrl): html_content = urllib.request.urlopen(playlistUrl) search_results = re.findall(r'href=\"\/watch\?v=(.{11})', html_content.read().decode()) #print(', '.join(search_results)) localInteger = len(search_results) #print("sr ", localInteger) searchInteger = 0 while localInteger > 0: print("Adding video2: " + search_results[searchInteger]) core.log('videos', "http://www.youtube.com/watch?v=" + search_results[searchInteger]) localInteger = localInteger - 1 searchInteger = searchInteger + 1 return
def fix2brackets(article, text): errorcount = 0 text = str(text) oldtext = text saves = '' zeroedit = 0 twobrackets = re.findall(r"\[(.*?)\]", text) for item in twobrackets: location = text.index(item) if '[' in item[0:2]: if 'https://' in item[0:10] or 'http://' in item[0:10]: if 'Tiedosto:' not in item and 'Kuva:' not in item and 'File:' not in item and 'Image:' not in item: errorcount += 1 location = text.index(item)+len(item) if ']' in text[location+1:location+2]: olditem = '['+str(item)+']]' item = item.replace('[', '') item = '['+item+']' log('fix2brackets: '+article+'\n'+olditem+' --> '+item) text = text.replace(olditem, str(item)) else: olditem = '['+str(item)+']' item = item.replace('[', '') item = '['+item+']' log('fix2brackets: '+article+'\n'+olditem+' --> '+item) text = text.replace(olditem, str(item)) if text != oldtext: zeroedit = 1 printlog('fix2brackets error found: '+ article) if errorcount > 1 and lang == 'fi': saves = u"Botti poisti ylimääräiset hakasulkeet ulkoisista linkeistä. " elif errorcount == 1 and lang == 'fi': saves = u"Botti poisti ylimääräiset hakasulkeet ulkoisesta linkistä. " elif errorcount > 1 and lang == 'en': saves = u"Bot has removed excessive brackets from external links. " elif errorcount == 1 and lang == 'en': saves = u"Bot has removed excessive brackets from external link. " elif errorcount == 0: printlog('fix2brackets error not found: '+ article) return errorcount, text, saves, zeroedit
def DateDiff (t, d1, d2): try: if type(d1) is str: if d1 == "": d1 = "2000-01-01 00:00:00" d1 = datetime.datetime.strptime(d1, "%Y-%m-%d %H:%M:%S") if type(d2) is str: if d2 == "": d2 = "2000-01-01 00:00:00" d2 = datetime.datetime.strptime(d2, "%Y-%m-%d %H:%M:%S") except: log ("DateDiff ERROR: Got an error converting strings to datetimes, make sure they are in the format of Y-m-d H:M:S!") raise return try: sum = time.mktime(d1.timetuple()) - time.mktime(d2.timetuple()) if sum == 0: return 0 if t.lower() == "days": ret = sum / 86400 if t.lower() == "hours": ret = (sum / 86400) * 24 if t.lower() == "minutes": ret = ((sum / 86400) * 24) * 60 if t.lower() == "seconds": ret = (((sum / 86400) * 24) * 60) * 60 return ret except: log ("DateDiff ERROR: Got an error converting to " + t) raise return
def archiveFolder( container , method , compress ): c = config() MAINVOLUME = c.getMainVolume() localPath = MAINVOLUME + "/" + container.localPath log("archive folder " + localPath + " with " + method ) #be sure that the path ends with a "/" if localPath[-1] != "/": localPath = localPath + "/" dateTupel = gmtime(time()) dateString = str(dateTupel[0]) + "_" + str(dateTupel[1]) + "_" + str(dateTupel[2]) + "_" + str(dateTupel[3]) + "_" + str(dateTupel[4]) if method == "tar": if compress == "on": tar_cmd = "tar -jcf " + localPath + "archived/" + container.name + "_" + dateString + ".tar.bz2 " + localPath + "cur/*" else: tar_cmd = "tar -cf " + localPath + "archived/" + container.name + "_" + dateString + ".tar " + localPath + "cur/*" log( tar_cmd , "verbose" ) subprocess.Popen(tar_cmd,shell=True, stdout=subprocess.PIPE).wait() if method == "hardlinks": # see http://www.mikerubel.org/computers/rsync_snapshots/ if sys.platform == "darwin": #"gcp" comes with the coreutils package from macports.. cp_command = "gcp" else: cp_command = "cp" cmd = cp_command + " -al " + localPath + "cur/" + " " + localPath + "snapshots/" + container.name + "_" + dateString log( cmd , "verbose" ) subprocess.Popen(cmd,shell=True, stdout=subprocess.PIPE).wait() if method == "btrfs snapshot": cmd = "btrfsctl -s " + localPath + "snapshots/" + container.name + "_" + dateString + " " + localPath + "cur/" log( cmd , "verbose" ) subprocess.Popen(cmd,shell=True, stdout=subprocess.PIPE).wait()
def syncMonitorData(): c = config() if c.getMonitorClientEnabled() == "False": return #push changes to the monitoring server log( "trying monitorSync " + str(c.getMonitorClientEnabled())) try: monitorClient = datenfresserMonitorClient() monitorClient.sync() except Exception, e: #ie.print_exc(file=sys.stdout) log( str( sys.exc_info()[0] ) ) log("Exception during monitor sync: " + str(e) )
def main(): clear() create() data = core.read('info') core.log('log', data) data = core.split(data) createPlaylist() playlistUrls = core.read('playlistUrls') core.log('log', playlistUrls) playlistUrls = core.split(playlistUrls) search(data) videos = core.read('videos') core.log('log', videos) videos = core.split(videos) download(data, videos) clear()
def log_test(s): core.log(s)
def fixblink(article ,text): errorcount = 0 saves = '' zeroedit = 0 linkpartlist = [] fixedlinks = [] invalidlinks = [] text = str(text) oldtext = text characters = 'abcdefghijklmnopqrstuvxyzäöABCDEFGHIJKLMNOPQRSTUVXYZŽÄÖ!?*[]{}()0123456789' special = '!?*[]{}()' twobrackets = re.findall(r"\[(\S+)\]", text) for hit in twobrackets: link = str(hit) matches = re.search(r'(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}', link) if 'http://' not in link and 'https://' not in link and matches != None and 'ref' not in link and '@' not in link and '[' not in link and '{' not in link[0:2]: orglink = '['+link+']' errorcount += 1 linkpartlist = link.split('.') if len(linkpartlist) >= 3 and 'w' in linkpartlist[0] and linkpartlist[0] != 'www': if any((char in linkpartlist[0]) for char in characters): if any((char in linkpartlist[0]) for char in special): continue else: if len(linkpartlist[0]) != 3: linkpartlist[0] = 'www' time = 0 finallink = '' for item in linkpartlist: time += 1 if time != len(linkpartlist): finallink = finallink+item+'.' else: finallink = finallink+item link = '[http://'+finallink+']' log('fixblink invalid link found: '+article+'\n'+orglink+' --> '+link) text = text.replace(orglink, link) fixedlinks.append(link) invalidlinks.append(orglink) else: printlog('www fix error') else: link = '[http://'+link+']' log('fixblink invalid link found: '+article+'\n'+orglink+' --> '+link) fixedlinks.append(link) invalidlinks.append(orglink) text = text.replace(orglink, link) if text != oldtext: zeroedit = 1 printlog('fixblinks error found: '+ article) printlog(str(errorcount)+' invalid links found') if errorcount > 1 and lang == 'fi': saves = u"Botti korjasi linkkejä. " elif errorcount == 1 and lang == 'fi': saves = u"Botti korjasi linkin. " elif errorcount > 1 and lang == 'en': saves = u"Bot has fixed links. " elif errorcount == 1 and lang == 'en': saves = u"Bot has fixed link. " elif errorcount == 0: printlog('fixblinks error not found: '+ article) return errorcount, text, saves, zeroedit
def log(filefrom, fileto): x = core.read(filefrom) core.log(fileto, x)
def attach_fitnesse(s): core.attach_volume('vol-1fb7d673',s) core.log( "SP2013 Lab has been started!" )
def startServer( self ): log( "Starting datenfresser monitoring server on port %s" % self.port ) #or logEntry in self.dataBase.getLogs(0): server = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) server.bind(("", int(self.port))) server.listen(1) clients = [] state = {} try: while True: read, write, oob = select.select([server] + clients, [], []) for sock in read: if sock is server: client, addr = server.accept() clients.append(client) log( "#Client %s connected" % addr[0] ) else: result = "ok" while 1: ip = sock.getpeername()[0] ipPortTuple = sock.getpeername() if ipPortTuple in state.keys(): print "before recv; waiting for " + str(state[ ipPortTuple ].length) + " bytes " else: print "before recv" #receive outstanding data message = sock.recv(1024) if ipPortTuple in state.keys() and state[ ipPortTuple ].length == 0: print "message: " + message if len(message) == 0: self.reply( sock, "zero length" ) break #check if this is a part of a chunked transfer lastChunk = False if ipPortTuple in state.keys() and state[ ipPortTuple ].state == "recv": state[ ipPortTuple ].length -= len(message) state[ ipPortTuple ].data += message if state[ ipPortTuple ].length <= 0: #this was the last chunk, handle the event later lastChunk = True else: #omit a reply and recv the other chunks.. continue #check if this was the last chunk if lastChunk: state[ ipPortTuple ].state = "" state[ ipPortTuple ].length = 0 self.reply(sock, "recv ok") break #every client has to authenticate itself at the #beginning of a transmission if message[0:4] == "auth": parts = message.split(" ") user = parts[1].strip() password = parts[2].strip() if user == self.config.getLocalMonitorUser() and password == self.config.getLocalMonitorPassword(): newState = stateContainer() newState.authState = "authenticated" state[ ipPortTuple ] = newState else: print "wrong credentials" sock.close() clients.remove(sock) print "[%s] %s" % (ip, message) print state result = "auth ok" else: if ipPortTuple in state.keys() and state[ ipPortTuple ].authState == "authenticated": pass else: print "not authenticated" log( "#Connection to %s closed" % ip ) #sock.close() #clients.remove(sock) if message[0:4] == "host": #announce the clients host name parts = message.split(" ") state[ ipPortTuple ].hostname = parts[1] if message[0:11] == "checkDataID": #checkDataID id checksum #used to check if a data container with this id already exists parts = message.split(" ") id = parts[1] checksum = parts[2] if self.database.checkForRemoteContainer( state[ ipPortTuple ].hostname, id , checksum ): result = "dataID known" else: result = "dataID unknown" if message[0:17] == "pushDataContainer": #checkDataID id checksum #used to check if a data container with this id already exists parts = message.split(" ") id = parts[1] checksum = parts[2] if self.database.checkForRemoteContainer( id, state[ ipPortTuple ].hostname , checksum ): result = "dataID known" else: result = "dataID unknown" if message[0:4] == "data": #format: 'data sizeof(x) 01100011....' print "Adding data was requested" parts = message.split(" ") #store the size of the whole data state[ ipPortTuple ].length = int(parts[1]) #calculate the payload size startOfData = find( message, parts[1]) startOfData = startOfData + len(parts[1]) payload = message[startOfData:] payloadSize = len(payload) #we have already received the first chunk state[ ipPortTuple ].length -= payloadSize #if we're receiving multiple packates, change state of this recv. thread if state[ ipPortTuple ].length > 0: state[ ipPortTuple ].state = "recv" state[ ipPortTuple ].data = payload if message[0:6] == "commit": print "Committing your data" print "Data before commit: " + state[ ipPortTuple ].data m = self.xmlHandler.parseXml( state[ ipPortTuple ].data ) if m.classname == "monitorLog": self.database.insertMonitorLog( m ) if m.classname == "dataContainer": self.database.addRemoteDataContainer( state[ ipPortTuple ].hostname , m) result = "commit ok" if message[0:9] == "getLastID": parts = message.split(" ") host = parts[1].strip() print "Getting last id for host " + host result = str(self.database.getLastRemoteLogID( host )) if message[0:4] == "exit": log( "#Connection to %s closed" % ip ) result = "exit" sock.send("bye") sock.close() del state[ipPortTuple] clients.remove(sock) break if len(message) == 0 or len(message) < 1024 and result != "exit" and result != "recv ok": self.reply( sock, result ) break finally: for c in clients: c.close() server.close()
#!/usr/bin/python import core if __name__ == "__main__": try: core.init() core.start_dc() core.start_spots() core.tune_sql() core.tune_wss() except Exception, e: print e.message core.log( e.message )
from core.google import ggcrawler from core.log import * from settings import SETTINGS current_path = os.path.split(os.path.realpath(sys.argv[0]))[0] sys.path.append(current_path) keywords = [ '捐赠 元', '捐赠 万元 OR 亿元', '捐赠 元 小学 OR 中学 OR 大学 OR 学校 OR 学院 OR 班', '捐赠 元 集团 OR 有限公司 OR 有限责任公司 OR 基金会 -学校 -学院 -小学 -中学 -大学 -班', '捐赠 元 总经理 OR 董事长' ] # keywords = [ # '捐赠 元 总经理 OR 董事长' # ] start = datetime.datetime.now() log(NOTICE, 'Google Crawler Initializing...') for keyword in keywords: ggcrawler(keyword, SETTINGS['project'], SETTINGS['address'], SETTINGS['port'], SETTINGS['username'], SETTINGS['password']) log(NOTICE, 'Mission completes. Time: %d sec(s)' % (int((datetime.datetime.now() - start).seconds))) if __name__ == '__main__': pass
#!/usr/bin/python import core import os if __name__ == "__main__": core.init() img = core.get_image("selenium") spot = core.start_spot(img.id, 'm1.small') inst_id = core.get_instance( spot ) core.wait_running( inst_id ) inst = core.get_instance_by_id(inst_id) os.system("cli53 rrcreate quest.com selenium A %s --replace" % inst.private_ip_address) core.log( 'selenium started at %s' % inst.public_dns_name )
subprocess.Popen("kill -9 %s" % (b), shell=True).wait() ipaddr.connect(('localhost', 80)) if ipaddr: print "[*] Sorry hoss, couldn't kill it, check whats running on 80 and restart SET!" sys.exit() if not ipaddr: print "[*] Success, the stale process has been terminated and SET is running normally.." else: print core.bcolors.GREEN + "[*] If you want to use Apache, edit the config/set_config" print core.bcolors.ENDC + "Exit whatever is listening and restart SET.\n" + core.bcolors.ENDC sys.exit() # if apache is set to run let the user know we are good to go if apache == 1: print core.bcolors.GREEN + "\n[*] Apache appears to be running, moving files into Apache's home."+ core.bcolors.ENDC except Exception, e: core.log(e) # if we don't have anything running on 80 and we want apache, then flag an error. if apache == 1: print core.bcolors.RED + "\n[*] Error! Apache does not appear to be running.\nStart it or turn APACHE off in config/set_config" + core.bcolors.ENDC # see if they want an option to turn it on pause = raw_input("[*] Do you want SET to try and start it for you? yes or no: ") if pause == "yes" or pause == "y": apache_counter = 0 if os.path.isfile("/etc/init.d/apache2"): subprocess.Popen("/etc/init.d/apache2 start", shell=True).wait() apache_counter = 1 if os.path.isfile("/etc/init.d/httpd"): subprocess.Popen("/etc/init.d/httpd start", shell=True).wait() apache_counter = 1 if apache_counter == 0: print "[!] Unable to start Apache through SET, please turn Apache off in the set_config or turn it on manually!"
def main(): YEL = '\033[93m' END = '\033[0m' methods = ['fix2brackets', 'fixpiped', 'fixreflink', 'fixblink', 'twovlines', 'brfix', 'centerfix', 'smallfix', 'typofix', 'reftosrc', 'fixreflist'] start_time = datetime.datetime.now() fixcount = 0 zeroedit = 1 if testmode == 0: print('test mode disabled') else: print('test mode enabled') if reviewmode == 1: print('review mode enabled\n') else: print('review mode disabled\n') filename = input('list for bots file name: ') filenamef = 'core/lfb/'+filename+'.lfb' #list of articles try: articles = open(filenamef, 'r') except FileNotFoundError: print('error: file not found') listfiles = glob('core/lfb/*.lfb') print('\navailable lists:\n') for item in listfiles: print(item.replace('core/lfb/', '').replace('.lfb', '')) print() main() #check article for article in articles: zeroedit = 1 saves = '' site = pywikibot.Site() page = pywikibot.Page(site, article) try: text = str(page.text) except pywikibot.exceptions.InvalidTitle: continue if text == '': printlog("this page is empty or it doesn't exist") continue oldtext = text for method in methods: func = globals()[method] infoback = func(article, text) text = infoback[1] fixcount += infoback[0] saves += infoback[2] zeroedit -= infoback[3] if text != oldtext and reviewmode == 1: adiffer.show_diff(oldtext, text) print(YEL+saves+END) answer = input('do you agree these changes? [Y/N] ') if answer == 'p': print(text) answer = input('do you agree these changes? [Y/N] ') if answer == 'y' or answer == 'Y': pass else: continue else: print(YEL+saves+END) if testmode == 1: if saves != '': log(saves) if fixcount > 0: log('found something') #write changes if text != oldtext and zeroedit < 1 and testmode == 0: page.text = text try: page.save(saves) log(saves) except pywikibot.exceptions.OtherPageSaveError: printlog('cannot save article: '+article+' because bots are not allowed') continue if text != oldtext and zeroedit == 1: printlog("bot didn't make changes to "+article+ " because zeroedit") printlog('fixcount: '+str(fixcount)) stop_time = datetime.datetime.now() total_time = stop_time - start_time printlog("bot duration: "+str(total_time))
def fixreflink(article ,text): errorcount = 0 saves = '' zeroedit = 0 linkpartlist = [] fixedlinks = [] invalidlinks = [] text = str(text) oldtext = text characters = 'abcdefghijklmnopqrstuvxyzäöABCDEFGHIJKLMNOPQRSTUVXYZŽÄÖ!?*[]{}()0123456789' special = '!?*[]{}()' soup = BeautifulSoup(text, "lxml") for hit in soup.findAll('ref'): link = str(hit) orglink = link link = link.replace('<ref>', '').replace('</ref>', '') matches = re.search(r'(?:[a-zA-Z0-9](?:[a-zA-Z0-9\-]{,61}[a-zA-Z0-9])?\.)+[a-zA-Z]{2,6}', link) if 'http://' not in link and 'https://' not in link and matches != None and 'ref' not in link and '@' not in link and '{' not in link[0:2] and '[' not in link[1:2] and 'ftp://' not in link and "'" not in link[0:2]: errorcount += 1 if '[' in link and ']' in link: linkpartlist = link.split('.') if ' ' in linkpartlist[0][1:] or ' ' in linkpartlist[1][0:1]: continue if len(linkpartlist) >= 3 and 'w' in linkpartlist[0] and linkpartlist[0] != 'www': if '[' not in linkpartlist[0][0:1] and any((char in linkpartlist[0]) for char in characters): if any((char in linkpartlist[0]) for char in special): log('special mark found getting out') continue else: if len(linkpartlist[0]) != 3 or '[' in linkpartlist[0]: linkpartlist[0] = 'www' time = 0 finallink = '' for item in linkpartlist: time += 1 if time != len(linkpartlist): finallink = finallink+item+'.' else: finallink = finallink+item link = '<ref>[http://'+finallink+'</ref>' log('fixreflink invalid link found: '+article+'\n'+orglink+' --> '+link) fixedlinks.append(link) invalidlinks.append(orglink) else: printlog('www fix error: '+ str(linkpartlist)) else: link = link.replace('[','') link = '<ref>[http://'+link+'</ref>' log('fixreflink invalid link found: '+article+'\n'+orglink+' --> '+link) fixedlinks.append(link) invalidlinks.append(orglink) else: linkpartlist = link.split('.') if ' ' in linkpartlist[0][1:] or ' ' in linkpartlist[1][0:1]: continue if len(linkpartlist) >= 3 and 'w' in linkpartlist[0] and linkpartlist[0] != 'www': if any((char in linkpartlist[0]) for char in characters): if any((char in linkpartlist[0]) for char in special): continue else: print(linkpartlist[0]) if len(linkpartlist[0]) != 3: linkpartlist[0] = 'www' time = 0 finallink = '' for item in linkpartlist: time += 1 if time != len(linkpartlist): finallink = finallink+item+'.' else: finallink = finallink+item link = '<ref>http://'+finallink+'</ref>' log('fixreflink invalid link found: '+article+'\n'+orglink+' --> '+link) fixedlinks.append(link) invalidlinks.append(orglink) else: printlog('www fix error: '+ str(linkpartlist)) else: link = '<ref>http://'+link+'</ref>' log('fixreflink invalid link found: '+article+'\n'+orglink+' --> '+link) fixedlinks.append(link) invalidlinks.append(orglink) for fixedlink, invalidlink in zip(fixedlinks, invalidlinks): i = html.unescape(str(invalidlink)) f = html.unescape(str(fixedlink)) text = text.replace(i, f) if text != oldtext: zeroedit = 1 printlog('fixreflinks error found') printlog(str(errorcount)+' invalid links found') if errorcount > 1 and lang == 'fi': saves = u"Botti korjasi linkkejä. " elif errorcount == 1 and lang == 'fi': saves = u"Botti korjasi linkin. " elif errorcount > 1 and lang == 'en': saves = u"Bot has fixed links. " elif errorcount == 1 and lang == 'en': saves = u"Bot has fixed link. " elif errorcount == 0: printlog('fixreflinks error not found: '+ article) return errorcount, text, saves, zeroedit