def verifyredditcodeposted(request): code = request.POST['code'] sig = request.POST['sig'] if code != tornado_cookies.validate_cookie('code', sig): log.error("verifyredditcodeposted signature failed with (code, sig)", code, sig) raise ValueError('Invalid signature') content = get_reddit_content() comments = content[1]['data']['children'] comments_with_code = [] for c in comments: try: if code in c['data']['body']: comments_with_code.append(c) except KeyError: log.debug("verifyredditcodeposted could not parse comment", c) continue if not comments_with_code: log.debug("verifyredditcodeposted found no comment with code", code) return dict(success=False) comments_with_code.sort(key=lambda x:x['data']['created_utc']) original_comment = comments_with_code[0] username = original_comment['data']['author'] return dict(success=True, username=username, usersig=tornado_cookies.generate_secure_cookie('username', username))
def loop(): _buf = "" while True: try: _buf = sock.recv(4) if not _buf: continue len_, = struct.unpack(">I", _buf[:4]) _buf = _buf[4:] while True: _buf += sock.recv(512) if len(_buf) >= len_: break ctx = _buf[:len_] _buf = _buf[len_:] timeout.reset_timeout() protocol.data_received(ctx) except Timeout: log.warning("%r disconnect timeout", addr) sock.close() break except Exception as e: log.error("%r disconnect error:%s", *(addr, repr(e))) sock.close() break
def assemble(args): """Loop thru every blast/query pair and run an assembly for each one.""" with util.make_temp_dir( where=args['temp_dir'], prefix='atram_', keep=args['keep_temp_dir']) as temp_dir: util.update_temp_dir(temp_dir, args) queries = split_queries(args) for blast_db in args['blast_db']: with db.connect(blast_db, check_version=True) as cxn: for query in queries: db.aux_db(cxn, args['temp_dir'], blast_db, query) clean_database(cxn) log.setup(args['log_file'], blast_db, query) assembler = assembly.factory(args, cxn) try: assembly_loop(args, assembler, blast_db, query) except (TimeoutError, RuntimeError): pass except Exception as err: log.error('Exception: {}'.format(err)) finally: assembler.write_final_output(blast_db, query) db.aux_detach(cxn)
def __setInfo(self): """Sets the Debian project info""" uname = platform.uname() comp_name = uname[1] # platform.architecture works when run from source, but when compiled # it always returns 32-bit. #os_arch = platform.architecture()[0] apt_version = commands.getoutput('apt-get -v') first_line = apt_version.split('\n')[0] if 'i386' in first_line: os_arch = '32bit' elif 'amd64' in first_line: os_arch = '64bit' else: log.error(_('unable to detect architecture. Defaulting to 32-bit. ' \ 'If this is incorrect, edit your project\'s debian.conf.')) os_arch = '32bit' os_kern = uname[2] try: status = commands.getstatusoutput('cat /etc/issue.net') if status[0] != 0: log.error(_('Problem retrieving Debian version')) temp = status[1].split() os_name = temp[0] os_ver = temp[1] except: os_name = os_ver = _('Unknown') return [comp_name, os_name, os_ver, os_arch, os_kern]
def name(searchString): searchString = str(searchString).lower() for process in psutil.process_iter(): try: if searchString in str(process.name).lower(): return True except psutil.error.NoSuchProcess as e: continue except psutil.error.AccessDenied as e: error("Must run this as root/administrator to get process connection information\n") return False
def load(self): genome_files = glob.glob(config.config['main']['populationPath'] + '*_genome.obj') if len(genome_files) == 0: raise Exception('no genomes found under '+config.config['main']['populationPath']) for genome_file in genome_files: try: genome = pickle.load(open(genome_file)) self.genomes.append(genome) except EOFError: log.error("could not load genome from file %s" % genome_file) log.info('loaded %d genomes' % len(self.genomes))
def initRepository(repository_dir, repository_url, build_dir): if not os.path.isdir(repository_dir): log.info("Clone repository: {} ... ".format(repository_url), end='', flush=True) cloneResult = helper.execCommand( u"git clone {}".format(repository_url), build_dir) if cloneResult.returncode == 0: log.info(u"done", flush=True) else: log.error(u"error: {}".format(cloneResult.stdout.decode("utf-8")), flush=True)
def iter_stream(stream_url, params=None): url = prepare_request(stream_url, params) log.debug('Connecting to %s', url) resp = requests.get(url, stream=True) for line in resp.iter_lines(): if line.strip(): try: message = json.loads(line) except json.JSONDecodeError, e: log.error('Invalid JSON: %s: %r', e, line) else: yield message
def portlistportitemremoteport(searchString): searchString = str(searchString).lower() for process in psutil.process_iter(): try: for conn in process.get_connections(): if int(searchString) in conn.remote_address: return True except psutil.error.NoSuchProcess as e: continue except psutil.error.AccessDenied as e: error("Must run this as root/administrator to get process connection information\n") return False
def localport(searchString): searchString=str(searchString).lower() for process in psutil.process_iter(): try: for conn in process.get_connections(): if int(searchString) in conn.local_address: return True except psutil.error.NoSuchProcess as e: continue except psutil.error.AccessDenied as e: error("Must run this as root/administrator to get process connection information\n") return False
def installPacks(self, projdir, packnames): packsdir = os.path.join(projdir, 'packages') run = self.__runRoot('xterm', '-e sh -c \"apt-get -y -o dir::cache::archives=\\\"%s\\\" ' \ '--allow-unauthenticated install %s; echo \\\"Press [ENTER] to exit.\\\"; ' \ 'read x\"' % (packsdir, packnames)) if run[0] != 0: log.error(_('exit code:%i\n%s' % (run[0], run[1]))) log.info(_('Failed to start Xterm as root')) return False #log.info(_('installCache: success')) return True
def add_widget_complete(): """ Complete the process of adding a new widget. """ user_id = session.get('user_id') user_email = session.get('user_email') user = db_users.find_by_email(user_email) if user is None: log.error( 'widget.py::add_widget_complete', 'ERROR: invalid state - no user record for user_email: ' + user_email) flash( 'Please sign in. If you are not already a QuantumRocket user, please sign up.' ) return redirect(url_for('home.index')) widget_name = request.form['widget_name'] # check to see if the widget name already exists existing_widget: Optional[Dict[Any, Any]] = \ db_widgets.find_by_user_id_and_widget_name(user_id, widget_name) if existing_widget is not None: flash("You already have a Widget with the name '" + widget_name + "'. Please try a different name as Widget names are unique.") keep_widget: Optional[Dict[ str, Optional[Any]]] = get_widget_from_request(request) return render_template('add_widget.html', widget=keep_widget) description = request.form['description'] widget = {} widget['widget_name'] = request.form['widget_name'] widget['user_id'] = user_id widget['user_email'] = user_email widget['description'] = description widget_id = db.insert('widgets', widget) flash("Widget '" + widget_name + "' added.") log.debug('widget.py::add_widget_complete', 'Added new widget, widget_id [' + widget_id + '].') return redirect(url_for('widget.my_widgets'))
def mode_preprocess(): """The main preprocessing entrypoint""" start_time = time.time() preprocessed = [] io = get_io() logline("preprocessing") enter_group() logline("reading input paths") enter_group() input_paths = collect_input_paths(io) for input_path in input_paths: logline('found path: "{}"'.format(input_path)) exit_group() logline("iterating files") enter_group() for file in get_files(input_paths): if not file: error("no files") return None features = gen_features(file) outputs = gen_outputs(file, io) feature_arr = list(map(lambda x: x.to_arr(), features)) output_arr = list(map(lambda x: x.to_arr(), outputs)) assert np.array(feature_arr).shape[1] == Features.length() assert np.array(output_arr).shape[1] == OUT_VEC_SIZE preprocessed.append({ "file_name": file.name, "features": feature_arr, "outputs": output_arr }) logline('done with file: "{}"'.format(file.name)) file.close() exit_group() logline("done iterating files") with open(io.get("output_file"), "wb+") as file: pickle.dump(preprocessed, file) logline("wrote output to file: {}".format(io.get("output_file"))) exit_group() logline("done preprocessing, runtime is {}".format( Timer.stringify_time(Timer.format_time(time.time() - start_time))))
def name(searchString): searchString = str(searchString).lower() for process in psutil.process_iter(): try: if searchString in str(process.name).lower(): return True except psutil.error.NoSuchProcess as e: continue except psutil.error.AccessDenied as e: error( "Must run this as root/administrator to get process connection information\n" ) return False
def OnNewProject(self, event): # wxGlade: startDialog.<event_handler> projName = self.nameTextCtrl.GetLineText(0) plugin = self.choice.GetSelection() if projName == '' or plugin == -1: wx.MessageBox( _("Please enter a project name and select an OS plugin."), _("Error")) return # Append new project lib.project.projects.append(Project()) project = lib.project.projects[len(lib.project.projects) - 1] # Grab the plugin entry plugin_index = -1 name = self.choice.GetItems()[plugin] for i in range(len(plugins.OSPluginList)): if plugins.OSPluginList[i][0] == name: plugin_index = i if plugin_index == -1: wx.MessageBox(_("Unable to find plugin"), _("Error")) return if not plugins.OSPluginList[plugin_index][1].IsOS(): wx.MessageBox( _("Please create the project on the plugin's supported OS."), _("Error")) return # Create project success, filename = project.CreateKeryx( projName, plugins.OSPluginList[plugin_index][0], plugins.OSPluginList[plugin_index][1]) if not success: wx.MessageBox( _("Unable to create project. Make sure a project by this name does not already exist." ), _("Error")) return success = project.OpenKeryx(filename) if success: self.EndModal(wx.ID_OK) else: log.error(_("Unable to open project")) wx.MessageBox( _("Make sure you have a plugin loaded to support this project." ), _("Unable to open project."))
def run(self): """Try to assemble the input.""" try: log.info('Assembling shards with {}: iteration {}'.format( self.args['assembler'], self.state['iteration'])) self.assemble() except TimeoutError: msg = 'Time ran out for the assembler after {} (HH:MM:SS)'.format( datetime.timedelta(seconds=self.args['timeout'])) log.error(msg) raise TimeoutError(msg) except CalledProcessError as cpe: msg = 'The assembler failed with error: ' + str(cpe) log.error(msg) raise RuntimeError(msg)
def create_snap(self, name): add_test_info.sub_test_info('3', 'Create VM snap') snapshot = self.nova_server.vm_snap(self.vm.vm, name=name) assert snapshot.status, 'Vm snap creation error' self.snap = self.nova_server.get_snap(snapshot.snap) log.info('VM snap name: %s' % self.snap.snap.name) time.sleep(10) self.timer.wait_for_state_change(self.snap.snap.status, 'SAVING') if self.snap: log.info('Snap %s created' % self.snap) else: log.error('Snap creation failed') add_test_info.sub_test_completed_info()
def create_snap(self, name): add_test_info.sub_test_info("3", "Create VM snap") snapshot = self.nova_server.vm_snap(self.vm.vm, name=name) assert snapshot.status, "Vm snap creation error" self.snap = self.nova_server.get_snap(snapshot.snap) log.info("VM snap name: %s" % self.snap.snap.name) time.sleep(10) self.timer.wait_for_state_change(self.snap.snap.status, "SAVING") if self.snap: log.info("Snap %s created" % self.snap) else: log.error("Snap creation failed") add_test_info.sub_test_completed_info()
def delete_vol(self): add_test_info.sub_test_info('5', 'Delete volume') vol_delete = self.cinder_vol.delete_volume(self.volume) assert vol_delete.execute, "volume delete initialize error" time.sleep(10) volume_exists = self.cinder_vol.get_volume(self.volume) if not volume_exists.status: log.info('volume deleted') else: log.error('volume status: %s' % volume_exists.volume.status) raise AssertionError("volume still exists") add_test_info.sub_test_completed_info()
def openPath(self, path): # Append a new project lib.project.projects.append(Project()) project = lib.project.projects[len(lib.project.projects) - 1] # Load the project from file if project.OpenKeryx(path): log.info(_("Opened project") + ": " + project.name) return True else: # Remove project lib.project.projects.pop() log.error(_("Unable to open project")) wx.MessageBox(_("Make sure you have a plugin loaded to support this project."), _("Unable to open project.")) return False
def get_top_rank(self,count): ret = [] try: cursor = self.col_account.find({},{"_id":False, "key":False}).sort([("score",pymongo.DESCENDING)]).limit(count) for i in cursor: doc = { 'user_id':i['account'], 'score':i['score'], 'nickname':i['nickname'] } ret.append(doc) except: log.error(traceback.format_exc()) return ret
def __setInfo(self): # Sets the Debian project info uname = platform.uname() comp_name = uname[1] os_arch = platform.architecture()[0] os_kern = uname[2] try: status = commands.getstatusoutput('cat /etc/issue.net') if status[0] != 0: log.error(_('Problem retrieving Debian version')) temp = status[1].split() os_name = temp[0] os_ver = temp[1] except: os_name = os_ver = _('Unknown') return [comp_name, os_name, os_ver, os_arch, os_kern]
def loadLocalPackageList(self, dir, arch): listDir = os.path.join(dir, 'lists') debs = self.__parseSources(dir) filenames = self.__filesFromDebs(debs, arch) installed = self.__getInstalled(dir) # Initialize vars packages = {} for item in filenames: try: data = open(os.path.join(listDir, item), 'rb') # Open package list packages = self.__readPackages(data, installed, packages, 'http://' + item.split('_dists')[0].replace('_','/')) # Append all packages to list data.close() except Exception, exc: log.error(_('Unable to open file: ') + str(exc))
def url2ip(url): """ get the url ip if url use cdn, then return random one of the cdn ips """ parser = dns.resolver.Resolver() parser.lifetime = parser.timeout = time_out prefix = int(hashlib.md5(url).hexdigest()[-1], 16) % len(get_valid_dns()) parser.nameservers = [get_valid_dns()[prefix]] try: url_ip = parser.query(url)[0].address except (dns.resolver.NXDOMAIN, dns.resolver.NoNameservers, dns.exception.Timeout): url_ip = False log.error('dns(' + get_valid_dns()[prefix] + ') parse url(' + url + ') error.') finally: return url_ip
def rec(u, v, a, beta): """ Recombination function. """ assert len(u) == len(v) assert len(v) == len(a) t0 = beta - dot_product(a, v) t1 = dot_product(a, [(u[i] - v[i]) for i in range(len(u))]) if t1 == 0: log.error("**** STOP in rec() ****") log.debug("u = %s", prettyprint_vector(u)) log.debug("v = %s", prettyprint_vector(v)) log.debug("a = %s", prettyprint_vector(a)) raise ZeroDivisionError() t = t0 / t1 return [ (t*u[i] + (1-t)*v[i]) for i in range(len(u)) ]
def delete_server(self): add_test_info.sub_test_info('4', 'Delete server') vm_delete = self.nova_server.vm_delete(self.vm.vm.id) assert vm_delete.execute, "Server delete initialize error" time.sleep(5) vm_exists = self.nova_server.vm_details(self.vm.vm) if not vm_exists.status: log.info('Server deleted') else: log.error('Server status: %s' % vm_exists.vm.status) raise AssertionError("Server still exists") add_test_info.sub_test_completed_info()
def processConfigFile(configfile): if os.path.isfile(configfile): debug("reading config file: %s" % (configfile)) config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option('core', 'iocServer'): #info("iocServer: %s"%(config.get('core','iocServer'))) options.iocServer = config.get('core', 'iocServer') debug('set options.iocServer: %s' % (options.iocServer)) if config.has_option('core', 'logfile'): log.logfile = config.get('core', 'logfile') debug('set logfile: %s' % (log.logfile)) if config.has_option('core', 'loglevel'): log.loglevel = log.loglevels[config.get('core', 'loglevel')] debug('set loglevel: %s' % (log.loglevel)) else: error('bad config file %s' % (configfile))
def processConfigFile(configfile): if os.path.isfile(configfile): debug("reading config file: %s"%(configfile)) config = ConfigParser.ConfigParser() config.readfp(open(configfile)) if config.has_option('core','iocServer'): #info("iocServer: %s"%(config.get('core','iocServer'))) options.iocServer=config.get('core','iocServer') debug('set options.iocServer: %s'% (options.iocServer)) if config.has_option('core','logfile'): log.logfile = config.get( 'core', 'logfile' ) debug('set logfile: %s' % (log.logfile)) if config.has_option('core','loglevel'): log.loglevel = log.loglevels[config.get( 'core', 'loglevel' )] debug('set loglevel: %s' % (log.loglevel)) else: error('bad config file %s'%(configfile))
def openPath(self, path): # Append a new project lib.project.projects.append(Project()) project = lib.project.projects[len(lib.project.projects) - 1] # Load the project from file if project.OpenKeryx(path): log.info(_("Opened project") + ": " + project.name) return True else: # Remove project lib.project.projects.pop() log.error(_("Unable to open project")) wx.MessageBox( _("Make sure you have a plugin loaded to support this project." ), _("Unable to open project.")) return False
def url2ips(url): """ get the url ip if url use cdn, then return ip list """ parser = dns.resolver.Resolver() parser.lifetime = parser.timeout = time_out url_ips = [] for ip in dns_ips: parser.nameservers = [ip] try: url_ip = parser.query(url)[0].address if url_ip not in url_ips: url_ips.append(str(url_ip)) except (dns.resolver.NXDOMAIN, dns.resolver.NoNameservers, dns.exception.Timeout): log.error('dns(' + ip + ') parse url(' + url + ') error.') return url_ips
def isValidData(self, data): """ Checks the validity of the provided serialized data """ log.debug(function=self.isValidData, args=(data, self.getType())) assert data != None assert isinstance(data, dict) assert "type" in data assert data["type"] == self.getType() if data != None and isinstance( data, dict) and "type" in data and data["type"] == self.getType(): # valid data is a dict containing at least a field "type" with value self.classname return True log.error(function=self.isValidData, args=data, returns=False) # perhaps, we should raise an exception here, for now, False is returned return False
def image_delete(self): add_test_info.sub_test_info("4", "Delete glance image") image_to_delete = self.glance_image.delete_image(self.img.id) assert image_to_delete.execute, "Image deletion failure" self.timer.wait_for_state_change(self.image.image.status, "active") image_exists = self.glance_image.get_image(self.image.image) if not image_exists.status: log.info("Image deleted") else: log.error("Image status: %s" % image_exists.image.status) raise AssertionError("Image still exists") add_test_info.sub_test_completed_info()
def __get__(self, inst, owner): if inst is None: return self key = self._func.__get__(inst, type(inst)) if key not in self._queues: try: self._queues[key] = queue.Queue() self._threads[key] = threading.Thread(target=self._thread_loop, args=[inst, key]) self._threads[key].daemon = True self._threads[key].start() except Exception as err: log.error('Could not start thread for %s: %s' % (key.__name__, err)) return lambda *a, **k: self._queues[key].put((a, k))
def loadLocalPackageList(self, dir, arch): listDir = os.path.join(dir, 'lists') debs = self.__parseSources(dir) filenames = self.__filesFromDebs(debs, arch) installed = self.__getInstalled(dir) # Initialize vars packages = {} for item in filenames: try: data = open(os.path.join(listDir, item), 'rb') url = 'http://%s' % item.split('_dists')[0].replace('_', '/') # Append all packages to list packages = self.__readPackages(data, installed, packages, url) except Exception, exc: log.error(_('Unable to open file: ') + str(exc))
def send_mail(url, sources, title=None, dry_run=False): ctx = { 'title': title or ellipsize(url, 50), 'url': url, 'start_date': rel_date(sources[0][1]), 'count': len(sources), 'sources': [{ 'url': source_url, 'timestamp': rel_date(ts), 'name': parse_source_name(source_url), } for source_url, ts in sources], } ctx['sources'][-1]['last'] = 1 # for prettier emails with open('resources/email.html.mustache') as f: template = f.read() body = pystache.render(template, ctx) if dry_run: return body subject = u'Thresholderbot: {}'.format(ctx['title']) m = mandrill.Mandrill(os.environ['MANDRILL_APIKEY']) resp = m.messages.send({ 'from_email': os.environ['FROM_ADDRESS'], 'to': [{ 'email': os.environ['TO_ADDRESS'] }], 'subject': subject, 'html': body, 'inline_css': True, 'auto_text': False, 'track_opens': False, 'track_clicks': False, }) if resp[0]['status'] != 'sent': log.error('Error sending mail: %r', resp) return False return True
def delete_widget(widget_id): """ Delete a widget. """ try: cur = con.cursor() cur.execute("DELETE FROM widgets WHERE widget_id = %s", (widget_id, )) con.commit() return except Exception as e: log.error('db_widgets.py::delete_widget', str(e)) con.commit() return
def get_user_id_by_widget_id(widget_id): """ Return the user_id of a widget, retrieved by its widget ulid. """ try: widget = find_by_id(widget_id) if widget is None: return None return widget['user_id'] except Exception as e: log.error('db_widgets.py::get_user_id_by_widget_id', str(e)) return None
def select_single(table, where, order_by, cols): """ Call the select(...) function and return the first list element if the list is not empty, otherwise return None. """ try: rows = select(table, where, order_by, cols, False) if rows: return rows[0] else: return None except Exception as e: log.error('db.py::select_single', 'Error [' + str(e) + '].') raise e
def select_all_like(table, where, order_by, cols): """ Call the select(...) function and return all list elements if the list is not empty, otherwise return None. """ try: rows = select(table, where, order_by, cols, True) if rows: return rows else: return None except Exception as e: log.error('db.py::select_all_like', 'Error [' + str(e) + '].') raise e
def delete_vol(self): add_test_info.sub_test_info('3', 'delete volume') vol_delete = self.cinder_volume.delete_volume(self.volume) assert vol_delete.execute, "volume delete initialize error" volume_exists = self.cinder_volume.get_volume(self.volume) self.timer.wait_for_state_change(volume_exists.volume.status, 'deleting') volume_exists = self.cinder_volume.get_volume(self.volume) if not volume_exists.status: log.info('volume deleted') else: log.error('volume status: %s' % volume_exists.volume.status) raise AssertionError("volume still exists") add_test_info.sub_test_completed_info()
def delete_snap(self): add_test_info.sub_test_info('6', 'Delete VM snap') snap_to_delete = self.nova_server.snap_delete(self.snap.snap) assert snap_to_delete.execute, 'Snap deletion error' snapshot = self.nova_server.get_snap(self.snap.snap) self.timer.wait_for_state_change(snapshot.snap.status, 'ACTIVE') log.info('status: %s' % snapshot.snap.status) snap_exists = self.nova_server.get_snap(self.snap.snap) if snap_exists.snap.status == 'DELETED': log.info('VM snap deleted') else: log.error('Snap status: %s' % snap_exists.snap.status) raise AssertionError('VM snap still exists') add_test_info.sub_test_completed_info()
def createProject(self, name): # Creates project files dirNew = os.path.join(consts.dirProjects, name) # Create in projects directory try: # Set information info = self.__setInfo() self.__grabFiles(name, dirNew) outfile = open(os.path.join(dirNew, FILE), 'wb') outfile.write(_('Computer Name: ') + info[0] + '\n' + _('OS Name: ') + info[1] + '\n' + _('OS Version: ') + info[2] + '\n' + _('Architecture: ') + info[3] + '\n' + _('Kernel: ') + info[4] + '\n') outfile.close() return True except: log.error('Problem writing to project settings file') return False
def create_dict(obj, columns): """ Create a dict, given a database record and an ordered list of columns. Mind the order of column names in the columns list """ try: new_obj = {} for col in range(len(columns)): new_obj[columns[col]] = obj[col] return new_obj except Exception as e: log.error('db.py::create_dict', str(e)) return {}
def delete(table, where): """ Execute a DELETE statement, based on the supplied parameters. """ try: cur: object = con.cursor() s = 'DELETE FROM ' + table if where: s += ' WHERE ' where_values = [] for where_col, where_value in where.items(): if where_values: s += " AND " s += where_col + ' = %s' where_values.append(where_value) log.debug('db.py::delete', 'sql [' + s + ']') if where: cur.execute(s, (*where_values, )) else: log.info('db.py::delete', 'About to delete all rows from table [' + table + '].') cur.execute(s) con.commit() except Exception as e: log.error( 'db.py::delete', 'Error attempting to execute [' + s + ']. Error [' + str(e) + '].') con.commit() raise e
def OnNewProject(self, event): # wxGlade: startDialog.<event_handler> projName = self.nameTextCtrl.GetLineText(0) plugin = self.choice.GetSelection() if projName == '' or plugin == -1: wx.MessageBox(_("Please enter a project name and select an OS plugin."), _("Error")) return # Append new project lib.project.projects.append(Project()) project = lib.project.projects[len(lib.project.projects) - 1] # Grab the plugin entry plugin_index = -1 name = self.choice.GetItems()[plugin] for i in range(len(plugins.OSPluginList)): if plugins.OSPluginList[i][0] == name: plugin_index = i if plugin_index == -1: wx.MessageBox(_("Unable to find plugin"), _("Error")) return if not plugins.OSPluginList[plugin_index][1].IsOS(): wx.MessageBox(_("Please create the project on the plugin's supported OS."), _("Error")) return # Create project success, filename = project.CreateKeryx(projName, plugins.OSPluginList[plugin_index][0], plugins.OSPluginList[plugin_index][1]) if not success: wx.MessageBox(_("Unable to create project. Make sure a project by this name does not already exist."), _("Error")) return success = project.OpenKeryx(filename) if success: self.EndModal(wx.ID_OK) else: log.error(_("Unable to open project")) wx.MessageBox(_("Make sure you have a plugin loaded to support this project."), _("Unable to open project."))
def calculate_fitness(self): log.debug('calculate_fitness') pool_data = [[self.fitnessMachine, g] for g in self.genomes] log.debug('pool_data: ' + str(pool_data)) pool_data = [pickle.dumps((self.fitnessMachine, g)) for g in self.genomes] # self.genomes = [_poolFitnessCalculation(i) for i in pool_data] tmp = self.genomes soft_recovery = True try: fitness_values = self.pool.map_async(_pool_fitness_calculation, pool_data).get(timeout=30) for i, fitness in enumerate(fitness_values): self.genomes[i].set_fitness(fitness) except TimeoutError as e: if soft_recovery: self.genomes = tmp log.error('!!!!!!!!! ' + str(e)) else: raise e
def send_mail(url, sources, title=None, dry_run=False): ctx = { 'title': title or ellipsize(url, 50), 'url': url, 'start_date': rel_date(sources[0][1]), 'count': len(sources), 'sources': [{ 'url': source_url, 'timestamp': rel_date(ts), 'name': parse_source_name(source_url), } for source_url, ts in sources], } ctx['sources'][-1]['last'] = 1 # for prettier emails with open('resources/email.html.mustache') as f: template = f.read() body = pystache.render(template, ctx) if dry_run: return body subject = u'Thresholderbot: {}'.format(ctx['title']) m = mandrill.Mandrill(os.environ['MANDRILL_APIKEY']) resp = m.messages.send({ 'from_email': os.environ['FROM_ADDRESS'], 'to': [{'email': os.environ['TO_ADDRESS']}], 'subject': subject, 'html': body, 'inline_css': True, 'auto_text': False, 'track_opens': False, 'track_clicks': False, }) if resp[0]['status'] != 'sent': log.error('Error sending mail: %r', resp) return False return True
def handle_message_with_entities(message): assert message['entities'] for url_info in message['entities']['urls']: url = url_info['expanded_url'] log.info('Found URL: %s', url) try: canonical_url = urlwork.canonicalize(url) except requests.TooManyRedirects: log.error('Too many redirects: %s', url) except Exception, e: log.exception('Canonicalization error: %s', e) log.error('URL info: %r', url_info) else: if canonical_url != url: log.info('=> %s', canonical_url) source = message['user']['id'] source_url = make_tweet_url(message) count = db.add(canonical_url, source, source_url) if count >= int(os.environ.get('THRESHOLD', 5)): log.info('URL %s seen %d times!', canonical_url, count) handle_thresholded_url(canonical_url)
from gevent.server import StreamServer from lib import log from dggs.server import tcp_rloop import settings import sys import gevent def load_config(): """定时重新加载配置文件 """ for module in settings.CONFIG_MODULES: try: module = sys.modules[module] except: continue reload(module) log.debug(u"加载配置文件") if __name__ == "__main__": port = settings.MAIN_PORT if sys.platform == "linux2": import signal gevent.signal(signal.SIGQUIT, gevent.killall) log.error("sigquit, kill all") server = StreamServer(("0.0.0.0", port), tcp_rloop, spawn=5000) log.debug("start server port:%d", port) server.serve_forever()
log.info(consts.parameters) sys.exit(0) if ('--version' in sys.argv) or ('-v' in sys.argv): log.info('%s %s' % (consts.appName, consts.appVersion)) sys.exit(0) if ('--create' in sys.argv): import platform from lib import plugins, project try: index = sys.argv.index('--create') name = sys.argv[index + 1] plugin_name = 'debian' except: log.error(_('Unable to create project')) sys.exit(1) plugins.load(consts.dirPlugins, '', False) # Don't load interface plugins for item in range(0,len(plugins.OSPluginList)): if plugin_name == plugins.OSPluginList[item][0].lower(): # Append new project project.projects.append(project.Project()) proj = project.projects[len(project.projects) - 1] # Create project success, filename = proj.CreateKeryx(name, plugins.OSPluginList[item][0], plugins.OSPluginList[item][1]) if success: log.info(_('Project created successfully.'))
def installCache(self, projdir, scriptname, move=False): """Transfers index files to the APT cache. Uses sh script called as 'root' """ packsdir = os.path.join(projdir, 'packages') # Make sure there are packages to install if not os.path.exists(packsdir): log.error(_("%s does not exist: no packages to be installed") % \ (packsdir)) return False # If we're missing the partial dir, APT will throw a fit. partialdir = os.path.join(packsdir, 'partial') if not os.path.exists(partialdir): os.mkdir(partialdir) scriptpath = os.path.join(projdir, '%s.sh' % scriptname) bin = '#!/bin/sh -' spacks = [x for x in os.listdir(packsdir) if os.path.isfile(os.path.join(packsdir, x)) and (os.path.splitext(x)[1] == '.deb')] packnames = ' '.join([x[:x.find('_')] for x in spacks]) content = bin+"""\n# # Keryx 0.92.4 install script # #Used to transfer the necessary files back to a computer #from a project so that the downloaded packages can #be installed. # # Usage <script.sh> [<project directory>] ['-move'] # # written by jacseen, class=amateur :) # http://keryxproject.org mailto:[email protected] if [ -n "$1" ] && [ -e "$1" ] then proj="$1" shift else proj="$(pwd)" fi cd "$proj" if [ -n "$1" ] && [ "$1" = "-move" ] then transfer="mov" else transfer="copy" fi slists=lists tlists=/var/lib/apt/lists #spacks=packages #tpacks=/var/cache/apt/archives ssources=sources tsources=/etc/apt #Find all index files, skipping the 'status' files and copy them cd ./"$slists"/ if [ ! $? = 0 ] #if cannot cd into lists, not project dir then echo "Not project dir: $(pwd)" exit 65 fi filelist=`find -maxdepth 1 -iname '*_dists_*'` #TODO:attain lock on folder $tlists to be package-manager-friendly # will be attained directly with python in later versions for fn in $filelist do cp -t "$tlists" "$fn" if [ ! $? = 0 ] then echo "Failure when copying list: $fn" exit 66 fi done #TODO:release lock on folder $tlists ### Debs will no longer be moved to the cache. The downloads directory will ### ### Be made Temporary cache. ### #Find all downloaded packages and move to cache #cd ../"$spacks"/ #filelist=`find -maxdepth 1 -name '*.deb'` #TODO:attain lock on folder $tpacks to be package-manager-friendly # will be attained directly with python in later versions #for fn in $filelist #do # if [ $transfer = "mov" ] # then # mv -f -t "$tpacks" "$fn" # else # cp -t "$tpacks" "$fn" # fi # if [ ! $? = 0 ] # then # echo "Failure when ${transfer}ing package: $fn" # exit 67 # fi #done #TODO:release lock on folder $tpacks #Update the main sources.list file in case it was changed in keryx cd ../"$ssources"/ cp -t "$tsources" "sources.list" if [ ! $? = 0 ] then echo "Failure when copying sources.list" exit 68 fi # Update the APT caches with the latest lists apt-cache gencaches exit 0""" try: script = open(scriptpath, 'wb') script.write(content) script.close() except: log.error(_('installCache: Could not write the script %s') % scriptpath) return False if move: transfer='-move' else: transfer='' log.info(_('installCache: launching script as root')) run = self.__runRoot('sh', '%s %s %s' % (scriptpath, projdir, transfer)) if run[0] != 0: log.error('exit code:%i\n%s' % (run[0], run[1])) log.info(_('installCache: failed')) return False log.info(_('installCache: transfer success')) return True
pp = pprint.PrettyPrinter(indent=4) class UtilException(Exception): pass def die(msg, exitcode=1): print(msg) sys.exit(exitcode) with open("/etc/ergotime/ergotime.yaml", "r") as f: try: config = yaml.load(f) builtins.config = config except yaml.YAMLError as err: log.error("Cannot load config, err: %s" % err) sys.exit(1) class AddSysPath: def __init__(self, path): self.path = path def __enter__(self): self.savedPath = sys.path.copy() sys.path.insert(0, self.path) def __exit__(self, typ, value, tb): sys.path = self.savedPath