def main(argv, is_local=True): # exit if no receiver if not config.TELEGRAM_IDS_SUBSCRIBER: logger.error("No Telegram IDs to notify") return # initialize last_prices = {c.HOURLY: all_exchanges(), c.EVENT: all_exchanges()} # get the last prices and notify while True: is_periodic = utils.true_every_1_hour() current_prices = all_exchanges() logger.info(current_prices) msg, last_prices, has_events_to_notify = voila(current_prices, last_prices, is_periodic) if is_periodic: # hourly notification notify.to_subscribers(msg) elif has_events_to_notify: # by prices and percent changes notify.to_premiums(msg) # check every min time.sleep(c.ONE_MIN_IN_SEC)
def feed(self, pool, dbi): from lib import db, logger Ctlr_Base.feed(self, pool, dbi) extra = {'classname': self.__class__} for rss in self._my_feeds: if ('title' not in rss): rss['title'] = None if not ('url' in rss and rss['url']): logger.warning('Bad rss host url for %s(%s)', rss['title'], rss.get('url', None), extra=extra) continue if ('host_url' not in rss): rss['host_url'] = self.get_host()['url'] db.save_feed(rss, dbi=dbi) db.save_ctlr_feed( { 'url': rss['url'], 'classname': str(self.__class__) }, dbi=dbi) logger.info('%s queued', rss['url'], extra=extra) pool.put(rss['url'], self.dispatch_rss_2_0, category=self._parser['format'])
def _get_language(filename): if filename.count('.') < 1: logger.info('No suffix in filename. Text assumed.') return _default suffix = filename.rsplit('.', 1)[1] logger.info('filename suffix: ' + suffix) return _languages.get(suffix.lower(), _default)
def _information_content(self, all_traces: Set[Strace]) -> Dict[Syscall, float]: """Compute and cache normalized information content.""" if (self._all_traces is not all_traces or self._syscall_information_content is None): logger.info('Computing document frequencies.') # Cache traces self._all_traces = all_traces # Count how many traces each syscall appears in counter = Counter(chain.from_iterable(map( lambda s: set(s.trace_lines), all_traces ))) total = len(all_traces) # Compute normalized information content # # The standard definition of information content is # -log(P) = -log(count / total). # # We normalize to the range 0..1 by dividing by the max value, # which is -log(1 / total). The negatives cancel, and dividing by # the log is equivalent to performing a log change of base to # base = (1 / total). base = 1 / total self._syscall_information_content = { k: math.log(v / total, base) for k, v in counter.items() } return self._syscall_information_content
def writeToInflux(parmInfluxUrl, parmInfluxDb, parmTargetUser, parmTargetPwd, perfList, whitelistDict): ''' writes the data to the influx DB using the write REST API ''' l.debug("writeToInflux with the following parameters: \nparmInfluxUrl: '%s'\n parmInfluxDb: '%s'\n parmTargetUser: '******'\n parmTargetPwd: '%s'\n len(perfList): : '%s'" % (parmInfluxUrl, parmInfluxDb, parmTargetUser, parmTargetPwd, len(perfList))) try: (urlSchema, urlHost, urlPort) = o.splitHttpUrlString(parmInfluxUrl) except Exception as e: raise Exception, sys.exc_info()[1] ## ## influxdb write end-point with query string tmpUri = "/write" tmpUri += o.buildQueryString(db=parmInfluxDb, precision="ms", p=parmTargetPwd, u=parmTargetUser) l.debug("Uri to /write Influx: '%s'" % (tmpUri)) postHeaders = {"Content-type": "text/plain; charset=utf-8", "Accept": "text/plain"} ## ## Number of rows inserted rowCount = 0 ## ## Format the output as a string data = outputFormatter(perfList, outFormat="INFLUX", whitelistDict=whitelistDict) l.verbose("formatted influx data: \n%s", data) ## ## outputFormatter returns a string of the data separated by \n per line postDataDict = data.split("\n") ## ## iterate over the perflist and build the REST API string. ## The "tags" is string of tags separated by NODE_SEPARATOR and the counters will be the fields for postData in postDataDict: l.debug("POST data for write end-point: '%s'" % (postData)) ## ## try: ## ## Get the HTTP Connection httpConn = o.getHttpConnection(urlSchema, urlHost, urlPort) httpConn.request("POST", tmpUri, postData, postHeaders) httpResponse = httpConn.getresponse() responseData = httpResponse.read() httpConn.close() rowCount += 1 except Exception as e2: httpConn.close() errorString = "Failed to write data to influx, '%s'" % (e2.strerror) raise Exception, errorString ## ## indluxDb write returns code 204 if (httpResponse.status != httplib.NO_CONTENT): l.error("Error response data: '%s'" % (responseData)) errorString = "Write to influx db failed with status code: '%d'", httpResponse.status l.error(errorString) httpConn.close() raise Exception, errorString else: l.debug("influx URL ping returned status code: '%d'", httpResponse.status) ## ## Finished - close the connection httpConn.close() l.info("writeToInflux: Number of rows inserted: '%d'" % (rowCount))
def connectSwitch(self, host_ip, u_name, p_word, tc_name): """ Connects to Linux host and checks for software existence - **parameters**, **types**, **return** and **return types**:: :param host_ip: Linux host IP :param u_name: Linux host username :param p_word: Linux host Password :param softwares: List of software to be checked :type host_ip: string :type u_name: string :type p_word: string :type softwares: list """ port = 22 client = paramiko.SSHClient() try: client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) logger.info('*** Connecting to Switch...') client.connect(host_ip, port, u_name, p_word) stdin, stdout, stderr = client.exec_command('version') client.close() except Exception: msg = "Unable to connect Switch..Hardware requirement failed" \ "Hence skipping test case {}".format(tc_name) logger.error(msg) notify.message(msg) raise Exception(msg)
def load_dataset(ds_id, seed=None): """Load a dataset object. Args: ds_id (str): Dataset identifier. seed (int): Random number generator seed. Returns: datasets.Dataset: Dataset object. """ if dataset_exists(ds_id): logger.info('Loading dataset `{0}`.'.format(ds_id)) mod = importlib.import_module('datasets.' + ds_id) cls = inspect.getmembers(mod, inspect.isclass)[-1][0] ds = getattr(mod, cls)() ds.id = ds_id ds.basepath = path(ds_id) ds.rs = np.random.default_rng(seed=seed) ds.logger = logger return ds else: raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), path(ds_id))
def feed(self, pool, dbi): from lib import db, logger Ctlr_Base.feed(self, pool, dbi) extra={'classname': self.__class__} for rss in self._my_feeds: if ('title' not in rss): rss['title'] = None if not ('url' in rss and rss['url']): logger.warning('Bad rss host url for %s(%s)', rss['title'], rss.get('url', None), extra=extra) continue if ('host_url' not in rss): rss['host_url'] = self.get_host()['url'] db.save_feed(rss, dbi = dbi) db.save_ctlr_feed({ 'url': rss['url'], 'classname': str(self.__class__) }, dbi = dbi) logger.info('%s queued', rss['url'], extra=extra) pool.put(rss['url'], self.dispatch_rss_2_0, category = self._parser['format'])
def parse_executables_from_repo(repo: str): """Parse executables from a repository. Parameters ---------- repo : str GitHub repository. """ try: logger.info(f'Processing repo {repo}') # Get Dockerfile contents dockerfile = get_dockerfile_contents(repo) # Error if the Dockerfile is not for a single stage Debian build from_instructions = FROM_INSTRUCTIONS.findall(dockerfile) from_debian_instruction = FROM_DEBIAN_INSTRUCTION.match(dockerfile) if len(from_instructions) > 1 or not from_debian_instruction: raise ValueError( 'Repo Dockerfile is not a single stage Debian build' ) # Parse dockerfile contents executables = parse_dockerfile(dockerfile) # Insert executables into database insert_untraced_executables(executables) except Exception: logger.exception(f'Failed to parse executables from {repo}')
def parse(*args, **kwargs) -> Generator[Strace, None, None]: """Parse straces. Yields ------ Strace Parsed strace. """ logger.info(f'Parsing straces for {COLLECTOR_NAME}') # Parse traces for trace_data in TRACES: # Compute values strace_file = TRACE_DIR / trace_data['strace_file'] collector_assigned_id = Path(trace_data['strace_file']).stem # Log and parse logger.info(f'Parsing {collector_assigned_id}') yield (parser.parse( strace_file, system=trace_data['system'], executable=trace_data['executable'], arguments=trace_data['arguments'], collector=COLLECTOR_NAME, collector_assigned_id=collector_assigned_id, strace_file=strace_file, ).normalize())
def on_event(self, event, extension): setClipboard(event.get_data()) copyHook = extension.preferences['copy_hook'] if copyHook: logger.info('Running copy hook: ' + copyHook) subprocess.Popen(['sh', '-c', copyHook])
def setup_module(module): trace = "tests#test_templates#setup_module" info("Enter", trace, { "module": module, }) init_logger() info("Exit", trace)
def __process_media_file(self): self.__get_media_file() if self.current_processing_file is not None: try: logger.info("Processing file [{}]".format( self.current_processing_file.identifier)) logger.debug(self.current_processing_file) self.__execute_handbreak_command() logger.info("File [{}] processed successfully".format( self.current_processing_file.identifier)) logger.debug(self.current_processing_file) self.mfq[self.current_processing_file.id, self.current_processing_file. file_path] = MediaFileState.PROCESSED self.current_processing_file = None except HandbreakProcessInterrupted: self.__return_current_processing_file(MediaFileState.WAITING) except Exception: logger.exception( "File [{}] returning to processing queue after processing error, status [{}]" .format(self.current_processing_file.identifier, MediaFileState.FAILED.value)) self.__return_current_processing_file(MediaFileState.FAILED)
def waf_ipset_ranges(_id): """Retrieves all ipsets' ranges associated to a ipset id """ trace = "aws#waf_ipset_ranges" info("Enter", trace, {"id": _id}) client = boto3.client("waf") ranges = [] response = None try: response = client.get_ip_set(IPSetId=_id) except Exception as e: error("Failed to determine ipset", trace, { "id": _id, "error": e, }) raise e info("Determined ipset", trace, {"ipset": response}) for descriptor in response["IPSet"]["IPSetDescriptors"]: info("Examining descriptor", trace, {"descriptor": descriptor}) ranges.append(descriptor["Value"]) info("Determnied ranges associated to ipset id", trace, { "id": _id, "ranges": ranges, }) info("Exit", trace, {"returns": ranges}) return ranges
def write_auth(self): # TODO: Implements auth conf = config.getConfig() fileconf = config.getGlobalConfig() if conf.require('global') else config.getUserConfig() remove = conf.require('remove') if remove: fileconf.remove('gist.auth') fileconf.remove('gist.token') print 'Authentication removed, you may delete the token from your user panel.' return if fileconf.get('gist.auth', False) and not conf.get('force', False): logger.info('check current token') try: token = fileconf.require('gist.token') except exception.NoSuchOption: fileconf.remove('gist.auth') return self.write_auth() result = self._do_auth(token=token) if result: print 'Current token is valid, no auth required.' return print 'Current token is invalid, requesting a new token.' token = self._perform_auth() logger.info('auth ok.') fileconf.set('gist.auth', True) fileconf.set('gist.token', token) logger.debug('saving to config file.') fileconf.save() print 'Done!'
def test_main(runner, main_arguments): trace = "tests#test_commands#test_main" info("Enter", trace, { "runner": runner, "main_arguments": main_arguments }) result = runner.invoke(cloudformation.main, main_arguments) assert result.exit_code == 0 info("Exit", trace)
def test_envs(envs): trace = "tests#test_commands#test_envs" info("Enter", trace, { "envs": envs, }) for k in envs: assert k in os.environ info("Exit", trace)
def teardown_module(module): trace = "tests#test_commands#teardown_module" info("Enter", trace, { "module": module, }) for f in glob.glob('./build/*'): os.remove(f) info("Exit", trace)
def __init__(self, pool): from lib import DB, logger Thread.__init__(self) logger.info('initiated', extra={'classname': self.__class__}) self.pool = pool self.dbi = DB()
def test_loglevel_info(): trace = "tests#test_logger#test_loglevel_info" info("Enter", trace) init_logger("INFO") assert logger().getEffectiveLevel() == logging.INFO info("Exit", trace)
def test_loglevel_error(): trace = "tests#test_logger#test_loglevel_info" info("Enter", trace) init_logger("ERROR") assert logger().getEffectiveLevel() == logging.ERROR info("Exit", trace)
def retry_media_files(self, media_file=None): if not media_file: logger.info("Retrying all media files") for media_file in self.mfq: self.mfq[media_file.id, media_file.file_path] = MediaFileState.WAITING else: logger.info("Retrying [{}] media file".format(media_file)) self.mfq[media_file] = MediaFileState.WAITING
def build_linux_docker_image(): """(Re)build the collector Linux Docker image.""" logger.info('Building the Linux Docker image.') subprocess.run( ['docker', 'build', '-t', LINUX_DOCKER_IMAGE, '.'], cwd=LINUX_DOCKER_CONTEXT, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, )
def build_ansible_docker_image(): """(Re)build the collector Ansible Docker image.""" logger.info('Building the Ansible Docker image.') subprocess.run( ['docker', 'build', '-t', ANSIBLE_DOCKER_IMAGE, '.'], cwd=ANSIBLE_DOCKER_CONTEXT, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, )
def feed_revisit(pool, dbi=None): """重下載必要的新聞,仿造 Base Ctlr :: dispatch_rss_2_0 meta 並轉由 dispatch_response 處理 @see db.list_revisits() @startpoint """ import json import importlib from lib import db, DB, logger from lib.util.dt import to_timestamp if dbi is None: _dbi = DB() else: _dbi = dbi ctlr_cache = {} i_created_on = 0 i_last_seen_on = 1 i_pub_ts = 2 i_feed_url = 3 i_canonical_url = 4 i_title = 5 i_meta = 6 i_ctlr = 7 # logger.info("Found %d articles to revisit" % len(revisit_list)) for x in db.list_recent_fetches(revisit_max_m(), dbi=dbi): expired = need_revisit(x[i_created_on], x[i_last_seen_on]) if (not expired): continue if (x[i_ctlr] not in ctlr_cache): (ns, cn) = x[i_ctlr].rsplit('.', 1) module = importlib.import_module(ns) ctlr_cache[x[i_ctlr]] = getattr(module, cn)() ctlr = ctlr_cache[x[i_ctlr]] meta = json.loads(x[i_meta]) meta['feed_url'] = x[i_feed_url] meta['pub_date'] = to_timestamp(x[i_pub_ts]) meta['title'] = x[i_title] logger.info('Revisiting %s, expired for %d min', x[i_canonical_url], expired, extra={'classname': feed_revisit}) pool.log_stats('with_revisit') pool.put("http://" + x[i_canonical_url], ctlr.dispatch_response, category="revisit", meta=meta) if dbi is None: _dbi.disconnect()
def detect_word(): global previous_word if 'template.png' in hook.get_image_url(): return '' if hook.get_image_url() not in word_dictionary: logger.warning('Unregistered word image: ' + hook.get_image_url()) return '' logger.info('Get word ' + word_dictionary[hook.get_image_url()]) return word_dictionary[hook.get_image_url()]
def build_docker_image(): """Build the collector Docker image.""" logger.info('Building Docker image.') subprocess.run( ['docker', 'build', '-t', DOCKER_IMAGE, '.'], cwd=DOCKER_CONTEXT, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, )
def main(event, context): try: _tickers = event.get("tickers") _position_size = event.get("position-size") _strategy = event.get("strategy") account_alias = event.get("account-alias") kms = Kms() session = get_session() if environ.get("STAGE") == "test": wait_for_db_init(session) candle_repository = CandleRepository(session) account_repository = AccountRepository(session, kms) order_repository = OrderRepository(session) accounts = account_repository.get_all_active_upbit_accounts(alias=account_alias) db_feed = DbFeed(exchange=UpbitExchange.build(accounts[0]), candle_repository=candle_repository) tickers = _tickers or get_trading_tickers() strategy = get_strategy_by_name(name=_strategy) feeds_by_ticker = db_feed.build_feeds_by_ticker(tickers=tickers) strategies_by_ticker = get_trading_strategies_by_ticker(tickers=tickers, override_strategy=strategy) logger.info(f"accounts = {accounts}") orders = [] for account in accounts: send_credential_expiry_reminder(account) exchange = UpbitExchange.build(account) asset_manager = AssetManager(exchange) trader = Trader(asset_manager, exchange, session, account, order_repository) for ticker, strategies in strategies_by_ticker.items(): for strategy in strategies: logger.info(f"trading with strategy = {strategy}, ticker = {ticker}") wait_order = trader.trade(ticker=ticker, strategy=strategy(feeds_by_ticker[ticker]), position_size=_position_size) if wait_order is not None: orders.append(wait_order) return { "statusCode": 200, "body": ",".join([order.get_id() for order in orders]) } except Exception as e: stack = traceback.format_exc() logger.error(e) return { "statusCode": 500, "body": stack }
def setManager(name, extension): global manager logger.info('Loading ulauncher-clipboard manager: %s', name) manager = getManager(name) if not ensureStatus(manager): showMessage( 'ulauncher-clipboard error', "Could not load {}. Make sure it's installed and enabled.".format( manager.name), getThemeIcon('dialog-error', 32))
def __return_current_processing_file(self, media_file_state): if self.current_processing_file is not None: self.mfq[self.current_processing_file.id, self.current_processing_file.file_path] = media_file_state logger.info( "File [{}] returned to processing queue, status [{}]".format( self.current_processing_file.identifier, media_file_state.value)) logger.debug(self.current_processing_file)
def test_resource_path(resource_file, resource_paths): trace = "tests#test_util#test_resource_path" info("Enter", trace, { "resource_file": resource_file, "resource_paths": resource_paths, }) path = resource_path(resource_file, resource_paths) assert os.path.isfile(path) info("Exit", trace)
def feed_revisit(pool, dbi=None): """重下載必要的新聞,仿造 Base Ctlr :: dispatch_rss_2_0 meta 並轉由 dispatch_response 處理 @see db.list_revisits() @startpoint """ import json import importlib from lib import db, DB, logger from lib.util.dt import to_timestamp if dbi is None: _dbi = DB() else: _dbi = dbi ctlr_cache = {} i_created_on = 0 i_last_seen_on = 1 i_pub_ts = 2 i_feed_url = 3 i_canonical_url = 4 i_title = 5 i_meta = 6 i_ctlr = 7 # logger.info("Found %d articles to revisit" % len(revisit_list)) for x in db.list_recent_fetches(revisit_max_m(), dbi=dbi): expired = need_revisit(x[i_created_on], x[i_last_seen_on]) if not expired: continue if x[i_ctlr] not in ctlr_cache: (ns, cn) = x[i_ctlr].rsplit(".", 1) module = importlib.import_module(ns) ctlr_cache[x[i_ctlr]] = getattr(module, cn)() ctlr = ctlr_cache[x[i_ctlr]] meta = json.loads(x[i_meta]) meta["feed_url"] = x[i_feed_url] meta["pub_date"] = to_timestamp(x[i_pub_ts]) meta["title"] = x[i_title] logger.info("Revisiting %s, expired for %d min", x[i_canonical_url], expired, extra={"classname": feed_revisit}) pool.log_stats("with_revisit") pool.put("http://" + x[i_canonical_url], ctlr.dispatch_response, category="revisit", meta=meta) if dbi is None: _dbi.disconnect()
def add_some_numbers(a, b): """ Adds the passed parameters and returns the result. """ logger_name = 'add_some_numbers' logger = logging.getLogger(__name__).getChild(logger_name) result = a + b logger.info("Result of add_some_numbers: {}".format(result)) return result
async def add_code(pnum, device_id, code, package_name, app_version, os_type): m = tools.mysql_conn() try: sql = "INSERT INTO o_verify_log (pnum, device_id, status, code, package_name, app_version, os_type) \ VALUES(%s,%s,%s,%s,%s,%s,%s)" m.Q(sql, (pnum, device_id, 0, code, package_name, app_version, os_type)) r_id = int(m.cur.lastrowid) cache.invalidate(VerifyLib.get_code_by_pnum, pnum) return r_id except: info('添加错误') traceback.print_exc() return False
def _do_auth(self, token=None): # Authenticate to github, save some login info (user/pass, or oauth token) conf = config.getConfig() auth = conf.getboolean('gist.auth', False) or token is not None if auth: # User/Pass Pair logger.info('auth: oauth token') if token is None: token = conf.require('gist.token') logger.debug('auth: test token usability') # Try authenticate self.req.headers['Authorization'] = 'token ' + token # Get a time in future (1 year) fmt_time = (datetime.datetime.now() + datetime.timedelta(days=365)).strftime('%Y-%m-%dT%H:%M:%SZ') test_url = _api_base + '/gists?since=' + fmt_time logger.debug('test url: ' + test_url) try: resp = self.req.get(test_url) except exceptions.RequestException as e: logger.warn('http error, assume token is good.') logger.info('[%s] %s' % (e.__class__.__name__, e.message)) return logger.debug('http ok, response: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 401: # Invalid token logger.warn('invalid token') return False elif resp.status_code == 200: logger.info('token ok.') return True else: logger.warn('unknown response status: %d %s' % (resp.status_code, resp.reason)) raise exception.ServerException('Server responsed with unknown status: %d %s' % (resp.status_code, resp.reason)) logger.info('auth: none') return None
def fetch(payload, dbi = None): """抓取 payload['url'] 的檔案 並將最終讀取到的 url 寫入 payload['url_read'], response 寫入 payload['src'] """ import re from lxml.html import fromstring from lib import db, DB, logger from lib.util.text import to_unicode extra = {'classname': 'util.net.fetch()'} try: uo = urlopen(payload['url'], timeout=HTTP_TIMEOUT) if (uo.code != 200): raise IOError("HTTP response code=%d from %s" % (uo.code, uo.url)) portal = get_portal(uo.url) if portal: break_portal(portal, payload, uo) else: payload['src'] = uo.read() payload['url_read'] = uo.url except Exception as e: # 抓取出錯,留待記錄 (save_fetch) payload['src'] = 'error ' + unicode(e) payload['category'] = 'error' payload['exception'] = e if 'url_read' not in payload: payload['url_read'] = payload['url'] if dbi is None: _dbi = DB() else: _dbi = dbi try: db.save_fetch(payload['url'], to_unicode(payload['src']), payload['category'], dbi = _dbi) except Exception as e: logger.warning('DB save_fetch failed for url %s' % payload['url'], extra=extra) logger.debug(e) if dbi is None: _dbi.disconnect() if 'error' == payload['category']: # raise the exception to skip the parsing process logger.info("failed fetching %s" % payload['url'], extra=extra) raise payload['exception'] return payload
def push_content(self): logger.debug('call: ubuntupaste.push_content') conf = config.getConfig() post_target = 'http://paste.ubuntu.com/' logger.debug('post target: ' + post_target) poster = conf.get('ubuntu.user', getpass.getuser()) logger.debug('poster: ' + poster) # Get Filename for highlight. filename = conf.require('src').name if filename == '-': print 'Type your content here, end with EOF' print 'Use Ctrl-C to interrupt, if you have mistyped something.' try: content = conf.require('src').read() except KeyboardInterrupt: logger.warn('Ctrl-C received, interrpted...') sys.exit(1) lines = content.count('\n') bytes = len(content) logger.info('content: %d lines, %d bytes' % (lines, bytes)) lang = conf.get('ubuntu.lang', _get_language(filename)) logger.debug('highlight: ' + lang) post_data = { 'poster' : poster, 'syntax' : lang, 'content' : content, } try: resp = self.req.post(post_target, data=post_data, allow_redirects=False) except exceptions.RequestException as e: logger.info('Exception: ' + e.__class__.__name__) logger.error('Something went wrong when communicating with paste.ubuntu.com!') raise exception.ServerException(e) logger.debug('HTTP OK') logger.info('HTTP Status: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 302: pastepad = resp.headers['location'] logger.debug('location: ' + pastepad) pattern = re.compile(r'^http:\/\/paste.ubuntu.com/(?P<paste_id>\d+)/$') res = pattern.match(pastepad) if not res: raise exception.ServerException('Unknown location: ' + pastepad) paste_id = res.group('paste_id') logger.info('paste_id: ' + paste_id) # return paste_id print 'Paste ID: ' + str(paste_id) print 'HTTP Link: ' + pastepad return if resp.status_code == 200: data = resp.content err_start_flag = '<ul class="errorlist"><li>' err_stop_flag = '</li></ul>' msg = self.html2text(self.fetch_between(resp.content, err_start_flag, err_stop_flag)) raise exception.ServerException('Server refused our paste: ' + msg) raise exception.ServerException('Server responsed with unknown status %d %s' % (resp.status_code, resp.reason))
def retrieve(options): # check if git repo was update before retrieving files. pantri = Pantri(options) if pantri.nothing_to_retrieve(): logger.info('it-bin repo already up-to-date. Use -f/--force to override') return # In order to selectively choose which shelves to retrieve and have # different options per shelf, need to call "pantri.retrieve()" for each # shelf. if 'shelf' in options: for shelf in options['shelf']: options['shelf'] = shelf pantri = Pantri(options) pantri.retrieve() else: pantri = Pantri(options) pantri.retrieve()
def run(self): print '[Monitor]sending beacon request', time.ctime() try: """模拟向服务器的beacon请求""" data = self.get_attrs() post_param = { "data": json.dumps(data) } req_url = self.conf['server'] + '/api/beacon?kioskId=' + data['kiosk_id'] logger.info('[Monitor]req url: ' + req_url) server=self.conf['server'] resp_body, resp_status = lib.inet.http_post(req_url, server, post_param) if len(resp_body) > 0: resp_body = json.loads(resp_body) print resp_body, resp_status, '\n' except Exception, e: logger.error('[Monitor] '+str(e))
async def send_code(pnum, code, package_name, client_ip): msg = '验证码:%s。为了您的帐号安全,验证码请勿转发给他人' % code channel = VerifyLib.verify_sms_channel.get(package_name, '100') sign = '【红包锁屏】' data = json.dumps({'mno': str(pnum), # 目标手机号 'channel': channel, # 渠道号 'msg': msg, # 内容,对语音就是4-8位验证码,对短信就是完整的短信内容 'offer_code': '', # 强制指定通道发送 默认是空 'ip': client_ip, # 目标ip 'sign': sign, # 短信签名 }) url = VerifyLib.sms_url.get(package_name, '100') response = await http_put(url, data=data) if response: rs = None try: rs = json.loads(response.body) except: info('json错误: pnum: %s, channel: %s, return_body: %s' % (pnum, channel, str(response.body))) if not rs: info('验证码发送失败. pnum: %s. rs is None', pnum) return False elif rs["res"] != 1: info('发送验证码失败: pnum: %s, channel: %s, return_body: %s' % (pnum, channel, str(response.body))) return False else: return True
def executemany(self, sql, args): u'''批量执行多sql语句 ''' res = None try: self.get_conn(False) res = self.cur.executemany(sql, args) self.db.commit() # except (_mysql_exceptions.DatabaseError, _mysql_exceptions.OperationalError, _mysql_exceptions.ProgrammingError) as e: # error('got Exception when do sql:%s, (total:%d)args[:5]=%s, e:\n%s', sql, len(args), args[:5], e) # self.db.rollback() # raise e except StandardError as e: info('got StandardError and rollback when do sql:%s, (total:%d)args[:5]=%s, e:\n%s', sql, len(args), args[:5], e) self.db.rollback() raise e except Exception as e: info('got Exception and rollback when do sql:%s, (total:%d)args[:5]=%s, e:\n%s', sql, len(args), args[:5], e) self.db.rollback() raise e return res
def _perform_auth(self, otp_token=None): if otp_token is None: try: self.user = raw_input('Username: '******'user: '******'Password: '******'password ok.') except KeyboardInterrupt: logger.warn('Ctrl-C detected.') sys.exit(1) user = self.user pwd = self.pwd logger.info('auth: fetch new token') post_json = { 'scopes' : ['gist'], 'note' : 'paste.py @ ' + str(datetime.datetime.now()), 'note_url' : 'https://github.com/jackyyf/paste.py', } post_headers = { 'Content-Type' : 'application/json', } if otp_token is not None: post_headers['X-GitHub-OTP'] = otp_token post_str = json.dumps(post_json) post_url = _api_base + '/authorizations' logger.debug('post_url: ' + post_url) try: resp = self.req.post(post_url, data=post_str, headers=post_headers, auth=(user, pwd)) except exceptions.RequestException as e: raise exception.ServerException(e) logger.info('http ok. response: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 201: logger.info('auth ok.') token = resp.json()[u'token'] logger.debug(resp.content) self.req.headers['Authorization'] = 'token ' + token return token elif resp.status_code == 401: # Two factor auth? logger.warn('auth failed') if 'X-GitHub-OTP' in resp.headers: logger.warn('auth: two-factor required') try: token = raw_input('Two factor token from ' + resp.headers['X-Github-OTP'].replace('required; ', '') + ':') except KeyboardInterrupt: logger.warn('Ctrl-C detected') sys.exit(1) return self._perform_auth(otp_token=token) else: logger.error('username or password error.') return self._perform_auth() else: raise exception.ServerException('Server responsed with unknown status: %d %s' % (resp.status_code, resp.reason))
def install_gist(self): logger.info("Trying to get Gist") gist = json.load(urllib2.urlopen( 'https://api.github.com/gists/{}'.format(self.__gist_id))) try: # first json file for config config_file = self.get_gist_files_path(gist, '.json')[0] bash_files = self.get_gist_files_path(gist, '.sh') except Exception as e: logger.critical( "This is invalid gist_id or something else went wrong") raise e logger.info("Trying to save recipe files") try: bash_dir = os.path.join(self.__recipes_path, os.path.splitext( os.path.basename(config_file))[0]) if not os.path.exists(bash_dir): os.makedirs(bash_dir) config_file_path = os.path.join( self.__recipes_path, os.path.basename(config_file)) silent_remove(config_file_path) with open(config_file_path, "wb") as local_file: local_file.write(urllib2.urlopen(config_file).read()) for bash_file in bash_files: bash_file_path = os.path.join( bash_dir, os.path.basename(bash_file)) silent_remove(bash_file_path) with open(bash_file_path, "wb") as local_file: local_file.write(urllib2.urlopen(bash_file).read()) logger.info("Recipe files was saved successfully") except Exception as e: logger.critical("Something went wrong with the internet. " "Internet dies[SCREAMING]. Run, quickly run away") raise e
responseHeaders, returnStatus, content = module.content( userName, accessLevel, command = urlPath[1].lower() if len( urlPath ) > 1 else '', queryString = queryString, postData = postData, cookies = cookies, ) startResponse( returnStatus, responseHeaders ) return [ content ] application = wsgiblankApplication if __name__ == '__main__': try: logger.info( 'Trying to create local webserver' ) from wsgiref import simple_server print 'Running wsgiblank application at http://%s:%s/ ...' % ( str(WEBSERVERHOST), str(WEBSERVERPORT) ) SERVERISAPACHE = False HTTPD = simple_server.WSGIServer( ( str(WEBSERVERHOST), int(WEBSERVERPORT) ), simple_server.WSGIRequestHandler ) HTTPD.set_app( application ) HTTPD.serve_forever( ) except ImportError: logger.info( 'Creating local webserver failed, falling back to stdout output' ) for cnt in application( {}, lambda status, headers: None ): print cnt
def run_tests_negative(self, tests): for test in tests: logger.info('Pyser Query: %s' % test) self.assertRaises(ParserError, parser.build_tree, lexer.parse(test))
def run_tests(self, tests): for test in tests: root = parser.build_tree(lexer.parse(test)) logger.info('Pyser Query: %s -> %s' % (test, root))
def run_tests_negative(self, tests): for test in tests: logger.info('Pyser Query: %s' % test.strip()) logger.indent_push() self.assertRaises(LexerError, lexer.parse, test) logger.indent_pop()
def run_tests(self, tests): for test in tests: tokens = lexer.parse(test) logger.info('Pyser Query: %s -> %s' % (test.replace('\n', ' '), tokens)) for token in tokens: self.assert_(isinstance(token, lexer.Token))
def setUp(self): logger.set_verbosity(self._verbosity) logger.info('Running %s' % self) logger.indent()
def run_tests_negative(self, tests): for test in tests: logger.info('Pyser Query: %s' % test.code) logger.indent_push() self.assertRaises(InterpreterError, test.run) logger.indent_pop()
def run_tests(self, tests): for test in tests: logger.info('%s == %s' % (test.code, test.result)) self.assertEqual(test.result, test.run())
def wsgiblankApplication( environ, startResponse ): '''main function of the wsgiblank''' logger.info( 'running in wsgi mode' ) # reading configuration values try: urlPath = environ['PATH_INFO'].strip('/').split('/') except StandardError: urlPath = [''] try: queryString = urlparse.parse_qs( environ['QUERY_STRING'], keep_blank_values = 1 ) except StandardError: queryString = [] #Serve static for WSGI servers if SERVERISAPACHE is False: if urlPath[0].lower() in STATIC: path = os.path.join(INSTALLPATH, 'static', environ.get('PATH_INFO', '').lstrip('/')) #set path if os.path.exists(path): #it exists? filestat = os.stat(path) #get file stats fileToServe = file(path,'r') #read file mimeType = mimetypes.guess_type( path ) #guess mime-type #set headers if mimeType[0] == None: headers = [( CONTENT_LENGTH, str( filestat.st_size ) )] else: headers = [( CONTENT_TYPE, mimeType[0] ), ( CONTENT_LENGTH, str( filestat.st_size ) )] startResponse('200 OK', headers) #set response return environ['wsgi.file_wrapper'](fileToServe) #return file else: logger.debug('static content not found: %s' % str(path)) return notFound(environ, startResponse) #404 try: requestBodySize = int(environ.get('CONTENT_LENGTH', 0)) except ValueError: requestBodySize = 0 # Saving wsgi.input since it can be read only once wsgiInputStorage = StringIO(environ['wsgi.input'].read(requestBodySize)) environ['wsgi.input'] = wsgiInputStorage try: postData = urlparse.parse_qs( environ['wsgi.input'].read(requestBodySize), True ) except StandardError: postData = [] # seek back to the first byte of wsgiInputStorage wsgiInputStorage.seek(0) environ['wsgi_input'] = wsgiInputStorage cookies = {} if 'HTTP_COOKIE' in environ: for i in environ['HTTP_COOKIE'].split(';'): cookie = i.strip().partition('=') try: cookies[cookie[0]] = cookie[2].strip() except StandardError: pass logger.debug( ' urlPath: %s' % str( urlPath ) ) logger.debug( ' queryString: %s' % str( queryString ) ) try: logger.debug( ' userAgent: %s' % str( environ[ 'HTTP_USER_AGENT' ] if 'HTTP_USER_AGENT' in environ else '' ) ) logger.debug( ' remoteAddr: %s' % str( environ[ 'REMOTE_ADDR' ] if 'REMOTE_ADDR' in environ else '' ) ) except StandardError: pass for i in cookies.keys(): logger.debug( ' cookie: %s = %s' % (i, cookies[i]) ) accessLevel = NO_ACCESS userName = '' # authentication can be disabled for debug reasons via config file enableAuthentication = 1 try: if str( config.get( 'authentication', 'enableAuthentication' ) ).lower() in ['false', '0']: enableAuthentication = 0 accessLevel = int( config.get( 'authentication', 'accessLevel' ) ) userName = str( config.get( 'authentication', 'userName' ) ) except StandardError: pass # checking authentication if enableAuthentication and AUTHCOOKIENAME in cookies: try: [ uName, key ] = cookies[AUTHCOOKIENAME].strip().split(':') accessLevel = auth.querySession( uName, key ) if accessLevel > NO_ACCESS: userName = uName except ValueError: pass logger.debug( ' accessLevel: %d, username: %s' % (accessLevel, userName) ) if urlPath[0].lower() == 'debug' and accessLevel == RW_ACCESS: raise Exception('debug') else: # serve the requested content # decide which module to call moduleFile = '' moduleName = DEFAULT_MODULE if not SERVERISAPACHE: if urlPath[0] != 'wsgiblank' or urlPath == []: urlPath.insert(0, 'wsgiblank') if len(urlPath) == 1: # for evaluating urlPath[0 if SERVERISAPACHE else 1] urlPath.insert(1, '') if urlPath == [] or not urlPath[0 if SERVERISAPACHE else 1].lower() in MODULES: # if the requested module is not available or no module is specified, load the # default one from the config logger.debug( ' module not found, falling back to default module: %s' % DEFAULT_MODULE ) moduleFile = os.path.join( LIBPATH, 'module', MODULES[ DEFAULT_MODULE ][ 'dirname' ], MODULES[ DEFAULT_MODULE ][ 'filename' ] ) else: # loading the selected module moduleName = urlPath[0 if SERVERISAPACHE else 1].lower() logger.debug( ' trying to load module: %s' % moduleName ) if MODULES[moduleName]['enabled'] == 'enabled' and MODULES[moduleName]['access'] <= accessLevel: # load selected module moduleFile = os.path.join( LIBPATH, 'module', MODULES[ moduleName ][ 'dirname' ], MODULES[ moduleName ][ 'filename' ] ) logger.debug( ' determining module file: %s' % moduleFile ) else: # load default module moduleFile = os.path.join( LIBPATH, 'module', MODULES[ DEFAULT_MODULE ][ 'dirname' ], MODULES[ DEFAULT_MODULE ][ 'filename' ] ) logger.debug( ' don\'t load module because status is disabled or level of access is not met (status: %s, access: %d)' % ( MODULES[moduleName]['enabled'], MODULES[moduleName]['access'] ) ) try: # try to load the module lib module = imp.load_source( moduleName, moduleFile ) except IOError as eIO: logger.error( ' unable to load requested module: \'%s\'' % moduleFile ) logger.error( ' got exception: %s' % eIO ) logger.debug( ' module loaded: \'%s\'' % moduleFile ) logger.debug( ' postData: %s' % str( postData ) ) responseHeaders, returnStatus, content = module.content( userName, accessLevel, command = urlPath[1].lower() if len( urlPath ) > 1 else '', queryString = queryString, postData = postData, cookies = cookies, ) startResponse( returnStatus, responseHeaders ) return [ content ]
async def init(request, os_type: types.text, app_version: types.text, device_id: types.text, device_name: types.text, channel: types.text, package_name: types.text = 'com.test.package', uid: types.number = -1, ticket: types.text = ''): """ summary: init description: | 应用初始化接口 parameters: - name: pnum in: query description: 手机号 required: true type: number format: int - name: device_id in: query description: 设备号 required: true type: string - name: device_name in: query description: 设备型号名称 required: true type: string - name: os_type in: query description: 用户系统: android, os_type required: true type: string - name: app_version in: query description: 应用版本号 required: true type: string - name: channel in: query description: 应用渠道号 required: true type: string - name: package_name in: query description: 应用包名 required: true type: string tags: - user responses: 200: description: An array of price estimates by product schema: $ref: "#/definitions/response_msg" """ return tools.response() res = await UserLib.check_ticket(ticket, uid) ret = res['data'] if uid <= 0: # when to update log in ticket ? pass elif uid and ret: info(ret) res = await UserLib.get_new_ticket(ret['uid'], ret['qid']) info(ticket) return tools.response([{'new_ticket': res}]) return tools.response(code=err_code._ERR_TICKET_ERR, message="身份验证失败")
def push(self): # TODO: Implements push. conf = config.getConfig() res = self._do_auth() if res is not None: if not res: if conf.getboolean('check', True): print 'Token is invalid, please use paste.py gist auth to get a new token.' sys.exit(1) else: del self.req.headers['Authorization'] files = conf.require('files') if files.count(sys.stdin) > 1: raise exception.InvalidValue('stdin was listed more than once!') logger.debug('private: ' + ('yes' if conf.require('private') else 'no')) logger.debug('description: ' + conf.require('description')) logger.debug('files: ' + str(len(files))) post_data = { 'public' : not conf.require('private'), 'description' : conf.require('description'), } file_data = dict() try: for file in files: logger.info('reading file ' + file.name) if file is sys.stdin: print 'Type your content here, end with EOF' print 'Use Ctrl-C to interrupt, if you have mistyped something.' content = file.read() logger.debug('file ' + file.name + ': %d lines, %d bytes' % (content.count('\n'), len(content))) fname = os.path.basename(file.name) now = 2 if fname in file_data: if '.' in fname: name, ext = fname.rsplit('.', 1) else: name, ext = fname, '' while (name + '-' + str(now) + '.' + ext) in file_data: now += 1 fname = (name + '-' + str(now) + '.' + ext) logger.debug('final filename: ' + fname) file_data[fname] = { 'content' : content, } except KeyboardInterrupt: logger.warn('Ctrl-C received, exiting.') sys.exit(1) post_data['files'] = file_data post_str = json.dumps(post_data) post_url = _api_base + '/gists' logger.debug('post url: ' + post_url) try: resp = self.req.post(post_url, data=post_str, headers={ 'Content-Type' : 'application/json', }) except exceptions.RequestException as e: logger.error('Post error: ' + e.message) raise exception.ServerException(e) logger.debug('http ok.') logger.info('server response: %d %s' % (resp.status_code, resp.reason)) if resp.status_code == 201: logger.info('gist created') url = resp.json()[u'html_url'] gistid = url.rsplit('/', 1)[1] print 'HTTP Link: ' + url print 'Paste.py uri: gist://' + gistid else: raise exception.ServerException('Server responsed with unknown status: %d %s ' % (resp.status_code, resp.reason))
def tearDown(self): logger.unindent() logger.info('')
def pull_content(self): logger.debug('call: ubuntupaste.pull_content') conf = config.getConfig() fn = conf.require('dest') if fn == '-': fo = sys.stdout else: if os.path.exists(fn): if not conf.get('overwrite', False): raise exception.FileExists('File %s already exists.' % fn) fo = open(fn, 'w') _uri = conf.require('src') res = uri.parse(_uri) if res is None: raise exception.InvalidURI('Invalid URI: ' + _uri) logger.debug('uri format ok.') logger.debug('scheme: ' + res.scheme) if res.scheme == 'ubuntu': logger.info('using ubuntu:// style uri') pid = res.path logger.debug('path: ' + pid) for ch in pid: if not ch.isdigit(): raise exception.InvalidURI('UbuntuPaste should only contains digits!') if pid != str(int(pid)): raise exception.InvalidURI('No leading zero allowed.') url = 'http://paste.ubuntu.com/{pid}/'.format(pid=pid) logger.info('to http url: ' + url) elif res.scheme == 'http': logger.info('using http:// style uri') if '/' not in res.path: raise exception.InvalidURI('Invalid http url: ' + _uri) host, path = map(lambda x : x.lower(), res.path.split('/', 1)) # NOTE: Leading / in path is removed when using split. logger.debug('http host: ' + host) logger.debug('http path: ' + path) if host != 'paste.ubuntu.com': raise exception.InvalidURI('HTTP Host should be paste.ubuntu.com!') pattern = re.compile(r'^[1-9](?:\d+)(?:/?)') if not pattern.match(path): raise exception.InvalidURI('Invalid path for ubuntu paste!') # url validated. url = _uri else: raise exception.InvalidURI('Unknown scheme: ' + res.scheme) # Check if pad exists try: res = self.req.get(url) except exceptions.RequestException as e: logger.info('Exception: ' + e.__class__.__name__) logger.warn('Something wrong when communicating with paste.ubuntu.com, assume paste pad exists.') return url logger.debug('HTTP OK.') logger.info('Server response: %d %s' % (res.status_code, res.reason)) if res.status_code == 200: # OK # Q = pyquery.PyQuery(res.content) # content = pyquery.PyQuery(Q('.code').html().replace('\n', '<br />')).text()# start_flag = '<td class="code"><div class="paste"><pre>' end_flag = '</pre></div>' content = self.html2text(self.fetch_between(res.content, start_flag, end_flag)) logger.debug('content: %d lines, %d bytes' % (content.count('\n') + 1, len(content))) # return content fo.write(content) return if res.status_code >= 400 and res.status_code < 500: raise exception.NoSuchPad('No such pad: %s. Server responsed with status code %d' % (_uri, res.status_code)) raise exception.ServerException('Server responsed with status code %d' % res.status_code)
def save(self): if not isinstance(self._filename, (str, unicode)): raise ValueError('Invalid filename.') logger.info('saving to ' + self._filename) with open(self._filename, 'w') as f: self.saveTo(f)