def ticket_info(self, sender, message, channel, number=None): """ Set reply to sender and handle errors """ self.log('looking for ticket %s' % number) try: url = '%s/issues/%s.%s' % (self.redmine_url, number, self.redmine_format) headers = {'X-Redmine-API-Key': self.redmine_api_key} content = fetch_content(url, headers=headers) issue = json.loads(content).get('issue', None) if issue is None: raise WebAPIError(number) status = issue.get('status', None).get('name', '') priority = issue.get('priority', None).get('name', '') subject = truncate(issue.get('subject', ''), 50) tracker = issue.get('tracker', None).get('name', '') ticket_url = '%s/issues/%s' % (self.redmine_url, number) reply = "%s: %s: %s Priority: %s Status: %s Subj: '%s' %s" % ( sender, tracker, number, priority, status, subject, ticket_url) except WebAPIError, ex: reply = '%s: could not find ticket %s (also: %s)' % (sender, number, ex) self.log(reply)
def getPeriodInfo(self, bid, start, period, sample, c=None, total=None): close_cursor = False if c is None: c = self.db.cursor() close_cursor = True query = self._get_period_info_query() t = [bid, start, start + period] if sample.lower() != 'all': t.append(sample) query += " AND lb = ?" # print "query: %s, var: %s" % (query, str(t)) logging.debug(query + str(t)) c.execute(query, t) row = c.fetchone() ret = {'name': sample, 'count': row[0], 'avgt': row[1], 'maxt': row[2], 'mint': row[3], 'stddevt': row[4], 'medt': row[5], 'p10t': row[6], 'p90t': row[7], 'p95t': row[8], 'p98t': row[9], 'total': row[10], 'success': row[11], 'tput': row[0] / float(period), 'filename': str2id(sample), 'title': str2id(sample | truncate(20))} ret['error'] = int(ret['count'] - ret['success']) ret['success_rate'] = 100. if ret['count'] > 0: ret['success_rate'] = (100. * ret['success']) / ret['count'] if total is None or not total: ret['percent'] = 100. else: ret['percent'] = ret['total'] * 100. / total if close_cursor: c.close() return ret
def _prep_start(self): ''' Populate data gathered from global config. ''' logger.log_info("Preparing WSGI Application...") # Load modules self.load_drivers() for i in config.preprocessors: self.preprocessors.append(self.drivers.preprocessor[i]) if not hasattr(self.drivers.preprocessor[i], 'priority'): self.drivers.preprocessor[i].priority = 10 for i in config.postprocessors: self.postprocessors.append(self.drivers.postprocessor[i]) if not hasattr(self.drivers.postprocessor[i], 'priority'): self.drivers.postprocessor[i].priority = 10 for mapping, path in config.static_map.items(): logger.log_info("Mapping static directory: '%s' => '%s'" % (mapping, truncate(path, 40))) self.static_map[mapping] = path # Initialize dispatcher self.dispatcher = self.drivers.dispatcher.current(self) for i, init_hook in self.drivers.init_hook.iteritems(): init_hook(self) # Signal that the application has been prepped self._prepped = True
def prep_summary(str): placeholder = "A nifty project." if not str: return placeholder elif str == "UNKNOWN": return placeholder else: return truncate(str)
def _wrap_code(self, source): yield 0, templates["top"].render( name=util.truncate(self.title), cssfile=self.cssfile, raw_path=self.raw_path, use_extensions=self.use_extensions, ) for i, t in source: yield i, t yield 0, templates["bottom"].render()
def fetch_normal(): response = url_fetch(url, return_response_obj=True) # print 'INFO', response.info() if response and response.info().getheader('content-type', 'text/html').lower().split(';')[0].strip() == 'text/html': markup = response.read() else: print 'BAD MIME TYPE' if response else 'NO SUCCESSFUL RESPONSE' markup = None if markup: # process markup: markup_soup = BeautifulSoup(markup, 'lxml') og_title = find_meta_value(markup_soup, 'og:title') og_image = find_meta_value(markup_soup, 'og:image') og_description = find_meta_value(markup_soup, 'og:description') title_field = find_title(markup_soup) article.site_name = find_meta_value(markup_soup, 'og:site_name') # find author: article.author = find_author(markup_soup) # parse and process article content: content.html = article_extractor.extract(markup, article.url) doc_soup = BeautifulSoup(content.html, 'lxml') article.title = first_present([og_title, title_field, article.title]) article.top_image = make_url_absolute(first_present([article.top_image, og_image])) populate_article_json(article, content) # compute description: description = None if og_description and len(og_description.strip()): description = truncate(og_description.strip(), words=40) elif content.text and len(content.text.strip()) > 0: description = truncate(content.text, words=40) article.description = re.sub(r"[\r\n\t ]+", " ", description).strip() if description else None return True else: return False
def encodeSymbols(self, dataSubcarriers, pilotPolarity): Ns = dataSubcarriers.shape[0] symbols = np.zeros((Ns, self.format.nfft), np.complex) symbols[:, self.format.dataSubcarriers] = dataSubcarriers symbols[:, self.format. pilotSubcarriers] = self.format.pilotTemplate * np.array( list(util.truncate(pilotPolarity, Ns)))[:, np.newaxis] tilesNeeded = ( self.format.ncp + self.format.nfft - 1) // self.format.nfft + 2 # +1 for symbol, +1 for cross-fade start = -self.format.ncp % self.format.nfft return np.tile(np.fft.ifft(symbols), (1, tilesNeeded))[:, start:-(self.format.nfft - 1)]
def commit_info(self, sender, message, channel, sha): """ Retrieve commit info from Github, warm up repo cache if need be. """ self.log('looking for commit %s' % sha) # check our local cache of commit info if sha in self.commits: return "%s: %s" % (sender, self.commits[sha]) reply = None if not self.github_repos: self._populate_github_repos() for repo in self.github_repos: url = "%s/api/v2/json/commits/show/%s/%s/%s" % (self.github_url, self.github_user, repo, sha) try: content = fetch_content(url, credentials=self.github_credentials) commit = json.loads(content).get('commit', None) if commit: login = commit.get('committer', None).get('login', '') date = commit.get('committed_date', '') commit_url = commit.get('url', None) if commit_url: commit_url = "%s%s" % (self.github_url, commit_url) msg = truncate(commit.get('message', ''), 50) reply = self.commit_msg % (sha[0:10], login, date, msg, commit_url) # save it to a local cache so we don't waste API calls self.commits[sha] = reply reply = "%s: %s" % (sender, reply) # got our commit, don't waste API calls break except WebAPIError, ex: pass except ValueError, ex: # apparent incomplete json, go ahead and return reply = "Incomplete response, please try again." self.log("%s: %s" % (reply, ex)) break
def _prep_start(self): ''' Populate data gathered from global config. ''' logger.log_info("Preparing WSGI Application...") # Load modules self.load_drivers() for i in config.preprocessors: self.preprocessors.append(self.drivers.preprocessor[i]) if not hasattr(self.drivers.preprocessor[i], 'priority'): self.drivers.preprocessor[i].priority = 10 for i in config.postprocessors: self.postprocessors.append(self.drivers.postprocessor[i]) if not hasattr(self.drivers.postprocessor[i], 'priority'): self.drivers.postprocessor[i].priority = 10 # Sort the pre/postprocessors by priority self.preprocessors.sort(key=lambda x: x.priority) self.postprocessors.sort(key=lambda x: x.priority) for mapping, path in config.static_map.items(): logger.log_info("Mapping static directory: '%s' => '%s'" % ( mapping, truncate(path, 40))) self.static_map[mapping] = path # Initialize dispatcher self.dispatcher = self.drivers.dispatcher.current(self) for i, init_hook in self.drivers.init_hook.iteritems(): init_hook(self) # Signal that the application has been prepped self._prepped = True
def set_summary(self): self.summary = "A nifty project." try: self.summary = truncate(self.api_raw["info"]["description"]) except (KeyError, TypeError): pass
def confirm_input(words: List[str], pr, original_input, ignore_0match): n = pr.longest cols = shutil.get_terminal_size().columns prp = prompt_string() printed = '' if ignore_0match and (n == 0 or (n == 1 and pr.missing and words[0].isdigit())): if prompt: util.clear_line() print(prp + colored( util.truncate(original_input, cols - len(util.strip_markup(prp))), 'yellow'), end='\r') sys.stdout.flush() return False if prompt: util.clear_line() printed = prp print(printed, end='') sys.stdout.flush() if prompt: needed = 0 i = 0 while i < n: needed += len(words[i]) if i + 1 < n: needed += len(' ') i += 1 if n != len(words): if n != 0: needed += len(' ') needed += 5 elif pr.missing: needed += len(' ???') avail = cols - len(util.strip_markup(printed)) if needed > avail: print('\n ', end='') printed = ' ' all_good = (n == len(words) and not pr.missing and pr.error is None) if all_good: if all_good_beep is not None: all_good_beep.play() elif good_beep is not None and n != 0: good_beep.play(n - 1) i = 0 while i < n: if prompt: x = colored(words[i], 'green') if i + 1 < n: x += ' ' print(x, end='') sys.stdout.flush() printed += x if not all_good and good_beep is not None: time.sleep(good_beep.get_length()) i += 1 if not all_good and bad_beep is not None: bad_beep.play() if prompt: if n != len(words): if n != 0: print(' ', end='') printed += ' ' avail = cols - len(util.strip_markup(printed)) print(colored(util.truncate(' '.join(words[n:]), avail), 'red'), end='') elif pr.missing: # if all words were consumed but evaluation still went bad, # it means additional input was missing print(colored(' ???', 'red'), end='') print() if pr.error is not None: print() print(colored('error:', 'red'), pr.error) return False if not pr.missing: if pr.retval is not None: s = str(pr.retval) if not s.endswith('\n'): s += '\n' print(s, end='') elif n != len(words) and pr.missing == ['<command>']: print(colored('error: no such command', 'red')) elif suggestions is not None: eclcompletion.print_suggestions(eclc, suggestions) else: problem = ('missing' if n == len(words) else 'expected') problem += ' ' + ' or '.join( map(eclc.render_unit, list(set(pr.missing)))) print(colored('error: ' + problem, 'red')) return True
def seek(agent: Agent, frame_time, target: Vector2): locomotion = agent.locomotion * frame_time desired_velocity = truncate(target - agent.position, locomotion.max_speed) return desired_velocity
def compute_day(self, di, alpha): # Should return alpha as a list util.truncate(alpha, self.maxPercent, self.maxIter) return alpha
def test_truncate(): assert truncate("测试内容123test", 20) == "测试内容123test" assert truncate("测试内容123test", 11) == "测试内容..." assert truncate("测试内容123test", 8) == "测试..." assert truncate("测试内容123test", 4) == "..."
def encodeSymbols(self, dataSubcarriers, pilotPolarity): Ns = dataSubcarriers.shape[0] symbols = np.zeros((Ns,self.format.nfft), np.complex) symbols[:,self.format.dataSubcarriers] = dataSubcarriers symbols[:,self.format.pilotSubcarriers] = self.format.pilotTemplate * np.array(list(util.truncate(pilotPolarity, Ns)))[:,np.newaxis] tilesNeeded = (self.format.ncp+self.format.nfft-1) // self.format.nfft + 2 # +1 for symbol, +1 for cross-fade start = -self.format.ncp % self.format.nfft return np.tile(np.fft.ifft(symbols), (1,tilesNeeded))[:,start:-(self.format.nfft-1)]
def parse_js_helper(t, type_): if type_ in ['albums', 'tracks']: return (util.truncate(t['artist']['name']), util.truncate(t['name']), t['playcount']) elif type_ == 'artists': return (util.truncate(t['name']), t['playcount'])