def get_video_link(api, status_json): try: media_variants = status_json["extended_entities"]["media"][0][ "video_info"]["variants"] return filter_video(media_variants)['url'] except: try: additional_media_info = status_json["extended_entities"]["media"][ 0]["additional_media_info"] additional_media_info['embeddable'] if additional_media_info and additional_media_info[ 'embeddable'] is None: # look for link link = look_for_link(additional_media_info) if not link: return Error(1, 'Error') return Error(2, link) elif status_json['entities']['media']: # tweet is share of another tweet containing media expanded_url = status_json['entities']['media'][0][ 'expanded_url'].split('/') tweetId = expanded_url[len(expanded_url) - 3] if tweetId != status_json['id']: ogTweet = api.get_status(tweetId) ogTweet_json = get_json_dict(ogTweet) return get_video_link(api, ogTweet_json) except: return Error(3, "##")
def create_update_leaf(self, index, obj): # create the mappings in elasticsearch Content.init(index=index, using=self.es_client) # create and save and article content1 = Content( meta={'id': obj.leaf_id}, leaf_id=obj.leaf_id, branch_id=obj.branch_id, branch_name=obj.branch_name, content_eng=obj.content_eng, content_san=obj.content_san, content_kan=obj.content_kan, master_branch_name=obj.master_branch_name, content_type_name=obj.content_type_name, source_doc_name=obj.source_doc_name, tags=['test1', 'test2']) content1.published_date = datetime.now() try: content1.save() if content1.is_published(): return True raise Error("*** search.objects.create_update_leaf: Error! Unable to write leaf/content to Elastic.") return False except Exception as Ex: raise Error("*** search.objects.create_update_leaf: Exception occured! Unable to write leaf/content to Elastic.") print(Ex) return False
def HandleTestResult(status, info, result, rebase=False): try: if isinstance(result, Exception): raise result stdout, stderr, returncode, duration = result if info.is_roundtrip: if returncode == 0: status.Passed(info, duration) elif returncode == 2: # run-roundtrip.py returns 2 if the file couldn't be parsed. # it's likely a "bad-*" file. status.Skipped(info) else: raise Error(stderr) else: if returncode != info.expected_error: # This test has already failed, but diff it anyway. msg = 'expected error code %d, got %d.' % (info.expected_error, returncode) try: info.Diff(stdout, stderr) except Error as e: msg += '\n' + str(e) raise Error(msg) else: if rebase: info.Rebase(stdout, stderr) else: info.Diff(stdout, stderr) status.Passed(info, duration) except Exception as e: status.Failed(info, str(e))
def add_freq_plot(self, freq): """ Method to append a spectrum to the waterfall. It does this by appending a row to the live_data image property. Args: freq: (list[float]). Spectrum to append. """ if self._width < len(freq): if len(freq) % self._width != 0: raise Error('frequency vector must be a multiple of waterfall width') else: freq = np.array(freq).reshape((-1, len(freq)/self._width)) freq = freq.max(1) if self._width != len(freq): raise Error('Dimension mismatch') data = self.live_data['wfall']['image'][0] if data.shape[0] < self._height: self.live_data['wfall']['image'] = [np.concatenate((data, [freq]))] else: self.live_data['wfall']['image'] = [np.concatenate((data[1:,:], [freq]))]
def ParseDirective(self, key, value): if key == 'EXE': self.exe = value elif key == 'STDIN_FILE': self.input_filename = value elif key == 'FLAGS': if not isinstance(value, list): value = shlex.split(value) self.flags += value elif key == 'ERROR': self.expected_error = int(value) elif key == 'SLOW': self.slow = True elif key == 'SKIP': self.skip = True elif key == 'VERBOSE-FLAGS': self.verbose_flags = [shlex.split(level) for level in value] elif key in ['TODO', 'NOTE']: pass elif key == 'TOOL': if not value in TOOLS: raise Error('Unknown tool: %s' % value) self.tool = value for tool_key, tool_value in TOOLS[value].items(): self.ParseDirective(tool_key, tool_value) elif key == 'ENV': # Pattern: FOO=1 BAR=stuff self.env = dict(x.split('=') for x in value.split()) else: raise Error('Unknown directive: %s' % key)
def HandleTestResult(status, info, result, rebase=False): try: if isinstance(result, (Error, KeyboardInterrupt)): raise result if info.is_roundtrip: if result.Failed(): if result.GetLastFailure().returncode == 2: # run-roundtrip.py returns 2 if the file couldn't be parsed. # it's likely a "bad-*" file. status.Skipped(info) else: raise Error(result.stderr) else: status.Passed(info, result.duration) else: if result.Failed(): # This test has already failed, but diff it anyway. last_failure = result.GetLastFailure() msg = 'expected error code %d, got %d.' % ( last_failure.GetExpectedReturncode(), last_failure.returncode) try: info.Diff(result.stdout, result.stderr) except Error as e: msg += '\n' + str(e) raise Error(msg) else: if rebase: info.Rebase(result.stdout, result.stderr) else: info.Diff(result.stdout, result.stderr) status.Passed(info, result.duration) except Error as e: status.Failed(info, str(e), result)
def ParseDirective(self, key, value): if key == 'EXE': self.exe = value elif key == 'STDIN_FILE': self.input_filename = value self.generated_input_filename = value elif key == 'FLAGS': self.flags += shlex.split(value) elif key == 'ERROR': self.expected_error = int(value) elif key == 'SLOW': self.slow = True elif key == 'SKIP': self.skip = True elif key == 'VERBOSE-FLAGS': self.verbose_flags = [shlex.split(level) for level in value] elif key in ['TODO', 'NOTE']: pass elif key == 'TOOL': if not value in TOOLS: raise Error('Unknown tool: %s' % value) self.tool = value for tool_key, tool_value in TOOLS[value].items(): self.ParseDirective(tool_key, tool_value) else: raise Error('Unknown directive: %s' % key)
def put(self, nid, pass_id, orig, dest, msg): """ Add new item to the log Args: nid(int): Norad ID of the satellite commuicated with pass_id (str): ID of the pass the communication belongs to orig (str): Originator of the message dest (str): Destination of the message msg (str): The message itself """ if orig not in self.ACTORS: raise Error('Invalid origin, must be one of %s' % (self.ACTORS)) if orig not in self.ACTORS: raise Error('Invalid destination, must be one of %s' % (self.ACTORS)) df = DataFrame({ 'nid': [nid], 'pass_id': [pass_id], 'orig': [orig], 'dest': [dest], 'msg': [msg] }) self.put_df(self._TABLE, df, index=False)
def _schedule_pass(self, i): """ Function to add a timer and schedule a specific pass """ if self.timer is not None: raise Error("Pass %d has already been scheduled" % (i)) if not self._stop: tstart = self.tstamps[i] - pd.Timedelta( seconds=self.schedule.buffertime) now = pd.Timestamp(pd.datetime.utcnow()) if tstart <= now: self.stop( ) # <-- Added 2018-06-14. Seems like a sensible thing to sensure timers etc are reset. To be checked raise Error( "Can't schedule a pass that is less than %d sconds in the future. Scheduler stopped." % (self.schedule.buffertime)) sleeptime = (tstart - now) / np.timedelta64(1, 's') self.timer = (i, 'S', tstart, Timer(sleeptime, self._callback, args=(i, ))) self.timer[-1].daemon = True self.timer[-1].start()
def GetDataFromKey(self, section, ini_key): if section not in self._config: Error("Section {} does not exist.".format(section)) if ini_key not in self._config[section]: Error("Key {} does not exist in section {}.".format(ini_key, section)) value = self._config[section].get(ini_key) Info("Reading {}/{} from .ini file: {}".format(section, ini_key, value)) return value
def Run(self, cwd, timeout, console_out=False, env=None): process = None is_timeout = Cell(False) def KillProcess(timeout=True): if process: try: if IS_WINDOWS: # http://stackoverflow.com/a/10830753: deleting child processes in # Windows subprocess.call( ['taskkill', '/F', '/T', '/PID', str(process.pid)]) else: os.killpg(os.getpgid(process.pid), 15) except OSError: pass is_timeout.Set(timeout) try: start_time = time.time() kwargs = {} if not IS_WINDOWS: kwargs['preexec_fn'] = os.setsid stdin_data = None if self.stdin: stdin_data = open(self.stdin, 'rb').read() # http://stackoverflow.com/a/10012262: subprocess with a timeout # http://stackoverflow.com/a/22582602: kill subprocess and children process = subprocess.Popen( self.args, cwd=cwd, env=env, stdout=None if console_out else subprocess.PIPE, stderr=None if console_out else subprocess.PIPE, stdin=None if not self.stdin else subprocess.PIPE, **kwargs) timer = threading.Timer(timeout, KillProcess) try: timer.start() stdout, stderr = process.communicate(input=stdin_data) finally: returncode = process.returncode process = None timer.cancel() if is_timeout.Get(): raise Error('TIMEOUT') duration = time.time() - start_time except OSError as e: raise Error(str(e)) finally: KillProcess(False) return RunResult(self, stdout, stderr, returncode, duration)
def _read(self): if not os.path.isfile(CONFIG_PATH): Error("{} file not present.".format(CONFIG_PATH)) return Info("Reading {} data.".format(CONFIG_PATH)) self._config.read(CONFIG_PATH) for section in ConfigModule.SECTIONS: if section not in self._config: Error("Section {} does not exist.".format(section))
def RunCommandWithTimeout(command, cwd, timeout, console_out=False, env=None): process = None is_timeout = Cell(False) def KillProcess(timeout=True): if process: try: if IS_WINDOWS: # http://stackoverflow.com/a/10830753: deleting child processes in # Windows subprocess.call( ['taskkill', '/F', '/T', '/PID', str(process.pid)]) else: os.killpg(os.getpgid(process.pid), 15) except OSError: pass is_timeout.Set(timeout) try: start_time = time.time() kwargs = {} if not IS_WINDOWS: kwargs['preexec_fn'] = os.setsid # http://stackoverflow.com/a/10012262: subprocess with a timeout # http://stackoverflow.com/a/22582602: kill subprocess and children process = subprocess.Popen( command, cwd=cwd, env=env, stdout=None if console_out else subprocess.PIPE, stderr=None if console_out else subprocess.PIPE, universal_newlines=True, **kwargs) timer = threading.Timer(timeout, KillProcess) try: timer.start() stdout, stderr = process.communicate() finally: returncode = process.returncode process = None timer.cancel() if is_timeout.Get(): raise Error('TIMEOUT\nSTDOUT:\n%s\nSTDERR:\n%s\n' % (stdout, stderr)) duration = time.time() - start_time except OSError as e: raise Error(str(e)) finally: KillProcess(False) return stdout, stderr, returncode, duration
def FindExeWithFallback(name, default_exe_list, override_exe=None): result = override_exe if result is not None: if os.path.exists(result): return os.path.abspath(result) raise Error('%s executable not found.\nsearch path: %s\n' % (name, result)) for result in default_exe_list: if os.path.exists(result): return os.path.abspath(result) raise Error('%s executable not found.\n%s\n' % (name, '\n'.join('search path: %s' % path for path in default_exe_list)))
def next_lexeme(self): '''Gets and returns the next lexeme''' while True: self._fetch_lexeme() if self._token == 'comment_start': e = Error(line=self._file.index(), column=len(self._file.line()) - len(self._line), _type="Lexic", desc="Unterminated comment") while self._token != 'comment_end': self._fetch_lexeme() if self._file.eof(): self._raise_error(e) break elif self._token == 'one_line_comment': self._fetch_line() elif self._token == 'invisible_char': continue else: break return self.lexeme()
def __error(self, expected): self._raise_error(Error ( line = self.__lexic.line(), column = self.__lexic.column(), _type = "Syntactic", desc = "< {} > expected, {} < {} > received".format(expected, self.__lexic.token(), self.__lexeme) ))
def in_pos(self, az = None, el = None): """ Check if antenna is in position Returns: True if antenna is in position, False otherwise """ if hasattr(self, 'cmd_az') and az is None: az = self.cmd_az if hasattr(self, 'cmd_el') and el is None: el = self.cmd_el if (az is None or el is None): raise Error("in_pos called with arguments az={} and el={}, which is invalid".format(az,el)) if az is None and el is None: az = self.cmd_az el = self.cmd_el azerr, elerr = self.azel_err(az, el) if azerr < self.BEAMWIDTH/2.0 and elerr < self.BEAMWIDTH/2.0: return True else: return False
def ParseDirective(self, key, value): if key == 'RUN': self.cmds.append(CommandTemplate(value)) elif key == 'STDIN_FILE': self.input_filename = value elif key == 'ARGS': self.GetLastCommand().AppendArgs(value) elif key.startswith('ARGS'): suffix = key[len('ARGS'):] if suffix == '*': self.AppendArgsToAllCommands(value) elif re.match(r'^\d+$', suffix): self.GetCommand(int(suffix)).AppendArgs(value) elif key == 'ERROR': self.expected_error = int(value) elif key == 'SLOW': self.slow = True elif key == 'SKIP': self.skip = True elif key == 'VERBOSE-ARGS': self.GetLastCommand().AppendVerboseArgs(value) elif key in ['TODO', 'NOTE']: pass elif key == 'TOOL': self.SetTool(value) elif key == 'ENV': # Pattern: FOO=1 BAR=stuff self.env = dict(x.split('=') for x in value.split()) else: raise Error('Unknown directive: %s' % key)
def __init__(self, db, table, ncols=[], ntypes=sa_t.Text(), **kwargs): """ Args: ncols: Normal columns; a list of column names ntypes: SQLAlchemy types associated with the columns (defaults to Text) See :class:`.Database` for the definition of the additional arguments. """ # The table to use self._TABLE = table # Normal (non-kv) columns if 'key' in ncols or 'value' in ncols: raise Error("'key' or 'value' cant be used as a normal field name") self._NCOLS = ncols super(KVDb, self).__init__(db, **kwargs) if not isinstance(ntypes, list): ntypes = [ntypes for n in ncols] columns = [Column(n, t) for n, t in zip(ncols, ntypes)] self.create_table(self._TABLE, Column('key', sa_t.String(_MAX_STRING_LEN)), Column('value', sa_t.Text()), *columns)
def azel2rect(az, el): """ Helper-function to conver az,el (polar coordinates) to x,y for display on rectangular axes """ if az is None or el is None: return None, None # Check if az and el are arrays or scalars # convert to arrays if scalar try: len(az) except TypeError: az = [az] try: len(el) except TypeError: el = [el] if len(az) != len(el): raise Error('az and el must be of same lenght') az = 360.0 - np.array(az) el = np.array(el) x = (90-el)*cos((az+90)/180.0*pi) y = (90-el)*sin((az+90)/180.0*pi) return x,y
def linktext(links): """ Convenience function to create a series of navigation links to use on the top of the different apps """ if 1: linktxt = '' for link in links: if len(link) == 2: link = (link[0], link[1], None) if len(link) != 3: raise Error("Invalid link argument") if linktxt != '': linktxt += ' | ' if link[2] is not None: linktxt += '<a href="%s" onclick="javascript:event.target.port=%d">%s</a>'%(link[1], link[2], link[0]) else: linktxt += '<a href="%s">%s</a>'%(link[1], link[0]) return linktxt
def __init__(self, title, plot_name, mtype='PreText', links=[]): """ Args: title: The title of the plot plot_name: The unique name to refer to the LivePlot by (this dash only has a single LivePlot - the textbox) mtype: The type of textbox to use. Options are PreText, Div, or Paragraph. See :class:Markup links: An array of links to display. See :function:`linktext` for the format. """ if plot_name == 'links': raise Error("Plot name cannot be links") linksbox = Markup('links', mtype='Div', text=linktext(links)) textbox = Markup(plot_name, mtype=mtype, text='') # # Create a custom layout to make things look nice # def layout_figs(figs): row1 = bl.row([figs['links']], sizing_mode='scale_width') row2 = bl.row([figs[plot_name]], sizing_mode='scale_width') layout = bl.column([row1,row2], sizing_mode='scale_width') return layout super(TextDash, self).__init__(layout_callback=layout_figs, title=title) self.add_plot(linksbox) self.add_plot(textbox)
def ParseDirective(self, key, value): if key == 'RUN': self.cmds.append(CommandTemplate(value)) elif key == 'STDIN_FILE': self.input_filename = value elif key.startswith('ARGS'): suffix = key[len('ARGS'):] self.ApplyToCommandBySuffix(suffix, lambda cmd: cmd.AppendArgs(value)) elif key.startswith('ERROR'): suffix = key[len('ERROR'):] self.ApplyToCommandBySuffix( suffix, lambda cmd: cmd.SetExpectedReturncode(int(value))) elif key == 'SLOW': self.slow = True elif key == 'SKIP': self.skip = True elif key == 'VERBOSE-ARGS': self.GetLastCommand().AppendVerboseArgs(value) elif key in ['TODO', 'NOTE']: pass elif key == 'TOOL': self.SetTool(value) elif key == 'STDIN': self.GetLastCommand().SetStdin(value) elif key == 'ENV': # Pattern: FOO=1 BAR=stuff self.env = dict(x.split('=') for x in value.split()) else: raise Error('Unknown directive: %s' % key)
def _fetch_lexeme(self): while (not self._line) or self._line == '\n': self._fetch_line() if self._file.eof(): self._token = 'eof' self._lexeme = 'eof' return next_match = None for token in self._rexpressions.keys(): match = self._rexpressions[token].match(self._line) if match: if not next_match or len(match.group()) > len( next_match.group()): next_match = match self._token = (token, "reserved_word")[ match.group().strip() in self.RESERVED_WORDS] if not next_match: super(Lexic, self)._raise_error( Error(line=self._file.index(), _type="Lexic", column=len(self._file.line()) - len(self._line) + 1, desc="Unidentified lexeme '" + self._line[0] + "'")) self._lexeme = self._line[0] self._token = "no_token" self._line = self._line[1:].strip() else: self._lexeme = next_match.group().strip() self._line = self._line[next_match.end():]
def _Command(self, index, command): command_type = command['type'] new_field = 'assert_%d' % index if command_type == 'assert_return': self.lines.append('(func (export "%s")' % new_field) self.lines.append('block') self._Action(command['action']) for expected in command['expected']: self._Reinterpret(expected['type']) if expected['value'] in ('nan:canonical', 'nan:arithmetic'): self._NanBitmask(expected['value'] == 'nan:canonical', expected['type']) self._And(expected['type']) self._QuietNan(expected['type']) else: self._Constant(expected) self._Reinterpret(expected['type']) self._Eq(expected['type']) self.lines.extend(['i32.eqz', 'br_if 0']) self.lines.extend(['return', 'end', 'unreachable', ')']) elif command_type in ('assert_trap', 'assert_exhaustion'): self.lines.append('(func (export "%s")' % new_field) self._Action(command['action']) self.lines.extend(['br 0', ')']) else: raise Error('Unexpected command: %s' % command_type) # Update command to point to the new exported function. command['action']['field'] = new_field command['action']['args'] = [] command['expected'] = []
def execute(self, N=None): """ Execute the schedule. Args: N (int, optional): Number of passes to execute. If N is omitted or None, execute the entire schedule. """ # # In order to execute, the current schedule must be in the idle state (i.e. NEVER before executed) # *and* there must be no schedule running on the ground station. # if self.state != Scheduler.States.IDLE: log.error( "An attempt was made to execute a scheduler that was in the incorrect state, '{}'. A scheduler can only" + "be executed while in the 'idle' state. I.e. never before executed. Raising exception." .format(self.state)) raise Error( "A scheduler can only be executed while in the 'idle' state (i.e. never before executed). The current state is '{}'" .format(self.state)) if hasattr( self.gs, 'scheduler' ) and self.gs.scheduler is not None and self.gs.scheduler.state == Scheduler.States.EXECUTING: raise Error( "Unexpected error: This error should not be possible. Indicates that a different scheduler may be running on the ground station." ) self.gs.scheduler = self log.info("Executing schedule:\n%s" % (self.schedule)) if N is None: self._continue = len(self.schedule.passes) else: self._continue = N try: np = self._next_pass() except ValueError: raise Error("No future passes in schedule") self._schedule_pass(np)
def _serial_port(self, url_or_dev, tx_eol = '\r\n', rx_eol = '\r', **kwargs): """ Create a serial port generator. Send commands to it by using the generator send() method. It will return whatever comes back or None if readline failsself. Note: we do it this way to avoid conflicting writes to the serial port from different threads. If that's attempted, a ValueError: generator is already executing Exception will happen. """ if 'timeout' not in kwargs.keys(): kwargs['timeout'] = 1.0 try: _ser = serial.serial_for_url(url_or_dev, **kwargs) except AttributeError: _ser = serial.Serial(url_or_dev, **kwargs) if len(rx_eol) != 1: raise Error("rx_eol parameter can only be a single character long") def readline(_ser=_ser, rx_eol=rx_eol): data = '' while True: b = _ser.read(1) data += b if b == '' or b == rx_eol: break # Make loop abortable by making the abort_all event raise an exception raise_if_aborted() return data outp = None exc = None # t0 = time.time() # t1 =t0 while True: #print('BLAH. Time through loop = {:.2f}, time waiting for yield = {:.2f}'.format(time.time() - t0, t1-t0)) # t0 = time.time() inp = yield (outp, exc) # t1 = time.time() try: _ser.write((inp.strip() + tx_eol).encode()) outp = readline() if outp == '': raise Exception('Timeout: No response') exc = None except Exception as e: outp = None exc = e
def __init__(self, name, stream, rpcaddr, rpc_varmap = {}, iqbufflen = 1024, connect=True): """ Args: name (string) : A descriptive name for the radio stream (string) : The IP:PORT on which to listen for published IQ samples rpcaddr (string) : The RPC address (in format http://ip:port) to connect the XMLRPC proxy to rpc_varmap : A dict mapping between freq, sample_rate and range_rate to whatever those variables are called in the Gnu Radio flowgraph. """ self.name = name self.stream = stream self._rpcaddr = rpcaddr self._iqbuff = deque(maxlen=iqbufflen) self._fftsize = 1024 try: self._iqaddr, self._iqport = stream.split(':') except Exception as e: log.error("Did not understand stream address {}".format(stream)) raise self._iqport = int(self._iqport) self._last_sample_rate = 100e3 self._last_freq = 0 self._iqrxcnt = 0 self._lastiqrxcnt = 0 # Store errors (if any) self.err_sample_rate = None self.err_freq = None # # Set a default RPC map # self._callback_map = { 'freq': 'freq', 'sample_rate': 'sample_rate', 'range_rate': 'range_rage'} for k,v in rpc_varmap.items(): if k not in self._callback_map: raise Error("Unknown key %s in variable map"%(k)) else: self._callback_map[k] = v #self._server = xmlrpclib.ServerProxy(self._rpcaddr) #self._server = XMLRPCTimeoutServerProxy(self._rpcaddr, timeout=.25) if connect: self.connect()
def _WriteAssertModuleCommand(self, command): if command['module_type'] == 'binary': self.out_file.write('(%s (module binary "%s") "%s")\n' % ( command['type'], self._Binary(command['filename']), command['text'])) elif command['module_type'] == 'text': self.out_file.write('(%s (module quote "%s") "%s")\n' % ( command['type'], self._Text(command['filename']), command['text'])) else: raise Error('Unknown module type: %s' % command['module_type'])
def _poll_azel(self): """ Get the current AZ,EL by querying hamlib. This function is not thread-safe and not intended to be run outside this class. Use get_azel() instead. Returns: azimuth, elevation """ self._lock() try: if self._persist is False or not hasattr(self, '_sock'): self._connect() self._sock.sendall("p") resp = self._recv_all() if resp[0:4] == 'RPRT': s = 'Got invalid az/el information from hamlib' log.error(s) self._unlock() raise Error(s) try: az, el = resp.split('\n')[0:2] self._last_az = float(az) self._last_el = float(el) except Exception, e: s= "Error converting hamlib data '%s' to az/el. Error: '%s'"%(resp, e) log.error(s) self._unlock() raise Error(s) self._last_update = time.time() if self._persist is False: self._close()