def __init__(self, words, source, **kwargs): super().__init__(**kwargs) self.words = words.split(' ') self.src = source # Criando os componentes self.source = Source() self.layout = BoxLayout(orientation='vertical') self.clear_widgets() results = self.search() if results != []: for result in results: title = Label(text=result['title'], font_size=30, size_hint_y=None, height=40) time = Label(text=result['time'], font_size=20, size_hint_y=None, height=40) phrarse = Label(text=result['phrarse'], font_size=15, size_hint_y=None, height=40) separator = Label(text='-----------------------------------------------', font_size=60, size_hint_y=None, height=20) self.layout.add_widget(title) self.layout.add_widget(time) self.layout.add_widget(phrarse) self.layout.add_widget(separator) self.add_widget(self.layout)
def test_add_rsvps_to_event(self): event = copy.deepcopy(EVENT) Source.add_rsvps_to_event(event, []) self.assert_equals(EVENT, event) Source.add_rsvps_to_event(event, RSVPS) self.assert_equals(EVENT_WITH_RSVPS, event)
class Remote: def __init__(self,addr,**kwargs): self.ssrc = kwargs['ssrc'] self.control_address = addr self.data_address = None self.period = kwargs['expect_period'] self.packet_size = kwargs['expect_size'] self.tv_recv = time_now() self.source = Source() def onPeriod(self): pass def update(self,downstream): pass def onSenderReport(self,report): pass def onReceiverReport(self,report): pass def onPacket(self,packet): seq = packet.get('seq') if seq is None: return self.tv_recv = time_now() self.source.update_seq(seq)
def run(): """ Create the objects source for Sirio, Vega and Proxima as well as the corresponding scanners and the satellite object of Gaia. Then scan the sources from Gaia and print the time. :return: gaia, sirio, scanSirio, vega, scanVega, proxima, scanProxima """ start_time = time.time() sirio = Source("sirio", 101.28, -16.7161, 379.21, -546.05, -1223.14, -7.6) vega = Source("vega", 279.2333, 38.78, 128.91, 201.03, 286.23, -13.9) proxima = Source("proxima", 217.42, -62, 768.7, 3775.40, 769.33, 21.7) scanSirio = Scanner(np.radians(20), np.radians(2)) scanVega = Scanner(np.radians(20), np.radians(2)) scanProxima = Scanner(np.radians(20), np.radians(2)) gaia = Satellite() print(time.time() - start_time) scanSirio.start(gaia, sirio) scanVega.start(gaia, vega) scanProxima.start(gaia, proxima) print(time.time() - start_time) seconds = time.time() - start_time print('Total seconds:', seconds) return gaia, sirio, scanSirio, vega, scanVega, proxima, scanProxima
def setup_source(self, x, y, source_image, source_size, to_intersection, car_images, car_size, generative=True, spawn_delay=4.0): ''' Sets up a Source, which is an Intersection. ''' s = Sprite(source_image, source_size) s.move_to(x=x, y=self.height - y) source = Source(x, y, None, None, self, car_images, car_size, spawn_delay=spawn_delay, generative=generative) road = self.setup_road(source, to_intersection, 'road.bmp') source.road = road source.length_along_road = road.length self.source_set.add(source) self.window.add_sprite(s) return source
def setUp(self): t_init = 0 # 1/24/60 t_end = t_init + 1 / 24 / 60 # 365*5 my_dt = 1 / 24 / 60 / 10 # [days] spline_degree = 3 gaia = Satellite(ti=t_init, tf=t_end, dt=my_dt, k=spline_degree) self.gaia = gaia my_times = np.linspace(t_init, t_end, num=100, endpoint=False) real_sources = [] calc_sources = [] for t in my_times: alpha, delta = af.generate_observation_wrt_attitude( gaia.func_attitude(t)) real_src_tmp = Source(str(t), np.degrees(alpha), np.degrees(delta), 0, 0, 0, 0) calc_src_tmp = Calc_source('calc_' + str(t), [t], real_src_tmp.get_parameters()[0:5], real_src_tmp.get_parameters()[5]) real_sources.append(real_src_tmp) calc_sources.append(calc_src_tmp) # test if source and calc source are equal (as they should be) np.testing.assert_array_almost_equal( np.array(real_sources[0].get_parameters()[0:5]), calc_sources[0].s_params) # create Solver self.Solver = Agis( gaia, calc_sources, real_sources, attitude_splines=[gaia.s_w, gaia.s_x, gaia.s_y, gaia.s_z], spline_degree=spline_degree, attitude_regularisation_factor=1e-3)
def setUp(self): super(TestOptimizeInclusions, self).setUp() name = type(self).__name__ src = Source(name.lower() + ".c") ah = Header("a.h") bh = Header("b.h") ch = Header("c.h") ah.add_type(Type("a")) bh.add_type(Type("b")).add_reference(Type["a"]) ch.add_type(Type("c")).add_reference(Type["b"]).add_inclusion(ah) src.add_type(Pointer(Type["c"], "cpointer")) # c.h includes a.h but inclusion of a.h cannot be substituted with c.h # inclusion because it creating reference loop between inclusions of # c.h and b.h. This test checks inclusions optimization correctness and # ordering of chunks. src_content = """\ /* {} */ #include "a.h" #include "b.h" #include "c.h" typedef c *cpointer; """.format(src.path) self.files = [(src, src_content)]
def setUp(self): super(TestForwardDeclaration, self).setUp() name = type(self).__name__ src = Source(name.lower() + ".c") a = Structure("A") a.append_field(Pointer(a)("next")) b = Structure("B") b.append_field(Pointer(a)("next")) src.add_types([a, b]) src_content = """\ /* {} */ typedef struct A A; struct A {{ A *next; }}; typedef struct B {{ A *next; }} B; """.format(src.path) self.files = [(src, src_content)]
def __init__(self, name, directory, nic_num=0, timer_num=0, char_num=0, block_num=0, **qom_kw): super(QOMDevice, self).__init__(name, **qom_kw) self.directory = directory self.nic_num = nic_num self.timer_num = timer_num self.char_num = char_num self.block_num = block_num # Define header file header_path = join("hw", directory, self.qtn.for_header_name + ".h") try: self.header = Header.lookup(header_path) except Exception: self.header = Header(header_path) # Define source file source_path = join("hw", directory, self.qtn.for_header_name + ".c") self.source = Source(source_path)
def setUp(self): super(TestCrossDeclaration, self).setUp() src = Source(type(self).__name__.lower() + ".c") a = Structure("A") b = Structure("B") b.append_field(Pointer(a)("ref")) a.append_field(Pointer(b)("ref")) src.add_types([a, b]) src_content = """\ /* {} */ typedef struct B B; typedef struct A {{ B *ref; }} A; struct B {{ A *ref; }}; """.format(src.path) self.files = [(src, src_content)]
def setUp(self): super(TestSeparateCases, self).setUp() src = Source(type(self).__name__.lower() + ".c") i = Type["int"]("i") src.add_type( Function(name="func_a", body=BodyTree()(Declare(OpDeclareAssign(i, 0)), BranchSwitch(i, separate_cases=True)( SwitchCase(1), SwitchCase(2))))) src_content = """\ /* {} */ void func_a(void) {{ int i = 0; switch (i) {{ case 1: break; case 2: break; default: break; }} }} """.format(src.path) self.files = [(src, src_content)]
def setUp(self): super(TestLabelAndGotoGeneration, self).setUp() name = type(self).__name__ src = Source(name.lower() + ".c") lbl = Label("begin") i = Type["int"]("i") src.add_type( Function(name="main", body=BodyTree()(Declare(i), lbl, OpAssign(i, OpAdd(i, 1)), Goto(lbl)))) src_content = """\ /* {} */ void main(void) {{ int i; begin: i = i + 1; goto begin; }} """.format(src.path) self.files = [(src, src_content)]
def __init__(self, config=None): """ Initialization of NIST scraper :param config: configuration variables for this scraper, must contain 'reliability' key. """ Source.__init__(self, config) self.ignore_list = set()
def __init__(self, data_file, client_id, client_secret, from_address, header, footer): Source.__init__(self, data_file) self.consumer = Consumer(client_id, client_secret) self.from_address = from_address self.header = header self.footer = footer self.access_token = None
def buildSourceList(self, titles, verbose=True): def wikiLink(link): try: page = wiki_api.page(link) if page.exists(): return { 'page': link, 'text': page.text, 'link': page.fullurl, 'categories': list(page.categories.keys()) } except: return None for current_title in titles: if not self.checkPageAdded(current_title): wiki_api = wikipediaapi.Wikipedia( language='en', extract_format=wikipediaapi.ExtractFormat.WIKI) page_name = wiki_api.page(current_title) if not page_name.exists(): print('Page {} does not exist.'.format(page_name)) return page_links = list(page_name.links.keys()) print_description = "Links Scraped for page '{}'".format( current_title) progress = tqdm(desc=print_description, unit='', total=len(page_links)) if verbose else None current_source = Source(page_name.title, page_name.text, page_name.fullurl, list(page_name.categories.keys()), page_name) # Parallelize the scraping, to speed it up (?) with concurrent.futures.ThreadPoolExecutor( max_workers=4) as executor: future_link = { executor.submit(wikiLink, link): link for link in page_links } for future in concurrent.futures.as_completed(future_link): data = future.result() current_source.append(data) if data else None progress.update(1) if verbose else None progress.close() if verbose else None namespaces = ('Wikipedia', 'Special', 'Talk', 'LyricWiki', 'File', 'MediaWiki', 'Template', 'Help', 'User', \ 'Category talk', 'Portal talk') current_source = current_source[ (len(current_source['text']) > 20) & ~(current_source['page'].str.startswith(namespaces, na=True))] current_source['categories'] = current_source.categories.apply( lambda x: [y[9:] for y in x]) current_source['topic'] = page_name print('Wikipedia pages scraped so far:', len(current_source))
def test_handles_no_matches(self): p = Parser() m = Meme("all your * -base") s = [ Source("", "foo", ""), Source("", "bar", ""), Source("", "baz", "") ] self.assertEqual(p.collate_words(m, s).get_list(), [])
def __init__(self, config): Source.__init__(self, "mqtt", config) self.new_content = [] self.mqttc = mqtt.Client() self.mqttc.on_connect = self.on_connect self.mqttc.on_message = self.on_message self.mqttc.connect(self.get("server"), int(self.get("port")), 60)
def test_collates_words(self): p = Parser() m = Meme("all your * -base") s = [ Source("", "all your cake", ""), Source("", "all your cake", ""), Source("", "all your data", "") ] self.assertEqual( p.collate_words(m, s).get_list(), [("cake", 2), ("data", 1)])
def process_wiki(self, trader, amount, tag, partner, grlc, comment=None, date=None): source = Source(tag, partner, grlc, comment, date) if source.get_id() not in trader.trade_partners: trader.trade_partners[source.get_id()] = source trader.add_record(amount) if grlc != '-' and grlc != 'N/A': trader.add_grlc(grlc) self.needs_updating_trader_wiki.add(trader.get_id()) self.author_points[trader.get_id()] = trader
def __init__(self, source_fn: str, radar_fn: str, dipole=False): # To pass values to parent classes Source.__init__(self, source_fn, dipole) Antenna.__init__(self, radar_fn) # Source to antenna distances self.distance, self.path_vec = self.multi_dist() print("\x1b[1;31mDistance:\n\x1b[0m", self.distance, "\n\x1b[1;31mVector:\n\x1b[0m", self.path_vec)
def build(url=u'', dry=False, config=None, **kwargs): """Returns a constructed source object without downloading or parsing the articles """ config = config or Configuration() config = extend_config(config, kwargs) url = url or '' s = Source(url, config=config) if not dry: s.build() return s
def transactionalCreateTree(dataForPointTree, user): outlineRoot = OutlineRoot() outlineRoot.put() # ITERATE THE FIRST TIME AND CREATE ALL POINT ROOTS WITH BACKLINKS for p in dataForPointTree: pointRoot = PointRoot(parent=outlineRoot.key) pointRoot.url = p['url'] pointRoot.numCopies = 0 pointRoot.editorsPick = False pointRoot.viewCount = 1 if 'parentIndex' in p: parentPointRoot = dataForPointTree[p['parentIndex']]['pointRoot'] pointRoot.pointsSupportedByMe = [parentPointRoot.key] pointRoot.put() p['pointRoot'] = pointRoot point = Point(parent=pointRoot.key) point.title = p['title'] point.url = pointRoot.url point.content = p['furtherInfo'] point.current = True point.authorName = user.name point.authorURL = user.url point.put() user.addVote(point, voteValue=1, updatePoint=False) user.recordCreatedPoint(pointRoot.key) p['point'] = point p['pointRoot'].current = p['point'].key pointRoot.put() point.addToSearchIndexNew() # ITERATE THE SECOND TIME ADD SUPPORTING POINTS for p in dataForPointTree: if 'parentIndex' in p: linkP = dataForPointTree[p['parentIndex']]['point'] linkP.supportingPointsRoots = linkP.supportingPointsRoots + [p['pointRoot'].key] \ if linkP.supportingPointsRoots else [p['pointRoot'].key] linkP.supportingPointsLastChange = linkP.supportingPointsLastChange + [p['point'].key] \ if linkP.supportingPointsRoots else [p['point'].key] # ITERATE THE THIRD TIME AND WRITE POINTS WITH ALL SUPPORTING POINTS AND SOURCE KEYS for p in dataForPointTree: if p['sources']: sourceKeys = [] for s in p['sources']: source = Source(parent=p['point'].key) source.url = s['sourceURL'] source.name = s['sourceTitle'] source.put() sourceKeys = sourceKeys + [source.key] p['point'].sources = sourceKeys p['point'].put() return dataForPointTree[0]['point'], dataForPointTree[0]['pointRoot']
def __init__(self, path) -> None: self.path = Source(path) self.resp = self.path.jsonObj() self.name = self.path.name self.waiting = len(self.resp["waiting"]) self.produced = len([ addr['address'] for addr in self.resp['validators'] if addr['produced_blocks_counter'] != '' and addr['produced_blocks_counter'] != '0' ]) self.claimed = len( [addr['address'] for addr in self.resp['validators']])
class MainWindow(QtGui.QMainWindow, Ui_MainWindow): def __init__(self, database, admin): QtGui.QMainWindow.__init__(self) Ui_MainWindow.__init__(self) self.setupUi(self) self.show() # parameters self.admin = admin self.database = database # menu action signals self.actionOpen.triggered.connect(self.open_file) self.actionOpen_Camera.triggered.connect(\ self.open_camera) self.actionQuit.triggered.connect(self.quit) self.actionAbout.triggered.connect(self.about) # objects self.source = Source(self, database, admin) # init settings init_main(self) # keyboard shortcuts shortcuts(self) # admin self.admin_window = AdminWindow(self.source, self) # notification object here self.notification = Notification(self.source, self) # report tab self.report = Report(self.source, self) # menu action def open_file(self): print("Clicked open file in menu") self.source.open("file") def open_camera(self): print("Clicked open camera in menu") self.source.open("camera") def about(self): print("Opening about window") self.about = AboutWindow() def quit(self): print("Quit application") QtCore.QCoreApplication.instance().quit()
def show_fields_values(units: list, nbs: list, specs: list, starts: list): win_values = QWidget() win_values.setWindowTitle( "Définition des sources (3/3): Entrée des valeurs ") mwin.setCentralWidget(win_values) layout_values = QGridLayout() button_values = QPushButton("Ok") nbs = [int(elem) for elem in nbs] layout_values.addWidget(button_values, max(nbs) + 1, 0) sources = [] for unit, nb, spec, start in zip(units, nbs, specs, starts): source = Source(unit, int(nb), spec, float(start)) sources.append(source) for index_source, source in enumerate(sources): source.qlineedits = {} label_source = QLabel(f"Source {index_source + 1}") label_source.setFont(BoldFont) label_source.setAlignment(Qt.AlignCenter) layout_values.addWidget(label_source, 0, index_source * 2, 1, 2) frequence = list(source.puissances.keys()) for index_freq, freq in enumerate(frequence): label_freq = QLabel(str(freq) + " Hz") label_freq.setAlignment(Qt.AlignCenter) puissance = QLineEdit() source.qlineedits[freq] = puissance layout_values.addWidget(label_freq, index_freq + 1, index_source * 2) layout_values.addWidget(puissance, index_freq + 1, index_source * 2 + 1) def handler_values(): for source in sources: for k in source.qlineedits.keys(): source.puissances[k] = source.qlineedits[k].text() puiss = sources[0].puissances for source in sources: puiss = convert_freq(puiss, source.puissances) for source in sources: source.puissances = convert_freq(source.puissances, puiss) source.convert_unit() somme = sources[0] for source in sources: del source.qlineedits somme = somme.niveau_global_spectral(source) print(somme) show_fields_mesure(somme) button_values.clicked.connect(handler_values) win_values.setLayout(layout_values) win_values.show()
def main(): src = Source(config['RSS']) src.refresh() news = src.news news.reverse() # Проверяем на наличие в базе ссылки на новость, если нет, то добавляем в базу данных for i in news: if not db.find_link(i.link): i.id = db.get_last_news_id() + 1 i.publish = int(time.mktime( time.localtime())) # видимо cтоит удалить logging.info("Detect news: {}".format(i)) db.add_news(i)
def validate_forms(self): """Validate form.""" root_pass = self.ids.pr2.text username = self.ids.us1.text user_pass = self.ids.us3.text home = self.ids.us4.text shell = self.ids.us5.text pre = self.ids.pre.text pos = self.ids.pos.text if self.source: s = self.source else: s = 'cdrom' folder = '' server = '' partition = self.ids.partition.text ftp_user = self.ids.ftp_user.text ftp_pass = self.ids.ftp_pass.text print('SOURCE:' + self.source) if s == 'Hard drive': folder = self.ids.hh_folder.text elif s == 'NFS': folder = self.ids.nfs_folder.text server = self.ids.nfs_server.text elif s == 'HTTP': folder = self.ids.http_folder.text server = self.ids.http_server.text elif s == 'FTP': folder = self.ids.ftp_folder.text server = self.ids.ftp_server.text source = Source() source.save_source(s, partition, folder, server, ftp_user, ftp_pass) # if self.active is True and self.ids.pr1.text # is not self.ids.pr2.text: # print(self.ids.pr1.focus) # popup = InfoPopup() # popup.set_info('Root passwords do not match') # popup.open() user = User() user.save_user(root_pass, username, user_pass, home, shell) script = Script() script.save_script(pre, pos) section = Section() section.create_file()
def main(args): problem_dir, working_dir, source_path, language = parse_options(args) os.chdir(working_dir) shutil.copy(source_path, './source.' + LANG_EXT_MAP.get(language)) output_path = RUNNABLE_PATH problem_obj = Problem(problem_dir) source_obj = Source(source_path, language) program_obj = source_obj.compile(output_path) program_obj.run(problem_obj) return os.EX_OK
def getSource(self): from source import Source if hasattr(self, 'source_id'): self.source = Source(_id=self.source_id) else: fingerprint = None try: fingerprint = self.j3m['intent']['pgpKeyFingerprint'].lower() except KeyError as e: print "NO FINGERPRINT???" self.source = Source(inflate={ 'invalid': { 'error_code' : invalidate['codes']['source_missing_pgp_key'], 'reason' : invalidate['reasons']['source_missing_pgp_key'] } }) if fingerprint is not None: source = self.submission.db.query( '_design/sources/_view/getSourceByFingerprint', params={ 'fingerprint' : fingerprint } )[0] if source: self.source = Source(_id=source['_id']) else: # we didn't have the pgp key. # so init a new source and set an invalid flag about that. inflate = { 'fingerprint' : fingerprint } ## TODO: ACTUALLY THIS IS CASE-SENSITIVE! MUST BE UPPERCASE! self.source = Source(inflate=inflate) self.source.invalidate( invalidate['codes']['source_missing_pgp_key'], invalidate['reasons']['source_missing_pgp_key'] ) setattr(self, 'source_id', self.source._id) self.save() if hasattr(self, 'source_id'): return True else: return False
def setUp(self): super(TestRedirectionToDeclaration, self).setUp() name = type(self).__name__ private_h = Header("private.h") private_h.add_type(Structure("Private")) public_h = Header("public.h") public_h.add_types([ Type["Private"].gen_forward_declaration(), Function("public_func") ]) private_c = Source("private.c") public_func_impl = Type["public_func"].gen_definition() private_c.add_type(public_func_impl) src = Source(name.lower() + ".c").add_global_variable( # It must internally re-direct pointer from type "Private" # to "Private.declaration", its forward declaration. Pointer(Type["Private"])("handler")) src.add_type(Pointer(public_func_impl, name="cb_ptr")) src_content = """\ /* %s */ #include "public.h" typedef void (*cb_ptr)(void); Private *handler __attribute__((unused)); """ % (name.lower() + ".c") self.files = [(src, src_content)]
def __init__(self, config=None): """ Initialization of ChemSpider scraper :param config: a dictionary of settings for this scraper, must contain 'reliability' key """ Source.__init__(self, config) self.ignore_list = [] if 'token' not in self.cfg or self.cfg['token'] == '': log.msg('ChemSpider token not set or empty, search/MassSpec API ' 'not available', level=log.WARNING) self.cfg['token'] = '' self.search += self.cfg['token'] self.extendedinfo += self.cfg['token']
def find_live(): # TODO refactor this mess rm = visa.ResourceManager() for res in rm.list_resources(): try: print(f'trying: {res}') inst = rm.open_resource(res) answer = inst.query('*IDN?') model = answer.split(',')[1].strip() print(model, self._generatorList) if model == 'E3648A': # if model == 'E3631A': self._source = Source(res, answer, inst) print('>>> source') # elif 'N5183A' in answer: # self._generator1 = Generator(res, answer, inst) elif model in self._generatorList: if not self._generator1: self._generator1 = Generator(res, answer, inst) print('>>> gen1') else: self._generator2 = Generator(res, answer, inst) print('>>> gen2') elif model == 'N9030A': self._analyzer = Analyzer(res, answer, inst) print('>>> analyzer') except Exception as ex: print(ex)
def __init__(self, database, admin): QtGui.QMainWindow.__init__(self) Ui_MainWindow.__init__(self) self.setupUi(self) self.show() # parameters self.admin = admin self.database = database # menu action signals self.actionOpen.triggered.connect(self.open_file) self.actionOpen_Camera.triggered.connect(\ self.open_camera) self.actionQuit.triggered.connect(self.quit) self.actionAbout.triggered.connect(self.about) # objects self.source = Source(self, database, admin) # init settings init_main(self) # keyboard shortcuts shortcuts(self) # admin self.admin_window = AdminWindow(self.source, self) # notification object here self.notification = Notification(self.source, self) # report tab self.report = Report(self.source, self)
def main(): debug = Debugger() chrono = Chrono() universe = Universe(debug) source = Source(debug).get_source() bucket_chain = BucketChain(debug, chrono, universe, source) clusters = Clusters(debug, chrono, universe) algorithm = OnlineClustering(debug, universe, bucket_chain, clusters) while True: operation_time = time.time() if bucket_chain.is_updated(): universe.compute_log_n_df() bucket_chain.compute_universal_counts() bucket_chain.compute_universal_tfidf() clusters.update_centroid_counts() clusters.update_centroid_tfidf() algorithm.pre_clustering_work() algorithm.online_clustering() clusters.remove_old_clusters() universe.prune_terms(clusters) debug.log("BUCKET FINISHED IN: " + str(time.time() - operation_time)) clusters.debug_active_clusters() clusters.save_active_clusters()
def test_should_get_row_from_cache(self): q = Query( lambda: [{"t": 1, "v": 55000}, {"t": 2, "v": 11000}], query_name="q1", key_column="t", mapping={}, non_data_fields=[], ) s = Source(source_name="src", query=q) s.get_records() # to move the values into the hot part of the cache we need to push twice s.get_records() self.assertEquals(s.cache.get_row(1), {"t": 1, "v": 55000}) self.assertEquals(s.cache.get_row(2), {"t": 2, "v": 11000})
def setUp(self): super(TestPointerReferences, self).setUp() name = type(self).__name__ try: h = Header["type_a.h"] except: h = Header("type_a.h") h.add_type(Type("a", incomplete=False, base=False)) src = Source(name.lower() + ".c").add_type( Structure("s", Pointer(Type["a"])("next"))) src_content = """\ /* {} */ #include "type_a.h" typedef struct s {{ a *next; }} s; """.format(src.path) self.files = [(src, src_content)]
def addSource(self, x, y): self.state['sourceXIndices'].append(x) self.state['sourceYIndices'].append(y) self.state['sourcesPoints'].append((x, y)) self.state['sources'].append( Source(self.canvas, (x + 1) * (self.width / 8), (y + 1) * (self.height / 8)))
def __init__(self, config=None): """ Initialization of ChemSpider scraper :param config: a dictionary of settings for this scraper, must contain 'reliability' key """ Source.__init__(self, config) self.ignore_list = [] if 'token' not in self.cfg or self.cfg['token'] == '': log.msg( 'ChemSpider token not set or empty, search/MassSpec API ' 'not available', level=log.WARNING) self.cfg['token'] = '' self.search += self.cfg['token'] self.extendedinfo += self.cfg['token']
def __get_source(self): fname = self.__get_filename() if SourceMap.sources.has_key(fname): return SourceMap.sources[fname] else: SourceMap.sources[fname] = Source(fname) return SourceMap.sources[fname]
def setup( self, item=False, meshes=False, keyable=False, prefix=False, parentVis=False, ): """ Sets up and initialises the source mesh """ # add the parent visibility node # to the core node store so it can be hidden # on render self.coreNode.addParentGroup(parentVis) # now make the source node self.source = Source( meshes=meshes, keyable=keyable, parentVis=parentVis, prefix=prefix, node=item ) self.melSettings()
def __init__(self,addr,**kwargs): self.ssrc = kwargs['ssrc'] self.control_address = addr self.data_address = None self.period = kwargs['expect_period'] self.packet_size = kwargs['expect_size'] self.tv_recv = time_now() self.source = Source()
def __init__(self, program): super(Scanner, self).__init__() self.source = Source(program) self.char = '' self.atomPositionEnd = TextPos() self.atomPositionStart = TextPos() self.intConst = 0 self.strConst = "" self.spell = "" self.__nextChar()
def sample(self): """ Method to pick the sample satisfying the likelihood constraint using uniform sampling Returns ------- new : object The evolved sample number : int Number of likelihood calculations after sampling """ new = Source() x_l, x_u = self.getPrior_X() y_l, y_u = self.getPrior_Y() r_l, r_u = self.getPrior_R() a_l, a_u = self.getPrior_A() while(True): new.X = np.random.uniform(x_l,x_u) new.Y = np.random.uniform(y_l,y_u) new.A = np.random.uniform(a_l,a_u) new.R = np.random.uniform(r_l,r_u) new.logL = self.log_likelihood(new) self.number+=1 if(new.logL > self.LC): break return new, self.number
def _load_sources(self): for source_file_physical_path, dirnames, filenames in os.walk(self.source_dir): # get relative path starting from self.source_dir source_file_relative_path = os.path.relpath(source_file_physical_path, self.source_dir) get_formats = etree.XPath("//meta[@name='dcterms.Format']/@content") for f in filenames: # append a Source object to sources files list s = Source(os.path.abspath(source_file_physical_path), source_file_relative_path, f) xml = etree.parse(s.source) formats = get_formats(xml) for ext in formats: physical_path = os.path.abspath(os.path.join(self.out_dir, s.relative_path)) target = Target(physical_path, s.relative_path, s.basename, ext) s.add_target(target) # XXX: only use one format for now break self.sources.append(s) logging.debug(self.sources)
def __init__(self, events): config = Conf() self.reddit = praw.Reddit(user_agent='Switcharoo Cartographer v.0.2.1') self.reddit.login(config.r_username, config.r_password, disable_warning=True) self.events = events self.queue = None self.data = Access(self.events) self.source = Source(self) self._port = config.com_port self._share_port = config.share_port self._auth = config.auth self._should_stop = False self._threshold = 10
def _parse(self): funclines = {} source = None with open(self.filename) as f: for line in f: line = line.strip() if line == 'end_of_record': if source is not None: directory, filename = os.path.split(source.filename) self.sources[directory][filename] = source source = None else: key, argstr = tuple(line.split(':')) args = argstr.split(',') if key == 'SF': fname = args[0] if fname.startswith(self.basepath): source = Source(args[0]) else: source = None elif source is not None: if key == 'FN': name = args[1] funclines[name] = int(args[0]) elif key == 'FNDA': hits = int(args[0]) name = args[1] func = Function(funclines[name], name, hits) source.add_function(func) elif key == 'BRDA': line = int(args[0]) path = int(args[2]) hits = 0 if args[3] == '-' else int(args[3]) source.add_branch(Branch(line, path, hits)) elif key == 'DA': line = int(args[0]) hits = int(args[1]) source.add_line(Line(line, hits))
def run(config_path, days = 1): logging.info('Started') with open(config_path, 'r') as config_file: import json from datetime import datetime, timedelta from notifier import Notifier from source import Source from seminarparser import Parser config = json.load(config_file) source = Source.get(config['source']['type'], config['source']) parser = Parser.get(config['parser']['type'], config['parser']) notifier = Notifier.get(config['notifier']['type'], config['notifier']) now = datetime.now().date() tomorrow = now + timedelta(days = days) logging.info('Initialized') src = source.get(tomorrow) logging.info('Getting source is complete') headers = parser.find_headers(src) logging.info('%d headers found' % len(headers)) defaults = config['defaults'] seminars = [parser.parse_seminar(header, defaults) for header in headers] logging.info('%d seminars found' % len(headers)) for seminar in [(seminar.title, datetime.combine(seminar.date, seminar.time).isoformat(' '), seminar.place, '/'.join(seminar.contents)) for seminar in seminars]: logging.debug('Seminar found: %s, %s, %s, %s' % seminar) tomorrow_seminars = [seminar for seminar in seminars if seminar.date == tomorrow] logging.info('%d seminars found on %s' % (len(tomorrow_seminars), tomorrow)) for seminar in tomorrow_seminars: notifier.notify(seminar) logging.info('Notify sent: %s, %s, %s' % (seminar.title, seminar.date, seminar.time)) logging.info('Done')
def transactionalCreate(pointRoot, title, nodetype, content, summaryText, user, imageURL=None, imageAuthor=None, imageDescription=None, sourceURLs=None, sourceNames=None): pointRoot.put() point = Point(parent=pointRoot.key) point.title = title point.nodetype = nodetype point.url = pointRoot.url point.content = content point.summaryText = summaryText if (len( summaryText) != 250) else summaryText + '...' point.authorName = user.name point.authorURL = user.url point.version = 1 point.current = True point.upVotes = 1 point.downVotes = 0 point.voteTotal = 1 point.imageURL = imageURL point.imageDescription = imageDescription point.imageAuthor = imageAuthor point.put() sources = Source.constructFromArrays(sourceURLs, sourceNames, point.key) if sources: sourceKeys = [] for source in sources: source.put() sourceKeys = sourceKeys + [source.key] point.sources = sourceKeys point.put() point.addToSearchIndexNew() pointRoot.current = point.key pointRoot.put() user.addVote(point, voteValue=1, updatePoint=False) user.recordCreatedPoint(pointRoot.key) return point, pointRoot
def __show(self, thread, addr, sym, filename, line): assert(sym) view = None #find a view that matches the current filename for v in self._Composite__views: if v.filename() == filename: view = v book = self.__book book.set_current_page(book.page_num(v.widget())) break #not found, add a new view if not view: w = self.__book if line: view = Source(w) view.read(thread, addr, sym) else: view = Disasm(w) self.add(view, filename) assert view view.show(thread, addr, sym) self.__current_view = view
class Transverse: def __init__(self, events): config = Conf() self.reddit = praw.Reddit(user_agent='Switcharoo Cartographer v.0.2.1') self.reddit.login(config.r_username, config.r_password, disable_warning=True) self.events = events self.queue = None self.data = Access(self.events) self.source = Source(self) self._port = config.com_port self._share_port = config.share_port self._auth = config.auth self._should_stop = False self._threshold = 10 def init_queue(self): if self.queue is None: self.queue = EntryQueue(self) def build_graph(self, current_entry): entry_point = True stop = False found = False found_list = [] while current_entry is not None and not stop: current_entry.set_next() if current_entry.next_entry is None and entry_point: self.source.mark_searched(current_entry) return found, found_list entry_point = False # Check if item is already in graph node, stop = self.data.is_new_node(current_entry) # New node if not stop: found = True found_list.append(node) parents = self.data.get_parents(current_entry) for parent in parents: created = self.data.add_link(parent, node) if created: found = True self.source.mark_searched(current_entry) current_entry = current_entry.next_entry return found, found_list def analyze_found(self, list): if len(list) > 0: manager = BaseManager(address=('', self._share_port), authkey=self._auth) manager.register('get_meta_data') manager.connect() distances, max_dist = manager.get_meta_data() for n in list: try: if max_dist - distances[n._id] > self._threshold: # Do something here print "*** Node " + str(n._id) + " hit the threshold" except KeyError: # Do query here to see if node exists. If it does than node # does not link to origin print "*** Node " + str(n._id) + " may not link to origin" def loop(self, limit, sleep=10): while 1: current_entry = self.source.add_to_queue(limit, sleep) found, found_list = self.build_graph(current_entry) if found: self.events.on_clearing_cache() clear_cache(self._port) self.analyze_found(found_list)
def plot_si_result(basedir, event_name, outputdir, type_tag): cmtbase = os.path.join(basedir, "CMTSOLUTION_%s" % event_name) runfilebase = os.path.join(basedir, "xcmt3d_%s" % event_name) print "Plotting event:", event_name print "Base:", cmtbase fig1 = plt.figure(num=2, figsize=(7, 10), dpi=80, facecolor="w", edgecolor="k") G = gridspec.GridSpec(3, 3) loc_mapping = { "6p_ZT": G[0, 0], "7p_ZT": G[0, 1], "9p_ZT": G[0, 2], "6p_ZT_DC": G[1, 0], "7p_ZT_DC": G[1, 1], "9p_ZT_DC": G[1, 2], } # Original CMT ax = plt.subplot(G[2, 2]) cmtfile = cmtbase + "_init" print "cmtfile:", cmtfile cmt_init = Source.from_CMTSOLUTION_file(cmtfile) mt_init = [cmt_init.m_rr, cmt_init.m_tt, cmt_init.m_pp, cmt_init.m_rt, cmt_init.m_rp, cmt_init.m_tp] print mt_init # print cmt_init.moment_magnitude # plot_si_bb(ax, cmt_init) # Source Inversion result for tag, position in loc_mapping.iteritems(): ax = plt.subplot(position) cmtfile = cmtbase + "_" + tag print "cmtfile:", cmtfile runfile = runfilebase + "_" + tag + ".out" cmt = Source.from_CMTSOLUTION_file(cmtfile) # print cmt.moment_magnitude mt = [cmt.m_rr, cmt.m_tt, cmt.m_pp, cmt.m_rt, cmt.m_rp, cmt.m_tp] plot_si_bb_comp(ax, cmt, cmt_init, tag, runfile) # Map and source location ax = plt.subplot(G[2, :-1]) m = Basemap(projection="cyl", lon_0=142.36929, lat_0=0.0, resolution="c") m.drawcoastlines() m.fillcontinents() m.drawparallels(np.arange(-90.0, 120.0, 30.0)) m.drawmeridians(np.arange(0.0, 420.0, 60.0)) m.drawmapboundary() # Beachball on the map # calibrate longitude if cmt.longitude < 0: lon = cmt.longitude + 360 else: lon = cmt.longitude x, y = m(lon, cmt.latitude) # print cmt.longitude, cmt.latitude # print x, y # b = Beach(mt_init, xy=(x,y), width=20, linewidth=1, alpha=0.85) # b.set_zorder(100) # ax.add_collection(b) fig_title = "%s_%s" % (event_name, type_tag) plt.title(fig_title) outputfile = "%s_%s.pdf" % (event_name, type_tag) path = os.path.join(outputdir, outputfile) print "Output file:", path fig1.savefig(path)
fc = opt.channel # Print option summary: print 'Parameters in experiment:' print '\tSamples per bit:', opt.spb print '\tChannel type:', ('Audio' if not opt.bypass else 'Bypass') if opt.bypass: print '\t Noise:', opt.noise, ' lag:', opt.lag, 'h: [', opt.h, ']' print '\tFrequency:', fc, 'Hz' print '\tHamming code n :', opt.cc_len ######################################################## #instantiate and run the source block src = Source(opt.monotone, opt.fname) src_payload, databits = src.process() # instantiate and run the transmitter block xmitter = Transmitter(fc, opt.samplerate, opt.one, opt.spb, opt.silence, opt.cc_len) coded_bits = xmitter.encode(databits) coded_bits_with_preamble = xmitter.add_preamble(coded_bits) samples = xmitter.bits_to_samples(coded_bits_with_preamble) mod_samples = xmitter.modulate(samples) #################################### # create channel instance if opt.bypass: h = [float(x) for x in opt.h.split(' ')] channel = bch.BypassChannel(opt.noise, opt.lag, h) else:
def test_get_rsvps_from_event(self): self.assert_equals([], Source.get_rsvps_from_event(EVENT)) self.assert_equals(RSVPS, Source.get_rsvps_from_event(EVENT_WITH_RSVPS))
class Scanner(object): def __init__(self, program): super(Scanner, self).__init__() self.source = Source(program) self.char = '' self.atomPositionEnd = TextPos() self.atomPositionStart = TextPos() self.intConst = 0 self.strConst = "" self.spell = "" self.__nextChar() def __nextChar(self): self.char = self.source.nextChar() def nextSymbol(self): while True: if self.char == -1: return SymType.others while str.isspace(self.char): self.__nextChar() if self.char == -1: return SymType.others if not str.isspace(self.char): break self.atomPositionStart = copy(self.source.textPos) self.atomPositionEnd = self.source.textPos if str.isalpha(self.char): self.spell = "" while True: self.spell = self.spell + self.char self.__nextChar() if not str.isalnum(self.char): break if KT.get(self.spell, -1) != -1: return KT[self.spell] else : return SymType.ident elif str.isdigit(self.char) and self.char != '0': num = 0 while True: num = num*10 + int(self.char) self.__nextChar() if not str.isdigit(self.char): break self.intConst = num return SymType.intconst elif self.char == '0': self.intConst = 0 self.__nextChar() if not str.isdigit(self.char) : return SymType.intconst else: return SymType.others elif self.char == '"': strConst = "" while True: self.__nextChar() strConst = strConst + self.char if not str.isalnum(self.char): break if self.char == '"': self.__nextChar() self.strConst = strConst[:-1] return SymType.charconst else: self.scanError(1) elif self.char == '<': self.__nextChar() if self.char == '=': self.__nextChar() return SymType.leop else : return SymType.ltop elif self.char == '>': self.__nextChar() if self.char == '=': self.__nextChar() return SymType.geop else : return SymType.gtop elif self.char == '(': self.__nextChar() return SymType.lparent elif self.char == ')': self.__nextChar() return SymType.rparent elif self.char == ',': self.__nextChar() return SymType.comma elif self.char == '=': self.__nextChar() return SymType.becomes else: self.__nextChar() return SymType.others def scanError(self, ecode, mtxt="", atxt=""): self.source.error(ecode, self.atomPositionStart, self.atomPositionEnd, mtxt, atxt)
def test_get_rsvps_from_event_bad_id(self): event = copy.deepcopy(EVENT) for id in None, 'not_a_tag_uri': event['id'] = id self.assert_equals([], Source.get_rsvps_from_event(event))
def test_original_post_discovery(self): activity = {'object': { 'objectType': 'article', 'displayName': 'article abc', 'url': 'http://example.com/article-abc', 'tags': [], }} self.assert_equals(activity, Source.original_post_discovery( copy.deepcopy(activity))) # missing objectType activity['object']['attachments'] = [{'url': 'http://x.com/y'}] Source.original_post_discovery(activity) self.assert_equals([], activity['object']['tags']) activity['object']['content'] = 'x (not.at end) y (at.the end)' Source.original_post_discovery(activity) self.assert_equals(['http://at.the/end'], activity['object']['upstreamDuplicates']) self.assert_equals([], activity['object']['tags']) activity['object'].update({ 'content': 'x http://baz/3 y', 'attachments': [{'objectType': 'article', 'url': 'http://foo/1'}], 'tags': [{'objectType': 'article', 'url': 'http://bar/2'}], }) Source.original_post_discovery(activity) self.assert_equals([ {'objectType': 'article', 'url': 'http://foo/1'}, {'objectType': 'article', 'url': 'http://bar/2'}, {'objectType': 'article', 'url': 'http://baz/3'}, ], activity['object']['tags']) # leading parens used to cause us trouble activity = {'object': {'content' : 'Foo (http://snarfed.org/xyz)'}} Source.original_post_discovery(activity) self.assert_equals( [{'objectType': 'article', 'url': 'http://snarfed.org/xyz'}], activity['object']['tags']) # don't duplicate PSCs and PSLs with http and https for field in 'tags', 'attachments': for scheme in 'http', 'https': url = scheme + '://foo.com/1' activity = {'object': { 'content': 'x (foo.com/1)', field: [{'objectType': 'article', 'url': url}], }} Source.original_post_discovery(activity) self.assert_equals([{'objectType': 'article', 'url': url}], activity['object']['tags']) # exclude ellipsized URLs for ellipsis in '...', u'…': url = 'foo.com/1' + ellipsis activity = {'object': { 'content': 'x (%s)' % url, 'attachments': [{'objectType': 'article', 'url': 'http://' + url}], }} Source.original_post_discovery(activity) self.assert_equals([], activity['object']['tags'])
from sink import Sink from source import Source import numpy as np from common_srcsink import hamming compress = True sou = Source(1, "testfiles/columns.png", compress) sink = Sink(compress) a, b, c = sou.process() srcbits = sink.process(c) # #testArr = np.array([1, 1, 1, 0, 0, 0, 0, 0]) # testArr = sou.text2bits("testfiles/Time.txt") # statistics_bits, encoded_bits = sou.huffman_encode(testArr) # print len(encoded_bits) # print "Encoded bits", encoded_bits # print # sink = Sink(1) # srcbits = sink.huffman_decode(encoded_bits, statistics_bits) # text = sink.bits2text(srcbits) # print # print