def import_job(): base_dir = os.path.dirname(os.path.realpath(__file__)) + '/' data_dir = base_dir + 'data/' dal = Dal(data_dir) importer = Importer(data_dir, dal) print('importing ' + str(datetime.now())) importer.import_date(datetime.now())
def dispatch(request, *args, **kw): """ Perform somes checks, call the importer, and returns. Arguments of the method MUST be passed as pickle in POST data. This method is not meant to be called directly with a web browser. """ base = kw.pop('base') modules = kw.pop('modules') # Create Importer() if not already present in session. imp = Importer() full_path = base.replace('/', '.') + '.' + modules.replace('/', '.') mod, met = full_path.rsplit('.', 1) module = __import__(mod, {}, {}, ['']) callee = getattr(module, met) if not hasattr(callee, '__exportable__'): logger.debug("Exporter: method not exportable: " + full_path) return (200, Exception("Method not exportable")) # GET request ? # Only method call without args or attributes. (from a webbrowser) if request.method == 'GET': try: return (200, imp.get(mod, met)) except ImporterError, e: logger.debug("Exporter: Catched: " + e.traceback) return (500, {'msg': e.msg, 'traceback': e.traceback})
def __init__(self, root): self.importer = Importer(appConfig) self.generator = Generator(appConfig) self.translate_progress = StringVar() self.translate_started = False Frame.__init__(self, root) frame = Frame(root) frame.pack() # 更新词条 # Label(frame, text="", justify=LEFT).grid(row=1, column=1) Label(frame, text="1.更新词条", justify=LEFT).grid(row=2, column=1) Button(frame, text="更新 Android 多语言词条", command=self.update_android_resource).grid(row=2, column=2) Button(frame, text="更新 iOS 多语言词条", command=self.update_ios_resource).grid(row=2, column=3) # 自助翻译 # Label(frame, text="", justify=LEFT).grid(row=3, column=1) Label(frame, text="2.自助翻译", justify=LEFT).grid(row=4, column=1) Button(frame, text="自动翻译", command=self.auto_translate).grid(row=4, column=2) Label(frame, textvariable=self.translate_progress).grid(row=4, column=3) # 导入翻译资源 导出翻译资源 # Label(frame, text="", justify=LEFT).grid(row=5, column=1) Label(frame, text="3.人工翻译", justify=LEFT).grid(row=6, column=1) Button(frame, text="导出翻译资源(Excel)", command=self.generate_translate_resources).grid(row=6, column=2) Button(frame, text="导入翻译资源(Excel)", command=self.import_translated_excel).grid(row=6, column=3) # 生成多语言资源 # Label(frame, text="", justify=LEFT).grid(row=7, column=1) Label(frame, text="4.生成资源", justify=LEFT).grid(row=8, column=1) Button(frame, text="生成 Android 多语言资源", command=self.generate_android_resources).grid(row=8, column=2) Button(frame, text="生成 iOS 多语言资源", command=self.generate_ios_resources).grid(row=8, column=3) # 校验多语言资源 # Label(frame, text="", justify=LEFT).grid(row=9, column=1) Label(frame, text="5.校验资源", justify=LEFT).grid(row=10, column=1) Button(frame, text="将 Android 多语言资源修改结果同步到仓库", command=self.import_modified_android_resource).grid(row=10, column=2) Button(frame, text="将 iOS 多语言资源修改结果同步到仓库", command=self.import_modified_ios_resource).grid(row=10, column=3)
def main(): path = "data/yellow-small.csv" dry_run = False # parse tags if len(sys.argv) == 2: if sys.argv[1] == DRY_RUN: dry_run = True else: path = sys.argv[1] if len(sys.argv) == 3 and sys.argv[2] == DRY_RUN: dry_run = True path = sys.argv[1] # parse input im = Importer(path) im.parse_data() # build tree tree = DecisionTree(im) tree.build_tree(THRESH) print(tree.tree_to_json()) # classify data classify = Classify(path, tree.tree) classify.check_data(dry_run)
def __init__(self, maf_file, annotations_file, regions_names, coding=True): Seq.__init__(self) self.coding = coding annotations = Importer.get_lines_from_file(annotations_file) maf_file = Importer.get_lines_from_file(maf_file) for region_name in regions_names: print(region_name) self.tri_num = 0 self.mutation_number = 0 self.trinucleotides_occurences = self.generate_empty_params() self.neutral_matrix = self.generate_empty_params() print('extracting annotations') regions = self.extract_regions_from_annotations( annotations, region_name) print('analysing sequence') self.analyse_maf_file(maf_file, regions) self.export_tri_occ_and_mutations(region_name) self.normalize_neutral_matrix() self.export_results(region_name) print()
def __init__(self, verbose=True): print("Starting runer") self.imp = Importer() self.X_train, self.y_H, self.y_I, self.y_M = self.imp.get_X_y() if verbose: print("Printing imported shapes", self.X_train.shape, self.y_H.shape, self.y_I.shape, self.y_M.shape) print("Printing imported shapes", self.X_train[:3], self.y_H[:3], self.y_I[:3], self.y_M[:3])
def import_data(request, key): """取り込み元の内容を取り込む""" importer = Importer() source = Source.get(db.Key(key)) result = importer.import_data(source) return HttpResponseRedirect(reverse('admin.views.index'))
def setUp(self): """ Initialize test environment. """ # TODO: clean up spvd database beforehand self.imp = Importer() self.imp['distant_url'] = 'https://192.168.2.81/exporter/' self.groups = [] self.checks = [] self.objects = []
def get_discharge_page(uuid): data_importer = Importer() snapshot_data = data_importer.retrieve_snapshot_data(uuid) if snapshot_data == None: return render_template('patient_not_found.html') else: logging.debug(str(snapshot_data)) return render_template('discharge_page.html', discharge_data=snapshot_data)
def test_import_orphans(self, local_db): # assert that we import all 16 orphans (batch name = "Leeg") in de test csv importer = Importer() importer.import_orphans() with local_db.session() as session: num = session.query(NPRTable).count() assert num == 16
def parse_logs(): # json field names and processing options for each software logs dateutil.parser.parse('2017-04-15T00:00:03+00:00').replace(tzinfo=pytz.utc) conf = { 'bro': { 'source_ip': 'origin_ip', 'destionation_ip': 'dest_ip', 'date': 'timestamp', 'date_return': (lambda x: datetime.datetime.utcfromtimestamp( float(x)).replace(tzinfo=pytz.utc)) }, 'pan': { 'source_ip': 'src', 'destionation_ip': 'dst', 'date': 'datetime', 'date_return': (lambda x: dateutil.parser.parse(x).replace(tzinfo=pytz.utc)) }, 'ciscoasa': { 'source_ip': 'src_ip', 'destionation_ip': 'dst_ip', 'date': '@timestamp', 'date_return': (lambda x: dateutil.parser.parse(x).replace(tzinfo=pytz.utc)) }, 'ciscovpn': { 'source_ip': 'ip', 'destionation_ip': '', 'date': '@timestamp', 'date_return': (lambda x: dateutil.parser.parse(x).replace(tzinfo=pytz.utc)) }, 'suricata': { 'source_ip': 'src_ip', 'destionation_ip': 'dest_ip', 'date': 'timestamp', 'date_return': (lambda x: dateutil.parser.parse(x).replace(tzinfo=pytz.utc)) } } importer = Importer(conf) importer.log_import()
def export_results(self, region_name): matrix_file_name = "neutral_matrices/" + region_name + "_normalized_matrix.txt" Importer.export_dict_to_tsv(matrix_file_name, self.neutral_matrix) logs_file_name = "neutral_matrices/" + region_name + "_logs.txt" logs = 'mutability:' + '\t' + str( self.mutation_number / float(self.tri_num)) + '\n' logs += 'mutations number:' + '\t' + str(self.mutation_number) + '\n' logs += 'trin number:' + '\t' + str(self.tri_num) + '\n' Importer.export_text(logs_file_name, logs)
def start_import(data_dir, dry_run = False, tabular = True, variable = True, excel = True, delete = True): """ Comienza el proceso de importación propiamende dicho. Hasta ahora todo eran preparaciones para tener los datos organizados y disponibles. """ from importer import Importer importer = Importer(data_dir) if tabular: print u"Realizando la importacion tabular" if not dry_run: importer.import_tabular_data() if variable: print u"Realizando la importacion variable" if not dry_run: importer.import_variable_data() if excel: print u"Realizando la importacion del datos_extra" if not dry_run: importer.import_excel(variable) print u"Eliminando los datos no marcados" # TODO: Crear gestionar_jornadas_continuas que meta las matriculas de # los alumnos de los grupos en las asignaturas T.[INF|PRI] # de forma automatica, ahora mismo esto se hace si se importa del excel, # y si este no ha variado no se importa y elimina las matriculas # importer.gestionar_jornadas_continuas() importer.delete_non_used_data(not dry_run and delete) return importer.log_result
class MyFrame(wx.Frame): """ We simply derive a new class of Frame. """ def __init__(self, parent, title, opts, sources): wx.Frame.__init__(self, parent, title=title, size=((800 if opts.verbosity < 2 else 1100), 400)) self.lock = Lock() self.opts = opts self.control = wx.TextCtrl(self, style=wx.TE_MULTILINE) self.control.SetEditable(False) self.Show(True) self.twiddle_mode = 2 self.twiddle_next = 0 self.twiddle_me = '|/-\\' self.twiddle_size = len(self.twiddle_me) self.importer = Importer(self, sources, self.opts) self.Bind(wx.EVT_CLOSE, self.onClose) def onClose(self, event): self.importer.interrupt.set() while self.importer.isAlive(): self.importer.join(1) wx.Yield() self.Destroy() def __append_text(self, s): with self.lock: self.control.AppendText(s) def logger(self, s): # print s if self.twiddle_mode != 2: re_twiddle = True self.twiddle(2) else: re_twiddle = False self.__append_text("%s\n" % (s,)) if re_twiddle: self.twiddle(0) def twiddle(self, mode): # Mode (0, 1, 2) == (start (add first twiddle), advance, erase) self.twiddle_mode = mode if mode == 0: # append self.twiddle_next = 0 self.__append_text(self.twiddle_me[self.twiddle_next]) elif mode == 1: # replace lastPos = self.control.GetLastPosition() self.control.Replace(lastPos-1, lastPos, self.twiddle_me[self.twiddle_next]) elif mode == 2: # erase lastPos = self.control.GetLastPosition() self.control.Remove(lastPos-1, lastPos) # Advance self.twiddle_next = (self.twiddle_next + 1) % self.twiddle_size
def start(path, username, password): rest = RestClient(username) webdav = WebDav(username, password) login_attempts = 0 if not rest.login(username, password): login_attempts += 1 print("Invalid username or password." + username) if login_attempts < 5: start() else: exec(1) log = logging.getLogger("lims_import") log.setLevel(logging.DEBUG) sh = logging.StreamHandler() sh.setLevel(logging.DEBUG) fh = logging.FileHandler("lims-import.log") fh.setLevel(logging.DEBUG) fh.setFormatter(logging.Formatter("%(asctime)s [%(levelname)s] [%(filename)s:%(lineno)d] \t %(message)s")) log.addHandler(sh) log.addHandler(fh) if not webdav.is_available(): log.error("Unable to connect to Owncloud.") exit(1) for project in path: if not os.path.exists(project): log.error("{} not found.".format(project)) exit(1) total = 0 for project in path: total += 1 for root, dirs, files in os.walk(project): total += len(files) total += len(dirs) bar = progressbar.ProgressBar(max_value=total, redirect_stdout=True) def update_progress(): bar.update(bar.value + 1) importer = Importer(username, password) importer.on_update += update_progress for project in path: importer.start(project)
def __init__(self, options): self.importer = Importer() # try read all the files in the dir if options.dir_name is not None: self._files = [f for f in self.get_files(options.dir_name)] else: self._files = [options.file_name] # only one file self._algo_name = options.algorithm if (options.algorithm is not None) else 'bfs' self.options = options
def run(args): if hasattr(args, 'importer'): importer = Importer(csv=args.csv, database=args.database, verbose=args.verbose) importer.run() elif hasattr(args, 'service'): service = Service(host=args.host, port=args.port, database=args.database) service.run()
def __init__(self, file, tree): self.data = Importer(file).parse_data() self.training = True self.tree = tree self.records = { "num_records": 0, "correct": 0, "incorrect": 0, "accuracy": 0, "error_rate": 0 }
def test_import_range(self, mocked_objectstore, local_db, start_date_str, end_date_str, num_expected): mocked_objectstore.get_existing_batch_names = [] importer = Importer() start_date = datetime.strptime(start_date_str, "%Y%m%d").date() end_date = datetime.strptime(end_date_str, "%Y%m%d").date() importer.import_range(start_date, end_date) with local_db.session() as session: num = session.query(NPRTable).count() assert num == num_expected
def __init__(self, fileName, redisHost, redisPort, redisDB=0): """ Init a geonames cities importer @param fileName path to the geonames datafile @param redisConn redis connection """ Importer.__init__(self, fileName, redisHost, redisPort, redisDB) fileNames = fileName.split(',') self.fileName = fileNames[0] self.adminCodesFileName = fileNames[1] if len(fileNames) > 1 else None self._adminCodes = {}
def __init__(self, fileName, redisHost, redisPort, redisDB = 0): """ Init a geonames cities importer @param fileName path to the geonames datafile @param redisConn redis connection """ Importer.__init__(self, fileName ,redisHost, redisPort, redisDB) fileNames = fileName.split(',') self.fileName = fileNames[0] self.adminCodesFileName = fileNames[1] if len(fileNames) > 1 else None self._adminCodes = {}
def run(self): args = self.parser.parse_args() server = args.server token = args.token dir_path = args.dir if server.endswith("/"): server = server[:-1] importer = Importer( api_server=server, token=token, dir_path=dir_path, ) importer.run()
def ImportDatabase(self): """Import an old-style .db qwitter database, or .session file into the current session's storage.""" def import_buffer(tablename, buffername): table = new.load_table(tablename) newtable = new.convert_table(table) buf = self.session.get_buffer_by_name(buffername) buf.storage.extend(newtable) buf.storage.sort(key=lambda a: calendar.timegm( rfc822.parsedate(a['created_at']))) buf.storage = misc.RemoveDuplicates(buf.storage, lambda x: x['id']) f = wx.Frame(None, wx.ID_ANY, "FileDialog") dlg = wx.FileDialog( f, message="Select Database", defaultDir=paths.data_path(), wildcard= "Session files (*.session)|*.session|Qwitter Databases (*.DB)|*.db" ) f.Raise() if dlg.ShowModal() == wx.ID_OK: filename = dlg.GetPath() dlg.Destroy() f.Destroy() else: output.speak(_("Canceled"), True) dlg.Destroy() f.Destroy() return if filename.lower().endswith('.db'): new = Importer(filename) home = new.load_table("tweets") directs = new.load_table("directs") mentions = new.load_table("replies") sent = new.load_table("sent") total = len(home) + len(directs) + len(mentions) + len(sent) yesno = wx.MessageBox( _("Are you sure you want to import %d items from old database?" ) % total, _("Are you sure?"), style=wx.YES | wx.NO) if yesno == wx.YES: output.speak(_("Importing, please wait."), True) else: return output.speak(_("Canceled."), True) for i in [("tweets", "Home"), ("replies", "Mentions"), ("directs", "Direct Messages"), ("sent", "Sent")]: import_buffer(*i) elif filename.lower().endswith('.session'): try: new = SessionImporter(filename) except TypeError: return output.speak(_("Session type mismatch.")) new.do_import() wx.MessageBox(_("Import completed successfully."), _("Import complete."))
def get_importer(config): importer = Importer() importer.load( dict(key='service_classes', module_names=config.SERVICES, class_name='Service'), ) # load all modules here services = [] for service_class in importer.service_classes: srv = service_class() srv.name = service_class.__module__ services.append(srv) importer.services = services return importer
def __init__(self, fileName , redisHost, redisPort, redisDB, locationFileName=None, regionsFileName=None, countriesFileName=None): Importer.__init__(self, fileName, redisHost, redisPort, redisDB) if not locationFileName: dirname = os.path.dirname(fileName) locationFileName = os.path.join(dirname, 'GeoLiteCity-Location.csv') if not regionsFileName: dirname = os.path.dirname(fileName) regionsFileName = os.path.join(dirname, 'regions.csv') if not countriesFileName: dirname = os.path.dirname(fileName) countriesFileName = os.path.join(dirname, 'countries.csv') self.locationFileName = locationFileName self.regionsFileName = regionsFileName self.countriesFileName = countriesFileName
def main(): try: url, key = sys.argv[1:] except (IndexError, ValueError): sys.stderr.write('Usage: %s <redmine_url> <auth_key>\n' % sys.argv[0]) sys.exit(1) transport = RedmineHttpTransport(url, key) api = Redmine(transport) importer = Importer(api) source = CsvSource.from_buffer(sys.stdin.readlines(), IssueModel) results = importer.import_from_source(source) print 'Created issue ids:', ', '.join(results)
def process_file(self, wav_file_path, attack=False, peak=False): wav_file, json_file = Importer.load(wav_file_path) if not wav_file: print("Error for path " + wav_file_path) return if not json_file and not attack: print("Error for path " + wav_file_path) return # if attack and not json_file: if attack and peak: X = Dispatcher.peak_extract(wav_file, json_file) if json_file: y = [json_file[i] for i in sorted(json_file.keys())] y.pop(0) else: y = None else: X, y = Dispatcher.timestamped_no_peak_check(wav_file, json_file) if X is None: print("X not present, exiting...") return X = Extractor.transform_mfcc(X, self.winlen, self.winstep, self.numcep, self.nfilt, self.nfft) self.total_X.extend(X) if y: self.total_y.extend(y)
def __load_descriptors(self, offset): """ Loads the descriptors. Private method. """ self.learn = [[], []] self.validate = [[], []] self.test = [[], []] # Load images for learn and validate sets # TODO: Load and use test set for dir in range(self.__range): importer = Importer('images/' + str(dir), self.__crossValidate, offset) #print("Importing learn from " + str(dir) + " using " + str(self.extractor)) for image in importer.learn: self.learn[0].append(self.extractor.run(image).data) self.learn[1].append(dir) #print("Importing validate from " + str(dir) + " using " + str(self.extractor)) for image in importer.validate: self.validate[0].append(self.extractor.run(image).data) self.validate[1].append(dir) for image in importer.test: self.test[0].append(self.extractor.run(image).data) self.test[1].append(dir)
def importData(self, data, arguments): usage = "Usage: import Google/Garmin/GoogleForm/GoogleWeb/Races/csv <filename>" if " " in arguments: (type, filename) = arguments.split(' ', 1) else: self.error(usage) return if type == "Google": importer = GoogleImporter(filename) elif type == "Garmin": importer = GarminImporter(filename) elif type == "GoogleForm": importer = Importer(filename) elif type == "GoogleWeb": importer = GoogleWebImporter(filename) elif type == "Races": importer = RacesImporter(filename) elif type == "csv": importer = CsvImporter(filename) else: self.error("Type '%s' not recognized" % type) self.error(usage) return if not importer.fileExists(): self.error("File '%s' not found" % filename) self.error(usage) return #if importer.clashingData(data) and \ # not self.continuePrompt("Data clash processing %s. Overwrite existing data with newly imported data?"%filename): # return; importer.loadData(data)
def test_parse_line_version_2(client): line = "Elektra-Dach-Neu;1;1530364575;ATmega8_A_1;711;0;18.614;18.614;14.073;100;100;28;11.67;14.086;7;20;52.507309;13.458635;300" measurement = Importer.parse_line(line) # We save here to make sure that sqlalchemy converts the strings to ints/floats m = measurement.save() assert isinstance(measurement, Measurement) assert measurement.nodeId == "Elektra-Dach-Neu" assert measurement.isemsRevision == "1" assert measurement.timestamp == datetime.datetime(2018, 6, 30, 15, 16, 15) assert measurement.timeToShutdown == 711.0 assert measurement.isPowerSaveMode is False assert measurement.openCircuitVoltage == 18.614 assert measurement.mppVoltage == 18.614 assert measurement.batteryVoltage == 14.073 assert measurement.batteryChargeEstimate == 100 assert measurement.batteryHealthEstimate == 100 assert measurement.batteryTemperature == 28 assert measurement.lowVoltageDisconnectVoltage == 11.67 assert measurement.temperatureCorrectedVoltage == 14.086 assert measurement.rateBatteryCapacity == 7 assert measurement.ratedSolarModuleCapacity == 20 assert measurement.latitude == 52.507309 assert measurement.longitude == 13.458635 assert measurement.status == int("300", 16)
def dispatch(request, *args, **kw): """ Perform somes checks, call the importer, and returns. Arguments of the method MUST be passed as pickle in POST data. This method is not meant to be called directly with a web browser. The Importer() instance is stored in session. """ base = kw.pop('base') modules = kw.pop('modules') # Create Importer() if not already present in session. if '__importer__' not in request.session: request.session['__importer__'] = Importer() full_path = base.replace('/', '.') + '.' + modules.replace('/', '.') mod, met = full_path.rsplit('.', 1) module = __import__(mod, {}, {}, ['']) callee = getattr(module, met) if not hasattr(callee, '__exportable__'): logger.debug("Exporter: method not exportable: " + full_path) return (200, Exception("Method not exportable")) # GET request ? # Only method call without args or attributes. (from a webbrowser) if request.method == 'GET': try: return (200, request.session['__importer__'].get(mod, met)) except ImporterError, e: logger.debug("Exporter: Catched: " + e.traceback) return (500, {'msg': e.msg, 'traceback': e.traceback})
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None: super().__init__(scope, id, **kwargs) ticker = dynamodb.Attribute( name='Ticker', type=dynamodb.AttributeType.STRING, ) date = dynamodb.Attribute( name='Date', type=dynamodb.AttributeType.STRING, ) table = dynamodb.Table( self, 'StockHistory', partition_key=ticker, sort_key=date, billing_mode=dynamodb.BillingMode.PAY_PER_REQUEST, removal_policy=core.RemovalPolicy.DESTROY, point_in_time_recovery=True, ) index_name = 'Date-index' table.add_global_secondary_index( index_name=index_name, partition_key=date, sort_key=ticker, projection_type=dynamodb.ProjectionType.INCLUDE, non_key_attributes=['Name']) Importer(self, 'Importer', table=table) restapi = RestApi(self, 'Api', table=table, index_name=index_name) Website(self, 'Website', api=restapi.api)
def compile_dp_files(mvid): """ Compile all data prediction files for the given model version and return them as one DataFrame """ mvm = Importer.get_model_version(mvid) # Identify root directories cv_run = mvm.cross_validate_id.values[0] == 1 rootdirs = ['full'] if cv_run: rootdirs.extend(['cv%s' % cvn for cvn in range(1, 11)]) # Identify location leaves lsvid = mvm['location_set_version_id'].values[0] loctree = lt.construct(lsvid) leaf_ids = [l.id for l in loctree.leaves()] # Construct filepaths to read fdefs = [] for cv in rootdirs: for l in leaf_ids: for s in ['male', 'female']: for y in range(1990, 2016, 5): fdef = (mvid, cv, l, s, y) fdefs.append(fdef) pool = Pool(40) df = pool.map(read_data_pred_file, fdefs) pool.close() pool.join() df = pd.concat(df) return df
def configured_importer(): importer = Importer() importer.loxone_miniserver_host = '192.168.1.2' importer.loxone_miniserver_port = '80' importer.loxone_username = '******' importer.loxone_password = '******' importer.ha_bridge_host = '192.168.1.3' importer.ha_bridge_port = '8080' return importer
class ImportFiles(SetupFormBase): ''' Load one or more CSV files into the database from disk. ''' def __init__(self, master, *args, **kargs): super().__init__(master, 'RawImport', *args, **kargs) self.form_contents = [] master.grid(padx=10, pady=10) self.importer = Importer() row = 0 col = 0 header = Header(master, "Import Files") header.grid(row=row, column=col, columnspan=4) row += 1 col = 0 tk.Label(master, text='Import a CSV file for disk. All of the records').grid( row=row, column=col) row += 1 tk.Label( master, text='are placed in the database, the customers, vendors,').grid( row=row, column=col) row += 1 tk.Label( master, text='sales, and purchases are updated into the system.').grid( row=row, column=col) row += 1 col = 0 buttons = SingleButtonBox(master, 'import_file_form', 'Import') buttons.grid(row=row, column=col, columnspan=4) buttons.register_events(self.import_file) self.row = row #self.set_form() def import_file(self): ''' Bring a file into the database. ''' self.importer.import_all()
def import_dict(self, path): """ Receiving a path brings a dictionary sheet for memory to make any work """ self.__imported_dict = Importer.import_data_dictionary( path, config=self.main_config) return list(self.__imported_dict.keys())
def activate(self): self.setWindowTitle('Companies House Web Scrapping Tool') if not (len(self.lineImport.text()) > 1): self.lineImport.setText(QDir.fromNativeSeparators(self.dirBase + '\\input\\import.csv')) if os.path.isfile(self.lineImport.text()): self.progressBar.setValue(0) self.imp = Importer(self, self.apikey, self.lineImport.text(), self.dirBase + '\\output\\output.csv') self.imp.updateSignal.connect(self.progressBar.setValue) self.imp.maxSignal.connect(self.setRange) self.imp.closeSignal.connect(self.finished) self.imp.start() self.btnStart.setEnabled(False) self.btnStop.setEnabled(True) self.btnPause.setEnabled(True) else: QMessageBox.warning(self,'Error', self.lineImport.text() + "doesn't exist")
def __init__(self, options=None): if options is None: options = 1 # Get all customers and depots from selected file self.customers, self.depots = Importer.get_data(options) self.vehicle = self.depots[0].vehicle self.computer = Computer(self.customers, self.depots, self.vehicle) self.iteration = 0
def parse_dir(directory: str) -> None: """Takes a directory path and parses all Documents inside this path, writing the results to a file.""" docs = Importer.import_dir(directory) with open_db() as connection: insert_documents(connection, docs) insert_tfs(connection, docs) insert_boost(connection, docs) compute_statistics(connection)
def test__one_by_one_import_all(): mock_measurement = Measurement() mock_measurement.save = MagicMock(return_value=1) measurements = [mock_measurement, mock_measurement] inserted_count = Importer._one_by_one_import_all(measurements) assert inserted_count == 2
def create(chapter_id, kctype, file_kcids=None, debug=False, train_pct= 0.6, dev_pct=0.2, index=""): file_name = IntermediateData.input_path + "homework_xref_" + str(chapter_id) + "_decompressed.csv" print "CLEANING DATA..." imp = Importer() df_imp = imp.get_data(file_name, kctype, debug) kc_col = "kc" if kctype == "tom": kc_definition = [kc_col] else: #TODO: We should extracting features from exercises, but feature extracting does not support this kc_definition = [kc_col] #",exercise_id]" # Sort data df_imp = FeatureExtractor.sort(df_imp) for kc in df_imp[kc_col].unique(): print kc_col, kc # GET SUBSET of data df = df_imp[ df_imp[kc_col] == kc] # "EXTRACTING FEATURES..." fe = FeatureExtractor(default_kc=kc_definition) df_features = fe.df_to_features(df, sort_data=False) # No need to sort data # "SUBSET OF DATA..." df_train, df_dev, df_test = split(df_features, train_pct, dev_pct) # "STORING" chapter_data = IntermediateData(chapter_id, kc, df_features, train_rows=df_train, dev_rows=df_dev, test_rows=df_test) filename = os.path.join(IntermediateData.output_path, index, IntermediateData.get_filename(kctype, kc)) f_output = open( filename, "w") pickle.dump(chapter_data, f_output) f_output.close() #hy commented #df_features.to_csv(filename + ".csv") if file_kcids != None: file_kcids.write( str(chapter_id) +","+ str(kc) + "," + index + "\n") file_kcids.flush()
def post(self): """ Does the import, and shows any errors. """ self.msg = "" error = True importer = Importer(DataAccessor(self.addErrorMessage)) try: target = self.request.POST.get("newFile").file.read() importer.parse(StringIO(target)) except IOError: self.msg = "Please select a valid file to import" except Usage, err: self.msg = err.msg
def main(): opts = args.parse_args() db = get_db(opts) importer = Importer(db, opts) if (not opts.skip_rankings and not opts.only_uservisits): importer.import_data_set('rankings') if (not opts.skip_uservisits or opts.only_uservisits): importer.import_data_set('uservisits') if (not opts.skip_crawls and not opts.only_uservisits): importer.import_data_set('crawl')
def __init__(self, parent, title, opts, sources): wx.Frame.__init__(self, parent, title=title, size=((800 if opts.verbosity < 2 else 1100), 400)) self.lock = Lock() self.opts = opts self.control = wx.TextCtrl(self, style=wx.TE_MULTILINE) self.control.SetEditable(False) self.Show(True) self.twiddle_mode = 2 self.twiddle_next = 0 self.twiddle_me = '|/-\\' self.twiddle_size = len(self.twiddle_me) self.importer = Importer(self, sources, self.opts) self.Bind(wx.EVT_CLOSE, self.onClose)
def run(self): self.logger.info("NoaaImportController.run started") print("NoaaImportController.run started") start_after_date = self.uow.weather_stations.get_last_record_time(self.ws.id, self.year) if start_after_date: next_date = start_after_date + timedelta(hours=1) if self.year < next_date.year: self.logger.info("Skipping year " + str(self.year)) print("Skipping year " + str(self.year)) return self.logger.debug("Downloader started") downloader = Downloader(self.ws.usaf, self.ws.wban, self.year) downloader.run() self.logger.debug("Downloader ended") self.logger.debug("Unzipper started") unzipper = Unzipper(downloader.downloaded_file_path) unzipper.run() os.unlink(downloader.downloaded_file_path) self.logger.debug("Unzipper ended") self.logger.debug("Converter started") converter = Converter(unzipper.unzipped_file_path) converter.run() os.unlink(unzipper.unzipped_file_path) self.logger.debug("Converter ended") self.logger.debug("Importer started") importer = Importer(self.ws.id, self.year, start_after_date, converter.unencoded_file_path) importer.run() os.unlink(converter.unencoded_file_path) self.logger.debug("Importer ended") self.logger.info("NoaaImportController.run ended") print("NoaaImportController.run ended")
def ImportDatabase(self): """Import an old-style .db qwitter database, or .session file into the current session's storage.""" def import_buffer (tablename, buffername): table = new.load_table(tablename) newtable = new.convert_table(table) buf = self.session.get_buffer_by_name(buffername) buf.storage.extend(newtable) buf.storage.sort(key=lambda a: calendar.timegm(rfc822.parsedate(a['created_at']))) buf.storage = misc.RemoveDuplicates(buf.storage, lambda x: x['id']) f = wx.Frame(None, wx.ID_ANY, "FileDialog") dlg = wx.FileDialog(f, message="Select Database", defaultDir=paths.data_path(), wildcard="Session files (*.session)|*.session|Qwitter Databases (*.DB)|*.db") f.Raise() if dlg.ShowModal() == wx.ID_OK: filename = dlg.GetPath() dlg.Destroy() f.Destroy() else: output.speak(_("Canceled"), True) dlg.Destroy() f.Destroy() return if filename.lower().endswith('.db'): new = Importer(filename) home = new.load_table("tweets") directs = new.load_table("directs") mentions = new.load_table("replies") sent = new.load_table("sent") total = len(home) + len(directs) + len(mentions) + len(sent) yesno = wx.MessageBox(_("Are you sure you want to import %d items from old database?") % total, _("Are you sure?"), style=wx.YES|wx.NO) if yesno == wx.YES: output.speak(_("Importing, please wait."), True) else: return output.speak(_("Canceled."), True) for i in [("tweets", "Home"), ("replies", "Mentions"), ("directs", "Direct Messages"), ("sent", "Sent")]: import_buffer(*i) elif filename.lower().endswith('.session'): try: new = SessionImporter(filename) except TypeError: return output.speak(_("Session type mismatch.")) new.do_import() wx.MessageBox(_("Import completed successfully."), _("Import complete."))
def setup(self): self.engine = create_engine('sqlite://') self.Session = sessionmaker() self.Session.configure(bind=self.engine) Base.metadata.create_all(self.engine) self.importer = Importer() self.importer.dataAccess.Session = self.Session self.entry1 = Expando() self.entry1.title = u'entry 1 title é' self.entry1.description = u'entry 1 description é' self.entry1.link = u'http://entry1.linké' self.entry2 = Expando() self.entry2.title = u'entry 2 title é' self.entry2.description = u'entry 2 description é' self.entry2.link = self.entry1.link self.entry3 = Expando() self.entry3.title = u'entry 3 title é' self.entry3.description = u'entry 3 description é' self.entry3.link = u'http://entry3.linké' self.entry4 = Expando() self.entry4.title = u'entry 4 title é' self.entry4.description = u'entry 4 description é' self.entry4.link = u'http://entry4.linké' self.entry5 = Expando() self.entry5.title = u'entry 4 title é' self.entry5.link = u'http://entry4.linké' self.feed1 = u'http://feed1url.urlé' self.feed2 = u'http://feed2url.urlé' self.feed1contents = Expando() self.feed1contents.entries = [self.entry1, self.entry2] self.feed2contents = Expando() self.feed2contents.entries = [self.entry3, self.entry4]
def __init__(self, parent=None, updater=None, availableStudies=0): logging.debug("In ImportChooser::__init__()") super(ImportChooser, self).__init__(parent) self.setupUi(self) self.stopButton.setDisabled(True) self.importButton.setDisabled(True) self.availableStudiesLabel.setText(self.availableStudiesLabel.text().format(availableStudies)) self._availableStudies = availableStudies if availableStudies == -1: self.availableStudiesLabel.setVisible(False) self.createActions() self.createSignals() self._singleShotTime = 500 self.updateWidgets() self._importer = Importer() self._updater = updater self._warning = False self._msg = "" self._firstImport = False self.rename = Rename(self) self.rename.label.setText( QtGui.QApplication.translate("ImportChooser", "Serie's Description:", None, QtGui.QApplication.UnicodeUTF8) )
def setUp(self): super(AppTestCase, self).setUp() self.server_parameters = ServerParameters( io_loop=self.io_loop, host='localhost', port=8888, config_path='./tests/fixtures/test-valid.conf', log_level='INFO', debug=True, ) self.config = Config() self.importer = Importer() self.importer.load( dict(key='service_classes', module_names=self.config.SERVICES, class_name='Service'), ) # load all modules here services = [] for service_class in self.importer.service_classes: srv = service_class() srv.name = service_class.__module__ services.append(srv) self.importer.services = services self.context = Context(self.server_parameters, self.config, self.importer)
class ServiceTest(unittest.TestCase): def setUp(self): """ Initialize test environment. """ # TODO: clean up spvd database beforehand self.imp = Importer() self.imp['distant_url'] = 'https://192.168.2.81/exporter/' self.groups = [] self.checks = [] self.objects = [] def tearDown(self): """ Clean up test environment. """ groups = [grp_id for list_id in self.groups for grp_id in list_id] checks = [chk_id for list_id in self.checks for chk_id in list_id] objects = [obj_id for list_id in self.objects for obj_id in list_id] if groups: self.imp.call('spv.services', 'delete_groups', groups) if checks: self.imp.call('spv.services', 'delete_checks', checks) if objects: self.imp.call('spv.services', 'delete_objects', objects) # # TEST FOR GET_STATUS # def _get_status_tests_create_data(self): """ Test create object,group,check for get_status tests """ groups = self.imp.call('spv.services', 'create_groups', ['test_groups']) self.groups.append(groups.keys()) objects = self.imp.call('spv.services', 'create_objects', [{'address': 'test_address', 'infos': {'key_toto': 'value_toto'}, 'group_id': groups.keys()[0]}]) self.objects.append(objects.keys()) checks = self.imp.call('spv.services', 'create_checks', [{'plugin': 'hey', 'plugin_check': 'salut', 'name': 'toi', 'repeat': 100, 'repeat_on_error': 100 , 'infos': {'key_toto': 'value_toto'}, 'group_id': groups.keys()[0]}]) self.checks.append(checks.keys()) return [groups, objects, checks] def _get_status_test_value(self, data, groups, objects, checks): keys = data.keys() keys.sort(); self.assertTrue(keys == ['checks', 'groups', 'objects', 'status']) self.assertTrue(data['checks'][checks.keys()[0]]['name'] == 'toi') self.assertTrue(data['groups'][groups.keys()[0]]['name'] == 'test_groups') self.assertTrue(data['objects'][objects.keys()[0]]['address'] == 'test_address') self.assertTrue(data['status'][data['status'].keys()[0]]['grp_id'] == groups.keys()[0]) self.assertTrue(data['status'][data['status'].keys()[0]]['chk_id'] == checks.keys()[0]) self.assertTrue(data['status'][data['status'].keys()[0]]['obj_id'] == objects.keys()[0]) def test_get_status_check_status(self): """ ['get_status'] Get every status with on specific status """ self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'status': ['FINISHED', 'ERROR']}) for status in ret['status']: self.assertTrue(ret['status'][status]['check_status'] in ['FINISHED', 'ERROR']) ret_finish = self.imp.call('spv.services', 'get_status', {'status': 'FINISHED'}) self.assertNotEqual(len(ret), 0) for status in ret_finish['status']: self.assertEqual(ret_finish['status'][status]['check_status'], 'FINISHED') len_error = len(self.imp.call('spv.services', 'get_status', {'status': 'ERROR'})['status']) self.assertEqual(len(ret['status']), len(ret_finish['status']) + len_error) def test_get_status_group_name(self): """ ['get_status'] Get every status with on specific group_name """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_name': [groups.values()[0]['name']]}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_object_address(self): """ ['get_status'] Get every status with on specific object address """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'object_address': objects.values()[0]['address']}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_plugin_name(self): """ ['get_status'] Get every status with on specific plugin name """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'plugin_name': checks.values()[0]['plugin']}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_plugin_check(self): """ ['get_status'] Get every status with on specific plugin check """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'plugin_check': checks.values()[0]['plugin_check']}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_group_id(self): """ ['get_status'] Get every status with on specific groups_id """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0]}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_check_id(self): """ ['get_status'] Get every status with on specific check_id """ groups, objects, checks = self._get_status_tests_create_data() checks_second = self.imp.call('spv.services', 'create_checks', [{'plugin': '1', 'plugin_check': '2', 'name': '3', 'repeat': 100, 'repeat_on_error': 100 , 'group_id': groups.keys()[0]}]) self.checks.append(checks_second.keys()) ret = self.imp.call('spv.services', 'get_status', {'check_id': checks.keys()[0]}) self._get_status_test_value(ret, groups, objects, checks) def test_get_status_status_id(self): """ ['get_status'] Get status with a additional info on status """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'get_status_infos' : True}) ret_status = self.imp.call('spv.services', 'set_checks_status', [{'status_id': ret['status'][ret['status'].keys()[0]]['status_id'], 'sequence_id': 0, 'status': 'coucou !', 'message': 'salut', 'status_infos': {'key_status': 'value_status'}}]) ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'get_status_infos': True}) self.assertEqual(ret['status'][ret['status'].keys()[0]]['status_infos'].keys(), ['key_status']) self.assertEqual(ret['status'][ret['status'].keys()[0]]['status_infos']['key_status']['value'], 'value_status') self.assertEqual(ret['status'][ret['status'].keys()[0]]['status_infos']['key_status']['key'], 'key_status') self.assertEqual(ret['status'][ret['status'].keys()[0]]['status_infos']['key_status']['status_id'], ret['status'].keys()[0]) def test_get_status_limit(self): """ ['get_status'] Get object/checks/grousp by group_id with a limit """ groups, objects, checks = self._get_status_tests_create_data() checks_second = self.imp.call('spv.services', 'create_checks', [{'plugin': '1', 'plugin_check': '2', 'name': '3', 'repeat': 100, 'repeat_on_error': 100 , 'group_id': groups.keys()[0]}]) self.checks.append(checks_second.keys()) ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'limit': 1}) self.assertEqual(len(ret['checks']), 1) ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0]}) self.assertEqual(len(ret['checks']), 2) def _check_get_status_info(self, data, type, info, id_name): """ Generic funtionc to test get_status with additional info""" id = data[type].keys()[0] self.assertTrue(info in data[type][id].keys()) infos = data[type][id][info] self.assertTrue('key_toto' in infos.keys()) self.assertEqual(infos['key_toto'][id_name], id) self.assertEqual(infos['key_toto']['key'], 'key_toto') self.assertEqual(infos['key_toto']['value'], 'value_toto') def test_get_status_object_info(self): """ ['get_status'] Get object with additional info """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'get_object_infos': True}) self._check_get_status_info(ret, 'objects', 'object_infos', 'obj_id') def test_get_status_check_info(self): """ ['get_status'] Get check with additional info """ groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'get_check_infos': True}) self. _check_get_status_info(ret, 'checks', 'check_infos', 'chk_id') def test_get_status_detailed_infos(self): """ ['get_status'] Test get_status with 'get_check_infos' to True""" groups, objects, checks = self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0], 'get_check_infos': True}) self.assertEqual(ret['checks'][ret['checks'].keys()[0]]['check_infos'].keys(), ['key_toto']) def test_create_status(self): """ ['get_status'] Test status creation and retrieval from various search criteria. Create a group, then check and object and assign them to previously create group in order to trigger status creation.""" groups = self.imp.call('spv.services', 'create_groups', ['test_group']) self.groups.append(groups.keys()) objects = self.imp.call('spv.services', 'create_objects', [{'address': 'test_address', 'group_id': groups.keys()[0]}]) self.objects.append(objects.keys()) checks = self.imp.call('spv.services', 'create_checks', [{'plugin': 'test_plugin', 'plugin_check': 'check1', 'name': 'Name of test check', 'repeat': 10, 'repeat_on_error': 10, 'group_id': groups.keys()[0]}]) self.checks.append(checks.keys()) gets = {} gets[0] = self.imp.call('spv.services', 'get_status', {'group_id': groups.keys()[0]}) gets[1] = self.imp.call('spv.services', 'get_status', {'check_id': checks.keys()[0]}) gets[2] = self.imp.call('spv.services', 'get_status', {'object_address': objects.values()[0]['address']}) self.assertEqual(gets[0], gets[1]) self.assertEqual(gets[1], gets[2]) def test_update_check(self): """ ['get_status'] Reschedule batch of/one status_id and check them/it. """ self._get_status_tests_create_data() ret = self.imp.call('spv.services', 'get_status', {'status': ['FINISHED', 'ERROR']}) id1 = ret['status'][ret['status'].keys()[0]]['status_id'] id2 = ret['status'][ret['status'].keys()[1]]['status_id'] id3 = ret['status'][ret['status'].keys()[2]]['status_id'] ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_first_check = ret['status'][id1]['next_check'] self.imp.call('spv.services', 'reschedule_check',[id1, id2, id3]) ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_second_check = ret['status'][id1]['next_check'] self.imp.call('spv.services', 'reschedule_check', (id3, id2, id1)) ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_third_check = ret['status'][id1]['next_check'] self.assertTrue(id_first_check < id_second_check and id_second_check < id_third_check) ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_first_check = ret['status'][id1]['next_check'] self.imp.call('spv.services', 'reschedule_check', id1) ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_second_check = ret['status'][id1]['next_check'] self.imp.call('spv.services', 'reschedule_check', id1) ret = self.imp.call('spv.services', 'get_status', {'status_id': id1}) id_third_check = ret['status'][id1]['next_check'] self.assertTrue(id_first_check < id_second_check and id_second_check < id_third_check)
class ImportChooser(QtGui.QDialog, Ui_ImportChooser): def __init__(self, parent=None, updater=None, availableStudies=0): logging.debug("In ImportChooser::__init__()") super(ImportChooser, self).__init__(parent) self.setupUi(self) self.stopButton.setDisabled(True) self.importButton.setDisabled(True) self.availableStudiesLabel.setText(self.availableStudiesLabel.text().format(availableStudies)) self._availableStudies = availableStudies if availableStudies == -1: self.availableStudiesLabel.setVisible(False) self.createActions() self.createSignals() self._singleShotTime = 500 self.updateWidgets() self._importer = Importer() self._updater = updater self._warning = False self._msg = "" self._firstImport = False self.rename = Rename(self) self.rename.label.setText( QtGui.QApplication.translate("ImportChooser", "Serie's Description:", None, QtGui.QApplication.UnicodeUTF8) ) def createSignals(self): logging.debug("In ImportChooser::createSignals()") def createActions(self): logging.debug("In ImportChooser::createActions()") self.connect(self.cancelButton, QtCore.SIGNAL("clicked()"), self.slotActionCancel) self.connect(self.stopButton, QtCore.SIGNAL("clicked()"), self.slotActionStop) self.connect(self.importButton, QtCore.SIGNAL("clicked()"), self.slotActionImport) self.connect(self.addButton, QtCore.SIGNAL("clicked()"), self.slotActionAdd) self.connect(self.tableView, QtCore.SIGNAL("clicked(QModelIndex)"), self.slotActionItemClicked) self.connect(self.tableView, QtCore.SIGNAL("doubleClicked(QModelIndex)"), self.slotActionItemDoubleClicked) def slotActionStop(self): if self._importer.finished == 0: self._msg = QtGui.QApplication.translate("ImportChooser", "Stopping", None, QtGui.QApplication.UnicodeUTF8) self.stopButton.setDisabled(True) self._importer.stop() def slotActionItemClicked(self, modelIndex): if modelIndex.column() == 1: uid = self.itemModel.data(self.itemModel.index(modelIndex.row(), 6)) if modelIndex.data(QtCore.Qt.CheckStateRole) == QtCore.Qt.Checked: self._importer.series[uid]["checked"] = False self.itemModel.setData(modelIndex, QtCore.Qt.Unchecked, QtCore.Qt.CheckStateRole) else: self.itemModel.setData(modelIndex, QtCore.Qt.Checked, QtCore.Qt.CheckStateRole) self._importer.series[uid]["checked"] = True def slotActionItemDoubleClicked(self, modelIndex): self.rename.renameSerie( self._importer.series[self.itemModel.data(self.itemModel.index(modelIndex.row(), 6))], self.fillTable ) def slotActionCancel(self): self._msg = QtGui.QApplication.translate("ImportChooser", "Cancelling", None, QtGui.QApplication.UnicodeUTF8) if self._importer.finished == 0 and self._firstImport: self.cancelButton.setDisabled(True) self._importer.cancel() else: self.close() def slotActionAdd(self): settings = QtCore.QSettings("Moonstone", "Medical") lastdir = settings.value("ImportChooser-last-dir") caption = QtGui.QApplication.translate( "ImportChooser", "Select the directory of DICOMs", None, QtGui.QApplication.UnicodeUTF8 ) options = QtGui.QFileDialog.ShowDirsOnly dialog = QtGui.QFileDialog.getExistingDirectory(self, caption, lastdir, options) if dialog: # while QtCore.QCoreApplication.hasPendingEvents(): # QtCore.QCoreApplication.processEvents() self.loadDirectory(dialog) settings.setValue("ImportChooser-last-dir", dialog) def slotActionImport(self): logging.debug("In ImportChooser::slotActionImport()") self._firstImport = True self._msg = QtGui.QApplication.translate( "ImportChooser", "Importing series...", None, QtGui.QApplication.UnicodeUTF8 ) series = [] for i in range(self.itemModel.rowCount()): if self.itemModel.data(self.itemModel.index(i, 1), QtCore.Qt.CheckStateRole) == QtCore.Qt.Checked: series.append(self.itemModel.data(self.itemModel.index(i, 6))) self._importer.series[self.itemModel.data(self.itemModel.index(i, 6))]["error"] == False if len(series) == 0: QtGui.QMessageBox.information( self, QtGui.QApplication.translate("ImportChooser", "Warning", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "ImportChooser", "There is no serie selected!", None, QtGui.QApplication.UnicodeUTF8 ), ) elif len(series) <= self._availableStudies or self._availableStudies == -1: self.cancelButton.setDisabled(False) self._importer.makeImport(series) self.addButton.setDisabled(True) self.importButton.setDisabled(True) self.stopButton.setEnabled(True) self.recursiveCheck.setDisabled(True) self.tableView.setEnabled(False) self.progressLabel.show() self.updateProgress() else: QtGui.QMessageBox.critical( self, QtGui.QApplication.translate("ImportChooser", "Error", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "ImportChooser", "You do not have sufficient series available. Please uncheck the excess!", None, QtGui.QApplication.UnicodeUTF8, ), ) def updateWidgets(self): logging.debug("In ImportChooser::updateWidgets()") self.itemModel = QtGui.QStandardItemModel() self.itemModel.setHorizontalHeaderLabels( [ " ", " ", QtGui.QApplication.translate("ImportChooser", "Progress", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate("ImportChooser", "Patient", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate("ImportChooser", "Description", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate("ImportChooser", "Images", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate("ImportChooser", "UID", None, QtGui.QApplication.UnicodeUTF8), ] ) self.tableView.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers) self.tableView.setModel(self.itemModel) self.tableView.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows) self.tableView.setItemDelegate(ImportItemDelegate()) self.tableView.setSortingEnabled(True) self.tableView.setColumnWidth(0, 20) self.tableView.setColumnWidth(1, 30) self.tableView.setColumnWidth(2, 80) self.tableView.setColumnWidth(3, 260) self.tableView.setColumnWidth(4, 190) self.tableView.setColumnWidth(5, 60) self.tableView.setColumnHidden(6, True) def updateProgress(self): logging.debug("In ImportChooser::updateProgress()") self._importer.updateSeries() if not self._importer.finished: self.statusLabel.setText(self._msg) QtCore.QTimer.singleShot(self._singleShotTime, self.updateProgress) else: self.stopButton.setDisabled(True) self.cancelButton.setDisabled(False) self.statusLabel.setText( QtGui.QApplication.translate("ImportChooser", "Complete", None, QtGui.QApplication.UnicodeUTF8) ) self.progressLabel.hide() self.addButton.setDisabled(False) self.importButton.setDisabled(False) self.recursiveCheck.setDisabled(False) self.tableView.setEnabled(True) if self._importer.finished == 1 and self._warning: QtGui.QMessageBox.warning( self, QtGui.QApplication.translate("ImportChooser", "Warning", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "ImportChooser", "There are series you want to import" + " that already are in database. These series will" + " automaticaly be unchecked, if you want to" + " import any of them, check is manualy.", None, QtGui.QApplication.UnicodeUTF8, ), ) elif self._importer.finished == 2: values = self._importer.series.values() error = False seriesImported = 0 for serie in values: if serie["error"]: error = True else: if serie["checked"]: seriesImported = seriesImported + 1 if not error: QtGui.QMessageBox.information( self, QtGui.QApplication.translate("ImportChooser", "Success", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "ImportChooser", "All series were imported with success!", None, QtGui.QApplication.UnicodeUTF8, ), ) self.close() else: QtGui.QMessageBox.critical( self, QtGui.QApplication.translate("ImportChooser", "Error", None, QtGui.QApplication.UnicodeUTF8), QtGui.QApplication.translate( "ImportChooser", "Error founded during importation!", None, QtGui.QApplication.UnicodeUTF8 ), ) if self._updater: self._updater(seriesImported) self.fillTable(self._importer.series) def loadDirectory(self, directory): logging.debug("In ImportChooser::loadDirectory()") self._importer.loadDirectory(directory, self.recursiveCheck.isChecked()) self._msg = QtGui.QApplication.translate( "ImportChooser", "Loading directory...", None, QtGui.QApplication.UnicodeUTF8 ) if not self.progressLabel.movie(): movie = QtGui.QMovie() movie.setFileName(":/static/default/ajax-loader.gif") self.progressLabel.setMovie(movie) QtCore.QObject.connect(movie, QtCore.SIGNAL("frameChanged(int)"), lambda i: movie.jumpToFrame(i)) self.progressLabel.movie().start() self.addButton.setDisabled(True) self.importButton.setDisabled(True) if self.recursiveCheck.isChecked: self.stopButton.setDisabled(False) else: self.stopButton.setDisabled(True) self.cancelButton.setDisabled(True) self.recursiveCheck.setDisabled(True) self.tableView.setEnabled(False) self.progressLabel.show() self.updateProgress() def fillTable(self, series=None): logging.debug("In ImportChooser::fillTable()") if not series: series = self._importer.series self.itemModel.setRowCount(len(series)) values = series.values() self._warning = False for i, serie in enumerate(values): if (self._importer.finished == 2 and serie["error"]) or self._importer.finished != 2: if serie["checked"]: checked = QtCore.Qt.Checked else: checked = QtCore.Qt.Unchecked icon = QtGui.QIcon() if serie["error"]: icon.addPixmap( QtGui.QPixmap(":/static/default/icon/22x22/dialog-error.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off, ) self.itemModel.setData( self.itemModel.index(i, 0), QtGui.QApplication.translate( "ImportChooser", "There was a error importing this serie", None, QtGui.QApplication.UnicodeUTF8, ), QtCore.Qt.ToolTipRole, ) self.itemModel.setData(self.itemModel.index(i, 0), icon, QtCore.Qt.DecorationRole) elif serie["exists"]: self._warning = True icon.addPixmap( QtGui.QPixmap(":/static/default/icon/22x22/dialog-warning.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off, ) self.itemModel.setData( self.itemModel.index(i, 0), QtGui.QApplication.translate( "ImportChooser", "This serie already exists in database.\nIf you import all data will be lost.\nRename the serie to import it separately", None, QtGui.QApplication.UnicodeUTF8, ), QtCore.Qt.ToolTipRole, ) self.itemModel.setData(self.itemModel.index(i, 0), icon, QtCore.Qt.DecorationRole) else: icon.addPixmap( QtGui.QPixmap(":/static/default/icon/22x22/dialog-ok-apply.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off, ) self.itemModel.setData( self.itemModel.index(i, 0), QtGui.QApplication.translate( "ImportChooser", "This serie is ready to be importer", None, QtGui.QApplication.UnicodeUTF8 ), QtCore.Qt.ToolTipRole, ) self.itemModel.setData(self.itemModel.index(i, 0), icon, QtCore.Qt.DecorationRole) self.itemModel.setData(self.itemModel.index(i, 1), checked, QtCore.Qt.CheckStateRole) self.itemModel.setData(self.itemModel.index(i, 2), serie["progress"]) self.itemModel.setData(self.itemModel.index(i, 3), serie["patientName"]) self.itemModel.setData(self.itemModel.index(i, 4), serie["serieDescription"]) self.itemModel.setData(self.itemModel.index(i, 5), len(serie["files"])) self.itemModel.setData(self.itemModel.index(i, 6), serie["uid"]) self.tableView.repaint()
class ServiceTest(unittest.TestCase): def setUp(self): """ Initialize test environment. """ # TODO: clean up spvd database beforehand self.imp = Importer() self.imp["distant_url"] = "https://192.168.2.81/exporter/" self.groups = [] self.checks = [] self.objects = [] def tearDown(self): """ Clean up test environment. """ groups = [grp_id for list_id in self.groups for grp_id in list_id] checks = [chk_id for list_id in self.checks for chk_id in list_id] objects = [obj_id for list_id in self.objects for obj_id in list_id] if groups: self.imp.call("spv.services", "delete_groups", groups) if checks: self.imp.call("spv.services", "delete_checks", checks) if objects: self.imp.call("spv.services", "delete_objects", objects) # # TEST FOR GET_STATUS # def _get_status_tests_create_data(self): """ Test create object,group,check for get_status tests """ groups = self.imp.call("spv.services", "create_groups", ["test_groups"]) self.groups.append(groups.keys()) objects = self.imp.call( "spv.services", "create_objects", [{"address": "test_address", "infos": {"key_toto": "value_toto"}, "group_id": groups.keys()[0]}], ) self.objects.append(objects.keys()) checks = self.imp.call( "spv.services", "create_checks", [ { "plugin": "hey", "plugin_check": "salut", "name": "toi", "repeat": 100, "repeat_on_error": 100, "infos": {"key_toto": "value_toto"}, "group_id": groups.keys()[0], } ], ) self.checks.append(checks.keys()) return [groups, objects, checks] def _get_status_test_value(self, data, groups, objects, checks): keys = data.keys() keys.sort() self.assertTrue(keys == ["checks", "groups", "objects", "status"]) self.assertTrue(data["checks"][checks.keys()[0]]["name"] == "toi") self.assertTrue(data["groups"][groups.keys()[0]]["name"] == "test_groups") self.assertTrue(data["objects"][objects.keys()[0]]["address"] == "test_address") self.assertTrue(data["status"][0]["grp_id"] == groups.keys()[0]) self.assertTrue(data["status"][0]["chk_id"] == checks.keys()[0]) self.assertTrue(data["status"][0]["obj_id"] == objects.keys()[0]) def _check_get_status_info(self, data, type, info, id_name): """ generic funtionc to test get_status with additional info""" id = data[type].keys()[0] self.assertTrue(info in data[type][id].keys()) infos = data[type][id][info] self.assertTrue("key_toto" in infos.keys()) self.assertEqual(infos["key_toto"][id_name], id) self.assertEqual(infos["key_toto"]["key"], "key_toto") self.assertEqual(infos["key_toto"]["value"], "value_toto") # # DELETION TEST # def test_delete_groups(self): """ Create a groups, check and try to delete it """ groups = self.imp.call("spv.services", "create_groups", "toto") self.groups.append(groups.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_groups", {"group_id": groups.keys()[0]})), 1) self.imp.call("spv.services", "delete_groups", groups.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_groups", {"group_id": groups.keys()[0]})), 0) def test_delete_checks(self): """ Create a checks, check and try to delete it """ checks = self.imp.call( "spv.services", "create_checks", [{"plugin": "a", "plugin_check": "b", "name": "c", "repeat": 100, "repeat_on_error": 100}], ) self.checks.append(checks.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_plugin_checks", {"check_id": checks.keys()[0]})), 1) self.imp.call("spv.services", "delete_checks", checks.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_plugin_checks", {"check_id": checks.keys()[0]})), 0) def test_delete_objects(self): """ Create a objects, check and try to delete it """ objects = self.imp.call("spv.services", "create_objects", [{"address": "toto"}]) self.objects.append(objects.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_objects", {"obj_id": objects.keys()[0]})), 1) self.imp.call("spv.services", "delete_objects", objects.keys()) self.assertEqual(len(self.imp.call("spv.services", "get_objects", {"obj_id": objects.keys()[0]})), 0) # # CREATE TEST # def test_create_group(self): """ Test multiple group creation and returned values. """ # FIXME: group names are currently not unique but should probably be # create groups = self.imp.call("spv.services", "create_groups", ["toto", "tata"]) self.groups.append(groups.keys()) # test_returned_dict self.assertEqual(len(groups), 2) for group_id in groups.keys(): self.assertTrue(isinstance(group_id, int)) self.assertTrue(len(groups[group_id]), 2) for valid_key in ("grp_id", "name"): self.assertTrue(valid_key in groups[group_id]) self.assertFalse("errors" in groups) def test_create_checks(self): """ Test multiple check creation and returned values. """ # create checks = self.imp.call( "spv.services", "create_checks", [ { "plugin": "toto", "plugin_check": "test1", "name": "This is test 1", "repeat": 10, "repeat_on_error": 10, }, { "plugin": "toto", "plugin_check": "test2", "name": "This is test 2", "repeat": 10, "repeat_on_error": 10, "infos": {"key1": "Value1", "key2": "Value2"}, }, ], ) self.checks.append(checks.keys()) # test_returned_dict self.assertEqual(len(checks), 2) for check_id in checks.keys(): self.assertTrue(isinstance(check_id, int)) self.assertTrue(len(checks[check_id]), 4) for valid_key in ("chk_id", "plugin", "plugin_check"): self.assertTrue(valid_key in checks[check_id]) self.assertFalse("errors" in checks) def test_error_create_checks(self): """ Create a wrong checks and test the failure """ # create checks = self.imp.call( "spv.services", "create_checks", [ {"plugin": "toto", "plugin_check": "toto", "name": "toto", "repeat": 10, "repeat_on_error": 10}, {"plugin": "toto", "plugin_check": "toto", "name": "toto", "repeat": 10, "repeat_on_error": 10}, ], ) key = [k for k in checks.keys() if isinstance(k, int)] self.checks.append(key) self.assertTrue("errors" in checks.keys()) def test_create_objects(self): """ Test multiple object creation and returned values. """ # create objects = self.imp.call( "spv.services", "create_objects", [{"address": "toto"}, {"address": "tata", "infos": {"key1": "Value1", "key2": "Value2"}}], ) self.objects.append(objects.keys()) # test_returned_dict self.assertEqual(len(objects), 2) for object_id in objects.keys(): self.assertTrue(isinstance(object_id, int)) self.assertTrue(len(objects[object_id]), 2) for valid_key in ("obj_id", "address"): self.assertTrue(valid_key in objects[object_id]) self.assertFalse("errors" in objects) def test_error_create_objects(self): """ create a wrong objects and test the failure """ # create objects = self.imp.call("spv.services", "create_objects", [{"address": "toto"}, {"address": "toto"}]) key = [k for k in objects.keys() if isinstance(k, int)] self.objects.append(key) self.assertTrue("errors" in objects.keys()) # TODO: Set status infos # # GET TEST # def _get_test(self, params, data, api_func, test_func): """ Generic test for test_get_* functions @params: list of key value for research in spv API [ [key, param], ... ] @data: value returned when create an groups/object/checks @api_func: function of the spv API to call @test_func: function to call to validate the call to api_func """ for param, param_key in params: for key in data.keys(): ret = self.imp.call("spv.services", api_func, {param: data[key][param_key]}) test_func(ret, data, key) # check if the new entries that we create are in the global list all_groups = self.imp.call("spv.services", api_func) result_keys = [key for key in all_groups.keys() if key in data.keys()] self.assertEqual(len(result_keys), len(data.keys())) def test_get_groups(self): """ Create groups and get it from spv with different filter """ groups = self.imp.call( "spv.services", "create_groups", ["test_list_group1", "test_list_group2", "test_list_group3"] ) self.groups.append(groups.keys()) def test_result(ret, groups, key): self.assertEqual(ret[key]["id"], groups[key]["grp_id"]) self.assertEqual(ret[key]["name"], groups[key]["name"]) self._get_test([["group_id", "grp_id"], ["group_name", "name"]], groups, "get_groups", test_result) def test_get_objects(self): """ Create objects and get it from spv with different filter """ objects = self.imp.call( "spv.services", "create_objects", [{"address": "test/get_objects/1"}, {"address": "test/get_objects/2", "infos": {"key_toto": "toto"}}], ) self.objects.append(objects.keys()) def test_result(ret, objects, key): self.assertEqual(ret[key]["obj_id"], objects[key]["obj_id"]) self.assertEqual(ret[key]["address"], objects[key]["address"]) self.assertEqual(ret[key]["creation_date"], objects[key]["creation_date"]) self._get_test( [["obj_id", "obj_id"], ["address", "address"], ["creation_date", "creation_date"]], objects, "get_objects", test_result, ) keys = objects.keys() keys.sort() ret = self.imp.call("spv.services", "get_objects", {"info_key": "key_toto"}) self.assertEqual(len(ret), 1) test_result(ret, objects, keys[1]) ret = self.imp.call("spv.services", "get_objects", {"info_value": "toto"}) self.assertEqual(len(ret), 1) test_result(ret, objects, keys[1]) # check get multiple objects with on filter ret = self.imp.call("spv.services", "get_objects", {"creation_date": objects[keys[0]]["creation_date"]}) result_keys = [key for key in ret.keys() if key in keys] self.assertEqual(len(result_keys), 2) def test_get_checks(self): """ Create checks and get it from spv with different filter """ checks = self.imp.call( "spv.services", "create_checks", [ { "plugin": "toto", "plugin_check": "test1", "name": "This is test 1", "repeat": 10, "repeat_on_error": 10, }, { "plugin": "toto", "plugin_check": "test2", "name": "This is test 2", "repeat": 10, "repeat_on_error": 10, "infos": {"test_key_": "test_val_"}, }, ], ) self.checks.append(checks.keys()) def test_result(ret, checks, key): self.assertEqual(ret[key]["chk_id"], checks[key]["chk_id"]) self.assertEqual(ret[key]["plugin"], checks[key]["plugin"]) self.assertEqual(ret[key]["plugin_check"], checks[key]["plugin_check"]) self._get_test( [["chk_id", "chk_id"], ["plugin", "plugin"], ["plugin_check", "plugin_check"]], checks, "get_plugin_checks", test_result, ) keys = checks.keys() keys.sort() ret = self.imp.call("spv.services", "get_plugin_checks", {"info_key": "test_key_"}) self.assertEqual(len(ret), 1) test_result(ret, checks, keys[1]) ret = self.imp.call("spv.services", "get_plugin_checks", {"info_value": "test_val_"}) self.assertEqual(len(ret), 1) test_result(ret, checks, keys[1]) # check get multiple checks with on filter ret = self.imp.call("spv.services", "get_plugin_checks", {"plugin_name": "toto"}) self.assertEqual(len(ret), 2) # # UPDATE TEST # def test_update_groups(self): """ Create group and update it """ groups = self.imp.call("spv.services", "create_groups", ["test_list_group1"]) self.groups.append(groups.keys()) id = groups.keys()[0] ret = self.imp.call("spv.services", "update", {"groups": {id: {"grp_id": id, "name": "name_update"}}}) ret = self.imp.call("spv.services", "get_groups", {"group_id": id}) self.assertEqual(ret[id]["name"], "name_update") def test_update_checks(self): """ Createecks and update it """ checks = self.imp.call( "spv.services", "create_checks", [{"plugin": "toto", "plugin_check": "test1", "name": "test name", "repeat": 10, "repeat_on_error": 10}], ) self.checks.append(checks.keys()) id = checks.keys()[0] ret = self.imp.call( "spv.services", "update", { "checks": { id: { "chk_id": id, "name": "salut", "plugin": "salut", "plugin_check": "salut", "repeat": 21, "repeat_on_error": 2, } } }, ) ret = self.imp.call("spv.services", "get_plugin_checks", {"check_id": id}) self.assertEqual(ret[id]["name"], "salut") self.assertEqual(ret[id]["plugin"], "salut") self.assertEqual(ret[id]["plugin_check"], "salut") self.assertEqual(ret[id]["repeat"], 21) self.assertEqual(ret[id]["repeat_on_error"], 2) def test_update_objects(self): """ Create object and update it """ objects = self.imp.call("spv.services", "create_objects", [{"address": "test"}]) self.objects.append(objects.keys()) id = objects.keys()[0] ret = self.imp.call("spv.services", "update", {"objects": {id: {"obj_id": id, "address": "test_update"}}}) ret = self.imp.call("spv.services", "get_objects", {"obj_id": id}) self.assertEqual(ret[id]["address"], "test_update")
class Solver: def __init__(self, options): self.importer = Importer() # try read all the files in the dir if options.dir_name is not None: self._files = [f for f in self.get_files(options.dir_name)] else: self._files = [options.file_name] # only one file self._algo_name = options.algorithm if (options.algorithm is not None) else 'bfs' self.options = options def get_algo(self, name): """Return an instance of the algo specified""" name = name.lower() if name == 'bfs': return BestFirstSearch elif name in ['hillclimbing','hillclimb']: return HillClimbing elif name == 'hillclimb-restart': return RestartingHillClimb elif name in ['simanneal', 'simulated-annealing','annealing']: return SimulatedAnnealing def get_options(self): """Pull out the relevant (valid) options""" return { 'alpha':float(self.options.alpha), 'iterations':int(self.options.iterations), 'temp':float(self.options.temperature), 'restarts':int(self.options.restarts), 'operator':str(self.options.operator), } def get_files(self, path): """Find all solvable files in a dir""" for top, dirs, files in os.walk('problems'): for nm in files: file_name = os.path.join(top, nm) if file_name[-4:] == '.txt': yield file_name def solve_file(self, file_name): """Attempt to solve the file, with the given algo""" tour = self.importer.load(file_name) try: algo = self.get_algo(self._algo_name) algo = algo(tour, options=self.get_options()) except Exception as e: logger.exception('No such algorithm %s' % self._algo_name) raise e logger.info("Loaded problem '%s' and algo '%s'" % (file_name, algo.NAME)) path = algo.solve() length = tour.get_length(path) logger.info('Path found for %s. Cost: %r' % (tour.name,length)) # print path return tour.name, path, length def run(self): """Run the solver over all provided files""" try: logger.info('Travelling Salesman Problem') results = [] for file_name in self._files: results.append( self.solve_file(file_name) ) for name, path, length in results: print 'FILE=%s,' % name print 'SIZE=%d,' % length print 'PATH=%s' % ','.join(map(str,path)) # logger.info('%s\t%s' % (file_name.split('/')[1], length)) except Exception as e: logging.info('------------------------------') logging.info('Encountered an error. Halting.') logging.exception(e)
m.update(buf) if len(buf) == 0: break fd.close() return m.hexdigest() except Exception, inst: exception_info("Failed getting MD5 of %s" % path, inst) return None if __name__ == "__main__": usage = """usage: %prog PATH [--host DB_HOST]""" desc = """Import a directory into the filesystem database.""" parser = OptionParser(usage=usage, description=desc) parser.add_option("--host", metavar="DB_HOST", action="store", dest="db_host", help="specify the host of the filesys database") options, args = parser.parse_args() if len(args) == 0: path = '' else: path = args[0] os.stat_float_times(False) model = Model() imp = Importer(model) imp.run(path, options.db_host)
import json from time import strptime, mktime from datetime import datetime import dateutil.parser from numpy import mean, sqrt, square, arange from importer import Importer from csvimporter import CSVImporter from model import RMSDelay, RMSDelayEncoder # time_format = "%Y-%m-%dT%H:%M:%S.%f+0100" i = Importer() i.loadRBLList() print "Will now call Importer.request()..." try: monitors = i.request() request_time = datetime.now() except: print "ERROR: Could not issue request to WL!" raise exit(0) # print json.dumps(message) # message = json.loads("{\"message\": {\"messageCode\": 1, \"serverTime\": \"2014-11-19T11:34:41.870+0100\", \"value\": \"OK\"}, \"data\": {\"monitors\": [{\"locationStop\": {\"geometry\": {\"type\": \"Point\", \"coordinates\": [16.349198741357, 48.2249564556775]}, \"type\": \"Feature\", \"properties\": {\"name\": \"60201510\", \"title\": \"W\u00e4hringer Stra\u00dfe-Volksoper\", \"municipality\": \"Wien\", \"municipalityId\": 90000, \"coordName\": \"WGS84\", \"attributes\": {\"rbl\": 147}, \"type\": \"stop\"}}, \"lines\": [{\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"40\", \"richtungsId\": \"1\", \"trafficjam\": false, \"departures\": {\"departure\": [{\"departureTime\": {\"timePlanned\": \"2014-11-19T11:41:00.000+0100\", \"countdown\": 7, \"timeReal\": \"2014-11-19T11:42:21.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T11:48:00.000+0100\", \"countdown\": 13, \"timeReal\": \"2014-11-19T11:48:30.000+0100\"}, \"vehicle\": {\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"40\", \"linienId\": 140, \"richtungsId\": \"1\", \"trafficjam\": false, \"barrierFree\": true, \"realtimeSupported\": true, \"type\": \"ptTram\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T11:56:00.000+0100\", \"countdown\": 21, \"timeReal\": \"2014-11-19T11:56:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:03:00.000+0100\", \"countdown\": 28, \"timeReal\": \"2014-11-19T12:03:30.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:11:00.000+0100\", \"countdown\": 36, \"timeReal\": \"2014-11-19T12:11:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:18:00.000+0100\", \"countdown\": 43, \"timeReal\": \"2014-11-19T12:18:30.000+0100\"}, \"vehicle\": {\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"40\", \"linienId\": 140, \"richtungsId\": \"1\", \"trafficjam\": false, \"barrierFree\": true, \"realtimeSupported\": true, \"type\": \"ptTram\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:26:00.000+0100\", \"countdown\": 51, \"timeReal\": \"2014-11-19T12:26:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:33:00.000+0100\", \"countdown\": 58, \"timeReal\": \"2014-11-19T12:33:30.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:41:00.000+0100\", \"countdown\": 66}}]}, \"barrierFree\": false, \"realtimeSupported\": true, \"lineId\": 140, \"type\": \"ptTram\"}]}, {\"locationStop\": {\"geometry\": {\"type\": \"Point\", \"coordinates\": [16.349198741357, 48.2249564556775]}, \"type\": \"Feature\", \"properties\": {\"name\": \"60201510\", \"title\": \"W\u00e4hringer Stra\u00dfe-Volksoper\", \"municipality\": \"Wien\", \"municipalityId\": 90000, \"coordName\": \"WGS84\", \"attributes\": {\"rbl\": 147}, \"type\": \"stop\"}}, \"lines\": [{\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"41\", \"richtungsId\": \"1\", \"trafficjam\": false, \"departures\": {\"departure\": [{\"departureTime\": {\"timePlanned\": \"2014-11-19T11:37:00.000+0100\", \"countdown\": 2, \"timeReal\": \"2014-11-19T11:37:30.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T11:45:00.000+0100\", \"countdown\": 9, \"timeReal\": \"2014-11-19T11:44:26.000+0100\"}, \"vehicle\": {\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"41\", \"linienId\": 141, \"richtungsId\": \"1\", \"trafficjam\": false, \"barrierFree\": true, \"realtimeSupported\": true, \"type\": \"ptTram\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T11:52:00.000+0100\", \"countdown\": 17, \"timeReal\": \"2014-11-19T11:52:30.000+0100\"}, \"vehicle\": {\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"41\", \"linienId\": 141, \"richtungsId\": \"1\", \"trafficjam\": false, \"barrierFree\": true, \"realtimeSupported\": true, \"type\": \"ptTram\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:00:00.000+0100\", \"countdown\": 25, \"timeReal\": \"2014-11-19T12:00:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:07:00.000+0100\", \"countdown\": 32, \"timeReal\": \"2014-11-19T12:07:30.000+0100\"}, \"vehicle\": {\"direction\": \"R\", \"towards\": \"Schottentor U\", \"name\": \"41\", \"linienId\": 141, \"richtungsId\": \"1\", \"trafficjam\": false, \"barrierFree\": true, \"realtimeSupported\": true, \"type\": \"ptTram\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:15:00.000+0100\", \"countdown\": 40, \"timeReal\": \"2014-11-19T12:15:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:22:00.000+0100\", \"countdown\": 47, \"timeReal\": \"2014-11-19T12:22:30.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:30:00.000+0100\", \"countdown\": 55, \"timeReal\": \"2014-11-19T12:30:00.000+0100\"}}, {\"departureTime\": {\"timePlanned\": \"2014-11-19T12:37:00.000+0100\", \"countdown\": 62}}]}, \"barrierFree\": false, \"realtimeSupported\": true, \"lineId\": 141, \"type\": \"ptTram\"}]}]}}") print "Numbers of monitors received: ", len(monitors) delays = list()
def import_popular_movies(*args, **kwargs): i = Importer(db, im) i.get_popular_movies(schedule_movie, *args, **kwargs)
class TestImporter: def setup(self): self.engine = create_engine('sqlite://') self.Session = sessionmaker() self.Session.configure(bind=self.engine) Base.metadata.create_all(self.engine) self.importer = Importer() self.importer.dataAccess.Session = self.Session self.entry1 = Expando() self.entry1.title = u'entry 1 title é' self.entry1.description = u'entry 1 description é' self.entry1.link = u'http://entry1.linké' self.entry2 = Expando() self.entry2.title = u'entry 2 title é' self.entry2.description = u'entry 2 description é' self.entry2.link = self.entry1.link self.entry3 = Expando() self.entry3.title = u'entry 3 title é' self.entry3.description = u'entry 3 description é' self.entry3.link = u'http://entry3.linké' self.entry4 = Expando() self.entry4.title = u'entry 4 title é' self.entry4.description = u'entry 4 description é' self.entry4.link = u'http://entry4.linké' self.entry5 = Expando() self.entry5.title = u'entry 4 title é' self.entry5.link = u'http://entry4.linké' self.feed1 = u'http://feed1url.urlé' self.feed2 = u'http://feed2url.urlé' self.feed1contents = Expando() self.feed1contents.entries = [self.entry1, self.entry2] self.feed2contents = Expando() self.feed2contents.entries = [self.entry3, self.entry4] def teardown(self): Base.metadata.drop_all(self.engine) def test_process_newSubmission_isSaved(self): self.importer.process(self.entry1, self.feed1) saved = self.Session().query(Submission).first() assert saved.title == self.entry1.title assert saved.description == self.entry1.description assert saved.url == self.entry1.link assert saved.feed == self.feed1 assert saved.created != None def test_process_submissionWithExistingLink_isNotSaved(self): self.importer.process(self.entry1, self.feed1) self.importer.process(self.entry2, self.feed2) assert self.Session().query(Submission).count() == 1 def test_process_submissionWithNoDescription_isSaved(self): self.importer.process(self.entry5, self.feed1) assert self.Session().query(Submission).count() == 1 def test_importFeeds_entriesAreSaved(self): self.importer.parse_feed = Mock( side_effect=[self.feed1contents, self.feed2contents]) self.importer.process = MagicMock() self.importer.import_feeds([self.feed1, self.feed2]) self.importer.process.assert_has_calls( [call(self.entry1, self.feed1), call(self.entry2, self.feed1), call(self.entry3, self.feed2), call(self.entry4, self.feed2)])
class AppTestCase(TestCase): def setUp(self): super(AppTestCase, self).setUp() self.server_parameters = ServerParameters( io_loop=self.io_loop, host='localhost', port=8888, config_path='./tests/fixtures/test-valid.conf', log_level='INFO', debug=True, ) self.config = Config() self.importer = Importer() self.importer.load( dict(key='service_classes', module_names=self.config.SERVICES, class_name='Service'), ) # load all modules here services = [] for service_class in self.importer.service_classes: srv = service_class() srv.name = service_class.__module__ services.append(srv) self.importer.services = services self.context = Context(self.server_parameters, self.config, self.importer) @gen_test async def test_can_create_app(self): app = await LevelApp.create(self.context) expect(app).not_to_be_null() expect(app.context).to_equal(self.context) @gen_test async def test_can_initialize_services(self): class TestService(BaseService): def __init__(self, *args, **kw): super(TestService, self).__init__(*args, **kw) self.initialized = False self.name = 'TestService' self.app = None async def initialize_service(self, app): await super(TestService, self).initialize_service(app) self.initialized = True s = TestService() self.context.importer.services = [s] app = LevelApp(self.context, []) expect(app).not_to_be_null() await app.initialize() expect(s.initialized).to_be_true() expect(s.app).to_equal(app) @gen_test async def test_can_get_handlers_from_services(self): class TestService(BaseService): def __init__(self): self.initialized = False self.name = 'TestService' self.app = None async def initialize_service(self, app): await super(TestService, self).initialize_service(app) self.initialized = True async def get_handlers(self): return ( ('/test', None), ) s = TestService() self.context.importer.services = [s] app = LevelApp(self.context, []) expect(app).not_to_be_null() handlers = await app.get_handlers() expect(handlers).to_length(2) expect(handlers[1]).to_be_like( ('/test', None), )