def test_basic(self, mock_publish): """Test basic run.""" imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir) imp.run() repo = pygit2.Repository(self.remote_source_repo_path) commit = repo.head.peel() self.assertEqual('*****@*****.**', commit.author.email) self.assertEqual('OSV', commit.author.name) self.assertEqual('Import from OSS-Fuzz', commit.message) diff = repo.diff(commit.parents[0], commit) self.assertEqual( self._load_test_data('expected_patch_basic.diff'), diff.patch) mock_publish.assert_has_calls([ mock.call( 'projects/oss-vdb/topics/tasks', data=b'', original_sha256=('e3b0c44298fc1c149afbf4c8996fb924' '27ae41e4649b934ca495991b7852b855'), path='2021-111.yaml', source='oss-fuzz', type='update') ]) bug = osv.Bug.get_by_id('2017-134') self.assertEqual(osv.SourceOfTruth.SOURCE_REPO, bug.source_of_truth)
def test_delete(self, mock_publish): """Test deletion.""" self.mock_repo.add_file('2021-111.yaml', '') self.mock_repo.commit('User', 'user@email') repo = pygit2.Repository(self.remote_source_repo_path) synced_commit = repo.head.peel() self.source_repo.last_synced_hash = str(synced_commit.id) self.source_repo.put() self.mock_repo.delete_file('2021-111.yaml') self.mock_repo.commit('User', 'user@email') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_has_calls([ mock.call('projects/oss-vdb/topics/tasks', data=b'', deleted='true', original_sha256='', path='2021-111.yaml', source='oss-fuzz', type='update') ])
def __init__(self): self.date_format = '%Y-%m-%d' self.curr_session = Session() self.curr_importer = importer.Importer(self.curr_session) self.curr_importer.db_init() self.curr_controller = controller.Controller(self.curr_session) self.parser = argparse.ArgumentParser()
def test_ecosystem_bridge(self, mock_publish): """Test ecosystem pub/sub publishing.""" self.source_repo.key.delete() self.source_repo = osv.SourceRepository( type=osv.SourceRepositoryType.GIT, id='PyPI', name='PyPI', repo_url='file://' + self.remote_source_repo_path, repo_username='') self.source_repo.put() self.mock_repo.add_file( 'PYSEC-2021-1.yaml', 'id: PYSEC-2021-1\n' 'package:\n' ' name: pkg\n' ' ecosystem: PyPI\n') self.mock_repo.commit('User', 'user@email') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_has_calls([ mock.call( 'projects/oss-vdb/topics/tasks', data=b'', type='update', source='PyPI', path='PYSEC-2021-1.yaml', original_sha256=('cb700a08fa26d2e494670b9edd49d66e' '957ef4c9a1f7a4c4975c888e6d9da4f7'), deleted='false'), mock.call( 'projects/oss-vdb/topics/pypi-bridge', data=b'{"id": "PYSEC-2021-1", "package": ' b'{"name": "pkg", "ecosystem": "PyPI"}}') ])
def __init__(self, parent): Gtk.Box.__init__(self, orientation=Gtk.Orientation.VERTICAL, spacing=6) self.parent = parent try: current_locale, encoding = locale.getdefaultlocale() locale_path = os.path.join( os.path.abspath(os.path.dirname(__file__)), 'locale') translate = gettext.translation(cn.App.application_shortname, locale_path, [current_locale]) _ = translate.gettext except FileNotFoundError: _ = str self.stack = Gtk.Stack() self.stack.set_transition_type( Gtk.StackTransitionType.SLIDE_LEFT_RIGHT) self.stack.set_transition_duration(1000) self.welcome = wl.Welcome(self) self.create = cr.Create(self) self.importer = im.Importer(self) self.detail = dt.Detail(self) self.list_all = ls.List(self) self.stack.add_titled(self.welcome, "welcome", _('Welcome')) self.stack.add_titled(self.create, "create", _('Create')) self.stack.add_titled(self.importer, "importer", _('Importer')) self.stack.add_titled(self.detail, "detail", _('Detail')) self.stack.add_titled(self.list_all, "list", _('List')) self.pack_start(self.stack, True, True, 0)
def test_ignore(self, mock_publish): """Test ignoring.""" self.mock_repo.add_file('2021-111IGNORE.yaml', '') self.mock_repo.commit('User', 'user@email', 'message.') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_not_called()
def test_no_updates(self, mock_publish): """Test no update marker.""" self.mock_repo.add_file('2021-111.yaml', '') self.mock_repo.commit('User', 'user@email', 'message. OSV-NO-UPDATE') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_not_called()
def __init__(self, rows, cols): self.rows = rows self.cols = cols my_importer = importer.Importer('./dictionary_files/vocab.json') self.my_dict = my_importer.export_searchable_dict() self.status = [['empty'] * cols for y in range(rows)] self.words_matrix = [[None] * cols for y in range(rows)] self.horizontal_words = [] self.vertical_words = []
def __call__(self, request, *args, **kw): self.__exportable__ = True if hasattr(request, 'settings'): distant_url = request.settings.get('proxy-%s-distant_url' % self.__plugin__, None) if distant_url == None: return self.__func__(request, *args, **kw) else: imp = importer.Importer() imp['distant_url'] = distant_url imp['ssl_cert'] = request.settings.get('proxy-%s-ssl_cert' % self.__plugin__, None) imp['ssl_key'] = request.settings.get('proxy-%s-ssl_key' % self.__plugin__, None) return imp.call(self.__plugin__, self.__func__.__name__, *args, **kw) else: return self.__func__(request, *args, **kw)
def Init(self): self.srcdir = '' self.destdir = '' self.layout = QtGui.QVBoxLayout() self.src = QtGui.QPushButton('Source: ') self.src.clicked.connect(self.SrcCallback) self.layout.addWidget(self.src) self.dest = QtGui.QPushButton('Destination: ') self.dest.clicked.connect(self.DestCallback) self.layout.addWidget(self.dest) self.process = QtGui.QPushButton('Start Import') self.process.clicked.connect(self.ImportCallback) self.layout.addWidget(self.process) self.dryrun = QtGui.QCheckBox('Dry Run') self.dryrun.setChecked(False) self.layout.addWidget(self.dryrun) # TODO(Gabe) - Need to add way of stopping processing midway through # self.stop = QtGui.QPushButton('Stop Import') # self.stop.clicked.connect(self.StopImportCallback) # self.layout.addWidget(self.stop) self.textbox = QtGui.QTextEdit() self.textbox.setReadOnly(True) self.textbox.setLineWrapMode(QtGui.QTextEdit.NoWrap) self.layout.addWidget(self.textbox) self.status_label = QtGui.QLabel('') self.layout.addWidget(self.status_label) self.progress_bar = QtGui.QProgressBar() self.progress_bar.setMaximum(100) self.progress_bar.setValue(0) self.layout.addWidget(self.progress_bar) self.setLayout(self.layout) # TODO(Gabe) - Add combobox for selecting between move files and copy files self.importer = importer.Importer() self.connect(self.importer, QtCore.SIGNAL('log_entry'), self.Log) self.procthread = None self.files = []
def test_bucket(self, mock_publish): """Test bucket updates.""" imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_has_calls([ mock.call('projects/oss-vdb/topics/tasks', data=b'', type='update', source='bucket', path='a/b/test.json', original_sha256=('b2b37bde8f39256239419078de672ce7' 'a408735f1c2502ee8fa08745096e1971'), deleted='false'), ])
def main(): modules = importer.Importer(parse).load_all(sys.argv[1]) modules = map_modules(sort_checker.sort_check, modules) modules = apply_module_pass(kind_checker.kind_check, modules) modules = apply_module_pass(type_checker.type_check, modules) things = reorganize.reorganize(modules) things = lower_control_flow.lower_control_flow(things) instances = monomorphize.monomorphize(things) cg_decls = code_generator.generate(things, instances) with open(sys.argv[2], 'w') as fd: writer = output.LLVMWriter(fd) writer.writeout_prelude() writer.writeout_decls(cg_decls)
def test_basic(self): """Basic tests.""" imp = importer.Importer('fake_public_key', 'fake_private_key') imp.run() repo = pygit2.Repository(self.remote_source_repo_path) commit = repo.head.peel() self.assertEqual('*****@*****.**', commit.author.email) self.assertEqual('OSV', commit.author.name) self.assertEqual('Import from OSS-Fuzz', commit.message) diff = repo.diff(commit.parents[0], commit) self.assertEqual( self._load_test_data('expected_patch_basic.diff'), diff.patch)
def main(): logging.basicConfig(level=logging.INFO) with MySQLdb.connect(**LOOKUP_DB) as lookup_cursor: with MySQLdb.connect(**WIKI_DB) as wiki_cursor: # Import templates from the dump to the MediaWiki DB. importtemplates.ImportTemplates(DUMP_PATH, wiki_cursor) # Extract entries with English definitions and simplify their format. filter = filters.english_entry.EnglishEntryFilter() total, accepted = filter.parseXmlDump(SRC_DUMP_PATH, TEMP1) logging.info('Retained %d entries from %d (%.2f%%).', accepted, total, float(total) / accepted) # Discard non-English definitions from the entries. filter = filters.english_def.EnglishDefinitionFilter() filter.parseXmlDump(TEMP1, TEMP2) logging.info('Definition filter successful.') # Discard translation blocks from the entries. filter = filters.strip_translations.TraslationStrippingFilter() filter.parseXmlDump(TEMP2, TEMP3) logging.info('Translation stripping filter successful.') # Evaluate the MediaWiki templates in the entries. # This step takes aproximately 4.2 eternities. See template_expander.php. src = urllib2.quote(os.path.abspath(TEMP3), safe='') dest = urllib2.quote(os.path.abspath(TEMP4), safe='') expander_url = TEMPLATE_EXPANDER_URL % (src, dest) expander_connection = urllib2.urlopen(expander_url) for line in expander_connection: if line: logging.info(line) # Import pages from the MediaWiki dump to the lookup database. # The pages are cleaned up and converted to JSON during the import. importer.Importer().run(TEMP4, lookup_cursor) logging.info('Import successful.') # Create the soundex index for spelling corrections/suggestions. lookup_cursor.execute('UPDATE lookup SET sdx = SOUNDEX(name)') logging.info('Soundex generation successful.') # Redirect conjugated terms back to their parent canonical lemmas. # E.g. enjoyed -> enjoy, cats -> cat, better -> good. canonizer.Canonize(lookup_cursor) logging.info('Canonization successful.')
def test_basic(self, mock_publish): """Test basic run.""" self.mock_repo.add_file('2021-111.yaml', '') self.mock_repo.commit('User', 'user@email') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() repo = pygit2.Repository(self.remote_source_repo_path) commit = repo.head.peel() self.assertEqual('*****@*****.**', commit.author.email) self.assertEqual('OSV', commit.author.name) self.assertEqual('Import from OSS-Fuzz', commit.message) diff = repo.diff(commit.parents[0], commit) self.assertEqual(self._load_test_data('expected_patch_basic.diff'), diff.patch) mock_publish.assert_has_calls([ mock.call('projects/oss-vdb/topics/tasks', data=b'', deleted='false', original_sha256=('e3b0c44298fc1c149afbf4c8996fb924' '27ae41e4649b934ca495991b7852b855'), path='2021-111.yaml', source='oss-fuzz', type='update') ]) bug = osv.Bug.get_by_id('2017-134') self.assertEqual(osv.SourceOfTruth.SOURCE_REPO, bug.source_of_truth) source_repo = osv.SourceRepository.get_by_id('oss-fuzz') self.assertEqual(str(commit.id), source_repo.last_synced_hash) self.mock_storage_client().get_bucket.assert_called_with('bucket') bucket = self.mock_storage_client().get_bucket('bucket') expected_upload_contents = self._load_test_data('expected.json') bucket.blob.assert_has_calls([ mock.call('testcase/5417710252982272.json'), mock.call().upload_from_string(expected_upload_contents), mock.call('issue/1064.json'), mock.call().upload_from_string(expected_upload_contents), ])
def test_basic(self, mock_request_analysis): """Test basic run.""" imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir) imp.run() repo = pygit2.Repository(self.remote_source_repo_path) commit = repo.head.peel() self.assertEqual('*****@*****.**', commit.author.email) self.assertEqual('OSV', commit.author.name) self.assertEqual('Import from OSS-Fuzz', commit.message) diff = repo.diff(commit.parents[0], commit) self.assertEqual(self._load_test_data('expected_patch_basic.diff'), diff.patch) mock_request_analysis.assert_has_calls( [mock.call(mock.ANY, '2021-111.yaml')])
def test_scheduled_updates_already_done(self, mock_publish): """Scheduled updates already done.""" source_repo = osv.SourceRepository.get_by_id('oss-fuzz') source_repo.last_update_date = importer.utcnow().date() source_repo.put() self.mock_repo.add_file('proj/OSV-2021-1337.yaml', '') self.mock_repo.commit('OSV', '*****@*****.**') osv.Bug(id='2021-1337', project='proj', fixed='', status=1, source_id='oss-fuzz:123', source_of_truth=osv.SourceOfTruth.SOURCE_REPO, timestamp=datetime.datetime(2020, 1, 1, 0, 0, 0, 0)).put() imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() self.assertEqual(0, mock_publish.call_count)
def test_basic(self, mock_publish): """Test basic run.""" osv.Bug( db_id='OSV-2017-134', affected=['FILE5_29', 'FILE5_30'], affected_fuzzy=['5-29', '5-30'], affected_ranges=[{ 'type': 'GIT', 'repo_url': 'https://github.com/file/file.git', 'introduced': '17ee4cf670c363de8d2ea4a4897d7a699837873f', 'fixed': '19ccebafb7663c422c714e0c67fa4775abf91c43', }], details=( 'OSS-Fuzz report: ' 'https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=1064\n\n' 'Crash type: Heap-buffer-overflow READ 1\n' 'Crash state:\ncdf_file_property_info\ncdf_file_summary_info\n' 'cdf_check_summary_info\n'), ecosystem='OSS-Fuzz', ecosystem_specific={ 'severity': 'MEDIUM', }, fixed='19ccebafb7663c422c714e0c67fa4775abf91c43', has_affected=True, issue_id='1064', project='file', public=True, reference_url_types={ 'https://bugs.chromium.org/p/oss-fuzz/issues/detail?id=1064': 'REPORT' }, regressed='17ee4cf670c363de8d2ea4a4897d7a699837873f', search_indices=['file', '2017-134', '2017', '134'], source_id='oss-fuzz:5417710252982272', source_of_truth=osv.SourceOfTruth.INTERNAL, status=1, summary='Heap-buffer-overflow in cdf_file_property_info', timestamp=datetime.datetime(2021, 1, 15, 0, 0, 24, 559102)).put() self.mock_repo.add_file('2021-111.yaml', '') self.mock_repo.commit('User', 'user@email') imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() repo = pygit2.Repository(self.remote_source_repo_path) commit = repo.head.peel() self.assertEqual('*****@*****.**', commit.author.email) self.assertEqual('OSV', commit.author.name) self.assertEqual('Import from OSS-Fuzz', commit.message) diff = repo.diff(commit.parents[0], commit) self.assertEqual(self._load_test_data('expected_patch_basic.diff'), diff.patch) mock_publish.assert_has_calls([ mock.call('projects/oss-vdb/topics/tasks', data=b'', deleted='false', original_sha256=('e3b0c44298fc1c149afbf4c8996fb924' '27ae41e4649b934ca495991b7852b855'), path='2021-111.yaml', source='oss-fuzz', type='update') ]) bug = osv.Bug.get_by_id('OSV-2017-134') self.assertEqual(osv.SourceOfTruth.SOURCE_REPO, bug.source_of_truth) source_repo = osv.SourceRepository.get_by_id('oss-fuzz') self.assertEqual(str(commit.id), source_repo.last_synced_hash) self.mock_storage_client().get_bucket.assert_called_with('bucket') bucket = self.mock_storage_client().get_bucket('bucket') expected_upload_contents = self._load_test_data('expected.json') bucket.blob.assert_has_calls([ mock.call('testcase/5417710252982272.json'), mock.call().upload_from_string(expected_upload_contents), mock.call('issue/1064.json'), mock.call().upload_from_string(expected_upload_contents), ])
# draw_shape_test.py import rhinoscriptsyntax as rs import importer import shape importer_drone = importer.Importer() def make_sierpinski_init(): name = 'sierpinski-init' line_specs = [((0, 0, 0), (0, 0, 24)), ((0, 0, 0), (0, 18, 0)), ((0, 0, 0), (18, 0, 0)), ((0, 0, 12), (0, 9, 0)), ((0, 0, 12), (0, 9, 12)), ((0, 0, 12), (9, 0, 0)), ((0, 0, 12), (9, 0, 12)), ((0, 0, 24), (0, 18, 0)), ((0, 0, 24), (18, 0, 0)), ((0, 9, 0), (0, 9, 12)), ((0, 9, 0), (9, 0, 0)), ((0, 9, 0), (9, 9, 0)), ((0, 9, 12), (9, 0, 12)), ((0, 9, 12), (9, 9, 0)), ((0, 18, 0), (18, 0, 0)), ((9, 0, 0), (9, 0, 12)), ((9, 0, 0), (9, 9, 0)), ((9, 0, 12), (9, 9, 0))] lpoint_specs = [((3, 12, 4), 'a'), ((3, 3, 16), 'a'), ((3, 3, 4), 'a'), ((6, 6, 8), 'a'), ((12, 3, 4), 'a')] shape_out = shape.Shape(name, line_specs, lpoint_specs) return shape_out sierpinski_init = make_sierpinski_init() importer_drone._draw_shape(sierpinski_init)
def setUp(self): self.i = importer.Importer()
fileKdeMorfo = "./" + outputDir + "/" + timestamp + "-kdeMorfo.pdf" fileFreq = "./" + outputDir + "/" + timestamp + "-freqFeatures.pdf" fileLog = "./" + outputDir + "/" + timestamp + "-executionLog.txt" fileStatistics = "./" + outputDir + "/" + timestamp + "-statistics.txt" fileParameters = "./" + outputDir + "/" + timestamp + "-parameters.txt" sys.stderr = open(fileErr, 'w') randomState = np.random.RandomState(0) statisticLogger = statisticLogger.StatisticLogger() parameterLogger = parameterLogger.ParameterLogger() oldMetricsLogger = metricsLogger.MetricsLogger() metricsLogger = metricsLogger.MetricsLogger() plotter = plotter.Plotter() importer = importer.Importer(statisticLogger, parameterLogger, plotter) svmSede = svm.Svm(randomState) svmMorfo = svm.Svm(randomState) crossValidatorSede = crossValidator.CrossValidator(randomState, numFolds) crossValidatorMorfo = crossValidator.CrossValidator(randomState, numFolds) parameterLogger.numFolds = numFolds parameterLogger.printParameters(fileParameters) with logger.Logger(fileLog) as log: log.write("Start execution " + timestamp) log.write("Importing csv") importer.importCsv(fileIsto, fileNeop) #importer.cutDataset(500) log.write("Filter classes")
help='your .inp file', default=settings.s.start_model) args = parser.parse_args() # Configure global logging level logging.basicConfig(level=settings.s.logging_level) # Show main window with text logging handler f = gui.window.Factory() f.run_master(path.p.main_xml) # Main block m = model.Model() # generate FEM model t = tree.Tree(f, m) # create treeView items based on KOM j = model.job.Job(f, m) # create job object with file logging handler i = importer.Importer(f, m, t, j) # prepare to import model actions.actions(f, m, t, j, i) # window actions # Import default model if len(args.inp): start_model = os.path.join(path.p.app_home_dir, args.inp) i.import_file(start_model) # Or start empty else: logging.warning('No default start model specified.') m.KOM = model.kom.KOM() t.generateTreeView(m) logging.info('Started in {:.1f} seconds.\n'.format(time.perf_counter() - start_time))
def test_scheduled_updates(self, mock_publish): """Test scheduled updates.""" self.mock_repo.add_file('proj/OSV-2021-1337.yaml', _EMPTY_VULNERABILITY) self.mock_repo.add_file('proj/OSV-2021-1339.yaml', _EMPTY_VULNERABILITY) self.mock_repo.add_file('OSV-2021-1338.yaml', _EMPTY_VULNERABILITY) self.mock_repo.commit('OSV', '*****@*****.**') osv.SourceRepository( type=osv.SourceRepositoryType.GIT, id='source', name='source', repo_url='file://' + self.remote_source_repo_path, repo_username='').put() osv.Bug( db_id='OSV-2021-1337', affected_packages=[ osv.AffectedPackage( package=osv.Package(ecosystem='OSS-Fuzz', name='proj')) ], status=1, source_id='oss-fuzz:123', source_of_truth=osv.SourceOfTruth.SOURCE_REPO, timestamp=datetime.datetime(2020, 1, 1, 0, 0, 0, 0)).put() osv.Bug( db_id='OSV-2021-1338', affected_packages=[ osv.AffectedPackage( package=osv.Package(ecosystem='ecosystem', name='proj'), ranges=[ osv.AffectedRange2( type='GIT', events=[ osv.AffectedEvent(type='introduced', value='0'), osv.AffectedEvent(type='fixed', value='fix'), ]) ]) ], source_id='source:OSV-2021-1338.yaml', status=1, source_of_truth=osv.SourceOfTruth.SOURCE_REPO, timestamp=importer.utcnow()).put() osv.Bug( db_id='OSV-2021-1339', affected_packages=[ osv.AffectedPackage( package=osv.Package(ecosystem='OSS-Fuzz', name='proj')) ], status=1, source_id='oss-fuzz:124', source_of_truth=osv.SourceOfTruth.INTERNAL, timestamp=datetime.datetime(2020, 1, 1, 0, 0, 0, 0)).put() imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_has_calls([ mock.call( 'projects/oss-vdb/topics/tasks', data=b'', deleted='false', original_sha256=('bd3cc48676794308a58a19c97972a5e5' '42abcc9eb948db5701421616432cc0b9'), path='proj/OSV-2021-1337.yaml', source='oss-fuzz', type='update'), mock.call( 'projects/oss-vdb/topics/tasks', allocated_id='OSV-2021-1339', data=b'', source_id='oss-fuzz:124', type='impact'), ]) source_repo = osv.SourceRepository.get_by_id('oss-fuzz') self.assertEqual(datetime.date(2021, 1, 1), source_repo.last_update_date)
def test_scheduled_updates(self, mock_publish): """Test scheduled updates.""" self.mock_repo.add_file('proj/OSV-2021-1337.yaml', '') self.mock_repo.add_file('proj/OSV-2021-1339.yaml', '') self.mock_repo.add_file('OSV-2021-1338.yaml', '') self.mock_repo.commit('OSV', '*****@*****.**') osv.Bug(id='2021-1337', project='proj', ecosystem='OSS-Fuzz', fixed='', status=1, source_id='oss-fuzz:123', source_of_truth=osv.SourceOfTruth.SOURCE_REPO, timestamp=datetime.datetime(2020, 1, 1, 0, 0, 0, 0)).put() osv.Bug(id='2021-1338', project='proj', fixed='fix', source_id='source:OSV-2021-1338.yaml', status=1, source_of_truth=osv.SourceOfTruth.SOURCE_REPO, timestamp=importer.utcnow()).put() osv.Bug(id='2021-1339', project='proj', ecosystem='OSS-Fuzz', fixed='', status=1, source_id='oss-fuzz:124', source_of_truth=osv.SourceOfTruth.INTERNAL, timestamp=datetime.datetime(2020, 1, 1, 0, 0, 0, 0)).put() imp = importer.Importer('fake_public_key', 'fake_private_key', self.tmp_dir, 'bucket') imp.run() mock_publish.assert_has_calls([ mock.call('projects/oss-vdb/topics/tasks', data=b'', deleted='false', original_sha256=('e3b0c44298fc1c149afbf4c8996fb924' '27ae41e4649b934ca495991b7852b855'), path='proj/OSV-2021-1337.yaml', source='oss-fuzz', type='update'), mock.call('projects/oss-vdb/topics/tasks', allocated_id='2021-1339', data=b'', source_id='oss-fuzz:124', type='impact'), mock.call('projects/oss-vdb/topics/tasks', data=b'', deleted='false', original_sha256=('e3b0c44298fc1c149afbf4c8996fb924' '27ae41e4649b934ca495991b7852b855'), path='OSV-2021-1338.yaml', source='oss-fuzz', type='update'), ]) source_repo = osv.SourceRepository.get_by_id('oss-fuzz') self.assertEqual(datetime.date(2021, 1, 1), source_repo.last_update_date)
import pusher import scraper import parameters import time import importer import splitter MESSAGES = parameters.MESSAGES() SOURCE_STATUS = parameters.SOURCE_STATUS() if __name__ == "__main__": #Run the batch importer.Importer().importFile() splitter.Splitter().splitFile() status = pusher.Status() scraper = scraper.Scraper() start_time = time.time() print(MESSAGES.START_BATCH) status.changeSourceStatus(SOURCE_STATUS.REBUILD) scraper.pushAllDocs() status.changeSourceStatus(SOURCE_STATUS.IDLE) elapsed_time = round(time.time() - start_time, 2) print(MESSAGES.END_BATCH, elapsed_time, MESSAGES.BATCH_TIME_UNIT)
# Show CAE window and get window ID # A new logger's handler is created here if os.name == 'nt': w = gui.window.Windows_window(p, s) else: w = gui.window.Linux_window(p, s) w.show() w.wid1 = w.get_wid('CalculiX Advanced Environment') if s.align_windows: w.align() # Main block m = model.Model() # generate FEM model t = tree.Tree(p, s, w, m) # create treeView items based on KOM j = model.job.Job(p, s, w, m) # create job object i = importer.Importer(p, s, w, m, t, j) actions.actions(p, s, w, m, t, j, i) # window actions # Import default model if len(args.inp): start_model = os.path.join(p.app_home_dir, args.inp) i.import_file(start_model) # Or start empty else: logging.warning('No default start model specified.') m.KOM = model.kom.KOM(p, s) t.generateTreeView(m) logging.info('Started in {:.1f} seconds.\n'.format(time.perf_counter() - start_time))
#!/usr/bin/env python3 import asyncio import importer print(""" Welcome to the Saleor Importer! To import data into Saleor, I will need three pieces of information from you: 1. The URL of your Saleor API 2. An API token (see https://docs.saleor.io/docs/dashboard/configuration/service-accounts) 3. The path to a file containing the data to import (link to format To Be Provided) """) url = input('URL: ') token = input('API Token: ') filepath = input('Path to File: ') importer = importer.Importer(importer.Api(url, token), filepath) output_file = asyncio.run(importer.process()) print("Results are in: {}".format(output_file.name))
import importer imp = importer.Importer() imp.import_rule()