def __init__(self, chart, helm): self.helm = helm self.config = Config() self._release_name = chart.keys()[0] self._chart = chart[self._release_name] self._repository = Repository( self._chart.get('repository', default_repository), self.helm) self._chart['values'] = self._chart.get('values', {}) self._namespace = self._chart.get('namespace') self._context = self._chart.get('context') value_strings = self._chart.get('values-strings', {}) self._chart['values_strings'] = value_strings if value_strings != {}: del (self._chart['values-strings'])
def load_config(self): """ Load configuration file and set repo """ if self.options is None: return Settings() group_id = self.operation.group_id if not group_id: logging.error('Failed to load config: group id is not set in operation') return Settings() access_key = self.options['access_key'] secret_key = self.options['secret_key'] bucket = self.options['bucket'] config = self.options['config'] region = self.options['region'] if not all([access_key, secret_key, bucket]): # command line arguments are prior to the configuration file path = expandvars(expanduser(config)) try: with open(path) as fp: a, s, b, r = self._read_aws_config(fp, group_id) access_key = a if access_key is None else access_key secret_key = s if secret_key is None else secret_key bucket = b if bucket is None else bucket region = r if region is None else region except IOError: logging.error('Failed to open configuration file: %s' % config) return Settings() for x, arg, opt in [ (access_key, 'access_key', '--access'), (secret_key, 'secret_key', '--secret'), (bucket, 'bucket', '--bucket'), ]: if x is None: logging.error('Oops! "%s" setting is missing.' % arg) logging.error('Use "%s" option or write configuration file: %s' % (opt, config)) return Settings() # set repository driver driver = S3Driver(access_key, secret_key, bucket, group_id, region) repo = Repository(driver, group_id) return Settings(self.operation, self.options, repo)
def main(img_path='../data/test/017029558.jpg', imgs_path='../data/test/', csv_path='data.csv', metric='0'): # ../data/test/000316731.jpg repo = Repository(csv_path) feats_map = [] for fpath, _, feats in repo: if fpath.split('/')[-1] == img_path.split('/')[-1]: source_feats = feats else: feats_map.append((fpath, feats)) if metric.startswith('m'): dim = int(metric.split('_')[-1]) all_feats = [feats[:dim] for _, feats in feats_map[:dim]] cov = np.cov(np.array(all_feats)) VI = np.linalg.pinv(cov) distance_map = map( lambda (fpath, feats): (mahalanobis(feats[:dim], source_feats[:dim], VI), fpath), feats_map) else: metric = int(metric) distance_map = map( lambda (fpath, feats): (distance(feats, source_feats, metric), fpath), feats_map) distance_map.sort() f, axarr = plt.subplots(4, 5, figsize=(20, 16)) axarr[0, 2].imshow(mpimg.imread(img_path)) axarr[0, 2].set_title("Source image", fontsize=24) axarr[0, 2].xaxis.set_visible(False) axarr[0, 2].yaxis.set_visible(False) for i in [0, 1, 3, 4]: axarr[0, i].axis('off') for i in xrange(15): axarr[1 + i / 5, i % 5].imshow( mpimg.imread(os.path.join(imgs_path, distance_map[i][1]))) axarr[1 + i / 5, i % 5].set_title("{0:.3f}".format(distance_map[i][0]), fontsize=20) axarr[1 + i / 5, i % 5].xaxis.set_visible(False) axarr[1 + i / 5, i % 5].yaxis.set_visible(False) plt.tight_layout() plt.show() plt.close(f)
def test_remove_items(self): repo = Repository() repo.add_item(0) repo.add_item(1) repo.add_item(1) repo.add_item(1) repo.remove_item(condition=lambda item: item == 1) self.assertEqual(repo.get_items(), [0, 1, 1]) uid = repo.add_item(2) repo.remove_item(uid=uid) self.assertEqual(repo.get_items(), [0, 1, 1]) repo.remove_items(lambda item: item == 1) self.assertEqual(repo.get_items(), [0])
def main(): setup_uncaught_exceptions_logger() setup_default_loggers() parser = argparse.ArgumentParser() parser.add_argument('-u', '--url', action='store', type=str, help='API base URL', required=True) parser.add_argument('-a', '--username', action='store', type=str, help='HTTP basic auth username') parser.add_argument('-p', '--password', action='store', type=str, help='HTTP basic auth password') args = parser.parse_args() # Create controller queue = Queue() repo = Repository(base_url=args.url, username=args.username, password=args.password) dc=DrawController(queue=queue, matrix=Matrix(width=32, hieght=16), refresh_rate=0.1) controller = Controller(repo=repo, queue=queue) controller.start_polling()
def __testModificaElement(self): self.__repo = Repository() assert len(self.__repo) == 0 self.__id = 1 self.__nume = "Razvan" self.__adresa = "Botosani" self.__persoana = Persoana(self.__id, self.__nume, self.__adresa) self.__repo.adaugaElement(self.__persoana) self.__adresa = "Cluj-Napoca" self.__persoana = Persoana(self.__id, self.__nume, self.__adresa) self.__repo.modificaElement(self.__persoana) assert self.__persoana.get_adresa() == self.__repo.cautarePersoana( self.__persoana).get_adresa()
def __init__(self, username='', password='', repo_name_or_slug=''): self.username = username self.password = password self.repo_slug = repo_name_or_slug self.repo_tree = {} self.URLS = URLS self.repository = Repository(self) self.service = Service(self) self.ssh = SSH(self) self.issue = Issue(self) self.access_token = None self.access_token_secret = None self.consumer_key = None self.consumer_secret = None self.oauth = None
def process_repository(repo_dict): """ Takes a dictionary containing keys: path, branches and revisions and returns a Repository object. This method should only be called by read_yaml. """ path = repo_dict[REPOSITORY_PATH] revisions = {} if REPOSITORY_REVISIONS in repo_dict: for revision_id in repo_dict[REPOSITORY_REVISIONS]: revisions[revision_id] = None branches = {} # if the fixture has branches defined, set them to be None if REPOSITORY_BRANCHES in repo_dict: for branch_name in repo_dict[REPOSITORY_BRANCHES]: branches[branch_name] = None return Repository(path, branches, revisions, True)
def test_import_with_users(self): if os.path.exists('/tmp/test_repo2'): shutil.rmtree('/tmp/test_repo2') repository = Repository('Empty', '/tmp/test_repo2') # new_doc_gen = NewDocumentGenerator('/tmp/samples/importable', repository._user_manager, repository._document_manager) # new_doc_gen.generate_many_documents(2) alice = User('Alice', 'Smith', date(1980, 10, 10), '*****@*****.**', '****') bob = User('Bob', 'Marker', date(1970, 11, 11), '*****@*****.**', '****') alice_id = repository._user_manager.add_user(alice) bob_id = repository._user_manager.add_user(bob) repository._user_manager.save_user(alice_id, alice) repository._user_manager.save_user(bob_id, bob) self.assertEqual(repository._user_manager.count_users(), 2) repository.import_documents('{}/importable'.format(SAMPLE_DIR_PATH)) self.assertEqual(repository._document_manager.count_documents(), 2) results = repository._document_manager.find_documents_by_author( alice_id, repository._user_manager) self.assertEqual(len(results), 1) first_document = results[0] self.assertEqual(first_document.author, alice_id) self.assertEqual(first_document.title, 'Some important doc') self.assertEqual(first_document.description, 'Contains various documentations') self.assertEqual(first_document.files, ['part1.pdf', 'part2.pdf']) self.assertEqual(first_document.doc_format, 'pdf') results = repository._document_manager.find_documents_by_author( bob_id, repository._user_manager) self.assertEqual(len(results), 1) second_document = results[0] self.assertEqual(second_document.author, bob_id) self.assertEqual(second_document.title, 'Data report') self.assertEqual(second_document.description, 'Figures and graphs mainly') self.assertEqual(second_document.files, ['data.doc']) self.assertEqual(second_document.doc_format, 'doc') shutil.rmtree('/tmp/test_repo2')
def test_ServiceUpdate_movieWithAllFields_allFieldsUpdated(self): undo_stack = Stack() redo_stack = Stack() self.__movie_validator = ValidMovie() self.__movie_repository = Repository() self.__rental_repository = Repository self.__movie_service = ServiceMovies(self.__movie_repository, self.__movie_validator, undo_stack, redo_stack, self.__rental_repository) self.__movie_service.add_movie(1, 'Harry Potter', 'Witchcraft', 'Adventure') self.__movie_service.update_movie(1, 'Harry Potter 2', 'Wizardry', 'Action') expected = Movie(1, 'Harry Potter 2', 'Wizardry', 'Action') self.assertTrue( self.__movie_service.get_movies()[0].identical(expected))
def test_get_items(self): repo = Repository() repo.add_item(0) repo.add_item(1) self.assertEqual(repo.get_item(condition=lambda item: item == 1), 1) uid = repo.add_item(2) self.assertEqual(repo.get_item(uid=uid), 2) repo.add_item(1) self.assertEqual(repo.get_items(), [0, 1, 2, 1]) self.assertEqual(repo.get_items(lambda item: item == 1), [1, 1]) self.assertEqual(repo.get_items(lambda item: item > 2), []) repo.add_item(3) self.assertEqual(repo.get_items(lambda item: item > 2), [3])
def run(self, defines : dict = None): from os import chdir from os import getcwd # change cwd to working dir self.log("Changing CWD to: %s ..." % (self.working_dir)) chdir(self.working_dir) # clean output directory #self.clean_wrk_dir() #TODO # create output directory self.gen_run_dirs() # open log if not self.logger.log_to_file: logname = os.path.join("", *[self.log_dir, self.testcase_name + ".log"]) self.logger.open_file_log(filename=logname) else: self.logger.log() self.logger.log() self.logger.log_header() self.logger.log("CWD : %s" % (getcwd())) self.logger.log("Testcase : %s" % (self.testcase_name)) self.logger.log("Testcase files:", log_to_stdout=False) from util import dict_to_str filesstr = dict_to_str(self.files).split("\n") for line in filesstr : self.logger.log(" " + line, log_to_stdout=False) # repository info from repository import Repository try: repo = Repository(self.working_dir) self.logger.log(self.logger.hr(), log_to_stdout=True) repo_info = repo.info() self.logger.log(repo_info, log_to_stdout=False) if "modified" in repo_info: self.logger.log("WARNING: repository NOT clean!") else: self.logger.log("INFO: repository IS clean.") except ValueError: self.logger.log("INFO: No GIT repository detected, skipping GIT checks.") #self.logger.log(self.logger.hr(), log_to_stdout=False) # run runner ret = self.runner.run(files = self.files, top=None, defines=self.defines, waves=self.waves, dirs=self.dirs) #self.logger.close() # TODO testcase should be able to close its logger by itself, not just from testset! self.logger.fd.flush() os.fsync(self.logger.fd.fileno()) return ret
def getLate(self): repoOfLateStudents=Repository() for i in range(len(self.__idlist)): studentID=self.__idlist[i] assignmentsOfStudent=self.__assignmentDict[studentID] #we look into the assignments of the student w/ studentID for j in range(len(assignmentsOfStudent)): assignmentFound=self.__repoAssignments.search(assignmentsOfStudent[j]) deadline=assignmentFound.get_deadline() date=deadline.split(".") day=int(date[0]) month=int(date[1]) if self.checkLateDeadline(day,month)==True: student=self.__repoStudents.search(studentID) try: repoOfLateStudents.add(student) except ValueError: pass return repoOfLateStudents
def test_ServiceAdd_validMovie_isAdded(self): undo_stack = Stack() redo_stack = Stack() self.__movie_validator = ValidMovie() self.__movie_repository = Repository() self.__rental_repository = Repository self.__movie_service = ServiceMovies(self.__movie_repository, self.__movie_validator, undo_stack, redo_stack, self.__rental_repository) self.__valid_movie = Movie(1, 'Frozen', 'Ice queen with magical powers', 'Animation') self.__movie_service.add_movie(1, 'Frozen', 'Ice queen with magical powers', 'Animation') self.assertEqual( self.__movie_service.get_movies()[ self.__movie_service.get_number() - 1], self.__valid_movie)
def __testStergeElement(self): self.__repo = Repository() assert len(self.__repo) == 0 self.__id = 1 self.__nume = "Razvan" self.__adresa = "Botosani" self.__persoana = Persoana(self.__id, self.__nume, self.__adresa) self.__repo.adaugaElement(self.__persoana) self.__id = 2 self.__nume = "Cristian" self.__adresa = "Botosani" self.__persoana1 = Persoana(self.__id, self.__nume, self.__adresa) self.__repo.adaugaElement(self.__persoana1) assert len(self.__repo) == 2 try: self.__repo.stergereElement(self.__persoana1) assert True except RepoError: assert False try: self.__repo.stergereElement(self.__persoana1) assert False except RepoError as msg: assert str(msg) == "obiectul nu exista!\n" try: self.__repo.stergereElement(self.__persoana) assert True except RepoError: assert False try: self.__repo.stergereElement(self.__persoana) assert False except RepoError as msg: assert str(msg) == "obiectul nu exista!\n"
def __init__(self, parent=None): QtGui.QWidget.__init__(self, parent) self.settings = Settings() self._set_data_dir() self.repo = Repository(self.settings.getDataDir()) self.ui = Ui_MainWindow() self.ui.setupUi(self) self.ui.actionOpen.triggered.connect(self.openFile) self.search_result_widget = SearchResultWidget(self.ui.search_view) self.search_controller = SearchController(self.search_result_widget, self.ui.search_line, self.repo) self.ui.clear_button.pressed.connect(self.search_controller.clear) self.ui.add_button.pressed.connect(self.show_dialog)
def test_status_uncommitted_files(self): file_list = [('1.txt', '1\n'), ('2.txt', '2\n')] for path, content in file_list: write_to_file(path, content) Command.cmd_add(path) Command.cmd_commit('first ci') write_to_file(file_list[0][0], '11\n') Command.cmd_rm(file_list[1][0]) new_path = '3.txt' new_content = '3\n' write_to_file(new_path, new_content) Command.cmd_add('.') repo = Repository() uncommitted_files = repo.get_uncommitted_files() self.assertEqual(uncommitted_files['modified'], [file_list[0][0]]) self.assertEqual(uncommitted_files['deleted'], [file_list[1][0]]) self.assertEqual(uncommitted_files['new file'], [new_path]) Command.cmd_status()
def getAllRepos(): session = getSession() p = {'since': 17262486} result = requests.get('https://api.github.com/repositories', params=p, auth=myauth).json() l = len(result) counter = 1 print counter, l, p['since'] counter = Counter(5000) while l > 0: counter.check_limit() for proj in result: if 'id' in proj: rep = Repository() rep.id = proj['id'] rep.full_name = proj['full_name'] rep.languages_url = proj['languages_url'] session.add(rep) else: print proj p['since'] = result[-1]['id'] while l == 0: result = requests.get('https://api.github.com/repositories', params=p, auth=myauth) l = len(result.json()) counter.increment() print counter.count, l, p['since'] if l == 0: print result result = result.json() session.commit()
def __parse_repositories(self): # if the repository file fails to open there is nothing we can do. try: repo_csv = open(self.repofile, 'r') except IOError: print >> self.stderr, "failed ot open repofile" self.stop() csv_reader = csv.reader(repo_csv, delimiter=',') line = 0 for row in csv_reader: if line != 0: path = row[0] last_pulled = row[1] try: repo = Repository(path=path, last_pulled=last_pulled) self.repositories.append(repo) except RepositoryInitError: print >> self.stderr, ("repository for " + path + "failed to" + " be initialized skipping") line += 1
def process_pathfinder(path_finder: PathFinder): repo = Repository(can_skip_refresh=True) pairings, all_bibcodes = _store_bibcodes_for_node(path_finder.src, repo) doc_data = {} lb.set_n_docs_relevant(len(all_bibcodes)) repo.notify_of_upcoming_document_request(*all_bibcodes) _insert_document_data(pairings, doc_data, repo, path_finder.excluded_names) lb.update_progress_cache(force=True) chains = _build_author_chains(path_finder.src) scored_chains = _rank_author_chains(chains, repo, pairings) if scored_chains is None: # TODO: do better raise AllPathsInvalid(f"src: {path_finder.orig_src}" f" dest: {path_finder.orig_dest}" f" excln: {path_finder.excluded_names}" f" exclb: {path_finder.excluded_bibcodes}") return scored_chains, doc_data
def resolve_groups(repositories, kickstart_file_path): """ Resolves packages groups from kickstart file. @param repositories The list of original repository URLs. @param kickstart_file_path The path to the kickstart file. @return The list of package names. """ groups_paths = [] for url in repositories: repository = Repository(url) repository.prepare_data() groups_data = repository.data.groups_data if groups_data is not None and len(groups_data) > 0: groups_path = temporaries.create_temporary_file("group.xml") with open(groups_path, "w") as groups_file: groups_file.writelines(groups_data) groups_paths.append(groups_path) logging.debug("Following groups files prepared:") for groups_path in groups_paths: logging.debug(" * {0}".format(groups_path)) parser = mic.kickstart.read_kickstart(kickstart_file_path) groups = mic.kickstart.get_groups(parser) groups_resolved = {} for group in groups: groups_resolved[group.name] = [] for groups_path in groups_paths: packages = get_pkglist_in_comps(group.name, groups_path) groups_resolved[group.name] = packages logging.debug("Group {0} contains {1} " "packages.".format(group.name, len(groups_resolved[group.name]))) packages_all = [] for group_name in groups_resolved.keys(): packages_all.extend(groups_resolved[group_name]) if len(groups_resolved) != len(groups): logging.error("Not all groups were resolved.") sys.exit("Error.") return packages_all
def __init__(self, config: dict): super().__init__() self.queue = Queue() # Read config and initialize repositories to upload to self.repos = {} for name, cfg in config['repositories'].items(): self.repos[name] = { arch: Repository(name + '/' + arch, v['path'], arch) for arch, v in cfg.items() } # Read config and initialize mmpack repositories to fetch dependencies from deps_repos = {} for name, cfg in config['dependency-repositories'].items(): deps_repos[name] = {arch: v['url'] for arch, v in cfg.items()} self.builder_queues = { k: _BuilderQueue(Builder(name=k, cfg=v, deps_repos=deps_repos)) for k, v in config['builders'].items() } self.rules = FilterRule.load_rules(config)
def __init__(self): self.__id = "19" self.__nume = "Razvan" self.__adresa = "Botosani" self.__persoana = Persoana(self.__id, self.__nume, self.__adresa) self.__badId = "389178" self.__badNume = "" self.__badAdresa = "" self.__badPersoana = Persoana(self.__badId, self.__badNume, self.__badAdresa) self.__validator = ValideazaPersoana() self.__id = 1 self.__data = [12, 11, 2018] self.__timp = "14:53" self.__descriere = "Descriere" self.__eveniment = Eveniment(self.__id, self.__data, self.__timp, self.__descriere) self.__id = 1 self.__data = [12, 11, 2018] self.__timp = "14:53" self.__descriere = "Descriere" self.__eveniment = Eveniment(self.__id, self.__data, self.__timp, self.__descriere) self.__id = -3 self.__data = [32, 13, 2070] self.__timp = "25:64" self.__descriere = "" self.__badEveniment = Eveniment(self.__id, self.__data, self.__timp, self.__descriere) self.__validatorEv = ValideazaEveniment() self.__repo = Repository()
def setUpClass(cls) -> None: #Test database setup cls.dbConnector = DatabaseConnector(databaseFileName="testDB.db") cls.dbConnector.createDatabase() cls.dbConnector.openDatabaseConnection() cls.dbConnector.executeSQL( sql= 'CREATE TABLE Repository (ID INTEGER, Name TEXT, Owner TEXT, Private TEXT, Fork TEXT, Created_At TEXT, Updated_At TEXT, Pushed_At TEXT, Size INTEGER, Forks INTEGER, Open_Issues INTEGER, PRIMARY KEY(ID))' ) #Test data cls.testData = { 'id': 0, 'name': 'testName', 'owner': { 'login': '******' }, 'private': 'testPrivate', 'fork': 'testFork', 'created_at': 'testCreate', 'updated_at': 'testUpdate', 'pushed_at': 'testPush', 'size': 7357, 'forks_count': 7357, 'open_issues_count': 7357 } cls.dataComparison = [ (0, 'testName', 'testLogin', 'testPrivate', 'testFork', 'testCreate', 'testUpdate', 'testPush', 7357, 7357, 7357) ] #Commit class setup cls.repo = Repository(dbConnection=cls.dbConnector, oauthToken="token", repository="repo", username="******", url="url")
def configure(self): self.repository = Repository() self.set_theme(self.repository.theme) self.set_monitor("left") self.set_save_button_disabled() # config tables self.ui.leftTableWidget.setRowCount(len(self.repository.left_results)) self.ui.rightTableWidget.setRowCount(len( self.repository.right_results)) self.ui.rightTableWidget.clicked.connect(self.set_current_index) self.ui.leftTableWidget.clicked.connect(self.set_current_index) self.ui.leftTableWidget.setHorizontalHeaderLabels( ['Nimi', 'Arvo', 'Solu']) self.ui.rightTableWidget.setHorizontalHeaderLabels( ['Nimi', 'Arvo', 'Solu']) self.ui.leftTableWidget.setAlternatingRowColors(True) self.ui.rightTableWidget.setAlternatingRowColors(True) # populate tables for index, result in enumerate(self.repository.left_results): self.ui.leftTableWidget.setItem(index, 0, QTableWidgetItem(result.name)) self.ui.leftTableWidget.setItem(index, 2, QTableWidgetItem(result.cell)) for index, result in enumerate(self.repository.right_results): self.ui.rightTableWidget.setItem(index, 0, QTableWidgetItem(result.name)) self.ui.rightTableWidget.setItem(index, 2, QTableWidgetItem(result.cell)) self.ui.leftTableWidget.itemChanged.connect(self.save_table_item) self.ui.rightTableWidget.itemChanged.connect(self.save_table_item) if self.repository.input_file and os.path.isfile( self.repository.input_file): self.ui.inputFileButton.setText(self.repository.input_file)
def main(imgs_path, deploy_path, weights_path, blob_name="pool5/7x7_s1", shape=(3, 224, 224), mean_values=(104, 117, 123)): channels, rows, cols = shape net = caffe.Net(deploy_path, weights_path, caffe.TEST) net.blobs['data'].reshape(1, channels, rows, cols) repo = Repository() for directory, dirnames, filenames in os.walk(imgs_path): for i, img_path in enumerate(sorted(filenames)): # image = caffe.io.load_image(img_path) image = cv2.imread(img_path) image = cv2.resize(image, (rows, cols)) image = image.swapaxes(0,2).swapaxes(1,2) image = image.reshape(1, channels, rows, cols) input_img = image.astype(float) for channel in xrange(len(channels)): input_img[:,channel,:,:] -= mean_values[channel] net.blobs["data"].data[...] = input_img probs = net.forward()['prob'].flatten() feats = net.blobs[blob_name].data repo.store(os.path.join(imgs_path, img_path), probs, feats) print "{0} of {1} done".format(i, len(filenames))
def test_empty_repository_creation(self): Repository('Empty', '/tmp/test_repo') self.assertTrue(os.path.isdir('/tmp/test_repo/documents')) self.assertTrue(os.path.isdir('/tmp/test_repo/projects')) self.assertTrue(os.path.isdir('/tmp/test_repo/logs')) self.assertTrue(os.path.isdir('/tmp/test_repo/users')) self.assertTrue(os.path.exists('/tmp/test_repo/paths.ini')) self.assertTrue(os.path.exists('/tmp/test_repo/users/roles.txt')) # THIS IS WRONG!!! You can't predict the order of a dictionary => you can't test an INI file by line! # with open('/tmp/test_repo/paths.ini') as path_file: # self.assertEqual(path_file.readline().rstrip('\n'), '[directories]') # self.assertEqual(path_file.readline().rstrip('\n'), 'documents=documents') # self.assertEqual(path_file.readline().rstrip('\n'), 'logs=logs') # self.assertEqual(path_file.readline().rstrip('\n'), 'projects=projects') # self.assertEqual(path_file.readline().rstrip('\n'), 'users=users') metadata_file = read_ini_file('/tmp/test_repo/paths.ini') self.assertEqual(metadata_file['directories']['documents'], 'documents') self.assertEqual(metadata_file['directories']['logs'], 'logs') self.assertEqual(metadata_file['directories']['projects'], 'projects') self.assertEqual(metadata_file['directories']['users'], 'users') shutil.rmtree('/tmp/test_repo')
def main(args): if '--debug' in args and args['--debug']: logger.setLevel('DEBUG') else: logger.setLevel('INFO') logger.info(app) repo = Repository(config, logger, args['--refresh']) citeproc = CiteProc(repo, config, logger) try: # start the citeproc server if the flag is passed if 'fetch' in args and args['fetch']: if len(args['TYPES']) > 0: repo.fetch(args['TYPES']) else: repo.fetch(config.default_types) elif 'make' in args and args['make']: citeproc.start() citeproc.build(args['OUTPUT_TYPES']) finally: # always try to shutdown the citeproc server citeproc.shutdown()
import copy from django.core.exceptions import MultipleObjectsReturned, ObjectDoesNotExist from operator import attrgetter from foundation import evaluate_condition, find_groups, mixed_sort from repository import Repository from aggregation import Count repository = Repository() class MemoryQuerySet(object): fetch_from_repo = False ''' This quertset fecthes objects from memory. It will emulate all public QuerySet methods to be fully django compatible ''' def __init__(self, model, data=[], current=0): self.model = model self.model_name = model.__name__ self.data = data self.current = current def __len__(self): return len(self.data) def __iter__(self): return self def next(self):
def main(argv): help_info = ''' This is a script to show the university schedule for the students at Babes Bolyai University majoring in Computer Science in german. Help: Use -h(--help) options to see how the scripdayt works Use -t(--today) to see the schedule for today Use -g(--group) to see the schedule for a specific group Use -d(--day) to see the schedule for a specific day Accepted values are: Monday/Luni Tuesday/Marti Wendsday/Miercuri Thursday/Joi Friday/Vineri Possible options combinations: - group and today - group and day ''' correct = False try: opts, args = getopt.getopt(argv, 'thg:d:', ['help', 'today', 'group=', 'day=']) except getopt.GetoptError: print(help_info) sys.exit(2) day = None group = None today = False today_day = None help = False for opt, arg in opts: if opt in ('-h', '--help'): print(help_info) help = True elif opt in ('-t', '--today'): today = True today_day = dt.datetime.today().weekday() elif opt in ('-d', '--day'): day = arg elif opt in ('-g', '--group'): group = arg if help is False: while not correct: try: print('\n') year = int(input('Enter your current university year: ')) semester = int(input('Enter your current semester: ')) correct = True print('\nLoading data...') except ValueError: print('Given values are not correct. Please enter them again.') sys.exit(2) r = Repository(semester, year) ctrl = Controller(r) print('\nDone.') if day is None and group is None and today is False: print('\n') ctrl.get_all() elif day is None and group is not None and today is True: print('\n') if len(group) == 3: try: subgroup = check_subgroup(int(input('Enter subgroup: '))) print('\n') print(f'{group}/{subgroup}') ctrl.get_by_group_and_day(day=check_day(today_day), group=f'{group}/{subgroup}') except ValueError: print( 'Given values are not correct. Please rerun the script.' ) else: ctrl.get_by_group_and_day(day=check_day(today_day), group=group) elif day is not None and group is not None and today is False: print('\n') if len(group) == 3: try: subgroup = check_subgroup(int(input('Enter subgroup: '))) print('\n') ctrl.get_by_group_and_day(day=check_day(day), group=f'{group}/{subgroup}') except ValueError: print( 'Given values are not correct. Please rerun the script.' ) else: ctrl.get_by_group_and_day(day=check_day(day), group=group) elif day is not None: print('\n') ctrl.get_by_day(check_day(day)) elif today is True: print('here\n') ctrl.get_by_day(check_day(today_day)) elif group is not None: print('\n') if len(group) == 3: try: subgroup = check_subgroup(int(input('Enter subgroup: '))) print('\n') ctrl.get_by_group(f'{group}/{subgroup}') except ValueError: print( 'Given values are not correct. Please rerun the script.' ) else: ctrl.get_by_group(group) else: print('No such combination of arguments possible.')