def delete_section_helper(section_id): student_section_manager = StudentSectionRequestManager(Repository(sqldb())) student_section_manager.delete(data={"section_id": section_id}) # Delete section r_manager = SectionRequestManager(Repository(sqldb())) section_success = r_manager.delete(data={"section_id": section_id}) return section_success
class Game(Resource): def __init__(self): self._repository = Repository() self._validator = MovePieceValidator() def post(self): new_game = GameModel() uid = self._repository.insert(new_game) return {'uid': uid, 'game': new_game.to_dict()}, HTTPStatus.CREATED def get(self, uid): # if not uid: # return 404 return self._repository.fetch(uid).to_dict(), 200 def patch(self, uid): req = reqparse.request body = req.get_json() success, errors = self._validator.validate(body) if not success: return {"errors": errors}, HTTPStatus.BAD_REQUEST game = self._repository.fetch(uid) if body['player'] != game.current_player_turn: # return 404 pass piece = game.find_piece_at(*body['currentCoordinate']) if body['piece'] != piece.kind: # return 404 pass
def test_repository_add_remove_has_not(self): repo = Repository(repo_name) doc = test_doc(repo) repo.remove_document(doc) self.assertFalse(repo.has_document_uuid(doc.uuid))
def test_repository_add_search_keyword_success(self): repo = Repository(repo_name) doc = test_doc(repo) result = repo.search_keywords('jungle') self.assertEqual(len(result), 1)
def main(): # type: () -> None listener = Listener(WORKER_SOCKET_NAME, 'AF_UNIX') try: Door.initialize() name_to_add = None print('Worker started') while True: conn = listener.accept() print('connection accepted from', listener.last_accepted) msg = conn.recv() print('Message: ', msg) command = msg['type'] value = msg['value'] if command == 'read': process_read_command(value, name_to_add) name_to_add = None elif command == 'add': name_to_add = process_add_command(value) else: Repository.log_message( 'error', 'Unknown command: {}'.format(command)) finally: Door.cleanup() listener.close()
class TestRepositoryJSON(unittest.TestCase): def setUp(self): self.temp_file = tempfile.NamedTemporaryFile() self.repo = Repository(JSONAdapter(self.temp_file.name)) def tearDown(self): self.temp_file.close() def test_set_get(self): self.repo["id"] = "value" self.repo["id1"] = "value1" self.assertEqual("value", self.repo["id"]) self.assertEqual("value1", self.repo["id1"]) def test_items(self): self.repo["id"] = "value" self.repo["id1"] = "value1" self.assertEqual([('id', 'value'), ('id1', 'value1')], list(self.repo.items())) def test_persistance(self): self.repo["id"] = "value" self.repo["id1"] = "value1" self.repo.store() new_repo = Repository(JSONAdapter(self.temp_file.name)) self.assertEqual([('id', 'value'), ('id1', 'value1')], list(new_repo.items())) self.assertEqual("value", new_repo["id"]) self.assertEqual("value1", new_repo["id1"])
def get_lead_time(release: release.Release, repo: repository.Repository) -> timedelta: if len(repo.get_releases()) == 1: commit_times = [ datetime.timestamp(c.get_date()) - datetime.timestamp(repo.get_creation_time()) for c in repo.get_commits() ] else: releases = repo.get_releases() release_index = None for index, r in enumerate(releases): if r.get_id() == release.get_id(): release_index = index break if release_index != None: if release_index < len(releases) - 1: prev_release = releases[release_index + 1] else: return timedelta(seconds=0) else: return timedelta(seconds=0) commits = get_commits_between_releases(release, prev_release, repo) commit_times = [ datetime.timestamp(c.get_date()) - datetime.timestamp(prev_release.get_creation_time()) for c in commits ] # Stop disvision by zero if len(commit_times) == 0: return timedelta(seconds=sum(commit_times)) return timedelta(seconds=sum(commit_times) / len(commit_times))
class TestRepository(unittest.TestCase): def setUp(self): self.repo = Repository() def test_default_adapter(self): self.assertTrue(self.repo.adapter) def test_load(self): self.assertTrue(self.repo.load) def test_set_get(self): self.repo["id"] = "value" self.repo["id1"] = "value1" self.assertEqual("value", self.repo["id"]) self.assertEqual("value1", self.repo["id1"]) def test_items(self): self.repo["id"] = "value" self.repo["id1"] = "value1" self.assertEqual([('id', 'value'), ('id1', 'value1')], list(self.repo.items())) def test_store(self): self.repo.store()
def get_class_repositories(self, class_dir): repos = [] class_path = os.path.join(self.home_dir, class_dir) if not directory_exists(class_path, ssh=self.ssh): return repos for name in list_directory(class_path, ssh=self.ssh): path = os.path.join(class_path, name) if directory_exists(path, ssh=self.ssh) and name.endswith('.git'): assignment, _ = os.path.splitext(name) if self.ssh is not None: repos.append( Repository(path, assignment, is_bare=True, is_local=False, ssh=self.ssh, student_username=self.username)) else: repos.append( Repository(path, assignment, is_bare=True, student_username=self.username)) return repos
def setUp(self): self.__repo=Repository() self.__groups={} self.__idlist=[] self.__assignmentDict={} self.__undoRedo=UndoRedo() self.__control=controllerStudent(self.__repo,self.__undoRedo)
def create_course(): success = "Course Creation Failed" if request.method == 'POST': cleaned_data = clean_data(request) r_manager = CourseRequestManager(Repository(sqldb())) # Separate the data into course and section part course_data = {} section_data = {} for key, value in cleaned_data.items(): if key.startswith("courses"): course_data[key] = value else: section_data[key] = value # Add course first course_success = r_manager.add(course_data) if course_success: # Get the course_id course_results, course_field_names = r_manager.read( criteria=course_data) course_info = convert_sql(course_results, course_field_names) course_id = course_info[0]["course_id"] section_data["course_id"] = course_id # add the section to the course section_manager = SectionRequestManager(Repository(sqldb())) section_success = section_manager.add(section_data) if section_success: success = "HOORAY! Course and Section Successfully Created" else: r_manager.delete(course_data) success = "Section Creation Failed, Course Deleted" return {'success': success}
class BaseHaskiAction(object): """This class is the abstract base class for Haski tasks. """ __metaclass__ = ABCMeta def __init__(self): self._repository = Repository() @abstractmethod def __call__(self, namespace): """This method is invoked by argparse and should do the actuall work. By default, it is abstract and does nothing, so it forces you to implement it. """ pass def get_commit(self, namespace): """This function gets the commit wanted commit on which an action is to be performed. """ commit = self._repository.get_commit(namespace.revision) return commit def get_repository_location(self): """Returns the directory where this repository is located. """ return self._repository.get_path()
def test_status_untracked_files(self): path, content = ('1.txt', '1\n') write_to_file(path, content) repo = Repository() untracked_files = repo.get_untracked_files() self.assertEqual(untracked_files, ['1.txt']) Command.cmd_status()
def add_endpoint(my_request): if not my_request.pmh_url: return None endpoint_with_this_id = Endpoint.query.filter( Endpoint.repo_request_id == my_request.id).first() if endpoint_with_this_id: print u"one already matches {}".format(my_request.id) return None raw_endpoint = my_request.pmh_url clean_endpoint = raw_endpoint.strip() clean_endpoint = clean_endpoint.strip("?") clean_endpoint = re.sub(u"\?verb=.*$", "", clean_endpoint, re.IGNORECASE) print u"raw endpoint is {}, clean endpoint is {}".format( raw_endpoint, clean_endpoint) matching_endpoint = Endpoint() matching_endpoint.pmh_url = clean_endpoint repo_matches = my_request.matching_repositories() if repo_matches: matching_repo = repo_matches[0] print u"yay! for {} {} matches repository {}".format( my_request.institution_name, my_request.repo_name, matching_repo) else: print u"no matching repository for {}: {}".format( my_request.institution_name, my_request.repo_name) matching_repo = Repository() # overwrite stuff with request matching_repo.institution_name = my_request.institution_name matching_repo.repository_name = my_request.repo_name matching_repo.home_page = my_request.repo_home_page matching_endpoint.repo_unique_id = matching_repo.id matching_endpoint.email = my_request.email matching_endpoint.repo_request_id = my_request.id matching_endpoint.ready_to_run = True matching_endpoint.set_identify_and_initial_query() db.session.merge(matching_endpoint) db.session.merge(matching_repo) print u"added {} {}".format(matching_endpoint, matching_repo) print u"see at url http://unpaywall.org/sources/repository/{}".format( matching_endpoint.id) safe_commit(db) print "saved" print "now sending email" # get the endpoint again, so it gets with all the meta info etc matching_endpoint = Endpoint.query.get(matching_endpoint.id) matching_endpoint.contacted_text = "automated welcome email" matching_endpoint.contacted = datetime.datetime.utcnow().isoformat() safe_commit(db) send_announcement_email(matching_endpoint) print "email sent" return matching_endpoint
def setUp(self): self.__undoRedo = UndoRedo() self.__students = Repository() self.__assignments = Repository() self.__grades = Repository() self.__ui = UI(self.__students, self.__assignments, self.__grades, self.__undoRedo)
def test_rm_no_cached(self): entries = Repository().index.entries self.assertIn(self.path, entries) Command.cmd_rm(self.path, False) entries = Repository().index.entries self.assertNotIn(self.path, entries) self.assertFalse(os.path.exists(self.path))
def query_china_user(first, after): r1 = Repository.query_pinned() r2 = Repository.query_popular() q = """ {{ search(first: {},{} query: "{}", type: USER) {{ pageInfo {{ endCursor hasNextPage hasPreviousPage startCursor }} edges {{ node {{ ... on User {{ login name url {} {} }} }} }} }} }} """.format(first, after, config.user_query, r1, r2) return q
def __init__(self, filename=None): if not filename: raise FileRepositoryException("Please set a filename") self._filename = filename Repository.__init__(self) self._load()
class Token: def __init__(self): self.repo = Repository(METRICS_NETWORK_ID) def process_token(self, daemon_id): print("process_token::daemon_id: ", daemon_id); result = {} token = secrets.token_urlsafe(64) exist_info = self.daemon_id_exist(daemon_id=daemon_id) if exist_info.get('status', False): qry = "UPDATE daemon_token set token = %s WHERE daemon_id = %s " updt_info = self.repo.execute(qry, [token, daemon_id]) print(updt_info) if updt_info[0] > 0: return {"token": token} else: return {"error": "unable to generate token"} ctime_epoch = calendar.timegm(time.gmtime()) expiration = ctime_epoch + (6 * 60 * 60) qry = "INSERT INTO daemon_token (daemon_id, token, expiration, row_updated, row_created) " \ "VALUES(%s, %s, %s, %s, %s)" res = self.repo.execute(qry, [daemon_id, token, expiration, dt.utcnow(), dt.utcnow()]) if len(res) > 0 and res[0] > 0: result["token"] = token return result def daemon_id_exist(self, daemon_id): print("daemon_id_exist::daemon_id: ", daemon_id); qry = "SELECT * FROM daemon_token WHERE daemon_id = %s" res = self.repo.execute(qry, [daemon_id]) if len(res) > 0: return {'status': True, "token": res[0]['token']} return {'status': False}
def initialize(self): undoStack = Stack() redoStack = Stack() self.__valid_movie = ValidMovie() self.__valid_client = ValidClient() self.__valid_rental = ValidRental() self.__movies_repository = Repository() self.__clients_repository = Repository() self.__rentals_repository = Repository() self.__service_movies = ServiceMovies(self.__movies_repository, self.__valid_movie, undoStack, redoStack, self.__rentals_repository) self.__service_clients = ServiceClients(self.__clients_repository, self.__valid_client, undoStack, redoStack, self.__rentals_repository) self.__service_rentals = ServiceRentals(self.__movies_repository, self.__clients_repository, self.__rentals_repository, self.__valid_rental, undoStack, redoStack) self.__service_movies.generate_movies() self.__service_clients.generate_client() self.__service_rentals.rent_movie(1, 1, 1, '1/1/1', '2/1/1') self.__service_rentals.return_movie(1, '3/1/1') self.__service_rentals.rent_movie(3, 1, 1, '5/1/1', '10/1/1') self.__service_rentals.return_movie(3, '15/1/1') self.__service_rentals.rent_movie(2, 2, 2, '2/2/2', '3/3/3') self.__service_rentals.return_movie(2, '7/2/2') self.__service_rentals.rent_movie(4, 3, 4, '1/1/1', '2/1/1') self.__service_rentals.return_movie(4, '2/1/1')
def cmd_checkout(branch): b = Branch() b.switch_branch(branch) repo = Repository() pre_entries = dict(repo.index.entries) repo.rebuild_index_from_commit(repo.branch.head_commit) repo.rebuild_working_tree(pre_entries)
def edit_item_page(itemid): webPage = SetWebPageVM() if webPage.isLoggedIn: repo = Repository() item = repo.getItemById(itemid) return render_template('edititem.html', edititem=item, WebPage=webPage) return redirect('/')
def app_setup(app): app.config['CELERY_ACCEPT_CONTENT'] = ['json'] app.config['CELERY_TASK_SERIALIZER'] = 'json' app.config['CELERY_RESULT_SERIALIZER'] = 'json' app.config['CELERY_BROKER_URL'] = 'redis://localhost:6379/2' app.config['CELERY_BACKEND'] = 'redis://localhost:6379/3' app.config['CELERY_QUEUES'] = ( Queue('transplant', Exchange('transplant'), routing_key='transplant'), ) app.src_dir = tempfile.mkdtemp(dir=test_temp_dir) app.dst_dir = tempfile.mkdtemp(dir=test_temp_dir) app.config['TRANSPLANT_WORKDIR'] = tempfile.mkdtemp(dir=test_temp_dir) app.config['TRANSPLANT_REPOSITORIES'] = [{ 'name': 'test-src', 'path': app.src_dir }, { 'name': 'test-dst', 'path': app.dst_dir }] app.src = Repository.init(app.src_dir) app.dst = Repository.init(app.dst_dir) _set_test_file_content(app.src_dir, "Hello World!\n") app.src.commit("Initial commit", addremove=True, user="******") app.dst.pull(app.src_dir, update=True)
def validate_name(self, min_length, model, parameter): if len(parameter) < min_length: message = "The name should be at least {} symbols" raise Exception(message.format(min_length)) repository = Repository(self.__db_name) if repository.is_name_used(model, parameter): raise Exception("This name is already taken")
def test_generate_id(self): repo = Repository() uids = [] for i in range(1000): uid = repo.add_item(1) self.assertNotIn(uid, uids) uids.append(uid)
def test_reset_default(self): Command.cmd_reset(self.first_commit, is_soft=False, is_hard=False) self.assertEqual(Branch().head_commit, self.first_commit) repo = Repository() uncommitted_files = repo.get_uncommitted_files() unstaged_files = repo.get_unstaged_files() self.assertFalse(uncommitted_files['modified']) self.assertIn(self.path, unstaged_files['modified'])
def delete_item_page(itemid): webPage = SetWebPageVM() repo = Repository() item = repo.getItemById(itemid) if webPage.isLoggedIn: if item.user == login_session['username']: repo.deleteFromDatabase(item) return redirect('/')
def setup_hacker_scrape(db_name): content_provider = lambda url: requests.get(url).text repository = Repository(db_name) repository.create_table() scrape_manager = ScrapeManager(content_provider) ranker = HeaderRanker() return HackerScrape(repository, scrape_manager, ranker)
def main(): repository = Repository() repository.mapp.randomize() repository.random_sensors(SENSORS_COUNT, SENSORS_MAX_POWER) controller = Controller(repository) ui = UI(controller) ui.run()
class TwitterDataProcess(): def __init__(self, topics, file_name): self.file_name = file_name self.topics = topics self.tweets = [] self.repository = Repository() self.process_tweets() @staticmethod def compact_tweet_text(tweet_text): return tweet_text.replace('\n', ' ').replace('\r', '').lower() @staticmethod def to_dictionary(coordinate): keys = ["Lat", "Lng"] return dict(zip(keys, coordinate)) def check_key_words(self, topic_key, tweet_text): if re.search(topic_key, tweet_text): for word in self.topics.get(topic_key): if re.search(word, tweet_text): return True return False def process_tweets(self): self.tweets = [] with open(self.file_name, "r") as tweets_file: for line in tweets_file: tweet = dict() tweet_valid = False try: if line.strip() != '': raw_tweet = json.loads(line) text = TwitterDataProcess.compact_tweet_text(raw_tweet['text']) for topic_key in self.topics: if self.check_key_words(topic_key, text): tweet_valid = True tweet[topic_key] = True else: tweet[topic_key] = False if tweet_valid: tweet['text'] = text tweet['lang'] = raw_tweet['lang'] tweet['city'] = raw_tweet['place']['name'] if raw_tweet['place'] is not None else None if raw_tweet['geo'] is None: tweet['coordinates'] = False else: tweet['coordinates'] = True tweet.update(TwitterDataProcess.to_dictionary(raw_tweet['geo']['coordinates'])) self.tweets.append(tweet) if len(self.tweets) > 1000: self.repository.save_many(self.tweets) self.tweets = [] except Exception as e: print(str(e)) continue self.repository.save_many(self.tweets)
def cmd_diff(cached=False, use_less=True): if cached: res = Repository().diff_between_index_and_head_tree() else: res = Repository().diff_between_working_tree_and_index() if use_less: less_str(res) else: print res
def get_name_as_in_ADS(target_name, names_in_result: []): """For presentation in the UI, figures out how to capitalize a name The user may have typed in the query names in all lowercase. For the large banner at the top of the page, it would be nice to format the names more properly. Rather than just defaulting to first-letter-uppercase, we can use our ADS data to present the name in a form (or one of the forms) ADS has for the name. This means we may also pick up diacritics. Looks through all the publications belonging to the name and how the author's name appears in those publications. Grabs (one of) the most-detailed forms. If it contains more given names than the target names, truncates the list. Shortens given names to initials if the target name has an initial at that position.""" # Unique-ify names_in_result names_in_result = list(set(names_in_result)) repo = Repository(can_skip_refresh=True) names_in_result = [ADSName.parse(name) for name in names_in_result] orcid = is_orcid_id(target_name) if orcid: record = repo.get_author_record_by_orcid_id(target_name) else: target_name = ADSName.parse(target_name) record = repo.get_author_record(target_name) aliases = record.appears_as.keys() aliases = [ADSName.parse(alias) for alias in aliases] # Remove all aliases that aren't consistent with any of the name forms # used in the set of possible chains. E.g. if the user searched for # "Last" and all chains terminate at "Last, B.", then we shouldn't view # "Last, I." as a viable alias. aliases = [alias for alias in aliases if alias in names_in_result] # Grab the most-detailed alias. As tie-breaker, choose the form with the # most publications. alias = sorted([(a.level_of_detail, len(record.appears_as[a.original_name]), a.original_name) for a in aliases])[-1][-1] alias = ADSName.parse(alias, preserve=True) if orcid: gns = alias.given_names else: # Trim it down to size gns = alias.given_names if len(gns) > len(target_name.given_names): gns = gns[:len(target_name.given_names)] # Ensure we have initials where we need them gns = [ gn if len(tgn) > 1 else gn[0] for gn, tgn in zip(gns, target_name.given_names) ] final_name = ADSName.parse(alias.last_name, *gns, preserve=True) return final_name.full_name
def add_endpoint(my_request): if not my_request.pmh_url: return None endpoint_with_this_id = Endpoint.query.filter(Endpoint.repo_request_id==my_request.id).first() if endpoint_with_this_id: print u"one already matches {}".format(my_request.id) return None raw_endpoint = my_request.pmh_url clean_endpoint = raw_endpoint.strip() clean_endpoint = clean_endpoint.strip("?") clean_endpoint = re.sub(u"\?verb=.*$", "", clean_endpoint, re.IGNORECASE) print u"raw endpoint is {}, clean endpoint is {}".format(raw_endpoint, clean_endpoint) matching_endpoint = Endpoint() matching_endpoint.pmh_url = clean_endpoint repo_matches = my_request.matching_repositories() if repo_matches: matching_repo = repo_matches[0] print u"yay! for {} {} matches repository {}".format( my_request.institution_name, my_request.repo_name, matching_repo) else: print u"no matching repository for {}: {}".format( my_request.institution_name, my_request.repo_name) matching_repo = Repository() # overwrite stuff with request matching_repo.institution_name = my_request.institution_name matching_repo.repository_name = my_request.repo_name matching_repo.home_page = my_request.repo_home_page matching_endpoint.repo_unique_id = matching_repo.id matching_endpoint.email = my_request.email matching_endpoint.repo_request_id = my_request.id matching_endpoint.ready_to_run = True matching_endpoint.set_identify_and_initial_query() db.session.merge(matching_endpoint) db.session.merge(matching_repo) print u"added {} {}".format(matching_endpoint, matching_repo) print u"see at url http://unpaywall.org/sources/repository/{}".format(matching_endpoint.id) safe_commit(db) print "saved" print "now sending email" # get the endpoint again, so it gets with all the meta info etc matching_endpoint = Endpoint.query.get(matching_endpoint.id) matching_endpoint.contacted_text = "automated welcome email" matching_endpoint.contacted = datetime.datetime.utcnow().isoformat() safe_commit(db) send_announcement_email(matching_endpoint) print "email sent" return matching_endpoint
class Weather: def __init__(self, apikey, cities): self.cities = cities self.api = OpenWeatherAPI(apikey) self.repo = Repository() def forecast(self): for city in self.cities: self.repo.push("weather|" + city, self.api.three_hour_forecast(city))
def __init__(self, repoid, repoConfig, maxFileLength=1024*1024*1024*2, retryTime=3, socketTimeout=5): Repository.__init__(self, repoid) self.repoConfig = repoConfig self.cachedir = None self.expireTime = None self.enabled = bool(repoConfig.enabled) self.maxFileLength = maxFileLength self.retryTime = retryTime self.socketTimeout = socketTimeout
def post_add_item(): webPage = SetWebPageVM() if webPage.isLoggedIn: repo = Repository() repo.addItemToDatabase(request.form['name'], request.form['description'], request.form['category'], login_session['username']) return redirect('/') return render_template('additem.html', WebPage=webPage)
def run_tests(student_repo: Repository, test_repo: Repository, report_repo: Repository, call_action_path, student: Student, email_queue: Queue): starting_dir = os.getcwd() code_tempdir = TemporaryDirectory() test_tempdir = TemporaryDirectory() report_tempdir = TemporaryDirectory() code_path = code_tempdir.name test_path = test_tempdir.name report_path = report_tempdir.name report_file_path = '' tmp_report_repo = None try: tmp_report_repo = report_repo.clone_to(report_path) student_repo.clone_to(code_path) test_repo.clone_to(test_path) except CommandError as e: error = 'Failed to clone:\n{0}'.format(e) report_failure(error, student.email_address, student_repo.assignment, email_queue, report_file_path, tmp_report_repo) os.chdir(starting_dir) return report_filename = 'report-{0}.txt'.format(strftime('%Y-%m-%d-%H:%M:%S-%Z')) for item in os.listdir(report_path): item_path = os.path.join(report_path, item) if os.path.isdir(item_path) and student.username in item: report_file_path = os.path.join(item_path, report_filename) break os.chdir(test_path) try: output = call_action(call_action_path, code_path, student.first_name, student.last_name, student.username, student.email_address) except CommandError as e: error = '!!! ERROR: SCRIPT RETURNED NON-ZERO EXIT CODE !!!\n\n' error += str(e) report_failure(error, student.email_address, student_repo.assignment, email_queue, report_file_path, tmp_report_repo) os.chdir(starting_dir) return write_report(report_file_path, output, tmp_report_repo) email_subject = student_repo.assignment + ' submission test results' email_queue.put(Email(student.email_address, email_subject, output)) os.chdir(starting_dir)
def main(argv): config_file = None read_only = False verbose = False try: # -h (optional), -c (mandatory, hence 'c:'), -r (optional) opts, args = getopt.getopt(argv,"hc:rv",[]) except getopt.GetoptError: print 'repository.py [-h] [-r] [-v] -c <cfgfile>' sys.exit(2) for opt, arg in opts: if opt == '-h': print 'repository.py [-h] [-r] [-v] -c <cfgfile>' sys.exit(0) elif opt in ("-c", "--cfgfile"): config_file = arg elif opt == '-r': read_only = True elif opt == '-v': verbose = True if config_file is None: print 'repository.py [-h] [-r] [-v] -c <cfgfile>' sys.exit(2) if verbose: print 'Using cfg file:', config_file repository = Repository(config_file, verbose) config = ConfigParser.RawConfigParser() config.read(config_file) sensor_names = config.get("sensors", "names").strip().split(',') if verbose: print 'Sensor names:', sensor_names sensors = [] for name in sensor_names: if verbose: print 'Adding sensor:', name sensors.append(Sensor(name, config_file)) for sensor in sensors: if verbose: print 'Reading sensor', sensor.sensor_id readings = sensor.get_readings() if readings is not None: if read_only: s=json.dumps(readings, sort_keys=True, indent=4, separators=(',', ': ')) print s else: repository.save_readings(readings) print 'Done'
def cmd_add(workspace, file): if file == '.': file_list = [] for root, dirs, files in os.walk('.'): if ".git" in dirs: dirs.remove('.git') for file in files: file_list.append(os.path.join(root[2:], file)) Repository(workspace).stage(file_list) else: Repository(workspace).stage([file])
def get_repo(self, name): repo = self.db.get("SELECT id FROM repositories WHERE distro_id = %s AND name = %s", self.id, name) if not repo: return repo = Repository(self.pakfire, repo.id) repo._distro = self return repo
def repositories(self): _repos = self.db.query("SELECT id FROM repositories WHERE distro_id = %s", self.id) repos = [] for repo in _repos: repo = Repository(self.pakfire, repo.id) repo._distro = self repos.append(repo) return sorted(repos)
def prepare_mock_repositories(self): self.src_dir = tempfile.mkdtemp() self.dst_dir = tempfile.mkdtemp() self.workdir = tempfile.mkdtemp() self.src = Repository.init(self.src_dir) self.dst = Repository.init(self.dst_dir) self._set_test_file_content(self.src_dir, "Hello World!\n") self.src.commit("Initial commit", addremove=True) self.dst.pull(self.src_dir, update=True)
def transplant(self, src_dir, dst_dir, items): src_repo = Repository(src_dir) dst_repo = Repository(dst_dir) try: for item in items: self._transplant_item(src_repo, dst_repo, item) tip = dst_repo.id(id=True) logger.info('tip: %s', tip) return {'tip': tip} finally: self._cleanup(dst_repo)
def test_status_unstaged_files(self): file_list = [('1.txt', '1\n'), ('2.txt', '2\n')] for path, content in file_list: write_to_file(path, content) Command.cmd_add(path) write_to_file(file_list[0][0], '11\n') os.remove(file_list[1][0]) repo = Repository() unstaged_files = repo.get_unstaged_files() self.assertEqual(unstaged_files['modified'], [file_list[0][0]]) self.assertEqual(unstaged_files['deleted'], [file_list[1][0]]) Command.cmd_status()
def getLocally(): logger = logging.getLogger(__name__) logger.setLevel(logging.WARNING) file_handler = logging.FileHandler('mergehistory.log') logger.addHandler(file_handler) session = getSession() line_number = 0 with gzip.open(zipfile,"rb") as input_file: for line in input_file: line_number += 1 try: user_repositories = json.loads(line) for repo in user_repositories: if "created_at" in repo and repo["created_at"]: year = repo["created_at"][:4] is_success = Repository.update(session, repo['id'], creation_date = year, main_lang = repo['language']) if is_success: logger.info('%d %s %s' %(repo['id'] , year , repo['language'])) else: logger.warning(str(line_number) + ':' + str(repo['id']) +' update was unsuccessful') else: print repo logger.info('user repos updated') session.commit() file_handler.flush() except: logger.error(str(line_number) + ':' + line) logging.shutdown()
def print_repos(prefix): paths = os.listdir(os.path.join(config.REPO_ROOT, prefix)) print "\n[ Repositories in %s ]" % os.path.join(config.REPO_ROOT, prefix) paths.sort() for path in paths: if Repository.exists(os.path.join(config.REPO_ROOT, prefix, path)): print path
def predict(self, x_test): desktops = Repository.get_data('desktops') mobiles_sample = x_test merge_key = 'anonymous_c2' mobiles_sample = mobiles_sample[['device_id', merge_key]].set_index( [merge_key, 'device_id']) desktops_sample = desktops[['cookie_id', merge_key]].set_index([merge_key]) merge_data = mobiles_sample.join(desktops_sample).reset_index(1) return merge_data
def cmd_reset(commit_sha1, is_soft=False, is_hard=False): repo = Repository() pre_entries = dict(repo.index.entries) repo.update_head_commit(commit_sha1) if not is_soft: repo.rebuild_index_from_commit(commit_sha1) if is_hard: repo.rebuild_working_tree(pre_entries)
def createRepository(self): self.repository = Repository(self.appdir) self.repository.set_minecraft_target(self.minecraft_target) # Check if local packages repository exists and load it too if os.path.isfile(self.appdir + os.sep + 'local-packages.json'): self.repository.populate('', False) self.repository.populate(self.appdir + os.sep + 'local-packages.json') else: self.repository.populate('', True)
def test_status_uncommitted_files(self): file_list = [('1.txt', '1\n'), ('2.txt', '2\n')] for path, content in file_list: write_to_file(path, content) Command.cmd_add(path) Command.cmd_commit('first ci') write_to_file(file_list[0][0], '11\n') Command.cmd_rm(file_list[1][0]) new_path = '3.txt' new_content = '3\n' write_to_file(new_path, new_content) Command.cmd_add('.') repo = Repository() uncommitted_files = repo.get_uncommitted_files() self.assertEqual(uncommitted_files['modified'], [file_list[0][0]]) self.assertEqual(uncommitted_files['deleted'], [file_list[1][0]]) self.assertEqual(uncommitted_files['new file'], [new_path]) Command.cmd_status()
def backup(): parser = argparse.ArgumentParser(description="Backup utility") parser.add_argument("repos", nargs='+') parser.add_argument('--pendrive', action='store_const', const=PendriveStorage(), default=DropboxStorage(), help="use pendrive as storage", dest="storage") args = parser.parse_args() args.storage.start() for (location, repo) in Repository.read_repos_from_file("/etc/gitbackup.conf").items(): if any([os.path.samefile(location, rep_dir) for rep_dir in args.repos]): repo.send_data_to(args.storage.name) args.storage.stop()
def clone(name): repo_url = get_repo_url(name) repo_dir = get_repo_dir(name) repo_base_url = get_repo_base_url(name) if not os.path.exists(repo_dir): logger.info('cloning repository "%s"', name) repository = Repository.clone(repo_base_url, repo_dir) else: logger.info('repository "%s" is already cloned', name) repository = Repository(repo_dir) repository.set_config({ "paths": { "default": repo_url, "base": repo_base_url } }) return repository
def __init__(self, path): """To instantiantate a stash, provide a path that points to a location somewhere in a repository. """ # Check if the patches path exists, and in case it does not, create it. if not os.path.exists(self.STASH_PATH): os.mkdir(self.STASH_PATH) self.repository = Repository(path) super(Stash, self).__init__()
def get(self, training=None): training = training or Training() parts = Participants() org = Organization() sched = Schedules() when = When() costs = Costs() list_catg = [(item.value, item.name) for item in CategoryType] with Session(database='training') as session: repo = Repository(session) operator = repo.get_all(coll_name='organization', conditions={}, projection={'org_identity': 1, 'org_name': 1, '_id': 0}) list_op = [(item['org_identity'], item['org_name']) for item in operator] return self.render_response("training/add.html", training=training, list_catg=list_catg, list_op=list_op, parts=parts, org=org, sched=sched, when=when, costs=costs)
def _run_create(self, options, args): """ Create the repository. Save the yaml descriptor. """ prefix, name = __parse_prefix_name(args[0]) print "Name = %s" % name if prefix: print "Prefix = %s" % prefix print "Fisheye = %s" % options.fisheye repo = Repository(prefix, name, options.fisheye) if Repository.exists(repo.path_to_repo): raise CommandError('A repository named "%s" already exists.' % repo.path) try: __check_path_dir(repo) print 'Creating the repository ...' repo.create() print 'Created the repository' print repo.apache_conf apache_conf.process_to_file(repo.apache_conf, {'repopath' : repo.path, 'users' : ', '.join(repo.users()), 'apache_authz_path' : repo.apache_authz}) print "Created apache conf" __write_repository_yaml(repo) print "CREATED repository at %s." % repo.path if options.fisheye: __write_repository_fisheyeauth(repo) fisheye_admin = FisheyeAdmin(password=config.FISHEYE_ADMIN_PW) if fisheye_admin.create_repository(repo, __get_description()): print "Successfully created a fisheye instance for %s" % repo.name except Exception, e: raise CommandError("Failed to create the repository at %s\n" % repo.path_to_repo, "The original error was: %s: %s" % (type(e), e))
def _run_commit(self, options, args): """Commit the repository yaml descriptor file.""" prefix, name = __parse_prefix_name(args[0]) try: repo = __load_repository_from_yaml(prefix, name) except CommandError: repo = Repository(prefix, name) if not Repository.exists(repo.path_to_repo): raise CommandError('Cannot commit a descriptor for a repo that doesn\'t exist.') if(options.add): __add_yaml(repo) __checkin_yaml(repo, "Committing yaml descriptor for %s." % repo.path)