def commit(self, msg, ref="HEAD"): cur_tree = self.index.do_commit(self.workspace) branch_name = read_file(os.path.join(self.workspace, ".git", "HEAD")).strip("\n").rsplit("/", 1)[-1] ref_path = os.path.join(self.workspace, ".git", "refs", "heads", branch_name) parent_sha1 = None if os.path.exists(ref_path): parent_sha1 = read_file(ref_path) committer_name = self.config.config_dict["user"]["name"] committer_email = "<%s>" % (self.config.config_dict["user"]["email"]) commit_time = int(time.time()) # TO FIX commit_timezone = time.strftime("%z", time.gmtime()) commit = Commit( self.workspace, tree_sha1=cur_tree.sha1, parent_sha1=parent_sha1, name=committer_name, email=committer_email, timestamp=commit_time, timezone=commit_timezone, msg=msg, ) write_object_to_file(commit.path, commit.content) write_to_file(ref_path, commit.sha1)
def stage(self, files): try: for file in files: content = read_file(file) blob = Blob(self.workspace, content) if not os.path.exists(blob.path): write_object_to_file(blob.path, blob.content) stat = os.stat(os.path.join(self.workspace, file)) self.index.add_entry( file, ctime=stat.st_ctime, mtime=stat.st_mtime, dev=stat.st_dev, ino=stat.st_ino, mode=cal_mode(stat.st_mode), uid=stat.st_uid, gid=stat.st_gid, size=stat.st_size, sha1=blob.sha1, flags=0, ) self.index.write_to_file() except Exception, e: print "stage file %s error: %s" % (file, e)
def enter_directory_and_initilize_empty_meta_data(): os.chdir('.pygit') try: utils.write_object_to_file(globalVars.index_file_name, set()) os.mkdir(globalVars.blob_object_location) except OSError as e: if e.errno == errno.ENOENT: sys.stderr.write('Failed to create repo.\n') os.chdir('../')
def commit(self, msg): new_tree = self.index.do_commit(self.workspace) committer_name = self.config.config_dict['user']['name'] committer_email = '<%s>' % (self.config.config_dict['user']['email']) commit_time = int(time.time()) commit_timezone = time.strftime("%z", time.gmtime()) commit = Commit(self.workspace, sha1=None, tree_sha1=new_tree.sha1, parent_sha1=self.head_tree, name=committer_name, email=committer_email, \ timestamp=commit_time, timezone=commit_timezone, msg=msg) write_object_to_file(commit.path, commit.content) write_to_file(self.head_path, commit.sha1)
def _build_tree(path): dir_arr = [] file_arr = [] for name, entry in path.iteritems(): if isinstance(entry, dict): mode = stat.S_IFDIR sha1 = _build_tree(entry).sha1 dir_arr.append({'name':name, 'mode':mode, 'sha1':sha1}) else: (mode, sha1) = entry file_arr.append({'name':name, 'mode':mode, 'sha1':sha1}) newtree = Tree(workspace, sorted(dir_arr,key = lambda x:x['name']) + sorted(file_arr,key = lambda x:x['name'])) write_object_to_file(newtree.path, newtree.content) return newtree
def solve_import(root, dst_path=None, add_path=None, extra_run_lines=None, cmd_lines=None, select_pkgs="select-one", optim_target="approximate", explicit_installed="direct", output_type="Dockerfile"): while root.endswith(os.sep): root = root[:-1] pkgver_dict, msg = parse_pyvers_and_pkgvers(root, select_pkgs=select_pkgs) if pkgver_dict is None: return None pkgvers, sys_pkgs, pyver = solve_package( pkgver_dict, optim_target=optim_target, explicit_installed=explicit_installed) if pkgvers is None: return None if output_type == "Dockerfile": if add_path is None: add_path = root output = generate_dockerfile(pkgvers, sys_pkgs, add_path, msg, pyver=pyver, extra_run_lines=extra_run_lines, cmd_lines=cmd_lines) if not dst_path: folder_path = get_folder_path(root) dst_path = os.path.join(folder_path, "Dockerfile") if not dst_path.endswith("Dockerfile") and os.path.exists( dst_path) and os.path.isdir(dst_path): dst_path = os.path.join(dst_path, "Dockerfile") dst_file = open(dst_path, "w") dst_file.writelines(output) dst_file.close() else: # json output = generate_json(pkgvers, sys_pkgs, pyver, msg) if not dst_path: folder_path = get_folder_path(root) dst_path = os.path.join(folder_path, "dependency.json") if not dst_path.endswith("dependency.json") and os.path.exists( dst_path) and os.path.isdir(dst_path): dst_path = os.path.join(dst_path, "dependency.json") write_object_to_file(dst_path, output) return dst_path, msg
def stage(self, files): try: for file in files: content = read_file(file) blob = Blob(self.workspace, content) if not os.path.exists(blob.path): write_object_to_file(blob.path, blob.content) stat = os.stat(os.path.join(self.workspace, file)) self.index.add_entry(file, ctime=stat.st_ctime, mtime=stat.st_mtime, dev=stat.st_dev, ino=stat.st_ino, mode=cal_mode(stat.st_mode), \ uid=stat.st_uid, gid=stat.st_gid, size=stat.st_size,sha1=blob.sha1, flags=0) self.index.write_to_file() except Exception, e: print 'stage file %s error: %s' % (file, e)
def _build_tree(path): dir_arr = [] file_arr = [] for name, entry in path.iteritems(): if isinstance(entry, dict): mode = stat.S_IFDIR sha1 = _build_tree(entry).sha1 dir_arr.append({'name': name, 'mode': mode, 'sha1': sha1}) else: (mode, sha1) = entry file_arr.append({'name': name, 'mode': mode, 'sha1': sha1}) newtree = Tree( workspace, sorted(dir_arr, key=lambda x: x['name']) + sorted(file_arr, key=lambda x: x['name'])) write_object_to_file(newtree.path, newtree.content) return newtree
def commit(self, msg, ref='HEAD'): cur_tree = self.index.do_commit(self.workspace) branch_name = read_file(os.path.join(self.workspace, '.git', 'HEAD')).strip('\n').rsplit('/', 1)[-1] ref_path = os.path.join(self.workspace, '.git', 'refs', 'heads', branch_name) parent_sha1 = None if os.path.exists(ref_path): parent_sha1 = read_file(ref_path) committer_name = self.config.config_dict['user']['name'] committer_email = '<%s>' % (self.config.config_dict['user']['email']) commit_time = int(time.time()) #TO FIX commit_timezone = time.strftime("%z", time.gmtime()) commit = Commit(self.workspace, tree_sha1=cur_tree.sha1, parent_sha1=parent_sha1, name=committer_name, email=committer_email, \ timestamp=commit_time, timezone=commit_timezone, msg=msg) write_object_to_file(commit.path, commit.content) write_to_file(ref_path, commit.sha1)
def add(relative_file_path): try: pygit_repo_path = utils.find_pygit_repo() except utils.RepoNotFoundException: sys.stderr.write('Could not find pygit repo.') sys.exit(41) # Check if file path exists if not os.path.exists(relative_file_path): sys.stderr.write('File to add does not exist.') sys.exit(43) # Check if previously tracked and so forth for other status information. start_cwd = os.getcwd() os.chdir(pygit_repo_path + '/.pygit') index_set = utils.read_object_from_file(globalVars.index_file_name) index_set.add(relative_file_path) utils.write_object_to_file(globalVars.index_file_name, index_set) os.chdir(start_cwd)
def commit(commit_message): ''' Commit current index to disk. ''' repo_path = find_pygit_repo() current_index_path = repo_path + '/' + globalVars.current_index_file_name commit_log_path = repo_path + '/' + globalVars.commit_log if os.path.exists(current_index_path): sys.stdout.write('Nothing to commit.') else: # This should be a more interesting data structure. if os.path.exists(commit_log_path): commit_log_list = utils.read_object_from_file(commit_log_path) else: commit_log_list = [] # do stuff with commit_log_list current_index_dict = utils.read_object_from_file(current_index_path) for filename, file_contents = current_index_dict.iteritems(): # do nothing for now. Compute hash later pass # Get first file name are representative hash string. representative_hash_string = utils.compute_string_hash(current_index_dict.iteritems()[0][0]) if not os.path.exists(globalVars.blob_object_location): utils.write_error_message_and_exit("Broken pygit repo. Cannot find blob objects location") else: current_commit_file_name = globalVars.blob_object_location + '/' representative_hash_string) utils.write_object_to_file(current_commit_file_name, current_index_dict) commit_log_list.append((representative_hash_string, current_commit_file_name)) utils.write_object_to_file(commit_log_path) os.remove(current_index_path)
import os import re import config from ModuleParser.file_parser import parse_import_modules from ModuleParser.folder_parser import extract_all_py_filepath, parse_custom_top_levels from ModuleParser.module_filter import filter_custom_modules, apart_standard_modules from utils import read_object_from_file, write_object_to_file from ModuleParser.neo4j_reader import get_pyvers_by_module, get_all_pyvers, \ get_python_features, get_pkgvers_by_module_pyvers, get_os_by_pkg, get_std_top_modules, get_rank_by_pkg std_top_levels = read_object_from_file(config.STD_TOP_CACHE_PATH) if not std_top_levels: std_top_levels = get_std_top_modules() write_object_to_file(config.STD_TOP_CACHE_PATH, std_top_levels) def parse_modules(root): # both file and folder are ok # extract all custom top level modules and python files custom_top_levels = list() all_py_filepath = list() if os.path.isdir(root): custom_top_levels.extend(parse_custom_top_levels(root, need_init=False)) all_py_filepath.extend(extract_all_py_filepath(root)) elif root.endswith(".py"): all_py_filepath.append(root) else: return None, None, None, None # extract top and second level modules used
pkg, method = pkgmth.split("#") if method == "pip" or pkg == "python": continue dep_dict[pkgmth] = dict() ver_dict = pkgver_dict[pkgmth] for ver in ver_dict: dep_dict[pkgmth][ver] = list() return dep_dict def convert_pkgvers_to_constrain_dict(pkgvers): constrain_dict = dict() for pkgmth in pkgvers: pkg, method = pkgmth.split("#") if method == "pip" or pkg == "python": constrain_dict[pkg] = pkgvers[pkgmth] else: constrain_dict[pkgmth] = pkgvers[pkgmth] return constrain_dict pip_deps_dict = read_object_from_file(config.PIP_DEPS_CACHE_PATH) if not pip_deps_dict: print("Caching pip dependencies in file...") pip_deps_dict = get_pip_deps() write_object_to_file(config.PIP_DEPS_CACHE_PATH, pip_deps_dict) print("Dependencies cached.")