def clone_checkout_git_repository(url, repository): """ Creates a new git repository record in the DB; Clones the given 'url' by running a separate process. """ url_id_known = redis.get(url) if url_id_known: # repo with this url was already cloned: git_repo_id = url_id_known # use old id, do not create new record else: # generate next repository ID: git_repo_id = ''.join(('git_repo_id:', str(redis.incr('git_repo_id:id')))) redis.getset(url, git_repo_id) pipe = redis.pipeline() pipe.hmset(git_repo_id, repository) pipe.execute() git_clone_proc = Process(target=run_git_clone_or_checkout, args=(git_repo_id,)) git_clone_proc.start() # starts a non-blocking process
def get_git_repositories(): """ HTTP GET /resources handler """ get_req_json = request.get_json(force=True, silent=True) if get_req_json: try: repo_ids = json.loads(get_req_json) repo_ids = repo_ids.get('id', []) except (KeyError, ValueError) as exc: msg = ('An %s Error while decoding request JSON.' % exc) app.logger.warning(msg) raise BadRequest('Bad Request', 400, {'error': 'Expected a JSON serialized list of ' 'repository id (-s) to retrieve. ' 'Or no data to get all.'}) else: # find all resources records: repo_ids = redis.keys(pattern='git_repo_id:*') if 'git_repo_id:id' in repo_ids: repo_ids.remove('git_repo_id:id') # create a pipeline to get repos: pipe = redis.pipeline() for repo_id in repo_ids: pipe.hgetall(repo_id) # execute a pipeline, collect repos data: git_repositories = [] for repo in enumerate(pipe.execute()): repo_dict = {repo_ids[repo[0]]: repo[1]} git_repositories.append(repo_dict) return make_response(str(git_repositories), 200)