def on_commit(self, _prefix, interest: Interest, face, _filter_id, _filter): param = interest.applicationParameters.toBytes() if not isinstance(param, bytes): print("Malformed request") return param = param.split(b'\x00') if len(param) != 4: print("Malformed request") return repo, branch, dest_branch, commit_msg = map(bytes.decode, param) env = os.environ env['GIT_COMMITTER_NAME'] = 'GitSync' env['GIT_WORK_TREE'] = os.path.join(os.path.expanduser(MOUNT_PATH), repo, branch) env['GIT_DIR'] = os.path.join(env['GIT_WORK_TREE'], '.git') repo_uri = "ndn::" + Name(GIT_PREFIX).append(repo).toUri() # Commit (blocking) subprocess.call(['git', 'commit', '-a', '-m', commit_msg], env=env) # Push os.spawnlpe(os.P_NOWAIT, 'git', 'git', 'push', repo_uri, 'HEAD:refs/heads/' + dest_branch, env) # Respond with Data data = Data(interest.name) data.content = struct.pack("i", PUSH_RESPONSE_SUCCESS) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def onInterest(self, prefix, interest, face, interestFilterId, filter): """ Append the next segment number to the prefix and send a new data packet. If the last packet is sent, then call self._onFinished(). """ maxSegment = 2 if self._segment >= maxSegment: # We have already called self._onFinished(). return dump("Got interest", interest.toUri()) # Make and sign a Data packet for the segment. self._segment += 1 data = Data(Name(prefix).appendSegment(self._segment)) content = "Segment number " + repr(self._segment) data.content = content self._keyChain.sign(data, self._certificateName) face.putData(data) dump("Sent data packet", data.name.toUri()) if self._segment >= maxSegment: # We sent the final data packet. self._onFinished()
def onInterest(self, prefix, interest, transport, registeredPrefixId): """ Append the next segment number to the prefix and send a new data packet. If the last packet is sent, then set self._enabled[0] = False. """ maxSegment = 2 if self._segment >= maxSegment: return dump("Got interest", interest.toUri()) # Make and sign a Data packet for the segment. self._segment += 1 data = Data(Name(prefix).appendSegment(self._segment)) content = "Segment number " + repr(self._segment) data.content = content self._keyChain.sign(data, self._certificateName) encodedData = data.wireEncode() transport.send(encodedData.toBuffer()) dump("Sent data packet", data.name.toUri()) if self._segment >= maxSegment: # We sent the final data packet, so stop. self._enabled[0] = False
def on_branchinfo_interest(self, _prefix, interest: Interest, face, _filter_id, _filter): name = interest.name print("ON BRANCH INFO INTEREST", name.toUri()) branch = name[-1].toEscapedString() if branch not in self.branches: return data = Data(interest.name) data.content = pickle.dumps(self.branches[branch]) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_reflist_interest(self, _prefix, interest: Interest, face, _filter_id, _filter): result = '\n'.join("{} refs/heads/{}".format(info.head, name) for name, info in self.branches.items()) result = result + '\n' print("On reflist -> return:", result) data = Data(interest.name) data.content = result.encode("utf-8") data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_update(self, _prefix, interest: Interest, face, _filter_id, _filter): # Decode Interest logging.info("OnClone: %s", interest.name.toUri()) repo = interest.name[-2].toEscapedString() branch = interest.name[-1].toEscapedString() # Analyze commit commit = None if repo in self.repos: repo_obj = self.repos[repo] if branch in repo_obj.branches: commit = repo_obj.branches[branch].head if not commit: # We should fetch data = Data(interest.name) data.content = struct.pack("i", PUSH_RESPONSE_FAILURE) data.metaInfo.freshnessPeriod = 1000 face.putData(data) return # Call git to mount (not efficient) alphabet = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F' ] mount_path = os.path.join(os.path.expanduser(MOUNT_PATH), repo) os.makedirs(mount_path, exist_ok=True) mount_path = os.path.join( mount_path, "tmp-" + "".join(random.choices(alphabet, k=25))) repo_uri = "ndn::" + Name(GIT_PREFIX).append(repo).toUri() os.spawnlp(os.P_NOWAIT, 'git', 'git', 'clone', '--depth', '1', '--branch', branch, repo_uri, mount_path) # Respond with Data data = Data(interest.name) data.content = struct.pack( "i", PUSH_RESPONSE_SUCCESS) + mount_path.encode() data.metaInfo.freshnessPeriod = 1000 face.putData(data)
async def push(self, branch, commit, timeout, face, name): # TODO Check if new head is legal fetcher = self.fetch(commit) result = False async def checkout(): nonlocal fetcher, result await fetcher.wait_until_finish() if not fetcher.success: return # TODO W-A-W conflict timestamp = await self.sync.publish_data(branch) self.branches[branch].timestamp = timestamp self.branches[branch].head = commit # Fix the database head_data_name = Name(self.repo_prefix).append("refs") head_data_name = head_data_name.append(branch).appendTimestamp( timestamp) head_data = Data(head_data_name) head_data.content = commit.encode("utf-8") # TODO Sign data self.branches[branch].head_data = head_data.wireEncode().toBytes() self.repo_db.put(branch, pickle.dumps(self.branches[branch])) self.branches[branch].head_data = b"" result = True event_loop = asyncio.get_event_loop() response = None if branch not in self.branches: response = PUSH_RESPONSE_FAILURE if response is None: try: await asyncio.wait_for(fetcher.wait_until_finish(), timeout) except asyncio.TimeoutError: event_loop.create_task(checkout()) response = PUSH_RESPONSE_PENDING if response is None: await asyncio.wait_for(checkout(), None) if result: response = PUSH_RESPONSE_SUCCESS else: response = PUSH_RESPONSE_FAILURE logging.info("Push Result: %s", response) data = Data(name) data.content = struct.pack("i", response) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_unmount(self, _prefix, interest: Interest, face, _filter_id, _filter): logging.info("OnUnmount: %s", interest.name.toUri()) repo = interest.name[-2].toEscapedString() branch = interest.name[-1].toEscapedString() dest_path = os.path.join(os.path.expanduser(MOUNT_PATH), repo, branch) shutil.rmtree(dest_path, ignore_errors=True) # Respond with Data data = Data(interest.name) data.content = struct.pack("i", PUSH_RESPONSE_SUCCESS) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_mount(self, _prefix, interest: Interest, face, _filter_id, _filter): # TODO: Do a better job (mount, unmount, commit); support fetching and commit # Recover from malformed request # Decode Interest logging.info("OnMount: %s", interest.name.toUri()) repo = interest.name[-2].toEscapedString() branch = interest.name[-1].toEscapedString() # Analyze commit commit = None if repo in self.repos: repo_obj = self.repos[repo] if branch in repo_obj.branches: commit = repo_obj.branches[branch].head if not commit: # We should fetch data = Data(interest.name) data.content = struct.pack("i", PUSH_RESPONSE_FAILURE) data.metaInfo.freshnessPeriod = 1000 face.putData(data) return # Call git to mount (not efficient) mount_path = os.path.join(os.path.expanduser(MOUNT_PATH), repo) os.makedirs(mount_path, exist_ok=True) mount_path = os.path.join(mount_path, commit) repo_uri = "ndn::" + Name(GIT_PREFIX).append(repo).toUri() os.spawnlp(os.P_NOWAIT, 'git', 'git', 'clone', '--depth', '1', '--branch', branch, repo_uri, mount_path) # Respond with Data data = Data(interest.name) data.content = struct.pack( "i", PUSH_RESPONSE_SUCCESS) + mount_path.encode() data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_track_repo(self, _prefix, interest: Interest, face, _filter_id, _filter): logging.info("OnTrackRepo: %s", interest.name.toUri()) if len(interest.name) < 2: return repo = interest.name[-1].toEscapedString() if repo in self.repos: response = PUSH_RESPONSE_FAILURE else: self.repos[repo] = Repo(self.objects_db, repo, self.face) self.repos_db.put(repo, b"") response = PUSH_RESPONSE_SUCCESS data = Data(interest.name) data.content = struct.pack("i", response) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_create_branch(self, _prefix, interest: Interest, face, _filter_id, _filter): logging.info("OnCreateBranch: %s", interest.name.toUri()) if len(interest.name) < 3: return repo = interest.name[-2].toEscapedString() branch = interest.name[-1].toEscapedString() if repo not in self.repos: response = PUSH_RESPONSE_FAILURE else: result = self.repos[repo].create_branch(branch, self.cmd_prefix.toUri()) response = PUSH_RESPONSE_SUCCESS if result else PUSH_RESPONSE_FAILURE data = Data(interest.name) data.content = struct.pack("i", response) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
def on_mount(self, _prefix, interest: Interest, face, _filter_id, _filter): logging.info("OnMount: %s", interest.name.toUri()) repo = interest.name[-2].toEscapedString() branch = interest.name[-1].toEscapedString() mount_path = os.path.join(os.path.expanduser(MOUNT_PATH), repo) os.makedirs(mount_path, exist_ok=True) repo_uri = "ndn::" + Name(GIT_PREFIX).append(repo).toUri() os.spawnlp(os.P_NOWAIT, 'git', 'git', 'clone', '--single-branch', '--branch', branch, repo_uri, os.path.join(mount_path, branch)) # Respond with Data data = Data(interest.name) data.content = struct.pack("i", PUSH_RESPONSE_SUCCESS) data.metaInfo.freshnessPeriod = 1000 face.putData(data)
async def checkout(): nonlocal fetcher, result await fetcher.wait_until_finish() if not fetcher.success: return # TODO W-A-W conflict timestamp = await self.sync.publish_data(branch) head_data_name = Name(self.repo_prefix).append("refs") head_data_name = head_data_name.append(branch).appendTimestamp(timestamp) head_data = Data(head_data_name) head_data.content = commit.encode("utf-8") # TODO Sign data self.update_branch(branch, timestamp, commit, head_data.wireEncode().toBytes()) result = True
def on_interest(self, _prefix, interest, face, _filter_id, _filter): if len(interest.name) < 4: return if interest.name[-1].isSequenceNumber(): _ = interest.name[-1].toSequenceNumber() hash_name = interest.name[-2].toEscapedString() else: hash_name = interest.name[-1].toEscapedString() # TODO: Segmentation logging.info("OnInterest: %s", interest.name.toUri()) if self.storage.exists(hash_name): data = Data(interest.name) data.content = self.storage.get(hash_name) data.metaInfo.freshnessPeriod = 60000 face.putData(data) else: logging.info("Not exist: %s", hash_name)
def update_branch(self, branch: str, timestamp: int, commit: str, head_data: bytes): # Update database self.branches[branch].timestamp = timestamp self.branches[branch].head = commit self.branches[branch].head_data = head_data self.repo_db.put(branch, pickle.dumps(self.branches[branch])) # Release Data in memory self.branches[branch].head_data = b"" # Send notification Data notif = Data(Name(LOCAL_CMD_PREFIX) .append("notif") .append(self.repo_name) .append(branch) .appendTimestamp(Sync.timestamp())) notif.metaInfo.freshnessPeriod = 10 notif.content = commit.encode() self.face.putData(notif)
def onInterest(self, prefix, interest, face, interestFilterId, filter): """ Create and send a Data packet with the interest name. If the last packet is sent, then set self._enabled[0] = False. """ dump("Got interest in produce segments object:", interest.toUri()) # Make and sign a Data packet with the interest name. data = Data(interest.name) content = "Data packet " + interest.name.toUri() data.content = content self._keyChain.sign(data, self._certificateName) face.putData(data) dump("Sent data packet", data.name.toUri()) self._nSegmentsSent += 1 if self._nSegmentsSent >= (self._endBlockId - self._startBlockId) + 1: # We sent the final segment. self._onFinished()
def onInterest(self, prefix, interest, face, interestFilterId, filter): """ Create and send a Data packet with the interest name. If the last packet is sent, then set self._enabled[0] = False. """ dump("Got interest", interest.toUri()) # Make and sign a Data packet with the interest name. data = Data(interest.name) content = "Data packet " + interest.name.toUri() data.content = content self._keyChain.sign(data, self._certificateName) face.putData(data) dump("Sent data packet", data.name.toUri()) self._nSegmentsSent += 1 if self._nSegmentsSent >= (self._endBlockId - self._startBlockId) + 1: # We sent the final segment. self._onFinished()
def on_push(self, _prefix, interest: Interest, face, _filter_id, _filter): logging.info("OnPush: %s", interest.name.toUri()) if len(interest.name) < 3: return repo = interest.name[-3].toEscapedString() branch = interest.name[-2].toEscapedString() if repo not in self.repos: logging.info("Repo %s doesn't exist", repo) response = PUSH_RESPONSE_FAILURE data = Data(interest.name) data.content = struct.pack("i", response) face.putData(data) else: commit = interest.applicationParameters.toBytes().decode("utf-8") timeout = interest.interestLifetimeMilliseconds / 1000.0 / 2.0 logging.info("Arguments %s %s %s %d", repo, branch, commit, timeout) event_loop = asyncio.get_event_loop() event_loop.create_task(self.repos[repo].push(branch, commit, timeout, face, interest.name))
async def checkout(): nonlocal fetcher, result await fetcher.wait_until_finish() if not fetcher.success: return # TODO W-A-W conflict timestamp = await self.sync.publish_data(branch) self.branches[branch].timestamp = timestamp self.branches[branch].head = commit # Fix the database head_data_name = Name(self.repo_prefix).append("refs") head_data_name = head_data_name.append(branch).appendTimestamp( timestamp) head_data = Data(head_data_name) head_data.content = commit.encode("utf-8") # TODO Sign data self.branches[branch].head_data = head_data.wireEncode().toBytes() self.repo_db.put(branch, pickle.dumps(self.branches[branch])) self.branches[branch].head_data = b"" result = True
def on_result_interest(self, _prefix, interest, face, _interest_filter_id, _filter_obj): # type: (Name, Interest, Face, int, InterestFilter) -> bool prefix = Name(SERVER_PREFIX).append(RESULT_PREFIX) if not prefix.isPrefixOf(interest.name): # Wrong prefix return False data_name = interest.name[prefix.size():] logging.info("On result interest: %s", data_name.toUri()) # key, stat = self._result_set_prefix_match(data_name) status = self.load_status(data_name) if status is None: # No such request self.nodata_reply(interest.name, RET_NO_REQUEST) return True if data_name[-1].isSegment(): # Segment no suffix seg_no = data_name[-1].toSegment() result = self.storage.get(data_name.getPrefix(-1)) elif data_name[-1] == Name("_meta")[0]: # MetaInfo suffix seg_no = -1 result = self.storage.get(data_name.getPrefix(-1)) else: # No suffix seg_no = None result = self.storage.get(data_name) if result is not None: # There are data segment_cnt = (len(result) + self.segment_size - 1) // self.segment_size # Note: I don't understand why namespace keep all data in memory metainfo = MetaInfo() # metainfo.setFinalBlockId(segment_cnt - 1) # WHY this doesn't work? metainfo.setFinalBlockId(Name().appendSegment(segment_cnt - 1)[0]) if segment_cnt > 1 and seg_no is None: # Fetch segmented data with no suffix will get only first segment seg_no = 0 data_name.appendSequenceNumber(seg_no) data = Data(Name(prefix).append(data_name)) data.setMetaInfo(metainfo) if seg_no == -1: # _meta data.content = self.storage.get(data_name) else: # data if segment_cnt > 1: # Segmented if seg_no < segment_cnt: start_offset = seg_no * self.segment_size end_offset = start_offset + self.segment_size data.content = Blob(bytearray(result[start_offset:end_offset])) else: data.content = None else: # No segmentation data.content = Blob(bytearray(result)) self.keychain.sign(data) face.putData(data) return True else: # Data are not ready if status.status == STATUS_NO_INPUT: self.nodata_reply(interest.name, RET_NO_INPUT) elif status.status == STATUS_FAILED: self.nodata_reply(interest.name, RET_EXECUTION_FAILED) else: self.nodata_reply(interest.name, RET_RETRY_AFTER, status.estimated_time - Common.getNowMilliseconds()) return True