def m_create_task(self): logging.debug("Creating task : %s",str(self.ID)) self.dict_Jobs[str(self.ID)] = dataclasses.c_Task(self.ID) time.sleep(0.1) self.Data["ID"] = str(self.ID) self.Data["command"]="expand_files" self.Data["payload"]=self.Payload self.FileExpander = c_PreTaskWorker(self.Data) self.FileExpander.start() self.Output["status"] = str(self.ID) self.client.send(bytes(json.dumps(self.Output) ,'utf-8')) self.FileExpander.join()
def m_create_task(self,ID,Payload): logging.debug("Creating task : %s",ID) Tasks.Jobs[ID] = dataclasses.c_Task(ID) self.Data["ID"] = ID self.Data["command"]="expand_files" self.Data["payload"]= Payload self.Output["status"] = ID Tasks.Jobs[ID].state = "busy" Tasks.Order.append(ID) Tasks.Jobs[ID].order = len(Tasks.Order) Tasks.Jobs[ID].progress = -1 Tasks.Jobs[ID].filelist = self.FileExpand(ID,Payload) logging.debug("Number of files:%s", len(Tasks.Jobs[ID].filelist)) Tasks.Jobs[ID].state = "ready" self.WriteJob(Tasks,ID)
def m_ReadJobList(self): logging.debug("Init TCP server: Reading existing jobs from %s", Tasks.WorkData["sTargetDir"]) self.aFiles = self.get_xmljobs(Tasks) self.filecounter = 0 for i in range(len(self.aFiles)): Tasks.Order.append(i) for f in self.aFiles: self.tree = ET.ElementTree(file=f) self.root = self.tree.getroot() Tasks.Jobs[self.root.attrib["ID"]] = dataclasses.c_Task(self.root.attrib["ID"]) Tasks.Jobs[self.root.attrib["ID"]].state = self.root.attrib["state"] Tasks.Jobs[self.root.attrib["ID"]].active = self.StringToBool(self.root.attrib["active"]) Tasks.Jobs[self.root.attrib["ID"]].order = int(self.root.attrib["order"]) Tasks.Order[int(self.root.attrib["order"])-1] = self.root.attrib["ID"] self.bIsActive = True self.CopiedFiles = 0 for file in self.root.find("FileList").findall("File"): Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]] = dataclasses.c_file(os.path.getsize(file.attrib["file"])) Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]].copied = self.StringToBool(file.attrib["copied"]) if file.attrib["copied"] == True: Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]].progress = 100.0 self.CopiedFiles += 1 Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]].delete = self.StringToBool(file.attrib["delete"]) Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]].uploaded = self.StringToBool(file.attrib["uploaded"]) Tasks.Jobs[self.root.attrib["ID"]].filelist[file.attrib["file"]].size = int(file.attrib["size"]) Tasks.Jobs[self.root.attrib["ID"]].progress = (self.CopiedFiles/len(Tasks.Jobs[self.root.attrib["ID"]].filelist))*100 for ID in Tasks.Order: self.aIncompleteFiles = 0 for file in Tasks.Jobs[ID].filelist: if Tasks.Jobs[ID].filelist[file].copied == True: self.aIncompleteFiles += 1 logging.debug("Loading task:[%s] %s : %s files. %s percent complete", Tasks.Jobs[ID].order, ID, len(Tasks.Jobs[ID].filelist), (self.aIncompleteFiles/len(Tasks.Jobs[ID].filelist)*100))
def m_ReadJobList(self): logging.debug("Init TCP server: Reading existing jobs from %s", self.dict_WorkData["sTargetDir"]) self.aFiles = self.get_filepaths(self.dict_WorkData["sTargetDir"]) for f in self.aFiles: self.head,self.tail = os.path.split(f) self.head, self.tail = (os.path.splitext(self.tail)) if self.tail.lower() == ".xml": self.tree = ET.ElementTree(file=f) self.task = self.tree.getroot() self.dict_Jobs[self.task.attrib["ID"]] = dataclasses.c_Task(self.task.attrib["ID"]) self.dict_Jobs[self.task.attrib["ID"]].state = self.task.attrib["state"] self.dict_Jobs[self.task.attrib["ID"]].active = self.task.attrib["active"] self.bIsActive = True self.CopiedFiles = 0 for file in self.task.find("FileList").findall("File"): self.dict_Jobs[self.task.attrib["ID"]].filelist[file.attrib["file"]] = dataclasses.c_file(int(file.attrib["size"])) self.dict_Jobs[self.task.attrib["ID"]].filelist[file.attrib["file"]].copied = self.StringToBool(file.attrib["copied"]) if file.attrib["copied"] == False: self.bIsActive = False else: self.dict_Jobs[self.task.attrib["ID"]].filelist[file.attrib["file"]].progress = 100.0 self.CopiedFiles += 1 self.dict_Jobs[self.task.attrib["ID"]].filelist[file.attrib["file"]].delete = self.StringToBool(file.attrib["delete"]) self.dict_Jobs[self.task.attrib["ID"]].filelist[file.attrib["file"]].uploaded = self.StringToBool(file.attrib["uploaded"]) self.dict_Jobs[self.task.attrib["ID"]].active = self.bIsActive self.dict_Jobs[self.task.attrib["ID"]].progress = (self.CopiedFiles/len(self.dict_Jobs[self.task.attrib["ID"]].filelist))*100 for f in self.dict_Jobs: self.aIncompleteFiles = 0 for file in self.dict_Jobs[f].filelist: if self.dict_Jobs[f].filelist[file].copied == False: self.aIncompleteFiles += 1 logging.debug("Loading task:[%s] %s : %s files. %s Incomplete", self.dict_Jobs[f].active, f, len(self.dict_Jobs[f].filelist), self.aIncompleteFiles)
def handle(self): # print("handling request") self.buffersize = 1024 * 1024 * 5 self.data = self.request.recv(self.buffersize).decode("utf-8") self.data = json.loads(self.data) self.Command = self.data["command"] self.Payload = self.data["payload"] self.Output = {} if self.Command == "/syncserver/v1/global/queue/task/put": ############################ PUT ON TASKLIST ############################ # when a new task is put into the global task list, then it needs to notify this to all registered clients (except for the one that sent the # request in the first place. logging.debug("number of global tasks:%s", len(Tasks.Jobs)) self.Payload = json.loads(self.Payload) if len(self.Payload) > 0: for data in self.Payload: if not self.m_Is_ID_In_List(Tasks.Order, data["ID"]): Tasks.Order.append(data["ID"]) Tasks.Jobs[data["ID"]] = dataclasses.c_Task() Tasks.Jobs[data["ID"]].progress = data["Data"]["progress"] Tasks.Jobs[data["ID"]].metadata = data["Data"]["metadata"] logging.debug("Adding a task to the global list from:%s", self.client_address[0]) if len(Tasks.Jobs) > 0: self.m_NotifyClients("/webimporter/v1/global/queue/put", self.m_SerialiseSyncTasks(Tasks)) elif self.Command == "/syncserver/v1/global/queue/task/get": ############################ GET TASK ############################ logging.debug("Sending tasks to client:%s", self.client_address[0]) elif self.Command == "/syncserver/v1/global/queue/task/set_progress": ############################ SET PROGRESS ############################ self.ID = self.Payload["ID"] self.progress = self.Payload["progress"] self.m_setprogress(self.ID, self.progress) elif self.Command == "/syncserver/v1/global/queue/set_priority": ############################ SET PRIORITY ############################ logging.debug("Set priority list") self.data = self.Payload Tasks.Order = [] for Data in self.data: Tasks.Order.append(Data) self.m_NotifyClients("/webimporter/v1/local/queue/set_priority", Tasks.Order) elif self.Command == "/syncserver/v1/global/queue/get_priority": ############################ GET PRIORITY ############################ self.m_getpriority() elif self.Command == "/syncserver/v1/server/register": ############################ REGISTER ############################ self.client = {} self.client["ip"] = self.client_address[0] self.client["port"] = int(self.Payload) self.bFoundClient = False for cl in Tasks.clientlist: if cl["ip"] == self.client_address[0] and cl["port"] == int(self.Payload): self.bFoundClient = True if self.bFoundClient == False: logging.debug("Registering client:%s:%s", self.client_address[0], self.Payload) Tasks.clientlist.append(self.client) else: logging.debug("[%s:%s]Client already exists", self.client_address[0], self.Payload) elif self.Command == "/syncserver/v1/server/isconnected": ############################ IS CONNECTED ############################ self.Output = {} self.Output["status"] = "OK" self.request.sendall(bytes(json.dumps(self.Output), "utf-8")) elif self.Command == "/syncserver/v1/server/shutdown": ############################ SHUTDOWN ############################ logging.debug("Shutting down server") # go to each line manager and ask it to shut down Tasks.shutdown = True