def getScanImages(scanName): results = inga._inga().query(api.g.sessionData,query={ "scanName" : scanName, "up" : True },fields=["ports","ip","domains"])["results"] result = [] ids = [] for scan in results: try: for portValue in scan["ports"]["tcp"]: try: ids.append(db.ObjectId(portValue["data"]["webScreenShot"]["storageID"])) result.append({ "url" : "{0}://{1}:{2}".format(portValue["data"]["webServerDetect"]["protocol"],scan["ip"],portValue["port"]), "fileData" : portValue["data"]["webScreenShot"]["storageID"] }) except KeyError: pass for domainValue in scan["domains"]: for protocol in ["http","https"]: try: ids.append(db.ObjectId(domainValue["data"]["webScreenShot"][protocol]["storageID"])) result.append({ "url" : "{0}://{1}".format(protocol,domainValue["domain"]), "fileData" : domainValue["data"]["webScreenShot"][protocol]["storageID"] }) except KeyError: pass except KeyError: pass results = storage._storage().query(api.g.sessionData,query={ "_id" : { "$in" : ids } })["results"] for item in result: for storageResult in results: if item["fileData"] == str(storageResult["_id"]): item["fileData"] = storageResult["fileData"] return render_template("scanImages.html", result=result)
def loadAsClass(self, jsonList, sessionData=None): result = [] # Ininilize global cache cache.globalCache.newCache("modelCache", sessionData=sessionData) # Loading json data into class for jsonItem in jsonList: _class = cache.globalCache.get("modelCache", jsonItem["classID"], getClassObject, sessionData=sessionData) if _class is not None: if len(_class) == 1: _class = _class[0].classObject() if _class: result.append(helpers.jsonToClass(_class(), jsonItem)) else: logging.debug( "Error unable to locate class, disabling trigger: triggerID={0} classID={1}, models={2}" .format(jsonItem["_id"], jsonItem["classID"], [_trigger, db._document])) _trigger().api_update( query={"_id": db.ObjectId(jsonItem["_id"])}, update={"$set": { "enabled": False }}) systemTrigger.failedTrigger(None, "noTriggerClass") return result
def setAttribute(self,attr,value,sessionData=None): if attr == "name": results = self.query(query={"name" : value, "_id" : { "$ne" : db.ObjectId(self._id) }})["results"] if len(results) != 0: return False setattr(self,attr,value) return True
def getGraph(self): query = {"_id": db.ObjectId(self._id)} doc = self._dbCollection.find_one(query) x = [] y = [] for xy in doc["xy"]: x.append(xy[0]) y.append(xy[1]) return (x, y)
def setAttribute(self, attr, value, sessionData=None): if attr == "name": results = self.query(query={ "name": value, "_id": { "$ne": db.ObjectId(self._id) } })["results"] if len(results) != 0: return False # Resets startCheck to 0 each time a trigger is enabled elif attr == "enabled" and value == True and self.enabled == False: self.startCheck = 0 self.update(["startCheck"]) setattr(self, attr, value) return True
def run(self,data,persistentData,actionResult): correlationName = helpers.evalString(self.correlationName,{"data" : data}) excludeCorrelationValues = helpers.evalDict(self.excludeCorrelationValues,{"data" : data}) expiryTime = time.time() + self.expiryTime correlatedRelationships = event._eventCorrelation().getAsClass(query={ "correlationName" : correlationName, "expiryTime" : { "$gt" : int(time.time()) } }) eventsAfterTime = int(time.time()) - self.oldestEvent if not self.alwaysProcessEvents: ids = event._eventCorrelation()._dbCollection.distinct("ids",{ "$or" : [ { "expiryTime" : { "$gt" : int(time.time()) } }, { "lastUpdateTime" : { "$gt" : eventsAfterTime } } ] }) objectIds = [] for idItem in ids: objectIds.append(db.ObjectId(idItem)) events = event._event().getAsClass(query={ "_id" : { "$nin" : objectIds }, "expiryTime" : { "$gt" : eventsAfterTime }, "eventFields" : { "$in" : self.correlationFields } }) else: events = event._event().getAsClass(query={ "expiryTime" : { "$gt" : eventsAfterTime }, "eventFields" : { "$in" : self.correlationFields } }) # Build correlation field hash table fields = {} for field in self.correlationFields: fields[field] = {} if field not in excludeCorrelationValues: excludeCorrelationValues[field] = [] for correlatedRelationshipItem in correlatedRelationships: for field in self.correlationFields: try: if field not in excludeCorrelationValues: excludeCorrelationValues[field] = [] for value in correlatedRelationshipItem.correlations[field]: fields[field][value] = correlatedRelationshipItem except KeyError: pass correlatedRelationshipsCreated = [] correlatedRelationshipsUpdated = [] correlatedRelationshipsDeleted = [] # Initial Pass Loop for eventItem in events: foundCorrelatedRelationship = None correlations = {} processNew = False for eventField in eventItem.eventValues: try: if type(eventItem.eventValues[eventField]) is list: correlations[eventField] = [ x for x in eventItem.eventValues[eventField] if eventField in self.correlationFields ] matchFound = [ fields[eventField][x] for x in eventItem.eventValues[eventField] if eventField in self.correlationFields and x in fields[eventField] and x not in excludeCorrelationValues[eventField] ] if len(matchFound) > 0: foundCorrelatedRelationship = matchFound[0] else: matchFound = [ fields[eventField][x] for x in eventItem.eventValues[eventField] if eventField in self.correlationFields and x and x not in excludeCorrelationValues[eventField] ] if len(matchFound) > 0: processNew = True else: correlations[eventField] = [eventItem.eventValues[eventField]] if eventField in self.correlationFields and eventItem.eventValues[eventField] in fields[eventField] and eventItem.eventValues[eventField] not in excludeCorrelationValues[eventField]: foundCorrelatedRelationship = fields[eventField][eventItem.eventValues[eventField]] else: if eventField in self.correlationFields and eventItem.eventValues[eventField] and eventItem.eventValues[eventField] not in excludeCorrelationValues[eventField]: processNew = True except KeyError: pass # Create new if processNew == True: newEventCorrelation = event._eventCorrelation() newEventCorrelation.bulkNew(self.bulkClass, self.acl, correlationName,expiryTime,[eventItem._id],[eventItem.eventType],[eventItem.eventSubType],correlations,eventItem.score) correlatedRelationshipsCreated.append(newEventCorrelation) correlatedRelationships.append(newEventCorrelation) for eventField in eventItem.eventValues: try: for eventValue in correlations[eventField]: try: fields[eventField][eventValue] = newEventCorrelation except KeyError: fields[eventField] = { eventValue : newEventCorrelation } except KeyError: pass # Merge existing elif foundCorrelatedRelationship != None: for eventField in correlations: try: foundCorrelatedRelationship.correlations[eventField] += correlations[eventField] foundCorrelatedRelationship.correlations[eventField] = list(set(foundCorrelatedRelationship.correlations[eventField])) except KeyError: foundCorrelatedRelationship.correlations[eventField] = correlations[eventField] if eventItem._id not in foundCorrelatedRelationship.ids: foundCorrelatedRelationship.ids.append(eventItem._id) foundCorrelatedRelationship.score += eventItem.score if eventItem.eventType not in foundCorrelatedRelationship.types: foundCorrelatedRelationship.types.append(eventItem.eventType) if eventItem.eventSubType not in foundCorrelatedRelationship.subTypes: foundCorrelatedRelationship.subTypes.append(eventItem.eventSubType) foundCorrelatedRelationship.correlationLastUpdate = int(time.time()) foundCorrelatedRelationship.expiryTime = expiryTime if foundCorrelatedRelationship not in correlatedRelationshipsCreated and foundCorrelatedRelationship not in correlatedRelationshipsUpdated: correlatedRelationshipsUpdated.append(foundCorrelatedRelationship) # Process bulk creation if needed before merging self.bulkClass.bulkOperatonProcessing() # Reduction Loop loop = 1 maxLoops = 5 while loop > 0 and maxLoops > 0: correlatedFieldsHash = {} for correlatedRelationship in correlatedRelationships: for eventField, eventValue in ((eventField, eventValue) for eventField in correlatedRelationship.correlations for eventValue in correlatedRelationship.correlations[eventField] ): if eventField in self.correlationFields and eventValue not in excludeCorrelationValues[eventField]: try: if eventValue not in correlatedFieldsHash[eventField]: correlatedFieldsHash[eventField][eventValue] = correlatedRelationship else: currentCorrelation = correlatedFieldsHash[eventField][eventValue] for eventField in correlatedRelationship.correlations: try: currentCorrelation.correlations[eventField] += correlatedRelationship.correlations[eventField] currentCorrelation.correlations[eventField] = list(set(currentCorrelation.correlations[eventField])) except KeyError: currentCorrelation.correlations[eventField] = correlatedRelationship.correlations[eventField] for mergeKey in ["ids","types","subTypes"]: for value in getattr(correlatedRelationship,mergeKey): if value not in getattr(currentCorrelation,mergeKey): getattr(currentCorrelation,mergeKey).append(value) currentCorrelation.score += correlatedRelationship.score currentCorrelation.correlationLastUpdate = int(time.time()) currentCorrelation.expiryTime = expiryTime if currentCorrelation not in correlatedRelationshipsCreated and correlatedRelationship not in correlatedRelationshipsUpdated: correlatedRelationshipsUpdated.append(currentCorrelation) # Deleting the eventCorrelation it was merged with if correlatedRelationship not in correlatedRelationshipsDeleted: correlatedRelationshipsDeleted.append(correlatedRelationship) if correlatedRelationship in correlatedRelationshipsUpdated: correlatedRelationshipsUpdated.remove(correlatedRelationship) if correlatedRelationship in correlatedRelationshipsCreated: correlatedRelationshipsCreated.remove(correlatedRelationship) correlatedRelationship.bulkMerge(currentCorrelation._id,self.bulkClass) correlatedRelationships.remove(correlatedRelationship) loop+=1 break except KeyError: correlatedFieldsHash[eventField] = { eventValue : correlatedRelationship } maxLoops -= 1 loop -= 1 created = [ helpers.classToJson(x,hidden=True) for x in correlatedRelationshipsCreated ] updated = [ helpers.classToJson(x,hidden=True) for x in correlatedRelationshipsUpdated ] deleted = [ helpers.classToJson(x,hidden=True) for x in correlatedRelationshipsDeleted ] for correlatedRelationshipUpdated in correlatedRelationshipsUpdated: correlatedRelationshipUpdated.bulkUpdate(["expiryTime","ids","types","subTypes","correlations","score"],self.bulkClass) updated.append(helpers.classToJson(correlatedRelationshipUpdated,hidden=True)) self.bulkClass.bulkOperatonProcessing() actionResult["result"] = True actionResult["rc"] = 0 actionResult["correlatedEvents"] = { "created" : created, "updated" : updated, "deleted" : deleted } return actionResult
def getScan(): scanName = urllib.parse.unquote_plus(request.args.get("scanName")) results = inga._inga().query( api.g.sessionData, query={ "scanName": scanName, "up": True }, fields=["scanName", "ip", "up", "lastScan", "ports"])["results"] barChartData = {} scanData = [] networkChartPorts = [] # # New for scan in results: c = 0 portValues = [] try: tcpPorts = scan["ports"]["tcp"] udpPorts = scan["ports"]["udp"] combinedPorts = tcpPorts + udpPorts for portValue in combinedPorts: if portValue["state"] == "open": c += 1 portValues.append({ "port": portValue["port"], "service": portValue["service"], "state": portValue["state"] }) networkChartPorts.append( [scan["ip"], str(portValue["port"])]) if c > 0: try: barChartData[scan["ip"]] = c except KeyError: pass except KeyError: pass scanData.append({ "scanName": scan["scanName"], "ip": scan["ip"], "up": scan["up"], "lastScan": scan["lastScan"], "portData": portValues }) barChartColours = genRandomColours(len(barChartData)) test = inga._inga().query(api.g.sessionData, query={ "scanName": scanName, "up": True }, fields=["ports", "ip", "domains"])["results"] test2 = [] ids = [] print(scanName) for scan in test: # print(scan) try: for portValue in scan["ports"]["tcp"]: try: ids.append( db.ObjectId( portValue["data"]["webScreenShot"]["storageID"])) test2.append({ "url": "{0}://{1}:{2}".format( portValue["data"]["webServerDetect"]["protocol"], scan["ip"], portValue["port"]), "fileData": portValue["data"]["webScreenShot"]["storageID"] }) except KeyError: pass for domainValue in scan["domains"]: for protocol in ["http", "https"]: try: ids.append( db.ObjectId(domainValue["data"]["webScreenShot"] [protocol]["storageID"])) test2.append({ "url": "{0}://{1}".format(protocol, domainValue["domain"]), "fileData": domainValue["data"]["webScreenShot"][protocol] ["storageID"] }) except KeyError: pass except KeyError: pass print("IDS", ids) print(test2) results = storage._storage().query(api.g.sessionData, query={"_id": { "$in": ids }})["results"] for item in test2: for storageResult in results: if item["fileData"] == str(storageResult["_id"]): item["fileData"] = storageResult["fileData"] return render_template("ingaScan.html", barChartData=barChartData, barChartColours=barChartColours, scanData=scanData, networkChartPorts=networkChartPorts)
def run(self, data, persistentData, actionResult): now = int(time.time()) conductsCache = {} foundOccurrenceCache = {} # Finding occurrences that have expired their lull time foundOccurrences = occurrence._occurrence().query(query={ "lullTime": { "$lt": now }, "lullTimeExpired": { "$lt": 1 } })["results"] foundOccurrencesIDs = [] for foundOccurrence in foundOccurrences: # Adding IDs of found occurrences to the delete list as they are now cleared foundOccurrencesIDs.append(db.ObjectId(foundOccurrence["_id"])) # Notifiying clears if foundOccurrence["occurrenceFlowID"] not in foundOccurrenceCache: tempOccurrence = _occurrence().load( foundOccurrence["occurrenceActionID"]) foundOccurrenceCache[foundOccurrence["occurrenceFlowID"]] = { "triggerID": None, "conducts": [] } if tempOccurrence.enabled: conducts = conduct._conduct().query( query={ "flow.actionID": tempOccurrence._id, "flow.flowID": foundOccurrence["occurrenceFlowID"], "enabled": True })["results"] foundOccurrenceCache[foundOccurrence["occurrenceFlowID"]][ "exitCodeMode"] = { "actionID": tempOccurrence._id, "conducts": conducts } conducts = foundOccurrenceCache[foundOccurrence[ "occurrenceFlowID"]]["exitCodeMode"]["conducts"] data = conduct.dataTemplate() data["flowData"]["trigger_id"] = tempOccurrence._id data["flowData"]["clearOccurrence"] = True # If occurrence contains the orgnial data var and event then apply it to the data passsed to clear if "data" in foundOccurrence: data["flowData"]["event"] = foundOccurrence["data"]["event"] data["flowData"]["var"] = foundOccurrence["data"]["var"] for conduct_ in conducts: loadedConduct = None if conduct_["classID"] not in conductsCache: # Dynamic loading for classType model _class = model._model().get( conduct_["classID"]).classObject() if _class: loadedConduct = _class().load(conduct_["_id"]) conductsCache[conduct_["classID"]] = loadedConduct else: logging.debug( "Cannot locate occurrence by ID, occurrenceID='{0}'" .format(foundOccurrence["occurrenceFlowID"]), 6) else: loadedConduct = conductsCache[conduct_["classID"]] if loadedConduct: try: cache.globalCache.delete("occurrenceCacheMatch", foundOccurrence["match"]) eventStat = { "first": True, "current": 0, "total": 1, "last": True } tempData = conduct.copyData(data) tempData["flowData"]["eventStats"] = eventStat loadedConduct.triggerHandler( foundOccurrence["occurrenceFlowID"], tempData, flowIDType=True) except Exception as e: pass # Error handling is needed here # Deleting expired occurrences if len(foundOccurrencesIDs) > 0: foundOccurrences = occurrence._occurrence().api_delete( query={"_id": { "$in": foundOccurrencesIDs }}) logging.debug( "Occurrences cleared, result='{0}'".format(foundOccurrences), 7) activeOccurrences = occurrence._occurrence()._dbCollection.aggregate([{ "$project": { "triggerID": { "$toObjectId": '$triggerID' }, "lastLullCheck": 1, "lullTime": 1 } }, { "$lookup": { "from": "triggers", "localField": "triggerID", "foreignField": "_id", "as": "triggers" } }, { "$unwind": "$triggers" }, { "$match": { "lullTime": { "$lt": now }, "$expr": { "$gt": ["$triggers.lastCheck", "$lastLullCheck"] } } }]) updateOccurrenceIDs = [] for activeOccurrence in activeOccurrences: updateOccurrenceIDs.append(activeOccurrence["_id"]) # Increment all with expired lullTime if len(updateOccurrenceIDs) > 0: incrementedOccurrences = occurrence._occurrence().api_update( query={"_id": { "$in": updateOccurrenceIDs }}, update={ "$inc": { "lullTimeExpired": -1 }, "$set": { "lastLullCheck": int(time.time()) } }) logging.debug( "Occurrences incremented, result='{0}'".format( incrementedOccurrences), 7) actionResult["result"] = True actionResult["rc"] = 0 return actionResult
def getConductFlowCodify(conductID): def generateFlow(currentFlow, flowDict, triggers, actions): flowCode = "" processQueue = [] indentLevel = 0 logic = None while True: if currentFlow: obj = None if currentFlow["type"] == "trigger": for t in triggers: if flow["triggerID"] == t._id: obj = t break for nextFlow in currentFlow["next"]: processQueue.append({ "flowID": nextFlow["flowID"], "indentLevel": indentLevel + 1, "logic": nextFlow["logic"] }) elif currentFlow["type"] == "action": for a in actions: if currentFlow["actionID"] == a._id: obj = a break for nextFlow in currentFlow["next"]: processQueue.append({ "flowID": nextFlow["flowID"], "indentLevel": indentLevel + 1, "logic": nextFlow["logic"] }) if obj: classObj = _class = model._model().getAsClass( api.g["sessionData"], id=obj.classID) classObj = classObj[0] blacklist = [ "_id", "acl", "classID", "workerID", "startCheck", "nextCheck", "lastUpdateTime", "lastCheck" ] members = [ attr for attr in dir(obj) if not callable(getattr(obj, attr)) and not "__" in attr and attr ] params = [] for member in members: if member not in blacklist: value = getattr(obj, member) if type(value) == str: value = "\"{0}\"".format(value) params.append("{0}={1}".format(member, value)) if currentFlow["type"] == "action": flowCode += "\r\n{0}logic({1})->{2}({3})".format( ("\t" * indentLevel), logic, classObj.name, ",".join(params)) else: if len(flowCode) > 0: flowCode += "\r\n{0}{1}({2})".format( ("\t" * indentLevel), classObj.name, ",".join(params)) else: flowCode = "{0}({1})".format( classObj.name, ",".join(params)) if len(processQueue) == 0: break else: nextFlowID = processQueue[-1]["flowID"] indentLevel = processQueue[-1]["indentLevel"] logic = processQueue[-1]["logic"] processQueue.pop() if nextFlowID in flowDict: currentFlow = flowDict[nextFlowID] else: currentFlow = None return flowCode conductObj = conduct._conduct().getAsClass(api.g["sessionData"], id=conductID) if len(conductObj) == 1: conductObj = conductObj[0] else: return {}, 404 # Getting all UI flow details for flows in this conduct flows = [x for x in conductObj.flow] flowDict = {} for flow in flows: flowDict[flow["flowID"]] = flow flowTriggers = [ db.ObjectId(x["triggerID"]) for x in flows if x["type"] == "trigger" ] flowActions = [ db.ObjectId(x["actionID"]) for x in flows if x["type"] == "action" ] actions = action._action().getAsClass(api.g["sessionData"], query={"_id": { "$in": flowActions }}) triggers = trigger._trigger().getAsClass( api.g["sessionData"], query={"_id": { "$in": flowTriggers }}) flowCode = "" for flow in flows: if flow["type"] == "trigger": flowCode += generateFlow(flow, flowDict, triggers, actions) return render_template("blank.html", content=flowCode, CSRF=api.g["sessionData"]["CSRF"]), 200
def conductFlowchartPoll(conductID): conductObj = conduct._conduct().query(api.g["sessionData"], id=conductID)["results"] if len(conductObj) == 1: conductObj = conductObj[0] else: return {}, 404 data = json.loads(api.request.data) flowchartOperators = data["operators"] flowchartLinks = data["links"] flowchartResponse = { "operators": { "delete": {}, "create": {}, "update": {} }, "links": { "delete": {}, "create": {}, "update": {} } } # Getting all UI flow details for flows in this conduct flows = [x for x in conductObj["flow"]] flowTriggers = [ db.ObjectId(x["triggerID"]) for x in flows if x["type"] == "trigger" ] flowActions = [ db.ObjectId(x["actionID"]) for x in flows if x["type"] == "action" ] flowsList = [x["flowID"] for x in flows] linksList = [] # For every refresh the entire flow object and UI is loaded from the database - this may need improvment for speed in future flowsUI = webui._modelUI().getAsClass(api.g["sessionData"], query={ "flowID": { "$in": flowsList }, "conductID": conductID }) actions = action._action().getAsClass(api.g["sessionData"], query={"_id": { "$in": flowActions }}) triggers = trigger._trigger().getAsClass( api.g["sessionData"], query={"_id": { "$in": flowTriggers }}) cache.globalCache.newCache("modelCache", sessionData=api.g["sessionData"]) for flow in flows: if "type" in flow: flowType = flow["type"] if "subtype" in flow: flowSubtype = flow["subtype"] else: flowSubtype = "" if "{0}{1}".format(flowType, "ID") in flow: objectID = "{0}{1}".format(flowType, "ID") flowID = flow["flowID"] # Default to create flowchartResponseType = "create" if flowID in flowchartOperators: # If it already exits then its an update flowchartResponseType = "update" # Setting position if it has changed since last pollTime foundFlowUI = False foundObject = False name = flow["flowID"] node = {} for flowUI in flowsUI: if flow["flowID"] == flowUI.flowID: foundFlowUI = True if flowchartResponseType == "create": node["x"] = flowUI.x node["y"] = flowUI.y node["shape"] = "box" node["widthConstraint"] = { "minimum": 125, "maximum": 125 } node["heightConstraint"] = { "minimum": 35, "maximum": 35 } node["borderWidth"] = 1.5 node["font"] = {"multi": True} elif flowUI.x != flowchartOperators[flowID]["node"][ "x"] or flowUI.y != flowchartOperators[flowID][ "node"]["y"]: node["x"] = flowUI.x node["y"] = flowUI.y if flow["type"] == "trigger": for t in triggers: if flow["triggerID"] == t._id: name = t.name modeClass = cache.globalCache.get( "modelCache", t.classID, model.getClassObject, sessionData=api.g["sessionData"])[0] color = None if t.enabled: color = "#7cbeeb" duration = t.maxDuration if duration == 0: duration = 60 if ((t.startCheck != 0) and (t.startCheck + duration > time.time())): color = "green" if ((t.startCheck != 0) and (t.startCheck + duration < time.time())): color = "red" if not t.enabled: color = "gray" label = "<b>{0}</b>\n{1}".format( t.name, modeClass.name) if flowchartResponseType == "create": node["label"] = label node["color"] = {"background": color} else: if color != flowchartOperators[flowID][ "node"]["color"]: node["color"] = { "background": color } if label != flowchartOperators[flowID][ "node"]["label"]: node["label"] = label foundObject = True break elif flow["type"] == "action": for a in actions: if flow["actionID"] == a._id: name = a.name modeClass = cache.globalCache.get( "modelCache", a.classID, model.getClassObject, sessionData=api.g["sessionData"])[0] color = None if a.enabled: color = "#7cbeeb" if not a.enabled: color = "gray" label = "<b>{0}</b>\n{1}".format( a.name, modeClass.name) if flowchartResponseType == "create": node["label"] = label node["color"] = {"background": color} else: if color != flowchartOperators[flowID][ "node"]["color"]: node["color"] = { "background": color } if label != flowchartOperators[flowID][ "node"]["label"]: node["label"] = label foundObject = True break if node: if not foundObject: node["label"] = "Unknown Object" node["color"] = {"background": "black"} flowchartResponse["operators"][ flowchartResponseType][flowID] = { "_id": flow[objectID], "flowID": flowID, "flowType": flowType, "flowSubtype": flowSubtype, "name": name, "node": node } break if not foundFlowUI: node["x"] = 0 node["y"] = 0 node["shape"] = "box" node["widthConstraint"] = {"minimum": 125, "maximum": 125} node["heightConstraint"] = {"minimum": 35, "maximum": 35} node["borderWidth"] = 1.5 node["label"] = "Unknown Object" node["color"] = {"background": "black"} flowchartResponse["operators"][flowchartResponseType][ flowID] = { "_id": flow[objectID], "flowID": flowID, "flowType": flowType, "flowSubtype": flowSubtype, "node": node } # Do any links need to be created for nextFlow in flow["next"]: linkName = "{0}->{1}".format(flowID, nextFlow["flowID"]) linksList.append(linkName) if linkName not in flowchartLinks: flowchartResponse["links"]["create"][linkName] = { "from": flowID, "to": nextFlow["flowID"], "logic": nextFlow["logic"] } #Updates (for logic for now) << needs to be readded but using same as node with color value set from server #flowchartResponse["links"]["update"][linkName] = { "from" : flowID, "to" : nextFlow["flowID"], "logic" : nextFlow["logic"] } # Checking for deleted operators for flowchartOperator in flowchartOperators: if flowchartOperator not in flowsList: flowchartResponse["operators"]["delete"][flowchartOperator] = { "flowID": flowchartOperator } # Checking for deleted links for flowchartLink in flowchartLinks: if flowchartLink not in linksList: flowchartResponse["links"]["delete"][flowchartLink] = { "linkName": flowchartLink } return flowchartResponse, 200
def getModel(self): query = {"_id": db.ObjectId(self._id)} doc = self._dbCollection.find_one(query) # Check how safe this function is return pickle.loads(doc["model"])
def getStatistics(self, xyStatisticsDict): query = {"_id": db.ObjectId(self._id)} doc = self._dbCollection.find_one(query) return helpers.unicodeEscapeDict(doc["statistics"])