def startWorkflow(self, session, logfilename, address, workflowname):
        from Workflow import Workflow

        print "imported"
        if not self.isActive():
            print workflowname
            wf = Workflow(workflowname, self, self.user, logfilename, address)
            session.add(wf)
            session.commit()
            "print not active found workflow"
            wf.start(session)
            print "starting workflow"
            session.add(wf)
            session.commit()
Example #2
0
 def __init__(self, taskCount=10, save=False):
     self.taskCount = taskCount
     self.workflow = Workflow(taskCount)
     self.workflowbak = self.workflow
     self.runningTasks = []
     self.finishedTasks = []
     self.currentTime = 0
     self.resourcePool = []
     self.initVM()
     self.finishedSize = 0
     self.totalSize = sum(self.workflow.taskSize)
     if save:
         dbfile = open('env-' + str(self.taskCount), 'wb')
         pickle.dump(self, dbfile)
         dbfile.close()
Example #3
0
def main():
    """ Defines the type of job (preprocessing, training, generation, or testing), 
    runs it, and writes the job parameters used.
    """
    # fix date/time
    _ = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    workflow = Workflow(constants=C)

    job_type = C.job_type
    print(f"* Run mode: '{job_type}'", flush=True)

    if job_type == "preprocess":
        # write preprocessing parameters
        util.write_preprocessing_parameters(params=C)

        # preprocess all datasets
        workflow.preprocess_phase()

    elif job_type == "train":
        # write training parameters
        util.write_job_parameters(params=C)

        # train model and generate graphs
        workflow.training_phase()

    elif job_type == "generate":
        # write generation parameters
        util.write_job_parameters(params=C)

        # generate molecules only
        workflow.generation_phase()

    elif job_type == "benchmark":
        # TODO not integrated with MOSES, at the moment benchmarking is done by
        # generating N structures, copying the generated SMILES to the MOSES
        # dir, and running the benchmarking job according to MOSES instructions
        raise NotImplementedError

    elif job_type == "test":
        # write testing parameters
        util.write_job_parameters(params=C)

        # evaluate best model using the test set data
        workflow.testing_phase()

    else:
        return NotImplementedError("Not a valid `job_type`.")
Example #4
0
def find_matches( workflow_dir,keywords ):
	workflows = Workflow.workflows_for_filestrings( Seeker.file_strings( workflow_dir ) )
	keyword_set = KeywordSet( keywords )
	if keyword_set.is_valid() == False:
		print("   > Invalid keywords")
		sys.exit()
	return Analyzer.workflows_for_keywords( keyword_set,workflows )
Example #5
0
def main():
    """
    Defines the type of job (preprocessing, training, generation, or testing),
    writes the job parameters (for future reference), and runs the job.
    """
    # fix date/time
    _ = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")

    workflow = Workflow(constants=constants)

    job_type = constants.job_type
    print(f"* Run mode: '{job_type}'", flush=True)

    if job_type == "preprocess":
        # write preprocessing parameters
        util.write_preprocessing_parameters(params=constants)

        # preprocess all datasets
        workflow.preprocess_phase()

    elif job_type == "train":
        # write training parameters
        util.write_job_parameters(params=constants)

        # train model and generate graphs
        workflow.training_phase()

    elif job_type == "generate":
        # write generation parameters
        util.write_job_parameters(params=constants)

        # generate molecules only
        workflow.generation_phase()

    elif job_type == "test":
        # write testing parameters
        util.write_job_parameters(params=constants)

        # evaluate best model using the test set data
        workflow.testing_phase()

    else:
        raise NotImplementedError("Not a valid `job_type`.")
Example #6
0
def test_DeleteRecordStep(src_evtx_001, filter_single_record_without_res_template):
    wf = Workflow()
    step = DeleteRecordStep(filter_single_record_without_res_template)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True
Example #7
0
def main(src, dest, username):
    # initialize Workflow
    workflow = Workflow()

    # create target username and eventid filter
    filter_subj = WorkflowStepFilter()
    filter_subj.add_eventdata_filter("TargetUserName", username)
    filter_subj.add_system_filter("EventID", "4624")
    # create and add step to workflow
    step = DeleteRecordStep(filter_subj)
    workflow.add_step(step)

    # start workflow
    workflow.run(src, dest)
Example #8
0
def test_ModifyTimestampStep(src_evtx_001, filter_single_record):
    wf = Workflow()
    new_value = datetime.datetime(1900,11,11,11,11,11,11, tzinfo=datetime.timezone.utc)
    step = ModifyTimestampStep(filter_single_record, new_value)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True

        # check new value
        assert helper_get_value(output, filter_single_record.element_filter, filter_single_record.eventdata_filter,
                                element_name="TimeCreated", attribute_name="SystemTime", attribute=True) == "1900-11-11 11:11:11.000011"
Example #9
0
def test_ModifyEventdataStep(src_evtx_001, filter_single_record):
    wf = Workflow()
    systemdata_name = "EventID"
    new_value = "9999"
    step = ModifySystemdataStep(filter_single_record, new_value, systemdata_name=systemdata_name)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True

        # check new value
        assert helper_get_value(output, filter_single_record.element_filter, filter_single_record.eventdata_filter,
                                element_name=systemdata_name) == new_value
Example #10
0
def test_ModifyEventdataStep(src_evtx_001, filter_single_record):
    wf = Workflow()
    eventdata_name = "SubjectDomainName"
    new_value = "EvilDomain"
    step = ModifyEventdataStep(filter_single_record, new_value, eventdata_name)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True

        # check new value
        assert helper_get_value(output, filter_single_record.element_filter, filter_single_record.eventdata_filter,
                                element_name="Data", attribute_value=eventdata_name) == new_value
Example #11
0
def test_IncrementElementValueStep(src_evtx_001, filter_single_record):
    wf = Workflow()
    element_name = "EventID"
    new_value = -1
    step = IncrementElementValueStep(filter_single_record, new_value, element_name=element_name)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True

        # check new value
        assert helper_get_value(output, filter_single_record.element_filter, filter_single_record.eventdata_filter,
                                element_name="EventID") == "4687"
Example #12
0
def test_ModifyAttributeValueStep(src_evtx_001, filter_single_record):
    wf = Workflow()
    element_name = "Execution"
    attribute_name = "ProcessID"
    new_value = "666"
    step = ModifyAttributeValueStep(filter_single_record, new_value,element_name=element_name, attribute_name=attribute_name)
    wf.add_step(step)
    wf.run(src_evtx_001[0], src_evtx_001[1])

    with evtx.Evtx(src_evtx_001[1]) as output:
        fh = output.get_file_header()

        # check header
        assert fh.verify() is True

        # check new value
        assert helper_get_value(output, filter_single_record.element_filter, filter_single_record.eventdata_filter,
                                element_name=element_name, attribute_name=attribute_name, attribute=True) == new_value
Example #13
0
def main():
    # initialize Workflow
    workflow = Workflow()

    # create filter
    filter = WorkflowStepFilter()
    filter.add_system_filter("EventID", 4688)

    # create and add step to workflow
    step = ModifyElementValueStep(filter,
                                  new_value="Evil",
                                  element_name="Data",
                                  attribute_name="Name",
                                  attribute_value="SubjectUserName")
    workflow.add_step(step)

    # start workflow
    workflow.run("../tests/data/ex_001.evtx", "../tests/data/output.evtx")
Example #14
0
def main(src, dest, eventrecordid, days, hours, minutes, seconds,
         microseconds):
    # initialize Workflow
    workflow = Workflow()

    # create eventrecordid filter
    filter_subj = WorkflowStepFilter()
    filter_subj.add_system_filter("EventRecordID", eventrecordid)
    # create and add step to workflow
    step = IncrementTimestampStep(filter_subj,
                                  days=days,
                                  hours=hours,
                                  minutes=minutes,
                                  seconds=seconds,
                                  microseconds=microseconds)
    workflow.add_step(step)

    # start workflow
    workflow.run(src, dest)
Example #15
0
    def __init__(self, config):
        fileData = open(config, 'r').read()

        json_data = json.loads(fileData)

        configItems = json_data['Config']

        powerConsumptionItem = configItems['PowerConsumption']

        HWName = powerConsumptionItem['HWName']
        CPUIdle = float(powerConsumptionItem['CPUIdle'])
        CPUActive = float(powerConsumptionItem['CPUActive'])
        Sleep = float(powerConsumptionItem['Sleep'])

        self.PowerState = PowerState(HWName, CPUIdle, CPUActive, Sleep)

        sensingItem = configItems['Sensing']

        self.Sensors = {}

        for sensorItem in sensingItem['Sensor']:
            SensorId = int(sensorItem['Id'])
            SensorName = sensorItem['Name']
            SensingPeriod = float(sensorItem['SensingPeriod'])
            DataRate = float(sensorItem['DataRate'])
            AcquireTime = float(sensorItem['AcquireTime'])
            StaticPower = float(sensorItem['StaticPower'])
            DynamicPower = float(sensorItem['DynamicPower'])
            Criticality = float(sensorItem['Criticality'])

            SensorObj = Sensor(SensorName, SensingPeriod, DataRate,
                               AcquireTime, StaticPower, DynamicPower,
                               Criticality)
            self.Sensors[SensorId] = SensorObj

        processingItem = configItems['Processing']

        self.ProcAlgos = {}

        for procAlgoItem in processingItem['ProcAlgo']:
            ProcAlgoId = int(procAlgoItem['Id'])
            ProcAlgoName = procAlgoItem['Name']
            ProcTimePerBit = float(procAlgoItem['ProcTimePerBit'])
            CompressionRatio = float(procAlgoItem['CompressionRatio'])
            Accuracy = float(procAlgoItem['Accuracy'])
            ProcAlgoObj = ProcAlgo(ProcAlgoName, ProcTimePerBit,
                                   CompressionRatio, Accuracy)
            self.ProcAlgos[ProcAlgoId] = ProcAlgoObj

        networkingItem = configItems['Networking']

        self.Protocols = {}

        for protocolItem in networkingItem['Protocol']:
            ProtocolId = int(protocolItem['Id'])
            ProtocolName = protocolItem['Name']
            Rx = float(protocolItem['Rx'])
            Tx = float(protocolItem['Tx'])
            MaxPacketSize = float(protocolItem['MaxPacketSize'])
            PHYRate = float(protocolItem['PHYRate'])
            PHYOverhead = float(protocolItem['PHYOverhead'])
            MACOverhead = float(protocolItem['MACOverhead'])
            IPv6Overhead = float(protocolItem['IPv6Overhead'])
            SynchroPeriod = float(protocolItem['SynchroPeriod'])
            PacketDeliveryRatio = float(protocolItem['PacketDeliveryRatio'])
            ProtocolSpecificParam = protocolItem['ProtocolSpecificParam']

            NetProtocolObj = NetProtocolFactory.getNetProtocol(
                ProtocolName,
                TechnoName=ProtocolName,
                Rx=Rx,
                Tx=Tx,
                MaxPacketSize=MaxPacketSize,
                PHYRate=PHYRate,
                PHYOverhead=PHYOverhead,
                MACOverhead=MACOverhead,
                IPv6Overhead=IPv6Overhead,
                SynchroPeriod=SynchroPeriod,
                PacketDeliveryRatio=PacketDeliveryRatio,
                ProtocolSpecificParam=ProtocolSpecificParam)
            self.Protocols[ProtocolId] = NetProtocolObj

        contextItem = configItems['Context']

        self.Workflows = {}

        for workflowItem in contextItem['Workflow']:
            WorkflowId = int(workflowItem['Id'])

            if workflowItem['SensorId'] == 'None':
                SensorId = 0
            elif workflowItem['SensorId'] == 'Any':
                SensorId = -1
            else:
                SensorId = int(workflowItem['SensorId'])

            if workflowItem['ProcAlgoId'] == 'None':
                ProcAlgoId = 0
            elif workflowItem['ProcAlgoId'] == 'Any':
                ProcAlgoId = -1
            else:
                ProcAlgoId = int(workflowItem['ProcAlgoId'])

            if workflowItem['ProtocolId'] == 'None':
                ProtocolId = 0
            elif workflowItem['ProtocolId'] == 'Any':
                ProtocolId = -1
            else:
                ProtocolId = int(workflowItem['ProtocolId'])

            WorkflowObj = Workflow(SensorId, ProcAlgoId, ProtocolId)
            self.Workflows[WorkflowId] = WorkflowObj

        Schemes = configItems['Schemes']
        self.Schemes = {}

        for scheme in Schemes['Scheme']:
            SchemeId = scheme['Id']
            rules = []
            for ruleItem in scheme['Rule']:
                RuleId = int(ruleItem['Id'])

                ifItem = ruleItem['If']
                EventType = ifItem['EventType']
                CurId = list(map(int, ifItem['CurId'].split(',')))
                Incident = ifItem['Incident']

                thenItem = ruleItem['Then']
                Action = thenItem['Action']
                NewId = list(map(int, thenItem['NewId'].split(',')))

                RuleObj = Rule(EventType, CurId, Incident, Action, NewId)
                rules.append(RuleObj)
            SchemeObj = Scheme(scheme['Name'], rules,
                               int(scheme['DefaultWorkFlowId']))
            self.Schemes[SchemeId] = SchemeObj
Example #16
0
 def returnWorkflow(self):
     return Workflow(self.workflow)
Example #17
0
from Inject import Inject
from AttackVectorSimulation import AttackVectorSimulation
from Workflow import Workflow
import argparse

# Sets up parser
parser = argparse.ArgumentParser()
parser.add_argument("attack_type", help="put attack vector type")
parser.add_argument("number_of_threats",
                    help="put number of threats wanted",
                    type=int)

args = parser.parse_args()

# Code to call correct methods in order
i = Inject()
i.read()
a = AttackVectorSimulation(i.get_obj(), args.attack_type,
                           args.number_of_threats)
a.insert_threats()
w = Workflow()
temp = i.get_obj()
temp2 = w.create_workflow_emails()
for email in temp2:
    temp.append(email)
i.set_obj(temp)
i.write()
Example #18
0
class Environment:
    def __init__(self, taskCount=10, save=False):
        self.taskCount = taskCount
        self.workflow = Workflow(taskCount)
        self.workflowbak = self.workflow
        self.runningTasks = []
        self.finishedTasks = []
        self.currentTime = 0
        self.resourcePool = []
        self.initVM()
        self.finishedSize = 0
        self.totalSize = sum(self.workflow.taskSize)
        if save:
            dbfile = open('env-' + str(self.taskCount), 'wb')
            pickle.dump(self, dbfile)
            dbfile.close()

    def saveWorkflow(self):
        dbfile = open('env-' + str(self.taskCount), 'wb')
        pickle.dump(self, dbfile)
        dbfile.close()

    def getCurrentCost(self):
        cost = 0
        for vm in self.resourcePool:
            cost += vm.totalCost
        return cost

    def initVM(self):
        vm_large_1 = VM(speed=1.8, cost=2.3, type='large')
        vm_large_2 = VM(speed=1.8, cost=2.3, type='large')
        vm_large_3 = VM(speed=1.8, cost=2.3, type='large')

        vm_medium_1 = VM(speed=1.4, cost=1.7, type='medium')
        vm_medium_2 = VM(speed=1.4, cost=1.7, type='medium')
        vm_medium_3 = VM(speed=1.4, cost=1.7, type='medium')

        vm_small_1 = VM(speed=1, cost=1, type='small')
        vm_small_2 = VM(speed=1, cost=1, type='small')
        vm_small_3 = VM(speed=1, cost=1, type='small')

        self.resourcePool.append(vm_large_1)
        self.resourcePool.append(vm_large_2)
        self.resourcePool.append(vm_large_3)

        self.resourcePool.append(vm_medium_1)
        self.resourcePool.append(vm_medium_2)
        self.resourcePool.append(vm_medium_3)

        self.resourcePool.append(vm_small_1)
        self.resourcePool.append(vm_small_2)
        self.resourcePool.append(vm_small_3)

    def getFinishRate(self):
        return self.finishedSize / self.totalSize

    def timeProcess(self):
        self.currentTime += 0.1
        for i in range(len(self.resourcePool)):
            finishSig = self.resourcePool[i].timeProcess()
            if finishSig:
                self.setTaskFinished(self.resourcePool[i].taskNo)
                self.resourcePool[i].reset()

    def step(self, taskNo, vmNo):

        # vmNo == 1 means hold
        if vmNo != -1:
            if self.resourcePool[vmNo] <= 0:
                # print("调度任务:", taskNo, " 至VM: ", vmNo)
                self.scheduleTask(taskNo, vmNo)
            else:
                pass
                # print("=== error index")
        else:
            pass
            # print("=== hold")

        ob = self.getObservation()
        self.timeProcess()
        reward = self.getFinishRate()
        return self.isDone(), ob, reward

    def getNewTasks(self):
        pre_tasks = self.workflow.getNewTask()
        tasksToSchedule = list(
            set(pre_tasks) - set(self.runningTasks) - set(self.finishedTasks))
        if len(tasksToSchedule) > 0:
            tasksToSchedule.sort()
        return tasksToSchedule

    def scheduleTask(self, taskNo, vmNo):
        if self.resourcePool[vmNo].taskNo != -1:
            return False, 'vm_' + str(vmNo) + ' currently unavailable'
        self.runningTasks.append(taskNo)
        self.resourcePool[vmNo].assignTask(taskNo,
                                           self.workflow.taskSize[taskNo])
        return True, 'schedule task t_' + str(taskNo) + ' to vm_' + str(vmNo)

    def setTaskFinished(self, taskNo):
        self.workflow.markAsFinished(taskNo)
        self.runningTasks.remove(taskNo)
        self.finishedTasks.append(taskNo)

    def isDone(self):
        if len(self.finishedTasks) == self.workflow.taskCount:
            return True
        return False

    def reset(self):
        self.workflow = self.workflowbak
        self.runningTasks = []
        self.finishedTasks = []
        self.currentTime = 0
        self.resourcePool = []
        self.initVM()
        self.finishedSize = 0
        self.totalSize = sum(self.workflow.taskSize)
Example #19
0
def main(src, dest, old_username, new_username, new_sid):
    # initialize Workflow
    workflow = Workflow()

    # create subject username filter
    filter_subj = WorkflowStepFilter()
    filter_subj.add_eventdata_filter("SubjectUserName", old_username)
    # create and add step to workflow
    step_subj_sid = ModifyEventdataStep(filter_subj,
                                        new_value=new_sid,
                                        eventdata_name="SubjectUserSid")
    step_subj = ModifyEventdataStep(filter_subj,
                                    new_value=new_username,
                                    eventdata_name="SubjectUserName")
    workflow.add_step(step_subj_sid)
    workflow.add_step(step_subj)

    # create target username filter
    filter_target = WorkflowStepFilter()
    filter_target.add_eventdata_filter("TargetUserName", old_username)
    # create and add step to workflow
    step_target = ModifyEventdataStep(filter_target,
                                      new_value=new_username,
                                      eventdata_name="TargetUserName")
    step_target_sid = ModifyEventdataStep(filter_target,
                                          new_value=new_sid,
                                          eventdata_name="TargetUserSid")
    workflow.add_step(step_target_sid)
    workflow.add_step(step_target)

    # start workflow
    workflow.run(src, dest)
            }],
            3: [{
                'con': None,
                'out': 5
            }],
            4: [{
                'con': None,
                'out': 5
            }],
            5: [{
                'con': '<',
                'out': 6
            }]
        },
        # 'task_relationship': {
        #     1: [{'con': None, 'out': 2}],
        #     2: [{'con': None, 'out': 3}, {'con': None, 'out': 4}],
        #     3: [{'con': None, 'out': 5}],
        #     4: [{'con': None, 'out': 5}],
        #     5: [{'con': '>', 'out': 6}]
        # },
        'start_task': 1
    }

    workflow = Workflow(name='test')
    # Create workflow from provided input
    workflow.create_workflow(user_input=ui_input)

    # Start workflow tasks as separate processes
    start_tasks(workflow)