Beispiel #1
0
def main():
    session = auth()
    cache.init()
    target = int(input("[?] Enter target's ID or handle: "))
    g = graph_algos.create_ego_graph(target, session)
    comm_list = graph_algos.get_communities(g, target, session=session)
    comm_dict = graph_algos.separate_communities(comm_list)
    sims = {"city": [], "country": [], "school": [], "university": []}
    for t in sims.keys():
        for comm in comm_dict.keys():
            sims[t].append((stats.find_similar(comm_dict[comm], session, t)))
        sims[t] = stats.calc_prob(sims[t])

    report.gen_report(str(target), sims)
    return 0
Beispiel #2
0
def main():
    # Change work folder.
    os.chdir(config.WORK_PATH)

    print '[*] Target: %s' % config.EXE_PATH
    print '----- STEP 1 -----'
    print '[*] Running PinTool ...'
    print '#' * 80

    os.system(config.PIN_CMD)

    print '#' * 80

    print '[*] Pin instrument finished.'

    global bm
    bm = bbl_manager.BBLManager()

    print '----- STEP 2 -----'
    # Load instructions.
    bm.load_ins_info(config.INS_PATH)

    print '----- STEP 3 -----'
    # Load trace.
    bm.load_trace(config.TRACE_PATH, config.START_ADDR, config.END_ADDR)

    print '----- STEP 4 -----'
    # Generate execution graph.
    bm.consolidate_blocks()

    print '----- STEP 5 -----'
    bm.detect_handlers()

    print '----- STEP 6 -----'
    print '[*] Generating report ....'
    report.gen_report(bm)
    print '[*] Report generated.'
Beispiel #3
0
# Note: i_3 and o_3 are dropped for model improvements
new_features = new_features.drop(["i_3", "o_3"], axis=1)

# Load Prediction Model
rf = pickle.load(open("rf.sav", 'rb'))

# Execute Prediction Model on New Survey Results
import results
new_results_individual, new_results_department, new_results_job_level, new_results_age, new_results_organisation = results.get_results(
    rf, new_df, new_features)

# Generate Report for Frontend & Storing to Database
import report
report_type_3_age, report_type_3_job_level, report_type_3_department, report_type_4_wellbeing, report_type_4_opinions, report_type_4_personality, report_type_4_core_values, report_type_5 = report.gen_report(
    new_results_individual, new_results_age, new_results_job_level,
    new_results_department)

# Convert Index to Column for Storage in MongoDB as unique identifier
new_results_age['Age Category'] = new_results_age.index
new_results_department['Department'] = new_results_department.index
new_results_job_level['Job Level'] = new_results_job_level.index

import data_upload
data_upload.upload(report_type_4_wellbeing, report_type_4_opinions,
                   report_type_4_personality, report_type_4_core_values,
                   new_results_individual, new_results_department,
                   new_results_job_level, new_results_age)
""" Following Reports are Deprecated """
# =============================================================================
# # Collection 1 - Age
Beispiel #4
0
    bm = BBLManager()
    bm.load_ins_info(r'D:\papers\pin\pin-3.2-81205-msvc-windows\source\tools\MyPinTool\bin.ins')
    bm.load_trace(r'D:\papers\pin\pin-3.2-81205-msvc-windows\source\tools\MyPinTool\bin.trace',
        # start_addr=0x401000, end_addr=0x40127C) # allop
        start_addr=0x401000, end_addr=0x00401169) # base64
    # bm.load_trace('../bin.block')      
    bm.consolidate_blocks()
    # cPickle.dump(bm, open('test.dump','wb')) 
    # bm.display_bbl_graph()
    # bm.display_bbl_graph_ida()

    bm.detect_handlers() 
    bm.dump_handlers()


    report.gen_report(bm)
    report.open_report()


    # for h in bm.handlers.values():
    #     print '*'*20
    #     print h
    #     print h.ins_str
    #     print h.to_expr('cv')


    # h = bm.handlers[0x405853]

    # s = h.to_sym_state()

    # s.emul_ir_block(0, True)
Beispiel #5
0
def cook_report_uuid(scheduler: BackgroundScheduler,
                     report_uuid: str,
                     action="add") -> None:
    """
    :param scheduler:
    :param report_uuid:
    :param action: add/mod/del
    :return:
    """
    print(
        f"cook report uuid, scheduler: {scheduler}, report_uuid: {report_uuid}"
        f"action: {action}")
    if action == "del":
        scheduler.remove_job(job_id=report_uuid)

    ok, report_info = storage.get_report_info(report_uuid)
    print(f"report_info: {report_info}")
    if not ok:
        print(f"Fail to get report info for uuid: {report_uuid}"
              f"errmsg: {report_info}")
        return

    ok, page_info = storage.get_report_detail_info_by_id(report_uuid)
    if not ok:
        print(f"Fail to get report detail info for uuid: {report_uuid}, "
              f"errmsg: {page_info}")
        return

    # week/month
    sensor_group_ids = report_info["sensor_group_ids"]

    # 获取探针、探针组信息
    ok, sensor_id_group_mapping = storage.get_group_sensor_map(
        sensor_group_ids)
    if not ok:
        print(f"fail to get sensor_groups by id: {sensor_group_ids}")
        return

    sensors = list(sensor_id_group_mapping.keys())

    cron = CronTrigger.from_crontab(report_info["send_date"])
    _report_params = {
        "report_name": report_info["report_name"],
        "report_format": report_info["report_format"],
        "timed": report_info["timed"],
        "data_scope": report_info["data_scope"],
        "sensor_ids": sensors,
        "sensor_id_group_mapping": sensor_id_group_mapping,
        "page_info": page_info,
        "send_mail": report_info["send_email"],
    }

    if action == "mod":
        scheduler.modify_job(job_id=report_uuid,
                             trigger=cron,
                             kwargs=_report_params)

    elif action == "add":
        gen_report(**_report_params)
        scheduler.add_job(func=gen_report,
                          trigger=cron,
                          kwargs=_report_params,
                          id=report_uuid)

    else:
        raise ValueError("action must in ('add', 'del', 'mod')")
    def __init__(self):
        super(Main, self).__init__(load_config, NAME_FILE_CONFIG_PATH,
                                   NAME_FILE_LOG_PATH)

        self.loading_args()
        self.log_configuration()

        # General
        self.config = self.loading_file_config()
        self.errors = []

        self.fecha = str(datetime.date.today())
        self.hora_inicio = str(time.strftime("%H-%M-%S"))
        self.numbers_backup = self.config.get("GENERAL", "numbers_backup")
        self.ruta_destino = Path(self.config.get("GENERAL", "destiny_path"))

        # Mount and unmount
        self.mount_is_enable = self.config.get("MOUNT", "enable") == "yes"
        self.umount_is_enable = self.config.get("MOUNT", "umount") == "yes"
        self.username_und = self.config.get("MOUNT", "username_und")
        self.password_und = self.config.get("MOUNT", "password_und")
        self.path_network = self.config.get("MOUNT", "path_network")
        self.letter_und = self.config.get("MOUNT", "letter_und")

        self.mount = Mount(self.errors, self.mount_is_enable,
                           self.umount_is_enable, self.username_und,
                           self.password_und, self.path_network,
                           self.letter_und, self.ruta_destino)

        # Run command mount und network
        errors, salida_error_cmd, salida_cmd = self.mount.fun_mount_und()
        self.errors = errors

        # Backups
        self.backup = Backup(self.config, self.errors)
        self.command_backup = self.backup.gen_command_backup(
            self.config, self.fecha, self.hora_inicio)
        self.name_file_backup_latest = self.backup.gen_name_file_backup_latest(
            self.config, self.fecha, self.hora_inicio)

        # gen path temp Backup
        self.path_name_temp = self.ruta_destino.child(
            self.name_file_backup_latest)

        size_dump_file = 0

        # Is not errors in mounting
        if not self.errors:
            result = self.backup.run_command_backup(
                self.config,
                self.command_backup,
                self.numbers_backup,
                self.path_name_temp,
                self.ruta_destino,
            )

            size_dump_file = result.size
            self.errors = result.err

        full_path_dump = self.ruta_destino + "/sql-0/" + self.name_file_backup_latest

        # unmount if mount
        self.mount.fun_umount()

        hora_final = time.strftime("%H-%M-%S")

        report = gen_report(self.fecha, self.hora_inicio, hora_final,
                            full_path_dump, size_dump_file, self.errors)

        # Function send notification.
        mail = Mail(self.config, NAME_FILE_LOG_PATH)
        mail.fun_send_mail(report)

        logging.info("Script Completado.")