Ejemplo n.º 1
0
def main():
    
    start = time.time()
    # 样例数据
    # 实际使用可以直接使用args = command_parse()从命令行获取
    args = command_parse()
    # temp = [
    #     # "-n", 
    #     # "16",
    #     "-f",
    #     "tcp",
    #     "-ip",
    #     "127.0.0.1-127.0.0.5",
    #     "-v",
    #     "-m",
    #     "proc",
    #     "-w",
    #     "port_by_proc.json",
    #     "-p",
    #     "100-150"
    # ]
    # args = command_parse(temp)
    host_ports = ip_port_parse(args.ip,
                              args.p,
                              is_port=(args.f==PORT_SCANNER))
    print('程序开始运行...')
    available_ip_ports = run_scanner(concurrent_mode=args.m,
                        concurrent_num=args.n,
                        scanner_type=args.f,
                        host_ports=host_ports,)
    if args.w:
        save_as_json(available_ip_ports, args.w)
    if args.v:
        print(f'程序消耗总时间:{time.time()-start}s')
Ejemplo n.º 2
0
def get_acc(vel):
    sl = ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + vel +
                  "_low.h5")
    sh = ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + vel +
                  "_high.h5")

    acc_low = group_avarage_acc(sl, group_by_x_n_z)
    acc_to_save = {}
    for key in acc_low.keys():
        acc_to_save[key] = [acc_low[key][0].tolist(), acc_low[key][1]]
    tls.save_as_json(acc_to_save, "accel_by_x_and_z_" + vel + "_lower")
    print("Lower done")

    acc_high = group_avarage_acc(sh, group_by_x_n_z)
    acc_to_save = {}
    for key in acc_high.keys():
        acc_to_save[key] = [acc_high[key][0].tolist(), acc_high[key][1]]
    tls.save_as_json(acc_to_save, "accel_by_x_and_z_" + vel + "_higher")
    print("Higher done")

    m = tls.merge_dict(
        tls.read_json("accel_by_x_and_z_" + vel + "_higher"),
        tls.read_json("accel_by_x_and_z_" + vel + "_lower"),
        lambda a, b: [((np.array(a[0]) * a[1] + np.array(b[0]) * b[1]) /
                       (a[1] + b[1])).tolist(), a[1] + b[1]])
    tls.save_as_json(m, "accel_by_x_and_z_" + vel)
    print("DONE")
def get_velocity_by_loc_w(speed, groups=1, prefix=""):
    higher_avg_vel = group_avarage_velocity_w(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_high.h5"),
        tls.group_by_location,
        groups=groups)
    print("Higher vel calculated")

    higer_dic = {}
    for key in higher_avg_vel.keys():
        higer_dic[str(key).replace("-0.0", "0.0")] = higher_avg_vel[key]

    tls.save_as_json(
        higer_dic, "raupach_data/" + prefix + "avg_vel_by_loc_higher_" + speed)

    lower_avg_vel = group_avarage_velocity_w(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_low.h5"),
        tls.group_by_location,
        groups=groups)
    print("Lower vel calculated")

    lower_dic = {}
    for key in lower_avg_vel.keys():
        lower_dic[str(key).replace("-0.0", "0.0")] = lower_avg_vel[key]

    tls.save_as_json(
        lower_dic, "raupach_data/" + prefix + "avg_vel_by_loc_lower_" + speed)

    merged = tls.merge_dict(lower_dic, higer_dic, merge_long_dict)

    tls.save_as_json(merged,
                     "raupach_data/" + prefix + "avg_vel_by_loc_" + speed)
Ejemplo n.º 4
0
def get_average_velocity(speed):
    low_speed = group_avarage_velocity(
        ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + speed +
                 "_low.h5"),
        lambda t, i: tls.group_by_height(t, i, 0, 0.18, 0.01))
    high_speed = group_avarage_velocity(
        ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + speed +
                 "_high.h5"),
        lambda t, i: tls.group_by_height(t, i, 0, 0.18, 0.01))

    tls.save_as_json(low_speed,
                     "cd_data/avg_vel_by_height_" + speed + "_lower")
    tls.save_as_json(high_speed,
                     "cd_data/avg_vel_by_height_" + speed + "_higher")

    mrg = tls.merge_dict(
        low_speed, high_speed,
        lambda a, b: [((np.array(a[0]) * a[1] + np.array(b[0]) * b[1]) /
                       (a[1] + b[1])).tolist(), a[1] + b[1]])

    tls.save_as_json(mrg, "cd_data/avg_vel_by_height_" + speed)
def auto_disp_stress_calculator(speed):

    high_stress = get_dispersive_stress(
        ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + speed +
                 "_high.h5"), "raupach_data/avg_vel_by_loc_" + speed,
        "cd_data/avg_vel_by_height_" + speed)
    print("Stress higher calculated")
    print(high_stress)

    low_stress = get_dispersive_stress(
        ft.Scene("/home/ron/Desktop/Alexey/the_dataset/traj_" + speed +
                 "_low.h5"), "raupach_data/avg_vel_by_loc_" + speed,
        "cd_data/avg_vel_by_height_" + speed)
    print("Stress lower calculeted")
    print(low_stress)

    tls.save_as_json(high_stress, "raupach_data/disp_stress_higher_" + speed)
    tls.save_as_json(low_stress, "raupach_data/disp_stress_lower_" + speed)

    merged_stress = tls.merge_dict(
        high_stress, low_stress, lambda a, b: [(a[0] * a[1] + b[0] * b[1]) /
                                               (a[1] + b[1]), a[1] + b[1]])

    tls.save_as_json(merged_stress, "raupach_data/disp_stress_" + speed)
from tools import save_as_json

start = time.time()
# node_list, edge_dict = analyse("zdemo/20210319112759.json")
node_list, edge_dict = analyse("zdemo/20210525170638.json")

# print(edge_dict)
attr_dict, attr_list = attribute(node_list)

forward_list, output_id = forward_analyse(attr_dict, edge_dict)

template = {
    "MyModel": {
        "Path": "./demo/model.py",
        "Name": ["Model"],
        "Init": [" "],
        "Super": ["Model"],
        "Attribute": attr_list,
        "Input": ["x"],
        "Forward": forward_list,
        "Output": [output_id],
        "Function": ["Model"]
    }
}

end = time.time()

json_data = save_as_json(template, "./templates/pytorch/demo/config.json")
# print(json_data)
print(end - start)
def auto_rey_stress_err_calculator(speed, skip_vel=True):
    if not skip_vel:
        get_velocity_by_loc(speed)

    high_stress = get_reynolds_stress_errors(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_high.h5"),
        "raupach_data/goruped_u_avg_vel_by_loc_" + speed,
        "raupach_data/goruped_w_avg_vel_by_loc_" + speed, True)
    print("Stress higher calculated")
    print(high_stress)

    low_stress = get_reynolds_stress_errors(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_low.h5"),
        "raupach_data/goruped_u_avg_vel_by_loc_" + speed,
        "raupach_data/goruped_w_avg_vel_by_loc_" + speed, True)
    print("Stress lower calculeted")
    print(low_stress)

    tls.save_as_json(high_stress,
                     "raupach_data/rey_stress_higher_lerr_" + speed)
    tls.save_as_json(low_stress, "raupach_data/rey_stress_lower_lerr_" + speed)

    merged_stress = tls.merge_dict(
        high_stress, low_stress, lambda a, b: [(a[0] * a[1] + b[0] * b[1]) /
                                               (a[1] + b[1]), a[1] + b[1]])

    tls.save_as_json(merged_stress, "raupach_data/rey_stress_lerr_" + speed)

    high_stress = get_reynolds_stress_errors(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_high.h5"),
        "raupach_data/goruped_u_avg_vel_by_loc_" + speed,
        "raupach_data/goruped_w_avg_vel_by_loc_" + speed, False)
    print("Stress higher calculated")
    print(high_stress)

    low_stress = get_reynolds_stress_errors(
        ft.Scene("C:/Users/theem/Desktop/Projects/alpha offline/Data/traj_" +
                 speed + "_low.h5"),
        "raupach_data/goruped_u_avg_vel_by_loc_" + speed,
        "raupach_data/goruped_w_avg_vel_by_loc_" + speed, False)
    print("Stress lower calculeted")
    print(low_stress)

    tls.save_as_json(high_stress,
                     "raupach_data/rey_stress_higher_herr_" + speed)
    tls.save_as_json(low_stress, "raupach_data/rey_stress_lower_herr_" + speed)

    merged_stress = tls.merge_dict(
        high_stress, low_stress, lambda a, b: [(a[0] * a[1] + b[0] * b[1]) /
                                               (a[1] + b[1]), a[1] + b[1]])

    tls.save_as_json(merged_stress, "raupach_data/rey_stress_herr_" + speed)
Ejemplo n.º 8
0
import torch
import time
import neural_genesis
from framework_analyse.class_info_analyse import get_attr_init_dict
from tools import save_as_json

if __name__ == '__main__':
    # start = time.time_ns()
    start = time.time()
    info_dict = get_attr_init_dict(neural_genesis.nn)
    # end = time.time_ns()
    end = time.time()
    save_as_json(info_dict, './torchModel.json')
    print(end - start)