コード例 #1
0
ファイル: load_data.py プロジェクト: kerrm/godot
def load_ft1file(source,ft1file,weightcol,clobber=False,do_pickle=True,
        ft2file=None,**data_kwargs):

    if not clobber:
        try:
            data = pickle.load(open('%s_data.pickle'%source,'rb'))
            print('returning cached version of Data object')
            return data
        except:
            pass

    ra,dec,rad = get_position_from_ft1(ft1file)
    print('Using ra = %s, dec = %s, with extraction radius %s'%(ra,dec,rad))

    if ('max_radius' in data_kwargs) and (float(rad) < data_kwargs['max_radius']):
        print('Warning, specified max_radius=%s but data cuts imply %s.'%(data_kwargs['max_radius'],rad))

    if ft2file is None:
        ft2files = ['%s/ft2.fits'%data_path]
    else:
        ft2files = [ft2file]

    spectrum = lambda E: (E/1000)**-2.1
    data = core.Data([ft1file],ft2files,ra,dec,weightcol,
            base_spectrum=spectrum,zenith_cut=100,**data_kwargs)
    if do_pickle:
        pickle.dump(data,open('%s_data.pickle'%source,'wb'),protocol=2)
    return data
コード例 #2
0
def train(model, model_save_path, dat_type):

    if dat_type == 'multif0_complete':
        data_path = core.data_path_multif0_complete()
    elif dat_type == 'multif0_incomplete':
        data_path = core.data_path_multif0_incomplete()
    elif dat_type == 'melody3':
        data_path = core.data_path_melody3()
    elif dat_type == 'melody2':
        data_path = core.data_path_melody2()
    elif dat_type == 'melody1':
        data_path = core.data_path_melody1()
    elif dat_type == 'pitch':
        data_path = core.data_path_pitch()
    else:
        raise ValueError("Invalid value for dat_type")

    mtrack_list = core.track_id_list()
    input_patch_size = core.patch_size()

    ### DATA SETUP ###
    dat = core.Data(mtrack_list, data_path, input_patch_size=input_patch_size)
    train_generator = dat.get_train_generator()
    validation_generator = dat.get_validation_generator()

    model.compile(loss=core.bkld,
                  metrics=['mse', core.soft_binary_accuracy],
                  optimizer='adam')

    print(model.summary(line_length=80))

    ### FIT MODEL ###
    history = model.fit_generator(
        train_generator,
        SAMPLES_PER_EPOCH,
        epochs=NB_EPOCHS,
        verbose=1,
        validation_data=validation_generator,
        validation_steps=NB_VAL_SAMPLES,
        callbacks=[
            keras.callbacks.ModelCheckpoint(model_save_path,
                                            save_best_only=True,
                                            verbose=1),
            keras.callbacks.ReduceLROnPlateau(patience=5, verbose=1),
            keras.callbacks.EarlyStopping(patience=6, verbose=0)
        ])

    ### load best weights ###
    model.load_weights(model_save_path)

    return model, history, dat
コード例 #3
0
def main():
    all_models = glob.glob(
        os.path.join("../experiment_output/multif0*/", "*.pkl"))
    for mpath in sorted(all_models):
        print("Running evaluation for {}".format(os.path.basename(mpath)))
        try:
            model_key = os.path.basename(mpath).split('.')[0]
            exper_module = importlib.import_module(model_key)
            save_path = os.path.dirname(mpath)

            # if the last scores file already exists, go to next model
            if os.path.exists(os.path.join(save_path, 'su_all_scores.csv')):
                print("Already done with model {}".format(mpath))
                continue

            # define model from module's model_def
            model = exper_module.model_def()
            # load the pretrained model
            model.load_weights(mpath)

            # load common variables
            data_path = core.data_path_multif0_complete()
            mtrack_list = core.track_id_list()
            input_patch_size = core.patch_size()
            dat = core.Data(mtrack_list,
                            data_path,
                            input_patch_size=input_patch_size)

            print("getting best threshold...")
            thresh = evaluate.get_best_thresh(dat, model)

            print("scoring multif0 metrics on test sets...")
            print("    > bach10...")
            evaluate.score_on_test_set('bach10', model, save_path, thresh)
            print("    > medleydb test...")
            evaluate.score_on_test_set('mdb_test', model, save_path, thresh)
            print("    > su...")
            evaluate.score_on_test_set('su', model, save_path, thresh)

        except:
            traceback.print_exc()
コード例 #4
0
def main():
    # Terminal Colors
    t = Terminal()

    def banner():
        print(
            t.green("""

    # Authored: Benjamin Watson


    .____    .__  _____  _____
    |    |   |__|/ ____\/ ____\__.__.
    |    |   |  \   __\   __<   |  |
    |    |___|  ||  |   |  |  \___  |
    |_______ \__||__|   |__|  / ____|
        \/                \/

"""))

    def progressbar():

        bar_width = 70
        sys.stdout.write(t.green(" [*]  ") + " " * bar_width)
        sys.stdout.flush()
        sys.stdout.write("\b" * (bar_width + 1))

        for i in xrange(bar_width):
            time.sleep(0.01)
            sys.stdout.write(".")
            sys.stdout.flush()

        sys.stdout.write("\n")

    banner()  # Run the banner!

    if len(sys.argv) != 4:
        print((t.red(" [*] ") + "Not Enough Arguments!"))
        print(
            (t.red(" [*] ") +
             "Example: ./liffy.py --url http://target/files.php?file= --data\n"
             ))
        sys.exit(0)

    # Setup arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("--url", help="target url")
    parser.add_argument("--data", help="data technique", action="store_true")
    parser.add_argument("--input", help="input technique", action="store_true")
    parser.add_argument("--expect",
                        help="expect technique",
                        action="store_true")
    #parser.add_argument("--cookies", help="session cookies")
    args = parser.parse_args()

    # Assign argument values
    url = args.url
    #cookies = args.cookies

    print(t.green(" [*] ") + "Checking Target: " + url)
    parsed = urlparse.urlsplit(url)
    domain = parsed.scheme + "://" + parsed.netloc
    progressbar()

    try:
        r = requests.get(domain)
        if r.status_code != 200:
            print(
                t.red(" [!] ") +
                "Did Not Receive Correct Response From Target URL!")
        else:
            print(t.red(" [!] ") + "Target URL Looks Good!")
            if args.data:
                print(t.red(" [!] ") + "Data Technique Selected!")

                d = core.Data(url)
                d.execute_data()

            elif args.input:
                print(t.red(" [!] ") + "Input Technique Selected!")

                i = core.Input(url)
                i.execute_input()

            elif args.expect:
                print(t.red(" [!] ") + "Expect Technique Selected!")

                e = core.Expect(url)
                e.execute_expect()

            else:
                print(t.red(" [!] ") + "Technique Not Selected!")
                sys.exit(0)

    except requests.exceptions.RequestException as e:
        print(t.red(" [*] HTTP Error ") + str(e))
コード例 #5
0
def main():

    SAVE_KEY = os.path.basename(__file__).split('.')[0]
    (SAVE_PATH, MODEL_SAVE_PATH, PLOT_SAVE_PATH, MODEL_SCORES_PATH,
     SCORES_PATH, SCORE_SUMMARY_PATH) = core.get_paths(
         "/home/rmb456/repos/multif0/experiment_output", SAVE_KEY)

    ### DATA SETUP ###
    dat = core.Data(MTRACK_LIST,
                    DATA_PATH,
                    input_patch_size=INPUT_PATCH_SIZE,
                    output_patch_size=OUTPUT_PATH_SIZE,
                    batch_size=10)
    train_generator = dat.get_train_generator()
    validation_generator = dat.get_validation_generator()

    ### DEFINE MODEL ###
    input_shape = (None, None, 6)
    inputs = Input(shape=input_shape)

    y1 = Conv2D(64, (5, 5), padding='same', activation='relu',
                name='bendy1')(inputs)
    y2 = Conv2D(64, (5, 5), padding='same', activation='relu',
                name='bendy2')(y1)
    y3 = Conv2D(64, (3, 3), padding='same', activation='relu',
                name='smoothy1')(y2)
    y4 = Conv2D(64, (3, 3), padding='same', activation='relu',
                name='smoothy2')(y3)
    y5 = Conv2D(1, (1, 1),
                padding='same',
                activation='sigmoid',
                name='squishy')(y4)
    predictions = Lambda(lambda x: K.squeeze(x, axis=3))(y5)

    model = Model(inputs=inputs, outputs=predictions)
    model.compile(loss=core.bkld,
                  metrics=['mse', core.soft_binary_accuracy],
                  optimizer='adam')

    print(model.summary(line_length=80))

    ### FIT MODEL ###
    history = model.fit_generator(
        train_generator,
        SAMPLES_PER_EPOCH,
        epochs=NB_EPOCHS,
        verbose=1,
        validation_data=validation_generator,
        validation_steps=NB_VAL_SAMPLES,
        callbacks=[
            keras.callbacks.ModelCheckpoint(MODEL_SAVE_PATH,
                                            save_best_only=True,
                                            verbose=1),
            keras.callbacks.ReduceLROnPlateau(patience=5, verbose=1),
            keras.callbacks.EarlyStopping(patience=15, verbose=0)
        ])

    ### load best weights ###
    model.load_weights(MODEL_SAVE_PATH)

    ### Results plots ###
    print("plotting results...")
    core.plot_metrics_epochs(history, PLOT_SAVE_PATH)

    ### Evaluate ###
    print("getting model metrics...")
    core.get_model_metrics(dat, model, MODEL_SCORES_PATH)

    print("getting multif0 metrics...")
    core.get_all_multif0_metrics(dat.test_files, model, SAVE_PATH, SCORES_PATH,
                                 SCORE_SUMMARY_PATH)

    bach10_files = core.get_file_paths(mdb.TRACK_LIST_BACH10, dat.data_path)
    core.get_all_multif0_metrics(bach10_files,
                                 model,
                                 SAVE_PATH,
                                 os.path.join(SAVE_PATH, "bach10_scores.csv"),
                                 os.path.join(SAVE_PATH,
                                              "bach10_score_summary.csv"),
                                 create_pred=True)

    print("done!")
    print("Results saved to {}".format(SAVE_PATH))
コード例 #6
0
ファイル: liffy.py プロジェクト: youdinforsec/liffy
def main():
    # Terminal Colors
    t = Terminal()

    def banner():
        print(t.cyan("""

    .____    .__  _____  _____
    |    |   |__|/ ____\/ ____\__.__.
    |    |   |  \   __\   __<   |  |
    |    |___|  ||  |   |  |  \___  |
    |_______ \__||__|   |__|  / ____| v1.2
        \/                \/

"""))

    def progressbar():

        bar_width = 70
        sys.stdout.write(t.cyan("[{0}]  ".format(datetime.datetime.now())) + " " * bar_width)
        sys.stdout.flush()
        sys.stdout.write("\b" * (bar_width + 1))

        for w in xrange(bar_width):
            time.sleep(0.01)
            sys.stdout.write(".")
            sys.stdout.flush()

        sys.stdout.write("\n")

    #---------------------------------------------------------------------------------------------------

    banner()

    if not len(sys.argv):
        print(t.red("[{0}] ".format(datetime.datetime.now())) + "Not Enough Arguments!")
        print(t.red("[{0}] ".format(datetime.datetime.now())) + "Example: ./liffy.py --url \
        http://target/files.php?file= --data\n")
        sys.exit(0)

    #---------------------------------------------------------------------------------------------------

    """ Command Line Arguments """

    parser = argparse.ArgumentParser()
    parser.add_argument("--url", help="target url")
    parser.add_argument("--data", help="data technique", action="store_true")
    parser.add_argument("--input", help="input technique", action="store_true")
    parser.add_argument("--expect", help="expect technique", action="store_true")
    parser.add_argument("--environ", help="/proc/self/environ technique", action="store_true")
    parser.add_argument("--access", help="access logs technique", action="store_true")
    parser.add_argument("--ssh", help="auth logs technique", action="store_true")
    parser.add_argument("--filter", help="filter technique", action="store_true")
    parser.add_argument("--location", help="path to target file (access log, auth log, etc.)")
    parser.add_argument("--nostager", help="execute payload directly, do not use stager", action="store_true")
    parser.add_argument("--relative", help="use path traversal sequences for attack", action="store_true")
    parser.add_argument("--cookies", help="session cookies")
    args = parser.parse_args()

    #---------------------------------------------------------------------------------------------------

    """ Assign argument values """

    url = args.url
    nostager = args.nostager
    relative = args.relative
    c = args.cookies

    #---------------------------------------------------------------------------------------------------

    """ Check to make sure target is actually up """

    print(t.cyan("[{0}] ".format(datetime.datetime.now())) + "Checking Target: {0}".format(url))
    parsed = urlparse.urlsplit(url)
    domain = parsed.scheme + "://" + parsed.netloc
    progressbar()

    try:
        r = requests.get(domain)
        if r.status_code != 200:
            print(t.red("[{0}] ".format(datetime.datetime.now())) + "Did Not Receive Correct Response From Target URL!")
        else:
            print(t.red("[{0}] ".format(datetime.datetime.now())) + "Target URL Looks Good!")
            if args.data:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "Data Technique Selected!")
                d = core.Data(url, nostager, c)
                d.execute_data()
            elif args.input:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "Input Technique Selected!")
                i = core.Input(url, nostager, c)
                i.execute_input()
            elif args.expect:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "Expect Technique Selected!")
                e = core.Expect(url, nostager, c)
                e.execute_expect()
            elif args.environ:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "/proc/self/environ Technique Selected!")
                i = core.Environ(url, nostager, relative, c)
                i.execute_environ()
            elif args.access:
                if not args.location:
                    print(t.red("[{0}] ".format(datetime.datetime.now())) + "Log Location Not Provided! Using Default")
                    l = '/var/log/apache2/access.log'
                else:
                    l = args.location
                a = core.Logs(url, l, nostager, relative, c)
                a.execute_logs()
            elif args.ssh:
                if not args.location:
                    print(t.red("[{0}] ".format(datetime.datetime.now())) + "Log Location Not Provided! Using Default")
                    l = '/var/log/auth.log'
                else:
                    l = args.location
                a = core.SSHLogs(url, l, relative, c)
                a.execute_ssh()
            elif args.filter:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "Filter Technique Selected!")
                f = core.Filter(url, c)
                f.execute_filter()
            else:
                print(t.red("[{0}] ".format(datetime.datetime.now())) + "Technique Not Selected!")
                sys.exit(0)
    except requests.HTTPError as e:
        print(t.red("[{0}] HTTP Error!".format(datetime.datetime.now())) + str(e))
コード例 #7
0
ファイル: liffy.py プロジェクト: rollys/liffy
def main():
    # Terminal Colors
    t = Terminal()

    def banner():
        print(
            t.green("""

    # Authored: Benjamin Watson


    .____    .__  _____  _____
    |    |   |__|/ ____\/ ____\__.__.
    |    |   |  \   __\   __<   |  |
    |    |___|  ||  |   |  |  \___  |
    |_______ \__||__|   |__|  / ____|
        \/                \/

"""))

    def progressbar():

        bar_width = 70
        sys.stdout.write(t.green(" [*]  ") + " " * bar_width)
        sys.stdout.flush()
        sys.stdout.write("\b" * (bar_width + 1))

        for i in xrange(bar_width):
            time.sleep(0.01)
            sys.stdout.write(".")
            sys.stdout.flush()

        sys.stdout.write("\n")

    banner()  # Run the banner!

    if not len(sys.argv):
        print((t.red(" [*] ") + "Not Enough Arguments!"))
        print(
            (t.red(" [*] ") +
             "Example: ./liffy.py --url http://target/files.php?file= --data\n"
             ))
        sys.exit(0)

    # Setup arguments
    parser = argparse.ArgumentParser()
    parser.add_argument("--url", help="target url")
    parser.add_argument("--data", help="data technique", action="store_true")
    parser.add_argument("--input", help="input technique", action="store_true")
    parser.add_argument("--expect",
                        help="expect technique",
                        action="store_true")
    parser.add_argument("--access",
                        help="access logs technique",
                        action="store_true")
    parser.add_argument("--ssh",
                        help="ssh logs technique",
                        action="store_true")
    parser.add_argument("--filter",
                        help="filter technique",
                        action="store_true")
    parser.add_argument("--location", help="access logs location")
    parser.add_argument("--nostager",
                        help="execute payload directly, do not use stager",
                        action="store_true")
    #parser.add_argument("--cookies", help="session cookies")
    args = parser.parse_args()

    # Assign argument values
    url = args.url
    nostager = args.nostager
    #cookies = args.cookies

    print(t.green(" [*] ") + "Checking Target: " + url)
    parsed = urlparse.urlsplit(url)
    domain = parsed.scheme + "://" + parsed.netloc
    progressbar()

    try:
        r = requests.get(domain)
        if r.status_code != 200:
            print(
                t.red(" [!] ") +
                "Did Not Receive Correct Response From Target URL!")
        else:
            print(t.red(" [!] ") + "Target URL Looks Good!")
            if args.data:
                print(t.red(" [!] ") + "Data Technique Selected!")
                d = core.Data(url, nostager)
                d.execute_data()
            elif args.input:
                print(t.red(" [!] ") + "Input Technique Selected!")
                i = core.Input(url, nostager)
                i.execute_input()
            elif args.expect:
                print(t.red(" [!] ") + "Expect Technique Selected!")
                e = core.Expect(url, nostager)
                e.execute_expect()
            elif args.access:
                if not args.location:
                    print(
                        t.red(" [!] ") +
                        "Log Location Not Provided! Using default.")
                    l = '/var/log/apache2/access.log'
                else:
                    l = args.location
                a = core.Logs(url, l, nostager)
                a.execute_logs()
            elif args.ssh:
                if not args.location:
                    print(
                        t.red(" [!] ") +
                        "Log Location Not Provided! Using default.")
                    l = '/var/log/auth.log'
                else:
                    l = args.location
                    a = core.SSHLogs(url, l)
                    a.execute_ssh()
            elif args.filter:
                print(t.red(" [!] ") + "Filter Technique Selected!")
                f = core.Filter(url)
                f.execute_filter()
            else:
                print(t.red(" [!] ") + "Technique Not Selected!")
                sys.exit(0)

    except requests.exceptions.RequestException as e:
        print(t.red(" [*] HTTP Error ") + str(e))
コード例 #8
0
ファイル: migrate.py プロジェクト: mutabot/magenta
from utils.data_grep import DataGrep
from utils.data_migrate import DataMigrate


if __name__ == '__main__':
    parser = argparse.ArgumentParser(prog='G+RSS.Data.Migration')
    parser.add_argument('--src_port', default=6379, type=int)
    parser.add_argument('--src_host', default='127.0.0.1')
    parser.add_argument('--src_db', required=True, type=int)
    parser.add_argument('--dst_port', default=6379, type=int)
    parser.add_argument('--dst_host', default='127.0.0.1')
    parser.add_argument('--dst_db', required=True, type=int)
    parser.add_argument('--log_path', required=True)
    parser.add_argument('--gid', required=False)
    args = parser.parse_args()

    logging.basicConfig(format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %H:%M:%S')
    logger = logging.getLogger('migrateLogger')
    logger.addHandler(config.getLogHandler(os.path.join(args.log_path, 'migrate.log')))
    logger.level = logging.DEBUG

    src_data = core.Data(logger, args.src_host, args.src_port, args.src_db)
    dst_data = core.Data(logger, args.dst_host, args.dst_port, args.dst_db)

    cp = DataCopy(logger, src_data, dst_data)
    cp.run(args.gid)

    #migrate = DataMigrate(logger, src_data, dst_data)
    #migrate.migrate()
    #grep = DataGrep(logger, dst_data)
    #grep.multiple_parents()
コード例 #9
0
ファイル: dump.py プロジェクト: mutabot/magenta
import os
import logging

import argparse

import core
from utils import config
from utils.data_dump import DataDump

if __name__ == '__main__':
    parser = argparse.ArgumentParser(prog='G+RSS.Data.Migration')
    parser.add_argument('--redis_port', default=6379, type=int)
    parser.add_argument('--redis_host', default='127.0.0.1')
    parser.add_argument('--redis_db', required=True, type=int)
    parser.add_argument('--log_path', required=True)
    args = parser.parse_args()

    logging.basicConfig(format='%(asctime)s %(message)s',
                        datefmt='%m/%d/%Y %H:%M:%S')
    logger = logging.getLogger('migrateLogger')
    logger.addHandler(
        config.getLogHandler(os.path.join(args.log_path, 'migrate.log')))
    logger.level = logging.DEBUG
    logger.propagate = 0

    data = core.Data(logger, args.redis_host, args.redis_port, args.redis_db)

    dump = DataDump(logger, data)
    dump.dump()
コード例 #10
0
ファイル: load_data.py プロジェクト: kerrm/godot
def get_data(source,clobber=False,do_pickle=True,**data_kwargs):

    if 'use_phi' not in data_kwargs:
        data_kwargs['use_phi'] = True

    if not clobber:
        try:
            data = pickle.load(file('%s_data.pickle'%source,'rb'))
            print('returning cached version of Data object')
            return data
        except:
            pass

    data = None

    if source == 'j1018':
        jname = 'J1018-5856'

    elif source.startswith('j1231'):
        jname = 'J1231-1411'

    elif source.startswith('j2021'):
        jname = 'J2021+4026'

    elif source.startswith('j1311'):
        jname = 'J1311-3430'

    elif source.startswith('j2241'):
        jname = 'J2241-5236'

    elif source.startswith('j2032'):
        jname = 'J2032+4127'

    elif source.startswith('cygx3'):
        jname = 'J2032+4057_fake'

        #spectrum = lambda E: (E/1000)**-2.1

    elif source.startswith('j0633'):
        jname = 'J0633+1746'

    elif source.startswith('j0823'):
        jname = 'J0823.3-4205c_fake'

    elif source.startswith('j0534'):
        jname = 'J0534+2200'

    elif source == 'eridani':
        ra = 53.2327
        dec = -9.45826
        ft1files = sorted(glob.glob('/data/kerrm/eps_eridani/gtsrcprob*.fits'))
        ft2files = ['data/tyrel_ft2.fits']
        spectrum = lambda E: (E/1000)**-3

        data = core.Data(ft1files,ft2files,ra,dec,'Eridani',
                base_spectrum=spectrum,zenith_cut=90,
                use_weights_for_exposure=exposure_weights,use_phi=use_phi,
                max_radius=15)

    elif source == 'j0835_topo':
        jname = 'J0835-4510'

    elif source == 'lsi':
        jname = 'J0240+6113_fake'

    elif source == 'ls5039':
        jname = 'J1826-1450_fake'

        spectrum = lambda E: (E/1000)**-2.1

    elif source == '3c279':
        jname = 'J1256-0547_fake'

    else:
        print('Did not recognize a source alias.  Assuming source==jname.')
        jname = source
        #raise NotImplementedError('did not recognize %s'%source)

    if (data is not None):
        if do_pickle:
            pickle.dump(data,open('%s_data.pickle'%source,'rb'),protocol=2)
        return data

    spectrum = lambda E: (E/1000)**-2.1
    ft1files = ['%s/%s_%s.fits'%(data_path,jname,'bary' if 'bary' in source else 'topo')]

    ra,dec,rad = get_position_from_ft1(ft1files[0])
    print('Using ra = %s, dec = %s, with extraction radius %s'%(ra,dec,rad))

    if ('max_radius' in data_kwargs) and (float(rad) < data_kwargs['max_radius']):
        print('Warning, specified max_radius=%s but data cuts imply %s.'%(data_kwargs['max_radius'],rad))

    ft2files = ['%s/ft2.fits'%data_path]

    data = core.Data(ft1files,ft2files,ra,dec,'PSR%s'%jname,
            base_spectrum=spectrum,zenith_cut=100,**data_kwargs)
    if do_pickle:
        pickle.dump(data,open('%s_data.pickle'%source,'wb'),protocol=2)
    return data