예제 #1
0
def csv2npy():
    # Process each data split separately
    for n, f, fs in zip(names_lists, feats_paths, folders_save):
        print "Preparing features %s" % f
        feats_dict = dict()
        # Get file names
        names = []
        with open(base_path + '/' + n, 'r') as file:
            for line in file:
                line = line.rstrip('\n')
                line = line.split('.')[0]
                names.append(line)
        # Get features
        with open(base_path + '/' + f, 'r') as file:
            for i, line in enumerate(file):
                feats = np.fromstring(line.rstrip('\n'), sep=',')
                if (apply_L2):
                    feats = feats / np.linalg.norm(feats, ord=2)
                # Insert in dictionary
                feats_dict[names[i]] = feats[:n_feats]

        # Store dict
        print "Saving features in %s" % (base_path_save + '/' + fs + '/' +
                                         file_save + '.npy')
        create_dir_if_not_exists(base_path_save + '/' + fs)
        np.save(base_path_save + '/' + fs + '/' + file_save + '.npy',
                feats_dict)
        print
def main(src_dir, dst_dir, start, end):
    create_dir_if_not_exists(dst_dir)
    for subdir in ["%02d" % num for num in range(start, end + 1)]:
        from_dir = os.path.join(src_dir, subdir)
        to_dir = os.path.join(dst_dir, subdir)
        logging.info("copy %s to %s", from_dir, to_dir)
        copy_dir_by_sys(from_dir, to_dir)
예제 #3
0
def csv2npy():

    # Process each data split separately
    for n, f, fs in zip(names_lists, feats_paths, folders_save):
        print "Preparing features %s" % f
        feats_dict = dict()
        # Get file names
        names = []
        with open(base_path + '/' + n, 'r') as file:
            for line in file:
                line = line.rstrip('\n')
                #line = line.split('.')[0]
                names.append(line)

        # Get features
        with open(base_path + '/' + f, 'r') as file:
            for i, line in enumerate(file):
                feats = np.fromstring(line.rstrip('\n'), sep=',')
                if(apply_L2):
                    feats = feats/np.linalg.norm(feats, ord=2)
                # Insert in dictionary
                feats_dict[names[i]] = feats[:n_feats]

        # Store dict
        print "Saving features in %s" % (base_path_save +'/'+ fs +'/'+ file_save)
        create_dir_if_not_exists(base_path_save +'/'+ fs)
        np.save(base_path_save + '/' + fs + '/' + file_save + '.npy', feats_dict)
        print
def copy_dir(src_dir, dst_dir):
    create_dir_if_not_exists(dst_dir)
    csv_files = glob.glob(os.path.join(src_dir, '*.csv'))
    for idx, csv_file in enumerate(csv_files):
        if idx % 50 == 0 and idx != 0:
            logging.info('Already %d files from %s => %s',
                         idx, src_dir, dst_dir)
        shutil.copy(csv_file, dst_dir)
def main(src_dir, dst_dir, speed_thresh):
    files = glob.glob(os.path.join(src_dir, '*', '*.csv'))
    total_file_cnt = len(files)

    for cnt, csvfile in enumerate(files):
        logging.info('[%d/%d] processing: %s',
                     cnt + 1, total_file_cnt, csvfile)
        uid = os.path.splitext(os.path.basename(csvfile))[0]
        dstdir = os.path.join(dst_dir, uid[-2:])
        create_dir_if_not_exists(dstdir)
        dstfile = os.path.join(dstdir, uid + '.csv')
        if os.path.isfile(dstfile):
            continue
        per_file(csvfile, dstfile, dstdir, uid, speed_thresh)
예제 #6
0
    def configure_environment(self):
        log('Configure environment')
        delete(self.yaml_config_dir)
        create_dir_if_not_exists(self.yaml_config_dir)
        env_name = self.dea.get_env_name()
        env_net_segment_type = self.dea.get_env_net_segment_type()
        log('Creating environment %s release %s net-segment-type %s'
            % (env_name, self.release_id, env_net_segment_type))
        exec_cmd('fuel env create --name "%s" --release %s --net-segment-type %s'
                 % (env_name, self.release_id, env_net_segment_type))

        if not self.env_exists(env_name):
            err('Failed to create environment %s' % env_name)
        self.config_settings()
        self.config_network()
        self.config_nodes()
예제 #7
0
    def configure_environment(self):
        log('Configure environment')
        delete(self.yaml_config_dir)
        create_dir_if_not_exists(self.yaml_config_dir)
        env_name = self.dea.get_env_name()
        env_net_segment_type = self.dea.get_env_net_segment_type()
        log('Creating environment %s release %s net-segment-type %s' %
            (env_name, self.release_id, env_net_segment_type))
        exec_cmd(
            'fuel env create --name "%s" --release %s --net-segment-type %s' %
            (env_name, self.release_id, env_net_segment_type))

        if not self.env_exists(env_name):
            err('Failed to create environment %s' % env_name)
        self.config_settings()
        self.config_network()
        self.config_nodes()
def main(src_dir, dst_dir, valid_user_file):
    create_dir_if_not_exists(dst_dir)
    csv_files = glob.glob(os.path.join(src_dir, '*', '*.csv'))
    total_file_cnt = len(csv_files)
    valid_users = set(map(lambda x: x.strip(),
                          open(valid_user_file).readlines()))
    for cnt, csv_file in enumerate(csv_files):
        logging.info('[%d/%d] processing: %s costs: %s',
                     cnt + 1, total_file_cnt, csv_file, costs())
        filename = os.path.basename(csv_file)
        dirname = os.path.splitext(filename)[0][-2:]
        uid = os.path.splitext(filename)[0]
        if uid not in valid_users:
            continue
        create_dir_if_not_exists(os.path.join(dst_dir, dirname))
        dst_csv_file = os.path.join(dst_dir, dirname, filename)
        clean_data(csv_file, dst_csv_file, raw_log_header)
def main(src_dir, dst_dir, valid_user_file, speed_thresh):
    files = glob.glob(os.path.join(src_dir, '*', '*.csv'))
    total_file_cnt = len(files)
    valid_users = set(map(lambda x: x.strip(),
                          open(valid_user_file).readlines()))

    for cnt, csvfile in enumerate(files):
        logging.info('[%d/%d] processing: %s costs: %s',
                     cnt + 1, total_file_cnt, csvfile, costs())
        uid = os.path.splitext(os.path.basename(csvfile))[0]
        if uid not in valid_users:
            continue
        dstdir = os.path.join(dst_dir, uid[-2:])
        create_dir_if_not_exists(dstdir)
        dstfile = os.path.join(dstdir, uid + '.csv')
        if os.path.isfile(dstfile):
            continue
        per_file(csvfile, dstfile, dstdir, uid, speed_thresh)
예제 #10
0
def train(run_name: str, epochs: int, validation_split: float):
    from cc_model import create_model
    from keras.callbacks import ModelCheckpoint, TensorBoard

    db = get_database()
    color_records = db.child('colors').get()

    colors = []
    labels = []

    for c in color_records.each():
        c_val = c.val()

        colors.append(color_record_to_array(c_val))
        labels.append(color_record_to_label_index(c_val))

    colors_np = np.array(colors)
    labels_np = one_hot_encode_labels(labels)

    CHECKPOINT_PATH = os.path.join(RESULT_PATH, run_name)
    create_dir_if_not_exists(CHECKPOINT_PATH)

    # Callbacks
    tensorboard = TensorBoard(log_dir=os.path.join(LOGS_PATH, run_name))
    checkpoint = ModelCheckpoint(os.path.join(CHECKPOINT_PATH,
                                              "weights{epoch:03d}.hdf5"),
                                 monitor='val_loss',
                                 save_weights_only=True,
                                 mode='auto',
                                 period=1,
                                 verbose=1,
                                 save_best_only=True)

    model = create_model()

    # Compiling and running training
    model.compile(loss='categorical_crossentropy',
                  optimizer='adam',
                  metrics=['accuracy'])
    model.fit(x=colors_np,
              y=labels_np,
              epochs=epochs,
              validation_split=validation_split,
              callbacks=[tensorboard, checkpoint])
예제 #11
0
def parse_arguments():
    parser = ArgParser(prog='python %s' % __file__)
    parser.add_argument('-nf',
                        dest='no_fuel',
                        action='store_true',
                        default=False,
                        help='Do not install Fuel Master (and Node VMs when '
                        'using libvirt)')
    parser.add_argument('-nh',
                        dest='no_health_check',
                        action='store_true',
                        default=False,
                        help='Don\'t run health check after deployment')
    parser.add_argument('-fo',
                        dest='fuel_only',
                        action='store_true',
                        default=False,
                        help='Install Fuel Master only (and Node VMs when '
                        'using libvirt)')
    parser.add_argument('-co',
                        dest='cleanup_only',
                        action='store_true',
                        default=False,
                        help='Cleanup VMs and Virtual Networks according to '
                        'what is defined in DHA')
    parser.add_argument('-c',
                        dest='cleanup',
                        action='store_true',
                        default=False,
                        help='Cleanup after deploy')
    if {'-iso', '-dea', '-dha', '-h'}.intersection(sys.argv):
        parser.add_argument('-iso',
                            dest='iso_file',
                            action='store',
                            nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('-dea',
                            dest='dea_file',
                            action='store',
                            nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('-dha',
                            dest='dha_file',
                            action='store',
                            nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    else:
        parser.add_argument('iso_file',
                            action='store',
                            nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('dea_file',
                            action='store',
                            nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('dha_file',
                            action='store',
                            nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    parser.add_argument('-s',
                        dest='storage_dir',
                        action='store',
                        default='%s/images' % CWD,
                        help='Storage Directory [default: images]')
    parser.add_argument('-b',
                        dest='pxe_bridge',
                        action='append',
                        default=[],
                        help='Linux Bridge for booting up the Fuel Master VM '
                        '[default: pxebr]')
    parser.add_argument('-p',
                        dest='fuel_plugins_dir',
                        action='store',
                        help='Fuel Plugins directory')
    parser.add_argument('-pc',
                        dest='fuel_plugins_conf_dir',
                        action='store',
                        help='Fuel Plugins Configuration directory')
    parser.add_argument('-np',
                        dest='no_plugins',
                        action='store_true',
                        default=False,
                        help='Do not install Fuel Plugins')
    parser.add_argument('-dt',
                        dest='deploy_timeout',
                        action='store',
                        default=240,
                        help='Deployment timeout (in minutes) '
                        '[default: 240]')
    parser.add_argument('-nde',
                        dest='no_deploy_environment',
                        action='store_true',
                        default=False,
                        help=('Do not launch environment deployment'))
    parser.add_argument('-log',
                        dest='deploy_log',
                        action='store',
                        default='../ci/.',
                        help=('Path and name of the deployment log archive'))

    args = parser.parse_args()
    log(args)

    if not args.pxe_bridge:
        args.pxe_bridge = ['pxebr']

    check_file_exists(args.dha_file)

    check_dir_exists(os.path.dirname(args.deploy_log))

    if not args.cleanup_only:
        check_file_exists(args.dea_file)
        check_fuel_plugins_dir(args.fuel_plugins_dir)

    iso_abs_path = os.path.abspath(args.iso_file)
    if not args.no_fuel and not args.cleanup_only:
        log('Using OPNFV ISO file: %s' % iso_abs_path)
        check_file_exists(iso_abs_path)
        log('Using image directory: %s' % args.storage_dir)
        create_dir_if_not_exists(args.storage_dir)
        for bridge in args.pxe_bridge:
            check_bridge(bridge, args.dha_file)

    kwargs = {
        'no_fuel': args.no_fuel,
        'fuel_only': args.fuel_only,
        'no_health_check': args.no_health_check,
        'cleanup_only': args.cleanup_only,
        'cleanup': args.cleanup,
        'storage_dir': args.storage_dir,
        'pxe_bridge': args.pxe_bridge,
        'iso_file': iso_abs_path,
        'dea_file': args.dea_file,
        'dha_file': args.dha_file,
        'fuel_plugins_dir': args.fuel_plugins_dir,
        'fuel_plugins_conf_dir': args.fuel_plugins_conf_dir,
        'no_plugins': args.no_plugins,
        'deploy_timeout': args.deploy_timeout,
        'no_deploy_environment': args.no_deploy_environment,
        'deploy_log': args.deploy_log
    }
    return kwargs
def main(src_dir, dst_dir, valid_user_file, speed_thresh):
    files = glob.glob(os.path.join(src_dir, '*', '*.csv'))
    total_file_cnt = len(files)
    valid_users = set(map(lambda x: x.strip(),
                          open(valid_user_file).readlines()))

    for cnt, csvfile in enumerate(files):
        logging.info('[%d/%d] processing: %s costs: %s',
                     cnt + 1, total_file_cnt, csvfile, costs())
        uid = os.path.splitext(os.path.basename(csvfile))[0]
        if uid not in valid_users:
            continue
        dstdir = os.path.join(dst_dir, uid[-2:])
        create_dir_if_not_exists(dstdir)
        dstfile = os.path.join(dstdir, uid + '.csv')
        if os.path.isfile(dstfile):
            continue
        per_file(csvfile, dstfile, dstdir, uid, speed_thresh)


if __name__ == '__main__':
    import sys
    src_dir = sys.argv[1]
    dst_dir = "%s-no-invalidpoint" % src_dir
    speed_thresh = int(sys.argv[2]) if len(sys.argv) >= 3 else 300
    create_dir_if_not_exists(dst_dir)
    main(src_dir, dst_dir, 'valid_users.csv', speed_thresh)
    end_at = datetime.now()
    delta = end_at - start_at
예제 #13
0
#!/usr/bin/python3

import sys, os

import numpy as np

import common

# Load data
prefix = "t10k"
images = common.read_images(prefix)
labels = common.read_labels(prefix)

# Load result
try:
    result_filepath = sys.argv[1]
    result = np.loadtxt(result_filepath).astype("float32")
    result = result.reshape([result.shape[0], 1])
except IndexError:
    print("Usage: " + os.path.basename(__file__) + " <result_filepath>")
    sys.exit(1)

# Get false indices
false_indices = np.argwhere(labels != result)[:, 0]

# Debugging
debug_dir = "debugging"
common.create_dir_if_not_exists(debug_dir)
for i in false_indices:
    common.debug(debug_dir, i, images[i], labels[i][0], result[i][0])
예제 #14
0
import common

# Load data
prefix = "t10k"
images = common.read_images(prefix)
ori_labels = common.read_labels(prefix)
new_labels = common.category2binary(ori_labels)

# Create model
model = common.create_model()

# Load weights
try:
    checkpoint_filepath = sys.argv[1]
    model.load_weights(checkpoint_filepath)
except IndexError:
    print("Usage: " + os.path.basename(__file__) + " <checkpoint_filepath>")
    sys.exit(1)

# Testing
score = model.evaluate(images, new_labels)
common.print_score(score)

# Get and save result
result = common.binary2category(model.predict(images))
testing_dir = "testing"
filepath = "result.txt"
common.create_dir_if_not_exists(testing_dir)
np.savetxt(testing_dir + "/" + filepath, result, fmt="%d")
예제 #15
0
 def create_tmp_dir(self):
     self.tmp_dir = '%s/fueltmp' % CWD
     delete(self.tmp_dir)
     create_dir_if_not_exists(self.tmp_dir)
예제 #16
0
#!/usr/bin/python3

import common

# Metadata
batch_size = 128
epochs = 150
training_dir = "training"
checkpoint_format = "weights.{epoch:04d}-{val_loss:.2f}.h5"
period = 5

# Create checkpoint directory if not exists
common.create_dir_if_not_exists(training_dir)

# Load data
prefix = "train"
images = common.read_images(prefix)
ori_labels = common.read_labels(prefix)
new_labels = common.category2binary(ori_labels)

# Create model
model = common.create_model()

# Summary model
print("=" * 80)
model.summary()
input("Press Enter to continue...")
print("=" * 80)

# Train model
history = model.fit(images,
예제 #17
0
def parse_arguments():
    parser = ArgParser(prog='python %s' % __file__)
    parser.add_argument('-nf', dest='no_fuel', action='store_true',
                        default=False,
                        help='Do not install Fuel Master (and Node VMs when '
                             'using libvirt)')
    parser.add_argument('-nh', dest='no_health_check', action='store_true',
                        default=False,
                        help='Don\'t run health check after deployment')
    parser.add_argument('-fo', dest='fuel_only', action='store_true',
                        default=False,
                        help='Install Fuel Master only (and Node VMs when '
                             'using libvirt)')
    parser.add_argument('-co', dest='cleanup_only', action='store_true',
                        default=False,
                        help='Cleanup VMs and Virtual Networks according to '
                             'what is defined in DHA')
    parser.add_argument('-c', dest='cleanup', action='store_true',
                        default=False,
                        help='Cleanup after deploy')
    if {'-iso', '-dea', '-dha', '-h'}.intersection(sys.argv):
        parser.add_argument('-iso', dest='iso_file', action='store', nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('-dea', dest='dea_file', action='store', nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('-dha', dest='dha_file', action='store', nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    else:
        parser.add_argument('iso_file', action='store', nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('dea_file', action='store', nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('dha_file', action='store', nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    parser.add_argument('-s', dest='storage_dir', action='store',
                        default='%s/images' % CWD,
                        help='Storage Directory [default: images]')
    parser.add_argument('-b', dest='pxe_bridge', action='store',
                        default='pxebr',
                        help='Linux Bridge for booting up the Fuel Master VM '
                             '[default: pxebr]')
    parser.add_argument('-p', dest='fuel_plugins_dir', action='store',
                        help='Fuel Plugins directory')
    parser.add_argument('-pc', dest='fuel_plugins_conf_dir', action='store',
                        help='Fuel Plugins Configuration directory')
    parser.add_argument('-np', dest='no_plugins', action='store_true',
                        default=False, help='Do not install Fuel Plugins')

    args = parser.parse_args()
    log(args)

    check_file_exists(args.dha_file)

    if not args.cleanup_only:
        check_file_exists(args.dea_file)
        check_fuel_plugins_dir(args.fuel_plugins_dir)

    if not args.no_fuel and not args.cleanup_only:
        log('Using OPNFV ISO file: %s' % args.iso_file)
        check_file_exists(args.iso_file)
        log('Using image directory: %s' % args.storage_dir)
        create_dir_if_not_exists(args.storage_dir)
        check_bridge(args.pxe_bridge, args.dha_file)

    kwargs = {'no_fuel': args.no_fuel, 'fuel_only': args.fuel_only,
              'no_health_check': args.no_health_check,
              'cleanup_only': args.cleanup_only, 'cleanup': args.cleanup,
              'storage_dir': args.storage_dir, 'pxe_bridge': args.pxe_bridge,
              'iso_file': args.iso_file, 'dea_file': args.dea_file,
              'dha_file': args.dha_file,
              'fuel_plugins_dir': args.fuel_plugins_dir,
              'fuel_plugins_conf_dir': args.fuel_plugins_conf_dir,
              'no_plugins': args.no_plugins}
    return kwargs
예제 #18
0
 def create_tmp_dir(self):
     self.tmp_dir = '%s/fueltmp' % CWD
     delete(self.tmp_dir)
     create_dir_if_not_exists(self.tmp_dir)
예제 #19
0
def parse_arguments():
    parser = ArgParser(prog='python %s' % __file__)
    parser.add_argument('-nf', dest='no_fuel', action='store_true',
                        default=False,
                        help='Do not install Fuel Master (and Node VMs when '
                             'using libvirt)')
    parser.add_argument('-nh', dest='no_health_check', action='store_true',
                        default=False,
                        help='Don\'t run health check after deployment')
    parser.add_argument('-fo', dest='fuel_only', action='store_true',
                        default=False,
                        help='Install Fuel Master only (and Node VMs when '
                             'using libvirt)')
    parser.add_argument('-co', dest='cleanup_only', action='store_true',
                        default=False,
                        help='Cleanup VMs and Virtual Networks according to '
                             'what is defined in DHA')
    parser.add_argument('-c', dest='cleanup', action='store_true',
                        default=False,
                        help='Cleanup after deploy')
    if {'-iso', '-dea', '-dha', '-h'}.intersection(sys.argv):
        parser.add_argument('-iso', dest='iso_file', action='store', nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('-dea', dest='dea_file', action='store', nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('-dha', dest='dha_file', action='store', nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    else:
        parser.add_argument('iso_file', action='store', nargs='?',
                            default='%s/OPNFV.iso' % CWD,
                            help='ISO File [default: OPNFV.iso]')
        parser.add_argument('dea_file', action='store', nargs='?',
                            default='%s/dea.yaml' % CWD,
                            help='Deployment Environment Adapter: dea.yaml')
        parser.add_argument('dha_file', action='store', nargs='?',
                            default='%s/dha.yaml' % CWD,
                            help='Deployment Hardware Adapter: dha.yaml')
    parser.add_argument('-s', dest='storage_dir', action='store',
                        default='%s/images' % CWD,
                        help='Storage Directory [default: images]')
    parser.add_argument('-b', dest='pxe_bridge', action='store',
                        default='pxebr',
                        help='Linux Bridge for booting up the Fuel Master VM '
                             '[default: pxebr]')
    parser.add_argument('-p', dest='fuel_plugins_dir', action='store',
                        help='Fuel Plugins directory')
    parser.add_argument('-pc', dest='fuel_plugins_conf_dir', action='store',
                        help='Fuel Plugins Configuration directory')
    parser.add_argument('-np', dest='no_plugins', action='store_true',
                        default=False, help='Do not install Fuel Plugins')
    parser.add_argument('-dt', dest='deploy_timeout', action='store',
                        default=240, help='Deployment timeout (in minutes) '
                        '[default: 240]')
    parser.add_argument('-nde', dest='no_deploy_environment',
                        action='store_true', default=False,
                        help=('Do not launch environment deployment'))

    args = parser.parse_args()
    log(args)

    check_file_exists(args.dha_file)

    if not args.cleanup_only:
        check_file_exists(args.dea_file)
        check_fuel_plugins_dir(args.fuel_plugins_dir)

    iso_abs_path = os.path.abspath(args.iso_file)
    if not args.no_fuel and not args.cleanup_only:
        log('Using OPNFV ISO file: %s' % iso_abs_path)
        check_file_exists(iso_abs_path)
        log('Using image directory: %s' % args.storage_dir)
        create_dir_if_not_exists(args.storage_dir)
        check_bridge(args.pxe_bridge, args.dha_file)

    kwargs = {'no_fuel': args.no_fuel, 'fuel_only': args.fuel_only,
              'no_health_check': args.no_health_check,
              'cleanup_only': args.cleanup_only, 'cleanup': args.cleanup,
              'storage_dir': args.storage_dir, 'pxe_bridge': args.pxe_bridge,
              'iso_file': iso_abs_path, 'dea_file': args.dea_file,
              'dha_file': args.dha_file,
              'fuel_plugins_dir': args.fuel_plugins_dir,
              'fuel_plugins_conf_dir': args.fuel_plugins_conf_dir,
              'no_plugins': args.no_plugins,
              'deploy_timeout': args.deploy_timeout,
              'no_deploy_environment': args.no_deploy_environment}
    return kwargs
예제 #20
0
# Retrieves the images of a given split and sorts them according to that split
import shutil

from common import create_dir_if_not_exists

image_dir = '/data/DATASETS/Flickr8k/Images'
annotatios_dir = '/data/DATASETS/Flickr8k/Annotations'
split_name = 'val'
dest_dir = image_dir + '/' + split_name + '_images'
ext = '.jpg'

with open(annotatios_dir + '/' + split_name + '_list_ids.txt') as f:
    lines = f.readlines()

create_dir_if_not_exists(dest_dir)
n_items = len(str(len(lines))) + 1
i = 0
for filename in lines:
    i += 1
    shutil.copyfile(image_dir + '/' + filename[:-1] + ext, dest_dir + '/' + str(i).zfill(n_items) + ext)
예제 #21
0
import datetime
import os
import pyrebase
import sys
import numpy as np

from common import create_dir_if_not_exists, color_record_to_array, color_record_to_label_index, one_hot_encode_labels

ROOT_PATH = os.path.dirname(os.path.realpath(__file__))
OUTPUT_PATH = os.path.realpath(os.path.join(ROOT_PATH, 'data'))

RESULT_PATH = os.path.realpath(os.path.join(OUTPUT_PATH, 'results'))
LOGS_PATH = os.path.realpath(os.path.join(OUTPUT_PATH, 'logs'))

create_dir_if_not_exists(RESULT_PATH)
create_dir_if_not_exists(LOGS_PATH)


def get_database():
    firebase_config = {
        'apiKey': 'AIzaSyDPekCKX4ee6h9NVR2lEITGAM0XIHn-c7c',
        'authDomain': 'color-classification.firebaseapp.com',
        'databaseURL': 'https://color-classification.firebaseio.com',
        'projectId': 'color-classification',
        'storageBucket': '',
        'messagingSenderId': '590040209608'
    }

    firebase = pyrebase.initialize_app(firebase_config)

    return firebase.database()