Esempio n. 1
0
def process(t):
    command, queue = t
    gpu_id = queue.get()
    try:
        command += "--device_id " + str(gpu_id)
    finally:
        queue.put(gpu_id)


if __name__ == "__main__":
    if 'PROGRAMFILES' in os.environ.keys():
        nvidia_smi_path = os.path.join(os.environ['PROGRAMFILES'],
                                       'NVIDIA Corporation', 'NVSMI')
        if nvidia_smi_path not in os.environ['PATH']:
            os.environ['PATH'] = os.environ['PATH'] + ";" + nvidia_smi_path
    gpus = len(nvgpu.available_gpus(max_used_percent=30.0))
    args, unknown = parse_arguments(sys.argv)
    buildings_root = os.path.join(args.m3d, "v1", "scans")
    print("Working on M3D buildings @ %s" % buildings_root)
    rendered_trajectories = []
    if args.rendered_trajectories is not None and os.path.exists(
            args.rendered_trajectories):
        data = pyexcel.get_sheet(file_name=args.rendered_trajectories)
        rendered_trajectories.extend(data.column_at(0))
    commands = []
    building_hashes = {}
    for building_folder in os.listdir(buildings_root):
        building_hash = os.path.join(buildings_root, building_folder,
                                     building_folder, "matterport_mesh")
        for root, dirs, files in os.walk(building_hash, topdown=False):
            obj = next((s for s in files if 'obj' in s), None)
Esempio n. 2
0
CLASSES = ['bed', 'chair', 'table', 'sofa', 'bookcase']

COMBINED_CLASSES = {'desk': 'table'}

IGNORE_IMAGES_PATH = os.path.abspath('./skip_image_paths.txt')

# Root directory of the project
ROOT_DIR = os.path.abspath('./')

# Import Mask RCNN
sys.path.append(ROOT_DIR)  # To find local version of the library

try:
    print('Try to set gpu ressources ...')
    import nvgpu
    available_gpus = nvgpu.available_gpus()

    if type(available_gpus) is list and len(available_gpus) > 0:
        os.environ["CUDA_VISIBLE_DEVICES"] = available_gpus[0]
        print('Using GPU ', available_gpus[0])

    else:
        print('No free gpu found, try later..')
        exit()

except Exception as e:
    print(e)
    pass


class SunConfig(Config):
Esempio n. 3
0
logging.basicConfig(level=logging.DEBUG,
                    format='%(levelname)s: %(asctime)s %(filename)s'
                    ' [%(funcName)s:%(lineno)d][%(process)d] %(message)s',
                    datefmt='%m-%d %H:%M:%S',
                    filename=None,
                    filemode='a')

if __name__ == "__main__":
    from argparse import ArgumentParser
    try:
        arg_parser = ArgumentParser(
            description="print available_gpu id, using nvgpu")
        arg_parser.add_argument("-b",
                                "--best",
                                default=None,
                                type=int,
                                help="output best N")
        args = arg_parser.parse_args()

        if args.best is not None:
            gpus = sorted(nvgpu.gpu_info(),
                          key=lambda x: (x['mem_used'], x['index']))
            ids = [x['index'] for x in gpus]
            print(','.join(ids[:args.best]))
        else:
            print(','.join(nvgpu.available_gpus()))

    except Exception as e:
        traceback.print_exc()
        exit(-1)
        '{asctime} - pid: {process} - threadid: {thread} - func: {funcName} - {levelname}: {message}',
        style='{')
    logging_file_handler = logging.FileHandler(LOGGER_OUTPUT_FILE)
    logging_file_handler.setFormatter(logging_formatter)
    LOGGER.addHandler(logging_file_handler)
    LOGGER.addHandler(LOGGER_STREAM_HANDLER)
    return


_initialize_logger()

###########
# Globals #
###########

HYPERPARAMETER_SEARCH_IS_DISTRIBUTED = True

CPU_COUNT = mp.cpu_count()
pandarallel.initialize(nb_workers=CPU_COUNT, progress_bar=False, verbose=0)

GPU_IDS = eager_map(int, nvgpu.available_gpus())
DEFAULT_GPU = GPU_IDS[0]

NUM_WORKERS = 0 if HYPERPARAMETER_SEARCH_IS_DISTRIBUTED else 2

if not os.path.isdir('./checkpoints'):
    os.makedirs('./checkpoints')

if __name__ == '__main__':
    print('This module contains global initializations and global values.')