Example #1
0
    def extract(self, ex_path, version, location=None, sdk_build=False):
        if os.path.exists(ex_path):
            utils.rmtree(ex_path, ignore_errors=True)

        path = location or self.save_file_path(version, sdk_build=sdk_build)

        file = self.extract_class(path, *self.extract_args)
        # currently, python's extracting mechanism for zipfile doesn't
        # copy file permissions, resulting in a binary that
        # that doesn't work. Copied from a patch here:
        # http://bugs.python.org/file34873/issue15795_cleaned.patch
        if path.endswith('.zip'):
            members = file.namelist()
            for zipinfo in members:
                minfo = file.getinfo(zipinfo)
                target = file.extract(zipinfo, ex_path)
                mode = minfo.external_attr >> 16 & 0x1FF
                os.chmod(target, mode)
        else:
            file.extractall(ex_path)

        if path.endswith('.tar.gz'):
            dir_name = utils.path_join(
                ex_path,
                os.path.basename(path).replace('.tar.gz', ''))
        else:
            dir_name = utils.path_join(
                ex_path,
                os.path.basename(path).replace('.zip', ''))

        if os.path.exists(dir_name):
            for p in os.listdir(dir_name):
                abs_file = utils.path_join(dir_name, p)
                utils.move(abs_file, ex_path)
            utils.rmtree(dir_name, ignore_errors=True)
Example #2
0
    def make_desktop_file(self, nw_path, export_dest):
        icon_set = self.get_setting('icon')
        icon_path = utils.path_join(self.project_dir(), icon_set.value)
        if os.path.exists(icon_path) and icon_set.value:
            utils.copy(icon_path, export_dest)
            icon_path = utils.path_join(export_dest, os.path.basename(icon_path))
        else:
            icon_path = ''
        name = self.project_name()
        pdir = self.project_dir()
        version = self.get_setting('version')
        desc = self.get_setting('description')
        dfile_path = utils.path_join(export_dest, u'{}.desktop'.format(name))
        file_str = (
                    u'[Desktop Entry]\n'
                    u'Version={}\n'
                    u'Name={}\n'
                    u'Comment={}\n'
                    u'Exec={}\n'
                    u'Icon={}\n'
                    u'Terminal=false\n'
                    u'Type=Application\n'
                    u'Categories=Utility;Application;\n'
                    )
        file_str = file_str.format(version.value,
                                   name,
                                   desc.value,
                                   nw_path,
                                   icon_path)
        with codecs.open(dfile_path, 'w+', encoding='utf-8') as f:
            f.write(file_str)

        os.chmod(dfile_path, 0755)
Example #3
0
def compute_metrics_for_single_image(truth_path,
                                     color_img_dir,
                                     prediction_dirs,
                                     output_dir, n_preds):
    truth_img = cv2.imread(truth_path, cv2.IMREAD_GRAYSCALE)
    color_img_path = path_join(color_img_dir, Path(truth_path).name)
    print(truth_path)

    # Load all predictions pertaining to the same image and stack them,
    # taking each prediction as a channel
    pred_imgs = load_prediction_imgs_from_tuple(truth_path, prediction_dirs)

    maj_vote_result = np.apply_along_axis(
        majority_vote_pixelwise, -1, pred_imgs)

    # Compare majority vote classification with ground truth
    equality_img = np.equal(truth_img, maj_vote_result[:, :, 0])

    # Compute strength of disagreement between
    # majority vote classification and ground truth
    strength_of_disagreement_img = np.multiply(np.invert(equality_img),
                                               maj_vote_result[:, :, 1])

    visual_path = path_join(output_dir, 'visual', Path(truth_path).name)
    write_images(output_dir, truth_path, color_img_path, visual_path,
                 maj_vote_result, strength_of_disagreement_img, equality_img)

    return compute_metrics(
        truth_path,
        visual_path,
        color_img_path,
        truth_img,
        equality_img,
        strength_of_disagreement_img, maj_vote_result,
        n_preds)
def _remove_acl():
    """Ansi Common Lisp 1~2 is a lisp book."""
    path = path_join(TEXT_DIR, 'acl1.txt.txt')
    os.remove(path)

    path = path_join(TEXT_DIR, 'acl2.txt.txt')
    os.remove(path)
Example #5
0
    def extract(self, ex_path, version):
        if os.path.exists(ex_path):
            utils.rmtree(ex_path, ignore_errors=True)

        path = self.save_file_path(version)

        file = self.extract_class(path,
                                  *self.extract_args)
        # currently, python's extracting mechanism for zipfile doesn't
        # copy file permissions, resulting in a binary that
        # that doesn't work. Copied from a patch here:
        # http://bugs.python.org/file34873/issue15795_cleaned.patch
        if path.endswith('.zip'):
            members = file.namelist()
            for zipinfo in members:
                minfo = file.getinfo(zipinfo)
                target = file.extract(zipinfo, ex_path)
                mode = minfo.external_attr >> 16 & 0x1FF
                os.chmod(target, mode)
        else:
            file.extractall(ex_path)

        if path.endswith('.tar.gz'):
            dir_name = utils.path_join(ex_path, os.path.basename(path).replace('.tar.gz',''))
        else:
            dir_name = utils.path_join(ex_path, os.path.basename(path).replace('.zip',''))

        if os.path.exists(dir_name):
            for p in os.listdir(dir_name):
                abs_file = utils.path_join(dir_name, p)
                utils.move(abs_file, ex_path)
            utils.rmtree(dir_name, ignore_errors=True)
def test__get_dst():
    url = 'http://paulgraham.com/vb.html'
    dirname = os.path.dirname(os.path.realpath(__file__))
    dirname = os.path.dirname(dirname)
    DATA_DIR = path_join(dirname, 'data')
    RAW_HTML_DIR = path_join(DATA_DIR, 'raw_html')
    assert download._get_dst(url) == path_join(RAW_HTML_DIR, get_basename(url))
Example #7
0
def mean_per_class_iou(pred_dir, truth_dir, n_classes):
    pred_paths = sorted(
        glob.glob(path_join(pred_dir, '*.png')) +
        glob.glob(path_join(pred_dir, '*.jpg')))
    truth_paths = sorted(
        glob.glob(path_join(truth_dir, '*.png')) +
        glob.glob(path_join(truth_dir, '*.jpg')))

    assert len(pred_paths) == len(truth_paths), \
        'Different number of images in prediction and truth directories'
    pd_paths = list(zip(pred_paths, truth_paths))
    assert np.all(
        [Path(x_p).stem == Path(y_p).stem for (x_p, y_p) in pd_paths]), \
        'Not all prediction and truth images are named correspondingly'

    iou_class_names = [f'iou_class_{i}' for i in range(args.n_classes)]
    iou_df = pd.DataFrame(columns=['truth_path', 'pred_path'] +
                          iou_class_names)

    for pred_path, truth_path in pd_paths:
        pred_img = cv2.imread(pred_path, cv2.IMREAD_GRAYSCALE)
        truth_img = cv2.imread(truth_path, cv2.IMREAD_GRAYSCALE)

        resized_dim = (truth_img.shape[1], truth_img.shape[0])
        pred_img = cv2.resize(pred_img,
                              resized_dim,
                              interpolation=cv2.INTER_NEAREST)
        pc_iou = per_class_iou(pred_img, truth_img, n_classes)
        iou_df = iou_df.append(pd.Series([truth_path, pred_path] + pc_iou,
                                         index=iou_df.columns),
                               ignore_index=True)

        print(f'Per class IOU for {Path(pred_path).name}: {pc_iou}')

    return iou_df[iou_class_names].mean(axis=0, skipna=True), iou_df
Example #8
0
def main():
    global TRUTH_PATHS
    args = setup_args_parser().parse_args()

    if len(args.prediction_dirs) < 2:
        raise ValueError(
            'Please specify at least two directories containing predictions.')

    n_preds = len(args.prediction_dirs)
    TRUTH_PATHS = sorted(glob.glob(path_join(args.truth_dir, '*.png')))

    (Path(args.output_dir) / 'majority_voted').mkdir(
        parents=True, exist_ok=True)
    (Path(args.output_dir) / 'votes').mkdir(
        parents=True, exist_ok=True)
    (Path(args.output_dir) / 'visual').mkdir(
        parents=True, exist_ok=True)
    (Path(args.output_dir) / 'disagreement_strength').mkdir(
        parents=True, exist_ok=True)

    if args.threads < 1:
        raise ValueError('Number of threads must be at least 1.')
    if args.threads == 1:
        metrics = [compute_metrics_for_single_image(truth_path,
                                                    args.color_img_dir,
                                                    args.prediction_dirs,
                                                    args.output_dir,
                                                    n_preds)
                   for truth_path in TRUTH_PATHS]
    else:
        metrics = compute_metrics_distr(args, n_preds)

    pd.DataFrame(metrics).to_csv(path_join(args.output_dir, 'metrics.csv'),
                                 index=None)
Example #9
0
    def make_desktop_file(self, nw_path, export_dest):
        icon_set = self.get_setting('icon')
        icon_path = utils.path_join(self.project_dir(), icon_set.value)
        if os.path.exists(icon_path) and icon_set.value:
            utils.copy(icon_path, export_dest)
            icon_path = utils.path_join(export_dest, os.path.basename(icon_path))
        else:
            icon_path = ''
        name = self.project_name()
        pdir = self.project_dir()
        version = self.get_setting('version')
        desc = self.get_setting('description')
        dfile_path = utils.path_join(export_dest, u'{}.desktop'.format(name))
        file_str = (
                    u'[Desktop Entry]\n'
                    u'Version={}\n'
                    u'Name={}\n'
                    u'Comment={}\n'
                    u'Exec={}\n'
                    u'Icon={}\n'
                    u'Terminal=false\n'
                    u'Type=Application\n'
                    u'Categories=Utility;Application;\n'
                    )
        file_str = file_str.format(version.value,
                                   name,
                                   desc.value,
                                   nw_path,
                                   icon_path)
        with codecs.open(dfile_path, 'w+', encoding='utf-8') as f:
            f.write(file_str)

        os.chmod(dfile_path, 0755)
Example #10
0
	def compile_post(self):
		if self.host_os == utils.WIN:
			runtimeDir = utils.path_join(self.dir_source, "vb25-patch", "non-gpl", self.build_arch)
			files = []
			if self.vc2013:
				files.extend([
					"msvcp120.dll",
					"msvcr120.dll",
					"vcomp120.dll",
				])
			else:
				files.append("vcomp90.dll")
			for f in files:
				shutil.copy(utils.path_join(runtimeDir, f), self.dir_install_path)
def main():
    dst = path_join(DATA_DIR, 'PG')
    shutil.make_archive(dst, 'zip', HTML_DIR)

    base_cmd = '/usr/bin/ebook-convert {src}.zip {dst} --max-toc-links 999999 --toc-threshold 1 --title "Paul Graham Essay" --author-sort "Paul Graham" --authors "Paul Graham"'
    s = Shell(base_cmd.format(src=dst, dst=dst + '.mobi'))
    s.communicate()
Example #12
0
 def _seperate_one(self, _file):
     time = self._get_time(_file)
     folder = self._get_time_folder(time)
     fd_path = path_join([self.mv_path, folder, ''])
     if not path_exists(fd_path):
         create_folder(fd_path)
     print _file, fd_path
     print copy(_file, fd_path)
Example #13
0
	def compile(self):
		compileCmd = [sys.executable]
		compileCmd.append("scons/scons.py")

		if not self.build_clean:
			compileCmd.append("--implicit-deps-unchanged")
			compileCmd.append("--max-drift=1")

		if self.use_env_msvc:
			compileCmd.append(r'env="PATH:%PATH%,INCLUDE:%INCLUDE%,LIB:%LIB%"')

		if self.vc2013:
			compileCmd.append(r'MSVS_VERSION=12.0')

		cleanCmd = [sys.executable]
		cleanCmd.append("scons/scons.py")
		cleanCmd.append("clean")

		if not self.mode_test:
			os.chdir(self.dir_blender)

			if self.build_clean:
				sys.stdout.write("Calling: %s\n" % (" ".join(cleanCmd)))
				subprocess.call(cleanCmd)

			sys.stdout.write("Calling: %s\n" % (" ".join(compileCmd)))
			res = subprocess.call(compileCmd)
			if not res == 0:
				sys.stderr.write("There was an error during the compilation!\n")
				sys.exit(1)

			if self.host_os == utils.WIN:
				runtimeDir = utils.path_join(self.dir_source, "vb25-patch", "non-gpl", self.build_arch)
				files = []
				if self.vc2013:
					files.extend([
						"msvcp120.dll",
						"msvcr120.dll",
						"vcomp120.dll",
					])
				else:
					files.append("vcomp90.dll")
				for f in files:
					shutil.copy(utils.path_join(runtimeDir, f), self.dir_install_path)
Example #14
0
	def init_paths(self):
		if self.generate_package:
			if not self.mode_test:
				utils.path_create(self.dir_release)

		self.dir_build        = utils.path_slashify(self.dir_build)
		self.dir_source       = utils.path_slashify(self.dir_source)
		self.dir_install_path = utils.path_slashify(self.dir_install_path)

		self.dir_blender      = utils.path_join(self.dir_source, "blender")
		self.dir_blender_svn  = utils.path_join(self.dir_source, "blender-git")
		self.user_config      = utils.path_join(self.dir_blender, "user-config.py")

		if self.user_user_config:
			self.user_user_config = utils.pathExpand(self.user_user_config)

		if self.build_clean:
			if os.path.exists(self.dir_build):
				shutil.rmtree(self.dir_build)
Example #15
0
	def init_paths(self):
		if self.generate_package:
			if not self.mode_test:
				utils.path_create(self.dir_release)

		self.dir_build        = utils.path_slashify(self.dir_build)
		self.dir_source       = utils.path_slashify(self.dir_source)
		self.dir_install_path = utils.path_slashify(self.dir_install_path)

		self.dir_blender      = utils.path_join(self.dir_source, "blender")
		self.dir_blender_svn  = utils.path_join(self.dir_source, "blender-git")
		self.user_config      = utils.path_join(self.dir_blender, "user-config.py")

		if self.user_user_config:
			self.user_user_config = utils.pathExpand(self.user_user_config)

		if self.build_clean:
			if os.path.exists(self.dir_build):
				shutil.rmtree(self.dir_build)
Example #16
0
	def update(self):
		self.revision, self.commits = utils.get_svn_revision(self.dir_blender)
		self.version                = utils.get_blender_version(self.dir_blender)

		if self.build_release:
			self.dir_install_name = utils.GetInstallDirName(self)
		else:
			self.dir_install_name = self.project

		self.dir_install_path = utils.path_join(self.dir_install, self.dir_install_name)
Example #17
0
	def update(self):
		self.revision, self.brev, self.commits = utils.get_svn_revision(self.dir_blender)
		self.version = utils.get_blender_version(self.dir_blender)[0]
		self.versionArr = utils.get_blender_version(self.dir_blender)

		if self.build_release:
			self.dir_install_name = utils.GetInstallDirName(self)
		else:
			self.dir_install_name = self.project

		self.dir_install_path = utils.path_join(self.dir_install, self.dir_install_name)
Example #18
0
	def docs(self):
		if self.generate_docs:
			api_dir = utils.path_join(self.dir_install_path, "api")

			sys.stdout.write("Generating API documentation: %s\n" % (api_dir))

			if self.host_os != utils.LNX:
				sys.stdout.write("API documentation generation is not supported on this platform.\n")

			else:
				if not self.mode_test:
					sphinx_doc_gen = "doc/python_api/sphinx_doc_gen.py"

					# Create API directory
					os.system("mkdir -p %s" % api_dir)

					# Generate API docs
					os.chdir(self.dir_blender)
					os.system("%s -b -P %s" % (utils.path_join(self.dir_install_path, "blender"), sphinx_doc_gen))
					os.system("sphinx-build doc/python_api/sphinx-in %s" % api_dir)
Example #19
0
 def replace_icon_in_exe(self, exe_path):
     icon_setting = self.get_setting('icon')
     exe_icon_setting = self.get_setting('exe_icon')
     icon_path = (exe_icon_setting.value
                  if exe_icon_setting.value
                  else icon_setting.value)
     if icon_path:
         p = PEFile(exe_path)
         p.replace_icon(utils.path_join(self.project_dir(), icon_path))
         p.write(exe_path)
         p = None
Example #20
0
 def get_file_information_from_url(self):
     if hasattr(self, 'url'):
         self.file_name = self.url.split(u'/')[-1]
         self.full_file_path = utils.path_join(self.save_path, self.file_name)
         self.file_ext = os.path.splitext(self.file_name)[1]
         if self.file_ext == '.zip':
             self.extract_class = ZipFile
             self.extract_args = ()
         elif self.file_ext == '.gz':
             self.extract_class = TarFile.open
             self.extract_args = ('r:gz',)
Example #21
0
 def get_file_information_from_url(self):
     if hasattr(self, 'url'):
         self.file_name = self.url.split(u'/')[-1]
         self.full_file_path = utils.path_join(self.save_path, self.file_name)
         self.file_ext = os.path.splitext(self.file_name)[1]
         if self.file_ext == '.zip':
             self.extract_class = ZipFile
             self.extract_args = ()
         elif self.file_ext == '.gz':
             self.extract_class = TarFile.open
             self.extract_args = ('r:gz',)
Example #22
0
	def docs(self):
		if self.generate_docs:
			api_dir = utils.path_join(self.dir_install_path, "api")

			sys.stdout.write("Generating API documentation: %s\n" % (api_dir))

			if self.host_os != utils.LNX:
				sys.stdout.write("API documentation generation is not supported on this platform.\n")

			else:
				if not self.mode_test:
					sphinx_doc_gen = "doc/python_api/sphinx_doc_gen.py"

					# Create API directory
					os.system("mkdir -p %s" % api_dir)

					# Generate API docs
					os.chdir(self.dir_blender)
					os.system("%s -b -P %s" % (utils.path_join(self.dir_install_path, "blender"), sphinx_doc_gen))
					os.system("sphinx-build doc/python_api/sphinx-in %s" % api_dir)
Example #23
0
 def replace_icon_in_exe(self, exe_path):
     icon_setting = self.get_setting('icon')
     exe_icon_setting = self.get_setting('exe_icon')
     icon_path = (exe_icon_setting.value
                  if exe_icon_setting.value
                  else icon_setting.value)
     if icon_path:
         p = PEFile(exe_path)
         p.replace_icon(utils.path_join(self.project_dir(), icon_path))
         p.write(exe_path)
         p = None
Example #24
0
 def get_file_information_from_url(self):
     """Extract the file information from the setting url"""
     if hasattr(self, 'url'):
         self.file_name = self.url.split('/')[-1]
         self.full_file_path = utils.path_join(self.save_path, self.file_name)
         self.file_ext = os.path.splitext(self.file_name)[1]
         if self.file_ext == '.zip':
             self.extract_class = zipfile.ZipFile
             self.extract_args = ()
         elif self.file_ext == '.gz':
             self.extract_class = tarfile.TarFile.open
             self.extract_args = ('r:gz',)
Example #25
0
def test_download_nwjs(command_base):
    command_base.get_setting('nw_version').value = '0.19.0'
    command_base.get_setting('windows-x64').value = True
    command_base.init()
    command_base.get_files_to_download()

    command_base.download_file_with_error_handling()

    base, _ = os.path.split(__file__)

    assert os.path.exists(utils.path_join(base, 'test_data', 'files',
                                          'downloads',
                                          'nwjs-v0.19.0-win-x64.zip'))
Example #26
0
    def create_icns_for_app(self, icns_path):
        icon_setting = self.get_setting('icon')
        mac_app_icon_setting = self.get_setting('mac_icon')
        icon_path = (mac_app_icon_setting.value
                     if mac_app_icon_setting.value
                     else icon_setting.value)

        if icon_path:
            icon_path = utils.path_join(self.project_dir(), icon_path)
            if not icon_path.endswith('.icns'):
                save_icns(icon_path, icns_path)
            else:
                utils.copy(icon_path, icns_path)
Example #27
0
 def get_file_information_from_url(self):
     """Extract the file information from the setting url"""
     if hasattr(self, 'url'):
         self.file_name = self.url.split('/')[-1]
         self.full_file_path = utils.path_join(self.save_path,
                                               self.file_name)
         self.file_ext = os.path.splitext(self.file_name)[1]
         if self.file_ext == '.zip':
             self.extract_class = zipfile.ZipFile
             self.extract_args = ()
         elif self.file_ext == '.gz':
             self.extract_class = tarfile.TarFile.open
             self.extract_args = ('r:gz', )
Example #28
0
    def create_icns_for_app(self, icns_path):
        icon_setting = self.get_setting('icon')
        mac_app_icon_setting = self.get_setting('mac_icon')
        icon_path = (mac_app_icon_setting.value
                     if mac_app_icon_setting.value
                     else icon_setting.value)

        if icon_path:
            icon_path = utils.path_join(self.project_dir(), icon_path)
            if not icon_path.endswith('.icns'):
                save_icns(icon_path, icns_path)
            else:
                utils.copy(icon_path, icns_path)
Example #29
0
def mean_per_class_iou_parallel(pred_dir, truth_dir, n_threads, n_classes):
    pool = Pool(n_threads)
    pred_paths = sorted(
        glob.glob(path_join(pred_dir, '*.png')) +
        glob.glob(path_join(pred_dir, '*.jpg')))
    truth_paths = sorted(
        glob.glob(path_join(truth_dir, '*.png')) +
        glob.glob(path_join(truth_dir, '*.jpg')))

    assert len(pred_paths) == len(truth_paths), \
        'Different number of images in prediction and truth directories'
    pd_paths = list(
        zip(pred_paths, truth_paths, [n_classes] * len(truth_paths)))

    assert np.all(
        [Path(x_p).stem == Path(y_p).stem for (x_p, y_p, _) in pd_paths]), \
        'Not all prediction and truth images are named correspondingly'

    result_series = pool.map(mean_per_class_iou_single_image, pd_paths)
    iou_df = pd.DataFrame(result_series)
    iou_class_names = [f'iou_class_{i}' for i in range(args.n_classes)]
    mean_iou_for_datset = iou_df[iou_class_names].mean(axis=0, skipna=True)
    return mean_iou_for_datset, iou_df
def get_logger(name: str, log_path: str = os.path.join(os.path.dirname(__file__), "main.log"),
               console: bool = False) -> logging.Logger:
    """
    Simple logging wrapper that returns logger
    configured to log into file and console.

    Args:
        name (str): name of logger
        log_path (str): path of log file
        console (bool): whether to log on console

    Returns:
        logging.Logger: configured logger
    """
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")

    # ensure that logging handlers are not duplicated
    for handler in list(logger.handlers):
        logger.removeHandler(handler)

    # rotating file handler
    if log_path:
        fh = RotatingFileHandler(path_join(log_path),
                                 maxBytes=10 * 2 ** 20,  # 10 MB
                                 backupCount=1)  # 1 backup
        fh.setLevel(logging.DEBUG)
        fh.setFormatter(formatter)
        logger.addHandler(fh)

    # console handler
    if console:
        ch = logging.StreamHandler()
        ch.setLevel(logging.INFO)
        ch.setFormatter(formatter)
        logger.addHandler(ch)

    # null handler
    if not (log_path or console):
        logger.addHandler(logging.NullHandler())

    return logger
Example #31
0
 def put_client_binary_files(self, filenames, total_size):   
     # Filenames are relative to the data_dir.
     self.ui.set_progress_range(total_size)
     self.ui.set_progress_update_interval(total_size/50)
     for filename in filenames:
         self.request_connection()
         self.con.putrequest("PUT",
             self.url("/client_binary_file?session_token=%s&filename=%s" \
             % (self.server_info["session_token"],
             urllib.quote(filename.encode("utf-8"), ""))))
         full_path = path_join(self.database.data_dir(), filename)
         file_size = path_getsize(full_path)
         self.con.putheader("content-length", file_size)
         self.con.endheaders()
         for buffer in self.stream_binary_file(full_path, progress_bar=False):
             self.con.send(buffer)
             self.ui.increase_progress(len(buffer))
         self._check_response_for_errors(self.con.getresponse())
     self.ui.set_progress_value(total_size)
Example #32
0
 def put_client_binary_files(self, filenames, total_size):
     # Filenames are relative to the data_dir.
     self.ui.set_progress_range(total_size)
     self.ui.set_progress_update_interval(total_size / 50)
     for filename in filenames:
         self.request_connection()
         self.con.putrequest("PUT",
             self.url("/client_binary_file?session_token=%s&filename=%s" \
             % (self.server_info["session_token"],
             urllib.quote(filename.encode("utf-8"), ""))))
         full_path = path_join(self.database.data_dir(), filename)
         file_size = path_getsize(full_path)
         self.con.putheader("content-length", file_size)
         self.con.endheaders()
         for buffer in self.stream_binary_file(full_path,
                                               progress_bar=False):
             self.con.send(buffer)
             self.ui.increase_progress(len(buffer))
         self._check_response_for_errors(self.con.getresponse())
     self.ui.set_progress_value(total_size)
Example #33
0
 def load_package_json(self, json_path=None):
     self.logger.info('Loading package.json')
     if json_path is not None:
         p_json = [json_path]
     else:
         p_json = glob.glob(utils.path_join(self.project_dir(),
                                         'package.json'))
     setting_list = []
     if p_json:
         json_str = ''
         try:
             with codecs.open(p_json[0], 'r', encoding='utf-8') as f:
                 json_str = f.read()
         except IOError:
             return setting_list
         try:
             setting_list = self.load_from_json(json_str)
         except ValueError as e:  # Json file is invalid
             self.logger.warning('Warning: Json file invalid.')
             self.progress_text = u'{}\n'.format(e)
     return setting_list
Example #34
0
 def load_package_json(self, json_path=None):
     self.logger.info('Loading package.json')
     if json_path is not None:
         p_json = [json_path]
     else:
         p_json = glob.glob(utils.path_join(self.project_dir(),
                                         'package.json'))
     setting_list = []
     if p_json:
         json_str = ''
         try:
             with codecs.open(p_json[0], 'r', encoding='utf-8') as f:
                 json_str = f.read()
         except IOError:
             return setting_list
         try:
             setting_list = self.load_from_json(json_str)
         except ValueError as e:  # Json file is invalid
             self.logger.warning('Warning: Json file invalid.')
             self.progress_text = u'{}\n'.format(e)
     return setting_list
Example #35
0
	def exporter(self):
		"""
		  Add script and modules
		"""
		scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, self.version, "scripts")
		if self.host_os == utils.MAC:
			scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, "blender.app", "Contents", "Resources", self.version, "scripts")

		addonsPath  = utils.path_join(scriptsPath, "addons")
		startupPath = utils.path_join(scriptsPath, "startup")

		clonePath = addonsPath if self.vb30 else startupPath

		sys.stdout.write("Adding exporter...\n")
		sys.stdout.write("  in: %s\n" % clonePath)

		if not self.mode_test:
			if not os.path.exists(clonePath):
				sys.stderr.write("Something went wrong! Can't add Python modules and exporter!\n")
				sys.exit(3)

			if self.vb30:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb30")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)
				os.system("git clone --recursive https://github.com/bdancer/vb30.git")

			else:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb25")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)

				os.system("git clone --recursive https://github.com/bdancer/vb25.git")

			if self.use_exp_branch not in {'master'}:
				os.chdir(exporterPath)
				os.system("git remote update")
				os.system("git checkout -b {branch} origin/{branch}".format(branch=self.use_exp_branch))

			os.chdir(exporterPath)
			os.system("git submodule update --init --recursive")
			os.system("git submodule foreach git checkout master")
			os.system("git submodule foreach git pull --rebase origin master")
Example #36
0
	def exporter(self):
		"""
		  Add script and modules
		"""
		scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, self.version, "scripts")
		if self.host_os == utils.MAC:
			scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, "blender.app", "Contents", "Resources", self.version, "scripts")

		addonsPath  = utils.path_join(scriptsPath, "addons")
		startupPath = utils.path_join(scriptsPath, "startup")

		clonePath = addonsPath if self.vb30 else startupPath

		sys.stdout.write("Adding exporter...\n")
		sys.stdout.write("  in: %s\n" % clonePath)

		if not self.mode_test:
			if not os.path.exists(clonePath):
				sys.stderr.write("Something went wrong! Can't add Python modules and exporter!\n")
				sys.exit(3)

			if self.vb30:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb30")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)
				os.system("git clone --recursive https://github.com/bdancer/vb30.git")

			else:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb25")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)

				os.system("git clone --recursive https://github.com/bdancer/vb25.git")

			if self.use_exp_branch not in {'master'}:
				os.chdir(exporterPath)
				os.system("git remote update")
				os.system("git checkout -b {branch} origin/{branch}".format(branch=self.use_exp_branch))

			os.chdir(exporterPath)
			os.system("git submodule update --init --recursive")
			os.system("git submodule foreach git checkout master")
			os.system("git submodule foreach git pull --rebase origin master")
Example #37
0
	def patch(self):
		patch_dir = utils.path_join(self.dir_source, "vb25-patch")

		if self.use_blender_hash:
			patchBin      = utils.find_patch()
			patchFilepath = os.path.join(tempfile.gettempdir(), "vray_for_blender.patch")

			os.chdir(self.dir_blender)

			os.system("git checkout %s" % self.use_github_branch) # Checkout exporter branch
			os.system("git diff master > %s" % patchFilepath)     # Generate diff with master
			os.system("git fetch --tags")                         # Hash could be tag also
			os.system("git checkout %s" % self.use_blender_hash)  # Checkout needed revision
			os.system("git checkout -b vray_for_blender")         # Create some branch for patching
			os.system("patch -Np1 -i %s" % patchFilepath)         # Apply patch

			os.remove(patchFilepath)

		# Add datafiles: splash, default scene etc
		if self.add_datafiles:
			sys.stdout.write("Adding datafiles...\n")

			datafiles_path = utils.path_join(self.dir_blender, "release", "datafiles")

			if not self.mode_test:
				# Change splash
				for splash_filename in ["splash.png", "splash_2x.png"]:
					splash_path_src = utils.path_join(patch_dir, "datafiles", splash_filename)
					splash_path_dst = utils.path_join(datafiles_path, splash_filename)

					shutil.copyfile(splash_path_src, splash_path_dst)

				# Change icons
				for subdir in ["blender_icons16", "blender_icons32"]:
					icons_path_src = utils.path_join(patch_dir, "datafiles", subdir)
					icons_path_dst = utils.path_join(datafiles_path, subdir)

					shutil.rmtree(icons_path_dst)
					shutil.copytree(icons_path_src, icons_path_dst)
Example #38
0
	def patch(self):
		patch_dir = utils.path_join(self.dir_source, "vb25-patch")

		if self.use_blender_hash:
			patchBin      = utils.find_patch()
			patchFilepath = os.path.join(tempfile.gettempdir(), "vray_for_blender.patch")

			os.chdir(self.dir_blender)

			os.system("git checkout %s" % self.use_github_branch) # Checkout exporter branch
			os.system("git diff master > %s" % patchFilepath)     # Generate diff with master
			os.system("git fetch --tags")                         # Hash could be tag also
			os.system("git checkout %s" % self.use_blender_hash)  # Checkout needed revision
			os.system("git checkout -b vray_for_blender")         # Create some branch for patching
			os.system("patch -Np1 -i %s" % patchFilepath)         # Apply patch

			os.remove(patchFilepath)

		# Add datafiles: splash, default scene etc
		if self.add_datafiles:
			sys.stdout.write("Adding datafiles...\n")

			datafiles_path = utils.path_join(self.dir_blender, "release", "datafiles")

			if not self.mode_test:
				# Change splash
				for splash_filename in ["splash.png", "splash_2x.png"]:
					splash_path_src = utils.path_join(patch_dir, "datafiles", splash_filename)
					splash_path_dst = utils.path_join(datafiles_path, splash_filename)

					shutil.copyfile(splash_path_src, splash_path_dst)

				# Change icons
				for subdir in ["blender_icons16", "blender_icons32"]:
					icons_path_src = utils.path_join(patch_dir, "datafiles", subdir)
					icons_path_dst = utils.path_join(datafiles_path, subdir)

					shutil.rmtree(icons_path_dst)
					shutil.copytree(icons_path_src, icons_path_dst)
parser = argparse.ArgumentParser(
    description='This script reads and analyzes the validation IoU files of '
                'either Deeplab or FCN networks, '
                'and orders the respective network checkpoints by mean IoU.')
parser.add_argument('val_results_root',
                    help='Root directory containing all checkpoint evaluations')
parser.add_argument('--output_file',
                    help='If set, this will contain the mIoU for each epoch.')
parser.add_argument('--epochs', type=int, default=100,
                    help='Number of epochs used in training of the model')
args = parser.parse_args()

mIOU = []

for i in list(range(0, args.epochs, 5)) + [args.epochs - 1]:
    iou_df_i_path = path_join(args.val_results_root, f'ep{i}/iou.csv')
    iou_df_i = pd.read_csv(iou_df_i_path)
    mIOU_i = iou_df_i.drop(['truth_path', 'pred_path'], axis=1).mean(
        axis=0, skipna=True)
    mIOU_i['mIoU'] = mIOU_i.mean()
    mIOU_i['Epoch'] = i
    mIOU.append(mIOU_i)

mIOU = pd.DataFrame(mIOU)
mIOU.sort_values('mIoU', ascending=False, inplace=True)

if args.output_file is not None:
    mIOU.to_csv(args.output_file)

print(mIOU.head())
Example #40
0
    #run with multi thread.
    multi_thread.run_with_multi_thread(download_caida_restricted_wrapper, argv,
                                       resources, mt_num)

    #assemble segements.
    assemble_segements(file_path)


def download_directory(url, directory, mt_num=-1):
    #get file list.
    is_succeeded = False
    round_cnt = 1
    while (not is_succeeded):
        try:
            file_list = get_file_list_from_directory(url)
            is_succeeded = True
        except Exception, e:
            utils.log(str(e))
            is_succeed = False
            round_cnt = round_cnt + 1
            time.sleep(1 * round_cnt)

    utils.touch(directory + '/')

    #resource list
    resources = ['']

    for f in file_list:
        download_file(utils.url_join([url, f]),
                      utils.path_join([directory, f]), resources, mt_num)
Example #41
0
def get_file(path):
    parts = path.split('/')
    independent_path = utils.path_join(CWD, *parts)
    return independent_path
Example #42
0
def main():
    parser = ArgParser(description=('Command line interface '
                                    'to web2exe. {}'.format(__version__)),
                                     prog='web2execmd')
    command_base = CommandBase()
    command_base.init()
    parser.add_argument('project_dir', metavar='project_dir',
                        help='The project directory.', type=unicode_arg)
    parser.add_argument('--output-dir', dest='output_dir',
                        help='The output directory for exports.',
                        type=unicode_arg)
    parser.add_argument('--quiet', dest='quiet', action='store_true',
                        default=False,
                        help='Silences output messages')
    parser.add_argument('--verbose', dest='verbose', action='store_true',
                        default=False,
                        help=('Prints debug errors and messages instead '
                              'of logging to files/errors.log'))
    parser.add_argument('--package-json',
                        dest='load_json',
                        nargs='?',
                        default='',
                        const=True,
                        help=('Loads the package.json '
                              'file in the project directory. '
                              'Ignores other command line arguments.'))
    parser.add_argument('--cmd-version', action='version', version='%(prog)s {}'.format(__version__))

    for setting_group_dict in command_base.settings['setting_groups']+[command_base.settings['compression']]:
        for setting_name, setting in setting_group_dict.items():
            kwargs = {}
            if setting_name == 'name':
                kwargs.update({'default': command_base.project_name})
            else:
                kwargs.update({'required': setting.required,
                               'default': setting.default_value})
            action = 'store'
            option_name = setting_name.replace('_', '-')

            if setting.type in ['file', 'string', 'strings']:
                kwargs.update({'type': unicode_arg})

            if isinstance(setting.default_value, bool):
                action = ('store_true' if setting.default_value is False
                          else 'store_false')
                kwargs.update({'action': action})
                if setting.default_value is True:
                    option_name = u'disable-{}'.format(option_name)
            else:
                if setting.values:
                    kwargs.update({'choices': setting.values})
                    setting.description += u' Possible values: {{{}}}'.format(', '.join([unicode(x) for x in setting.values]))
                    kwargs.update({'metavar': ''})
                else:
                    kwargs.update({'metavar': '<{}>'.format(setting.display_name)})

            parser.add_argument(u'--{}'.format(option_name),
                                dest=setting_name,
                                help=setting.description,
                                **kwargs
                                )

    export_args = [arg for arg in command_base.settings['export_settings']]
    parser.add_argument('--export-to', dest='export_options',
                        nargs='+', required=True,
                        choices=export_args,
                        help=('Choose at least one system '
                              'to export to.'))

    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(
            stream=sys.stdout,
            format=("%(levelname) -10s %(module)s.py: "
                    "%(lineno)s %(funcName)s - %(message)s"),
            level=logging.DEBUG
        )
    else:
        logging.basicConfig(
            filename=LOG_FILENAME,
            format=("%(levelname) -10s %(asctime)s %(module)s.py: "
                    "%(lineno)s %(funcName)s - %(message)s"),
            level=logging.DEBUG
        )

    global logger
    global handler

    logger = logging.getLogger('CMD Logger')
    handler = lh.RotatingFileHandler(LOG_FILENAME, maxBytes=100000, backupCount=2)
    logger.addHandler(handler)

    def my_excepthook(type_, value, tback):
        output_err = u''.join([unicode(x) for x in traceback.format_exception(type_, value, tback)])
        logger.error(u'{}'.format(output_err))
        sys.__excepthook__(type_, value, tback)

    sys.excepthook = my_excepthook

    command_base.logger = logger

    if args.quiet:
        command_base.quiet = True

    command_base._project_dir = args.project_dir

    command_base._output_dir = (args.output_dir or
                                utils.path_join(command_base._project_dir, 'output'))

    if args.app_name is None:
        args.app_name = command_base.project_name()

    if args.name is not None:
        setting = command_base.get_setting('name')
        args.name = setting.filter_name(args.name if not callable(args.name) else args.name())

    command_base._project_name = args.app_name if not callable(args.app_name) else args.app_name()

    if not args.title:
        args.title = command_base.project_name()

    for name, val in args._get_kwargs():
        if callable(val):
            val = val()
        if name == 'export_options':
            for opt in val:
                setting = command_base.get_setting(opt)
                if setting is not None:
                    setting.value = True
        else:
            setting = command_base.get_setting(name)
            if setting is not None:
                setting.value = val

    if args.load_json is True:
        command_base.load_package_json()
    elif args.load_json:
        command_base.load_package_json(args.load_json)

    command_base.export()
 def _load_essay_orders():
     fp = path_join(DATA_DIR, 'essay_orders.json')
     with open(fp, 'r') as f:
         return simplejson.load(f)
Example #44
0
	def update_sources(self):
		"""
		  Getting/updating sources
		"""

		def exportSources():
			sys.stdout.write("Exporting sources...\n")
			if self.mode_test:
				return

			if os.path.exists(self.dir_blender):
				utils.remove_directory(self.dir_blender)

			# Copy full tree to have proper build info.
			shutil.copytree(self.dir_blender_svn, self.dir_blender)

			os.chdir(self.dir_blender)
			os.system("git remote update github")
			os.system("git checkout -b {branch} github/{branch}".format(branch=self.use_github_branch))

			if self.checkout_revision is not None:
				os.chdir(self.dir_blender)
				os.system("git checkout %s" % self.checkout_revision)

		# Update Blender sources
		if self.update_blender:
			if os.path.exists(self.dir_blender):
				sys.stdout.write("Removing exported sources...\n")
				if not self.mode_test:
					utils.remove_directory(self.dir_blender)

			if not os.path.exists(self.dir_blender_svn):
				sys.stdout.write("Obtaining Blender sources...\n")
				if not self.mode_test:
					os.chdir(self.dir_source)

					# Obtain sources
					os.system("git clone %s blender" % GITHUB_REPO)

					# Now set origin to Blender's git and additional github remote
					# This is needed for proper submodules init
					os.chdir(self.dir_blender)
					os.system("git remote set-url origin %s" % OFFICIAL_REPO)
					os.system("git remote add github %s" % GITHUB_REPO)
					os.system("git remote update")
					os.system("git pull --rebase")

					os.chdir(self.dir_blender)
					os.system("git submodule update --init --recursive")
					os.system("git submodule foreach git checkout master")
					os.system("git submodule foreach git pull --rebase origin master")

					os.chdir(self.dir_source)
					# Move "blender" to "blender-git"
					utils.move_directory(self.dir_blender, self.dir_blender_svn)

			else:
				sys.stdout.write("Updating Blender sources...\n")
				if not self.mode_test:
					os.chdir(self.dir_blender_svn)

					# Update sources
					os.system("git pull --rebase")
					os.system("git submodule foreach git pull --rebase origin master")

			exportSources()

			# Update Blender libs
			lib_dir = None
			svn_cmd = None
			if self.host_os != utils.LNX:
				if self.host_os == utils.WIN:
					lib_dir = utils.path_join(self.dir_source, "lib", "windows")
					svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/windows lib/windows"
					if self.host_arch == "x86_64":
						if self.vc2013:
							lib_dir = utils.path_join(self.dir_source, "lib", "win64_vc12")
							svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/win64_vc12 lib/win64_vc12"
						else:
							lib_dir = utils.path_join(self.dir_source, "lib", "win64")
							svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/win64 lib/win64"
				elif self.host_os == utils.MAC:
					lib_dir = utils.path_join(self.dir_source, "lib", "darwin-9.x.universal")
					svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/darwin-9.x.universal lib/darwin-9.x.universal"

				if not os.path.exists(lib_dir):
					sys.stdout.write("Getting \"lib\" data...\n")
					if not self.mode_test:
						os.chdir(self.dir_source)
						os.system(svn_cmd)
				else:
					sys.stdout.write("Updating \"lib\" data...\n")
					if not self.mode_test:
						os.chdir(lib_dir)
						os.system("svn update")

		# Update V-Ray/Blender patchset
		if self.update_patch and not self.mode_developer:
			vb25_patch = utils.path_join(self.dir_source, "vb25-patch")

			if os.path.exists(vb25_patch):
				sys.stdout.write("Updating V-Ray/Blender patches...\n")
				if not self.mode_test:
					os.chdir(vb25_patch)
					os.system("git pull")
			else:
				sys.stdout.write("Getting V-Ray/Blender patches...\n")
				if not self.mode_test:
					os.chdir(self.dir_source)
					os.system("git clone git://github.com/bdancer/vb25-patch.git")
Example #45
0
def get_file(path):
    parts = path.split('/')
    independent_path = utils.path_join(CWD, *parts)
    return independent_path
Example #46
0
    def run_script(self, script):
        if not script:
            return

        if os.path.exists(script):
            self.progress_text = 'Executing script {}...'.format(script)
            contents = ''
            with codecs.open(script, 'r', encoding='utf-8') as f:
                contents = f.read()

            _, ext = os.path.splitext(script)

            export_opts = self.get_export_options()
            export_dir = '{}{}{}'.format(self.output_dir(),
                                         os.path.sep,
                                         self.project_name())
            export_dirs = []
            for opt in export_opts:
                export_dirs.append('{}{}{}'.format(export_dir, os.path.sep, opt))

            command = None
            bat_file = None

            export_dict = {'mac-x64_dir': '',
                           'mac-x32_dir': '',
                           'windows-x64_dir': '',
                           'windows-x32_dir': '',
                           'linux-x64_dir': '',
                           'linux-x32_dir': ''}

            if ext == '.py':
                env_file = get_file('files/env_vars.py')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               export_dirs=str(export_dirs),
                                               num_dirs=len(export_dirs),
                                               **export_dict)
                pycontents = '{}\n{}'.format(env_vars, contents)

                command = ['python', '-c', pycontents]


            elif ext == '.bash':
                env_file = get_file('files/env_vars.bash')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()
                ex_dir_vars = ''

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir

                for ex_dir in export_dirs:
                    ex_dir_vars += "'{}' ".format(ex_dir)

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               num_dirs=len(export_dirs),
                                               export_dirs=ex_dir_vars,
                                               **export_dict)
                shcontents = '{}\n{}'.format(env_vars, contents)

                command = ['bash', '-c', shcontents]

            elif ext == '.bat':
                env_file = get_file('files/env_vars.bat')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()
                ex_dir_vars = ''

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir
                    ex_dir_vars += 'set "EXPORT_DIRS[{}]={}"\n'.format(i, ex_dir)

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               num_dirs=len(export_dirs),
                                               export_dirs=ex_dir_vars,
                                               **export_dict)
                batcontents = '{}\n{}'.format(env_vars, contents)

                bat_file = utils.path_join(TEMP_DIR, '{}.bat'.format(self.project_name()))

                self.logger.debug(batcontents)

                with open(bat_file, 'w+') as f:
                    f.write(batcontents)

                command = [bat_file]

            proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            output, error = proc.communicate()
            output = output.strip()
            error = error.strip()

            if bat_file:
                os.remove(bat_file)

            with open(get_file('script-output.txt'), 'w+') as f:
                f.write('Output:\n{}'.format(output))
                if error:
                    f.write('\n\nErrors:\n{}\n'.format(error))

            self.progress_text = 'Done executing script.'
        else:
            self.progress_text = '\nThe script {} does not exist. Not running.'.format(script)
Example #47
0
    def make_output_dirs(self):
        self.output_err = ''
        try:
            self.progress_text = 'Removing old output directory...\n'

            output_dir = utils.path_join(self.output_dir(), self.project_name())
            if os.path.exists(output_dir):
                utils.rmtree(output_dir, ignore_errors=True)

            temp_dir = utils.path_join(TEMP_DIR, 'webexectemp')
            if os.path.exists(temp_dir):
                utils.rmtree(temp_dir, ignore_errors=True)

            self.progress_text = 'Making new directories...\n'

            if not os.path.exists(output_dir):
                os.makedirs(output_dir)

            os.makedirs(temp_dir)

            self.copy_files_to_project_folder()

            json_file = utils.path_join(self.project_dir(), 'package.json')

            global_json = utils.get_data_file_path('files/global.json')

            if self.output_package_json:
                with codecs.open(json_file, 'w+', encoding='utf-8') as f:
                    f.write(self.generate_json())


            with codecs.open(global_json, 'w+', encoding='utf-8') as f:
                f.write(self.generate_json(global_json=True))

            zip_file = utils.path_join(temp_dir, self.project_name()+'.nw')

            app_nw_folder = utils.path_join(temp_dir, self.project_name()+'.nwf')

            utils.copytree(self.project_dir(), app_nw_folder,
                           ignore=shutil.ignore_patterns(output_dir))

            zip_files(zip_file, self.project_dir(), exclude_paths=[output_dir])
            for ex_setting in self.settings['export_settings'].values():
                if ex_setting.value:
                    self.progress_text = '\n'
                    name = ex_setting.display_name
                    self.progress_text = u'Making files for {}...'.format(name)
                    export_dest = utils.path_join(output_dir, ex_setting.name)
                    versions = re.findall('(\d+)\.(\d+)\.(\d+)', self.selected_version())[0]

                    minor = int(versions[1])
                    if minor >= 12:
                        export_dest = export_dest.replace('node-webkit', 'nwjs')

                    if os.path.exists(export_dest):
                        utils.rmtree(export_dest, ignore_errors=True)

                    # shutil will make the directory for us
                    utils.copytree(get_data_path('files/'+ex_setting.name),
                                   export_dest,
                                    ignore=shutil.ignore_patterns('place_holder.txt'))
                    utils.rmtree(get_data_path('files/'+ex_setting.name), ignore_errors=True)
                    self.progress_text += '.'

                    if 'mac' in ex_setting.name:
                        uncomp_setting = self.get_setting('uncompressed_folder')
                        uncompressed = uncomp_setting.value
                        app_path = utils.path_join(export_dest,
                                                self.project_name()+'.app')

                        try:
                            utils.move(utils.path_join(export_dest,
                                                     'nwjs.app'),
                                       app_path)
                        except IOError:
                            utils.move(utils.path_join(export_dest,
                                                     'node-webkit.app'),
                                       app_path)

                        plist_path = utils.path_join(app_path, 'Contents', 'Info.plist')

                        plist_dict = plistlib.readPlist(plist_path)

                        plist_dict['CFBundleDisplayName'] = self.project_name()
                        plist_dict['CFBundleName'] = self.project_name()
                        version_setting = self.get_setting('version')
                        plist_dict['CFBundleShortVersionString'] = version_setting.value
                        plist_dict['CFBundleVersion'] = version_setting.value

                        plistlib.writePlist(plist_dict, plist_path)


                        self.progress_text += '.'

                        app_nw_res = utils.path_join(app_path,
                                                  'Contents',
                                                  'Resources',
                                                  'app.nw')

                        if uncompressed:
                            utils.copytree(app_nw_folder, app_nw_res)
                        else:
                            utils.copy(zip_file, app_nw_res)
                        self.create_icns_for_app(utils.path_join(app_path,
                                                              'Contents',
                                                              'Resources',
                                                              'nw.icns'))

                        self.progress_text += '.'
                    else:
                        ext = ''
                        windows = False
                        if 'windows' in ex_setting.name:
                            ext = '.exe'
                            windows = True

                        nw_path = utils.path_join(export_dest,
                                               ex_setting.dest_files[0])

                        if windows:
                            self.replace_icon_in_exe(nw_path)

                        self.compress_nw(nw_path)

                        dest_binary_path = utils.path_join(export_dest,
                                                        self.project_name() +
                                                        ext)
                        if 'linux' in ex_setting.name:
                            self.make_desktop_file(dest_binary_path, export_dest)

                        join_files(dest_binary_path, nw_path, zip_file)

                        sevenfivefive = (stat.S_IRWXU |
                                         stat.S_IRGRP |
                                         stat.S_IXGRP |
                                         stat.S_IROTH |
                                         stat.S_IXOTH)
                        os.chmod(dest_binary_path, sevenfivefive)

                        self.progress_text += '.'

                        if os.path.exists(nw_path):
                            os.remove(nw_path)

        except Exception:
            error = u''.join([unicode(x) for x in traceback.format_exception(sys.exc_info()[0],
                                                                             sys.exc_info()[1],
                                                                             sys.exc_info()[2])])
            self.logger.error(error)
            self.output_err += error
        finally:
            utils.rmtree(temp_dir, ignore_errors=True)
Example #48
0
def main():
    parser = ArgParser(description=('Command line interface '
                                    'to web2exe. {}'.format(__version__)),
                                     prog='web2execmd')
    command_base = CommandBase()
    command_base.init()
    parser.add_argument('project_dir', metavar='project_dir',
                        help='The project directory.', type=unicode_arg)
    parser.add_argument('--output-dir', dest='output_dir',
                        help='The output directory for exports.',
                        type=unicode_arg)
    parser.add_argument('--quiet', dest='quiet', action='store_true',
                        default=False,
                        help='Silences output messages')
    parser.add_argument('--verbose', dest='verbose', action='store_true',
                        default=False,
                        help=('Prints debug errors and messages instead '
                              'of logging to files/errors.log'))
    parser.add_argument('--package-json',
                        dest='load_json',
                        nargs='?',
                        default='',
                        const=True,
                        help=('Loads the package.json '
                              'file in the project directory. '
                              'Ignores other command line arguments.'))
    parser.add_argument('--cmd-version', action='version', version='%(prog)s {}'.format(__version__))

    for setting_group_dict in command_base.settings['setting_groups']+[command_base.settings['compression']]:
        for setting_name, setting in setting_group_dict.items():
            kwargs = {}
            if setting_name == 'name':
                kwargs.update({'default': command_base.project_name})
            else:
                kwargs.update({'required': setting.required,
                               'default': setting.default_value})
            action = 'store'
            option_name = setting_name.replace('_', '-')

            if setting.type in ['file', 'string', 'strings']:
                kwargs.update({'type': unicode_arg})

            if isinstance(setting.default_value, bool):
                action = ('store_true' if setting.default_value is False
                          else 'store_false')
                kwargs.update({'action': action})
                if setting.default_value is True:
                    option_name = u'disable-{}'.format(option_name)
            else:
                if setting.values:
                    kwargs.update({'choices': setting.values})
                    setting.description += u' Possible values: {{{}}}'.format(', '.join([unicode(x) for x in setting.values]))
                    kwargs.update({'metavar': ''})
                else:
                    kwargs.update({'metavar': '<{}>'.format(setting.display_name)})

            parser.add_argument(u'--{}'.format(option_name),
                                dest=setting_name,
                                help=setting.description,
                                **kwargs
                                )

    export_args = [arg for arg in command_base.settings['export_settings']]
    parser.add_argument('--export-to', dest='export_options',
                        nargs='+', required=True,
                        choices=export_args,
                        help=('Choose at least one system '
                              'to export to.'))

    args = parser.parse_args()

    if args.verbose:
        logging.basicConfig(
            stream=sys.stdout,
            format=("%(levelname) -10s %(module)s.py: "
                    "%(lineno)s %(funcName)s - %(message)s"),
            level=logging.DEBUG
        )
    else:
        logging.basicConfig(
            filename=LOG_FILENAME,
            format=("%(levelname) -10s %(asctime)s %(module)s.py: "
                    "%(lineno)s %(funcName)s - %(message)s"),
            level=logging.DEBUG
        )

    global logger
    global handler

    logger = logging.getLogger('CMD Logger')
    handler = lh.RotatingFileHandler(LOG_FILENAME, maxBytes=100000, backupCount=2)
    logger.addHandler(handler)

    def my_excepthook(type_, value, tback):
        output_err = u''.join([unicode(x) for x in traceback.format_exception(type_, value, tback)])
        logger.error(u'{}'.format(output_err))
        sys.__excepthook__(type_, value, tback)

    sys.excepthook = my_excepthook

    command_base.logger = logger

    if args.quiet:
        command_base.quiet = True

    command_base._project_dir = args.project_dir

    command_base._output_dir = (args.output_dir or
                                utils.path_join(command_base._project_dir, 'output'))

    if args.app_name is None:
        args.app_name = command_base.project_name()

    if args.name is not None:
        setting = command_base.get_setting('name')
        args.name = setting.filter_name(args.name if not callable(args.name) else args.name())

    command_base._project_name = args.app_name if not callable(args.app_name) else args.app_name()

    if not args.title:
        args.title = command_base.project_name()

    for name, val in args._get_kwargs():
        if callable(val):
            val = val()
        if name == 'export_options':
            for opt in val:
                setting = command_base.get_setting(opt)
                if setting is not None:
                    setting.value = True
        else:
            setting = command_base.get_setting(name)
            if setting is not None:
                setting.value = val

    if args.load_json is True:
        command_base.load_package_json()
    elif args.load_json:
        command_base.load_package_json(args.load_json)

    command_base.export()
Example #49
0
class Builder:
	"""
	  A generic build class.
	"""

	project        = "vrayblender"
	version        = utils.VERSION
	revision       = utils.REVISION # Patches revision
	brev           = None           # Blender master revision
	commits        = '0'

	# Directories
	dir_build      = utils.path_join(os.getcwd(), "build")
	dir_install    = utils.path_join(os.getcwd(), "install")
	dir_release    = utils.path_join(os.getcwd(), "release")
	dir_source     = ""

	dir_blender     = ""
	dir_blender_svn = ""

	# Installation diractory name
	dir_install_name = "vrayblender"
	dir_install_path = utils.path_join(dir_install, dir_install_name)

	# Build archive for Mac and Linux
	# or NSIS installer for Windows
	generate_package = False
	generate_desktop = False
	generate_docs    = False
	with_installer   = 'NSIS'

	# Test mode - just print messages, does nothing
	mode_test      = True

	# Special mode used only by me =)
	mode_developer = False

	# Debug output of the script
	mode_debug     = False

	# Add V-Ray/Blender patches
	add_patches    = True

	# Add V-Ray/Blender datafiles
	add_datafiles  = True

	# Add patches from "extra" directory
	add_extra      = False

	# Add themes from "themes" directory
	add_themes     = False

	# Host info
	host_os        = utils.get_host_os()
	host_arch      = utils.get_host_architecture()
	host_name      = utils.get_hostname()
	host_username  = utils.get_username()
	host_linux     = utils.get_linux_distribution()

	# Install dependencies
	install_deps   = False
	build_deps     = False
	use_build_deps = False

	# Update sources
	update_blender = True
	update_patch   = True

	# Blender option
	use_debug      = False
	use_openmp     = True
	use_collada    = False
	use_sys_python = True
	use_sys_ffmpeg = True

	# Build settings
	build_arch          = host_arch
	build_threads       = 4
	build_optimize      = False
	build_optimize_type = "INTEL"
	build_clean         = False
	build_release       = False
	build_upload        = False
	checkout_revision   = None
	use_env_msvc        = False

	# user-config.py file path
	user_config         = ""

	# Use user defined user-config.py
	user_user_config    = ""

	# Max OS X specific
	osx_sdk             = "10.6"

	with_cycles         = False
	with_tracker        = False
	with_cuda           = False
	cuda_gpu            = "sm_21"
	with_osl            = False
	with_player         = False
	with_ge             = False

	use_proxy           = None

	use_github_branch   = None
	use_exp_branch      = None
	use_blender_hash    = None
	add_branch_name     = None

	vb30   = None
	vc2013 = None

	# Only prepare sources
	export_only = None

	def __init__(self, params):
		if not params:
			sys.stdout.write("Params are empty - using defaults...\n")

		for param in params:
			setattr(self, param, params[param])

		if self.mode_debug:
			for param in params:
				print("%s => %s" % (param, params[param]))
			print("")

		if not self.dir_source:
			sys.stderr.write("Fatal error!\n")
			sys.stderr.write("Source directory not specified!\n")
			sys.exit(2)

		if self.vb30:
			self.project += "3"
		elif self.use_github_branch == "dev/vray_for_blender/stable":
			self.project += "1"
		else:
			self.project += "2"


	def info(self):
		sys.stdout.write("\n")
		sys.stdout.write("Build information:\n")

		sys.stdout.write("OS: %s\n" % (self.host_os.title()))

		if self.host_os == utils.LNX:
			sys.stdout.write("Distribution: %s %s\n" % (self.host_linux["long_name"], self.host_linux["version"]))

		sys.stdout.write("Architecture: %s\n" % (self.host_arch))
		sys.stdout.write("Build architecture: %s\n" % (self.build_arch))
		sys.stdout.write("Target: %s %s (%s)\n" % (self.project, self.version, self.revision))
		sys.stdout.write("Source directory:  %s\n" % (self.dir_source))
		sys.stdout.write("Build directory:   %s\n" % (self.dir_build))
		sys.stdout.write("Install directory: %s\n" % (self.dir_install_path))
		sys.stdout.write("Release directory: %s\n" % (self.dir_release))
		sys.stdout.write("\n")


	def update_sources(self):
		"""
		  Getting/updating sources
		"""

		def exportSources():
			sys.stdout.write("Exporting sources...\n")
			if self.mode_test:
				return

			if os.path.exists(self.dir_blender):
				utils.remove_directory(self.dir_blender)

			# Copy full tree to have proper build info.
			shutil.copytree(self.dir_blender_svn, self.dir_blender)

			os.chdir(self.dir_blender)
			os.system("git remote update github")
			os.system("git checkout -b {branch} github/{branch}".format(branch=self.use_github_branch))

			if self.checkout_revision is not None:
				os.chdir(self.dir_blender)
				os.system("git checkout %s" % self.checkout_revision)

		# Update Blender sources
		if self.update_blender:
			if os.path.exists(self.dir_blender):
				sys.stdout.write("Removing exported sources...\n")
				if not self.mode_test:
					utils.remove_directory(self.dir_blender)

			if not os.path.exists(self.dir_blender_svn):
				sys.stdout.write("Obtaining Blender sources...\n")
				if not self.mode_test:
					os.chdir(self.dir_source)

					# Obtain sources
					os.system("git clone %s blender" % GITHUB_REPO)

					# Now set origin to Blender's git and additional github remote
					# This is needed for proper submodules init
					os.chdir(self.dir_blender)
					os.system("git remote set-url origin %s" % OFFICIAL_REPO)
					os.system("git remote add github %s" % GITHUB_REPO)
					os.system("git remote update")
					os.system("git pull --rebase")

					os.chdir(self.dir_blender)
					os.system("git submodule update --init --recursive")
					os.system("git submodule foreach git checkout master")
					os.system("git submodule foreach git pull --rebase origin master")

					os.chdir(self.dir_source)
					# Move "blender" to "blender-git"
					utils.move_directory(self.dir_blender, self.dir_blender_svn)

			else:
				sys.stdout.write("Updating Blender sources...\n")
				if not self.mode_test:
					os.chdir(self.dir_blender_svn)

					# Update sources
					os.system("git pull --rebase")
					os.system("git submodule foreach git pull --rebase origin master")

			exportSources()

			# Update Blender libs
			lib_dir = None
			svn_cmd = None
			if self.host_os != utils.LNX:
				if self.host_os == utils.WIN:
					lib_dir = utils.path_join(self.dir_source, "lib", "windows")
					svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/windows lib/windows"
					if self.host_arch == "x86_64":
						if self.vc2013:
							lib_dir = utils.path_join(self.dir_source, "lib", "win64_vc12")
							svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/win64_vc12 lib/win64_vc12"
						else:
							lib_dir = utils.path_join(self.dir_source, "lib", "win64")
							svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/win64 lib/win64"
				elif self.host_os == utils.MAC:
					lib_dir = utils.path_join(self.dir_source, "lib", "darwin-9.x.universal")
					svn_cmd = "svn checkout https://svn.blender.org/svnroot/bf-blender/trunk/lib/darwin-9.x.universal lib/darwin-9.x.universal"

				if not os.path.exists(lib_dir):
					sys.stdout.write("Getting \"lib\" data...\n")
					if not self.mode_test:
						os.chdir(self.dir_source)
						os.system(svn_cmd)
				else:
					sys.stdout.write("Updating \"lib\" data...\n")
					if not self.mode_test:
						os.chdir(lib_dir)
						os.system("svn update")

		# Update V-Ray/Blender patchset
		if self.update_patch and not self.mode_developer:
			vb25_patch = utils.path_join(self.dir_source, "vb25-patch")

			if os.path.exists(vb25_patch):
				sys.stdout.write("Updating V-Ray/Blender patches...\n")
				if not self.mode_test:
					os.chdir(vb25_patch)
					os.system("git pull")
			else:
				sys.stdout.write("Getting V-Ray/Blender patches...\n")
				if not self.mode_test:
					os.chdir(self.dir_source)
					os.system("git clone git://github.com/bdancer/vb25-patch.git")


	def update(self):
		self.revision, self.brev, self.commits = utils.get_svn_revision(self.dir_blender)
		self.version = utils.get_blender_version(self.dir_blender)[0]
		self.versionArr = utils.get_blender_version(self.dir_blender)

		if self.build_release:
			self.dir_install_name = utils.GetInstallDirName(self)
		else:
			self.dir_install_name = self.project

		self.dir_install_path = utils.path_join(self.dir_install, self.dir_install_name)


	def patch(self):
		patch_dir = utils.path_join(self.dir_source, "vb25-patch")

		if self.use_blender_hash:
			patchBin      = utils.find_patch()
			patchFilepath = os.path.join(tempfile.gettempdir(), "vray_for_blender.patch")

			os.chdir(self.dir_blender)

			os.system("git checkout %s" % self.use_github_branch) # Checkout exporter branch
			os.system("git diff master > %s" % patchFilepath)     # Generate diff with master
			os.system("git fetch --tags")                         # Hash could be tag also
			os.system("git checkout %s" % self.use_blender_hash)  # Checkout needed revision
			os.system("git checkout -b vray_for_blender")         # Create some branch for patching
			os.system("patch -Np1 -i %s" % patchFilepath)         # Apply patch

			os.remove(patchFilepath)

		# Add datafiles: splash, default scene etc
		if self.add_datafiles:
			sys.stdout.write("Adding datafiles...\n")

			datafiles_path = utils.path_join(self.dir_blender, "release", "datafiles")

			if not self.mode_test:
				# Change splash
				for splash_filename in ["splash.png", "splash_2x.png"]:
					splash_path_src = utils.path_join(patch_dir, "datafiles", splash_filename)
					splash_path_dst = utils.path_join(datafiles_path, splash_filename)

					shutil.copyfile(splash_path_src, splash_path_dst)

				# Change icons
				for subdir in ["blender_icons16", "blender_icons32"]:
					icons_path_src = utils.path_join(patch_dir, "datafiles", subdir)
					icons_path_dst = utils.path_join(datafiles_path, subdir)

					shutil.rmtree(icons_path_dst)
					shutil.copytree(icons_path_src, icons_path_dst)


	def docs(self):
		if self.generate_docs:
			api_dir = utils.path_join(self.dir_install_path, "api")

			sys.stdout.write("Generating API documentation: %s\n" % (api_dir))

			if self.host_os != utils.LNX:
				sys.stdout.write("API documentation generation is not supported on this platform.\n")

			else:
				if not self.mode_test:
					sphinx_doc_gen = "doc/python_api/sphinx_doc_gen.py"

					# Create API directory
					os.system("mkdir -p %s" % api_dir)

					# Generate API docs
					os.chdir(self.dir_blender)
					os.system("%s -b -P %s" % (utils.path_join(self.dir_install_path, "blender"), sphinx_doc_gen))
					os.system("sphinx-build doc/python_api/sphinx-in %s" % api_dir)


	def post_init(self):
		"""
		  Override this method in subclass.
		"""
		pass


	def init_paths(self):
		if self.generate_package:
			if not self.mode_test:
				utils.path_create(self.dir_release)

		self.dir_build        = utils.path_slashify(self.dir_build)
		self.dir_source       = utils.path_slashify(self.dir_source)
		self.dir_install_path = utils.path_slashify(self.dir_install_path)

		self.dir_blender      = utils.path_join(self.dir_source, "blender")
		self.dir_blender_svn  = utils.path_join(self.dir_source, "blender-git")
		self.user_config      = utils.path_join(self.dir_blender, "user-config.py")

		if self.user_user_config:
			self.user_user_config = utils.pathExpand(self.user_user_config)

		if self.build_clean:
			if os.path.exists(self.dir_build):
				shutil.rmtree(self.dir_build)


	def config(self):
		"""
		  Override this method in subclass.
		"""
		sys.stderr.write("Base class method called: config() This souldn't happen.\n")


	def compile(self):
		if self.host_os == utils.LNX and hasattr(self, 'compile_linux'):
			self.compile_linux()
		elif self.host_os == utils.MAC and hasattr(self, 'compile_osx'):
			self.compile_osx()
		else:
			compileCmd = [sys.executable]
			compileCmd.append("scons/scons.py")

			if not self.build_clean:
				compileCmd.append("--implicit-deps-unchanged")
				compileCmd.append("--max-drift=1")

			if self.host_os != utils.WIN:
				compileCmd.append('CXXFLAGS="-w"')
				compileCmd.append('CCFLAGS="-w"')

			if self.use_env_msvc:
				compileCmd.append(r'env="PATH:%PATH%,INCLUDE:%INCLUDE%,LIB:%LIB%"')

			if self.vc2013:
				compileCmd.append(r'MSVS_VERSION=12.0')

			cleanCmd = [sys.executable]
			cleanCmd.append("scons/scons.py")
			cleanCmd.append("clean")

			if not self.mode_test:
				os.chdir(self.dir_blender)

				if self.build_clean:
					sys.stdout.write("Calling: %s\n" % (" ".join(cleanCmd)))
					subprocess.call(cleanCmd)

				sys.stdout.write("Calling: %s\n" % (" ".join(compileCmd)))
				res = subprocess.call(compileCmd)
				if not res == 0:
					sys.stderr.write("There was an error during the compilation!\n")
					sys.exit(1)


	def compile_post(self):
		if self.host_os == utils.WIN:
			runtimeDir = utils.path_join(self.dir_source, "vb25-patch", "non-gpl", self.build_arch)
			files = []
			if self.vc2013:
				files.extend([
					"msvcp120.dll",
					"msvcr120.dll",
					"vcomp120.dll",
				])
			else:
				files.append("vcomp90.dll")
			for f in files:
				shutil.copy(utils.path_join(runtimeDir, f), self.dir_install_path)


	def exporter(self):
		"""
		  Add script and modules
		"""
		scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, self.version, "scripts")
		if self.host_os == utils.MAC:
			scriptsPath = utils.path_join(self.dir_install, self.dir_install_name, "blender.app", "Contents", "Resources", self.version, "scripts")

		addonsPath  = utils.path_join(scriptsPath, "addons")
		startupPath = utils.path_join(scriptsPath, "startup")

		clonePath = addonsPath if self.vb30 else startupPath

		sys.stdout.write("Adding exporter...\n")
		sys.stdout.write("  in: %s\n" % clonePath)

		if not self.mode_test:
			if not os.path.exists(clonePath):
				sys.stderr.write("Something went wrong! Can't add Python modules and exporter!\n")
				sys.exit(3)

			if self.vb30:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb30")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)
				os.system("git clone --recursive https://github.com/bdancer/vb30.git")

			else:
				os.chdir(clonePath)
				exporterPath = utils.path_join(clonePath, "vb25")
				if os.path.exists(exporterPath):
					utils.remove_directory(exporterPath)

				os.system("git clone --recursive https://github.com/bdancer/vb25.git")

			if self.use_exp_branch not in {'master'}:
				os.chdir(exporterPath)
				os.system("git remote update")
				os.system("git checkout -b {branch} origin/{branch}".format(branch=self.use_exp_branch))

			os.chdir(exporterPath)
			os.system("git submodule update --init --recursive")
			os.system("git submodule foreach git checkout master")
			os.system("git submodule foreach git pull --rebase origin master")


	def package(self):
		"""
		  Override this method in subclass.
		"""
		sys.stderr.write("Base class method called: package() This souldn't happen.\n")


	def build(self):
		self.init_paths()
		self.post_init()

		self.update_sources()
		self.update()

		self.info()

		self.patch()

		if not self.export_only:
			self.config()
			self.compile()
			self.compile_post()

			if not self.mode_developer:
				self.exporter()

			self.docs()

			if self.generate_package:
				if self.mode_developer:
					sys.stdout.write("Package generation is disabled in 'Developer' mode.\n")
				else:
					if self.build_release:
						releaeSubdir, releasePackage = self.package()
						if self.build_upload != 'off':
							self.upload(releaeSubdir, releasePackage)
					else:
						sys.stdout.write("Package generation is disabled in non-release mode.\n")


	def upload(self, subdir, filepath):
		if self.build_upload == 'http':
			import requests

			from ConfigParser import RawConfigParser

			config = RawConfigParser()
			config.read(os.path.expanduser("~/.passwd"))

			data = {
				"password" : config.get('cgdo.ru', 'upload_password'),
				"subdir"   : subdir,
			}

			files = {
				"file" : open(filepath, "rb"),
			}

			proxies = {}
			if self.use_proxy:
				proxies = {
					"http"  : self.use_proxy,
					"https" : self.use_proxy,
				}

			sys.stdout.write("Uploading package '%s' to '%s'...\n" % (filepath, subdir))
			requests.post("http://cgdo.ru/upload", files=files, data=data, proxies=proxies)

		elif self.build_upload == 'ftp':
			from ConfigParser import RawConfigParser

			config = RawConfigParser()
			config.read(os.path.expanduser("~/.passwd"))

			now = datetime.datetime.now()
			subdir = now.strftime("%Y%m%d")

			cmd = None

			if sys.platform == 'win32':
				ftpScriptFilepath = os.path.join(tempfile.gettempdir(), "blender_for_vray_upload.txt")

				with open(ftpScriptFilepath, 'w') as f:
					f.write('option batch abort\n')
					f.write('option confirm off\n')
					f.write('open ftp://%s:%s@%s -rawsettings ProxyMethod=%s ProxyHost=%s ProxyPort=%s\n' % (
						config.get('nightlies.ftp', 'user'),
						config.get('nightlies.ftp', 'pass'),
						config.get('nightlies.ftp', 'host'),
						config.get('nightlies.ftp', 'proxy_type'),
						config.get('nightlies.ftp', 'proxy_host'),
						config.get('nightlies.ftp', 'proxy_port'),
					))
					f.write('option transfer binary\n')
					f.write('put %s /%s/\n' % (filepath, subdir))
					f.write('exit\n')
					f.write('\n')

				cmd = ['winscp']
				cmd.append('/passive')
				cmd.append('/script="%s"' % ftpScriptFilepath)

				if not self.mode_test:
					os.system(' '.join(cmd))

			else:
				cmd = ['curl']
				cmd.append('--no-epsv')
				if self.use_proxy:
					cmd.append('--proxy')
					cmd.append(self.use_proxy)
				cmd.append('--user')
				cmd.append('%s:%s' % (
					config.get('nightlies.ftp', 'user'),
					config.get('nightlies.ftp', 'pass'),
				))
				cmd.append('--upload-file')
				cmd.append(filepath)
				cmd.append('ftp://%s/%s/' % (
					config.get('nightlies.ftp', 'host'),
					subdir,
				))

				if not self.mode_test:
					subprocess.call(cmd)

			if self.mode_test:
				print(' '.join(cmd))
def _save_articles_html():
    """Save http://paulgraham.com/articles.html to disk"""
    url = path_join(ROOT_URL, 'articles.html')
    dst = path_join(DATA_DIR, get_basename(url))
    return download_to_dst_and_get_html(url, dst, wait_sec=0, rewrite=True)
def _get_dst(url):
    """Get essay download file path"""
    return path_join(RAW_HTML_DIR, get_basename(url))
def _get_essays_urls(anchors):
    """Append root url to hrefs"""
    return [path_join(ROOT_URL, anchor.get('href')) for anchor in anchors]
 def _write_index(html):
     dst = path_join(HTML_DIR, 'index.html')
     with open(dst, 'wb') as o:
         o.write(html.encode('utf-8'))
Example #54
0
    def make_output_dirs(self):
        self.output_err = ''
        try:
            self.progress_text = 'Removing old output directory...\n'

            output_dir = utils.path_join(self.output_dir(), self.project_name())
            if os.path.exists(output_dir):
                utils.rmtree(output_dir, ignore_errors=True)

            temp_dir = utils.path_join(TEMP_DIR, 'webexectemp')
            if os.path.exists(temp_dir):
                utils.rmtree(temp_dir, ignore_errors=True)

            self.progress_text = 'Making new directories...\n'

            if not os.path.exists(output_dir):
                os.makedirs(output_dir)

            os.makedirs(temp_dir)

            self.copy_files_to_project_folder()

            json_file = utils.path_join(self.project_dir(), 'package.json')

            global_json = utils.get_data_file_path('files/global.json')

            if self.output_package_json:
                with codecs.open(json_file, 'w+', encoding='utf-8') as f:
                    f.write(self.generate_json())


            with codecs.open(global_json, 'w+', encoding='utf-8') as f:
                f.write(self.generate_json(global_json=True))

            zip_file = utils.path_join(temp_dir, self.project_name()+'.nw')

            app_nw_folder = utils.path_join(temp_dir, self.project_name()+'.nwf')

            utils.copytree(self.project_dir(), app_nw_folder,
                           ignore=shutil.ignore_patterns(output_dir))

            zip_files(zip_file, self.project_dir(), exclude_paths=[output_dir])
            for ex_setting in self.settings['export_settings'].values():
                if ex_setting.value:
                    self.progress_text = '\n'
                    name = ex_setting.display_name
                    self.progress_text = u'Making files for {}...'.format(name)
                    export_dest = utils.path_join(output_dir, ex_setting.name)
                    versions = re.findall('(\d+)\.(\d+)\.(\d+)', self.selected_version())[0]

                    minor = int(versions[1])
                    if minor >= 12:
                        export_dest = export_dest.replace('node-webkit', 'nwjs')

                    if os.path.exists(export_dest):
                        utils.rmtree(export_dest, ignore_errors=True)

                    # shutil will make the directory for us
                    utils.copytree(get_data_path('files/'+ex_setting.name),
                                   export_dest,
                                    ignore=shutil.ignore_patterns('place_holder.txt'))
                    utils.rmtree(get_data_path('files/'+ex_setting.name), ignore_errors=True)
                    self.progress_text += '.'

                    if 'mac' in ex_setting.name:
                        uncomp_setting = self.get_setting('uncompressed_folder')
                        uncompressed = uncomp_setting.value
                        app_path = utils.path_join(export_dest,
                                                self.project_name()+'.app')

                        try:
                            utils.move(utils.path_join(export_dest,
                                                     'nwjs.app'),
                                       app_path)
                        except IOError:
                            utils.move(utils.path_join(export_dest,
                                                     'node-webkit.app'),
                                       app_path)

                        plist_path = utils.path_join(app_path, 'Contents', 'Info.plist')

                        plist_dict = plistlib.readPlist(plist_path)

                        plist_dict['CFBundleDisplayName'] = self.project_name()
                        plist_dict['CFBundleName'] = self.project_name()
                        version_setting = self.get_setting('version')
                        plist_dict['CFBundleShortVersionString'] = version_setting.value
                        plist_dict['CFBundleVersion'] = version_setting.value

                        plistlib.writePlist(plist_dict, plist_path)


                        self.progress_text += '.'

                        app_nw_res = utils.path_join(app_path,
                                                  'Contents',
                                                  'Resources',
                                                  'app.nw')

                        if uncompressed:
                            utils.copytree(app_nw_folder, app_nw_res)
                        else:
                            utils.copy(zip_file, app_nw_res)
                        self.create_icns_for_app(utils.path_join(app_path,
                                                              'Contents',
                                                              'Resources',
                                                              'nw.icns'))

                        self.progress_text += '.'
                    else:
                        ext = ''
                        windows = False
                        if 'windows' in ex_setting.name:
                            ext = '.exe'
                            windows = True

                        nw_path = utils.path_join(export_dest,
                                               ex_setting.dest_files[0])

                        if windows:
                            self.replace_icon_in_exe(nw_path)

                        self.compress_nw(nw_path)

                        dest_binary_path = utils.path_join(export_dest,
                                                        self.project_name() +
                                                        ext)
                        if 'linux' in ex_setting.name:
                            self.make_desktop_file(dest_binary_path, export_dest)

                        join_files(dest_binary_path, nw_path, zip_file)

                        sevenfivefive = (stat.S_IRWXU |
                                         stat.S_IRGRP |
                                         stat.S_IXGRP |
                                         stat.S_IROTH |
                                         stat.S_IXOTH)
                        os.chmod(dest_binary_path, sevenfivefive)

                        self.progress_text += '.'

                        if os.path.exists(nw_path):
                            os.remove(nw_path)

        except Exception:
            error = u''.join([unicode(x) for x in traceback.format_exception(sys.exc_info()[0],
                                                                             sys.exc_info()[1],
                                                                             sys.exc_info()[2])])
            self.logger.error(error)
            self.output_err += error
        finally:
            utils.rmtree(temp_dir, ignore_errors=True)
def _write_essay_orders(essay_orders):
    """Write essay orders"""
    dst = path_join(DATA_DIR, 'essay_orders.json')
    with open(dst, 'w') as o:
        simplejson.dump(essay_orders, o)
Example #56
0
    def run_script(self, script):
        if not script:
            return

        if os.path.exists(script):
            self.progress_text = 'Executing script {}...'.format(script)
            contents = ''
            with codecs.open(script, 'r', encoding='utf-8') as f:
                contents = f.read()

            _, ext = os.path.splitext(script)

            export_opts = self.get_export_options()
            export_dir = '{}{}{}'.format(self.output_dir(),
                                         os.path.sep,
                                         self.project_name())
            export_dirs = []
            for opt in export_opts:
                export_dirs.append('{}{}{}'.format(export_dir, os.path.sep, opt))

            command = None
            bat_file = None

            export_dict = {'mac-x64_dir': '',
                           'mac-x32_dir': '',
                           'windows-x64_dir': '',
                           'windows-x32_dir': '',
                           'linux-x64_dir': '',
                           'linux-x32_dir': ''}

            if ext == '.py':
                env_file = get_file('files/env_vars.py')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               export_dirs=str(export_dirs),
                                               num_dirs=len(export_dirs),
                                               **export_dict)
                pycontents = '{}\n{}'.format(env_vars, contents)

                command = ['python', '-c', pycontents]


            elif ext == '.bash':
                env_file = get_file('files/env_vars.bash')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()
                ex_dir_vars = ''

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir

                for ex_dir in export_dirs:
                    ex_dir_vars += "'{}' ".format(ex_dir)

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               num_dirs=len(export_dirs),
                                               export_dirs=ex_dir_vars,
                                               **export_dict)
                shcontents = '{}\n{}'.format(env_vars, contents)

                command = ['bash', '-c', shcontents]

            elif ext == '.bat':
                env_file = get_file('files/env_vars.bat')
                env_contents = codecs.open(env_file, 'r', encoding='utf-8').read()
                ex_dir_vars = ''

                for i, ex_dir in enumerate(export_dirs):
                    opt = export_opts[i]
                    export_dict[opt+'_dir'] = ex_dir
                    ex_dir_vars += 'set "EXPORT_DIRS[{}]={}"\n'.format(i, ex_dir)

                env_vars = env_contents.format(proj_dir=self.project_dir(),
                                               proj_name=self.project_name(),
                                               export_dir=export_dir,
                                               num_dirs=len(export_dirs),
                                               export_dirs=ex_dir_vars,
                                               **export_dict)
                batcontents = '{}\n{}'.format(env_vars, contents)

                bat_file = utils.path_join(TEMP_DIR, '{}.bat'.format(self.project_name()))

                self.logger.debug(batcontents)

                with open(bat_file, 'w+') as f:
                    f.write(batcontents)

                command = [bat_file]

            proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
            output, error = proc.communicate()
            output = output.strip()
            error = error.strip()

            if bat_file:
                os.remove(bat_file)

            with open(get_file('script-output.txt'), 'w+') as f:
                f.write('Output:\n{}'.format(output))
                if error:
                    f.write('\n\nErrors:\n{}\n'.format(error))

            self.progress_text = 'Done executing script.'
        else:
            self.progress_text = '\nThe script {} does not exist. Not running.'.format(script)
        loss.backward()
        optimizer.step()

        loss_val = loss.item()
        running_loss_ep += loss_val

        # print statistics
        data_generator.set_description('Epoch {}/{}'.format(
            epoch + 1, args.epochs))
        data_generator.set_postfix(loss=loss_val)

    loss_log.append([epoch, (running_loss_ep / len(data_loader))])

    if epoch % 5 == 0:
        torch.save(model.state_dict(),
                   path_join(args.output_dir, f'model_ep{epoch}.dict'))

final_model_path = path_join(args.output_dir,
                             f'model_ep{args.epochs - 1}.dict')
torch.save(model.state_dict(), final_model_path)
loss_df = pd.DataFrame(data=loss_log, columns=['epoch', 'cross_entropy_loss'])
loss_df.to_csv(path_join(args.output_dir, f'train_loss.csv'), index=None)
print('Finished Training')

print('Starting Evaluation')
predict(args0.config,
        final_model_path,
        args.output_dir,
        args.classes,
        args.architecture,
        aux_loss=aux_loss)