Exemple #1
0
def createJiraExe(ico: str = None):
    call(
        'C:\\Users\\Administrator\\AppData\\Local\\Programs\\Python\\Python36-32\\Scripts\\pyinstaller -w -F -i {}  JiraTool.py -p AnrTool.py -p Tool --hidden-import Tool'
        .format(ico))
    dist = sep.join(['dist', 'JiraTool.exe'])
    if isfile(dist):

        EXE_PATH = sep.join([SHARE_PATH, 'JiraTool'])
        print('{} isdir {}'.format(EXE_PATH, isdir(EXE_PATH)))
        JIRA_TOOL_PATH = sep.join([EXE_PATH, 'JiraTool'])
        EXE_FILE_PATH = sep.join([JIRA_TOOL_PATH, 'JiraTool.exe'])
        ZIP_FILE_PATH = sep.join([EXE_PATH, 'JiraTool.zip'])
        print('exe={} zip={}'.format(EXE_FILE_PATH, ZIP_FILE_PATH))

        copyfile(dist, EXE_FILE_PATH)
        zip_single(JIRA_TOOL_PATH, ZIP_FILE_PATH)
        customerConf = ConfigParser()
        customerConf.read(JiraTool.VERSION_INI_FILE)
        defaultConf = customerConf.defaults()
        defaultConf['update_time'] = datetime.now().strftime(
            '%Y-%m-%d %H:%M:%S')
        defaultConf['version'] = getVersion('JiraTool.py')
        defaultConf['v{}'.format(
            defaultConf['version'])] = getUpdateContent('JiraTool.py')
        defaultConf['content'] = defaultConf['v{}'.format(
            defaultConf['version'])]
        customerConf.write(open(JiraTool.VERSION_INI_FILE, mode='w'))
        if isdir('dist'):
            rmtree('dist')
        if isdir('build'):
            rmtree('build')
Exemple #2
0
def _set_up_config_file(cfg_name: str):
    local_cfg_file = sep.join([
        CURRENT_EXPERIMENT["mainfolder"], CURRENT_EXPERIMENT["sample_name"],
        CURRENT_EXPERIMENT['local_scripts_subfolder'],
        "{}.config".format(cfg_name)
    ])
    if os.path.isfile(local_cfg_file):
        CURRENT_EXPERIMENT["{}_config".format(cfg_name)] = 'local'
        log.info('set up config file at {}'.format(local_cfg_file))
    else:
        log.warning('no config file found at {}, will try general'
                    ''.format(local_cfg_file))
        try:
            script_folder = CURRENT_EXPERIMENT["scriptfolder"]
        except KeyError:
            raise KeyError('scriptfolder not found in CURRENT_EXPERIMENT, '
                           'check that init function calls '
                           '_set_up_script_folder before _set_up_config_file')
        general_cfg_file = sep.join(
            [script_folder, "{}.config".format(cfg_name)])
        if os.path.isfile(general_cfg_file):
            CURRENT_EXPERIMENT["{}_config".format(cfg_name)] = 'general'
            log.info('set up config file at {}'.format(general_cfg_file))
        else:
            log.error('no config file found at {}' ''.format(general_cfg_file))
Exemple #3
0
def find_config_folder():
    """searches path from cwd to $HOME, searching for .pyabolism directory
        if cwd not within home directory, searches cwd only
        if finds nothing, returns None"""

    from os import getcwd
    from os.path import sep, expanduser, isdir

    folder_name = '.pyabolism'

    home_path = expanduser('~')
    cwd       = getcwd()

    if cwd[0:len(home_path)] != home_path:

        test_path = sep.join([cwd, folder_name])
        if isdir(test_path):
            return test_path

    else:

        split_home_path = home_path.split(sep)
        split_cwd       = cwd.split(sep)

        for i in range(len(split_cwd) - len(split_home_path) + 1):

            test_path = sep.join(split_cwd[:len(split_cwd) - i] + [folder_name])

            if isdir(test_path):
                return test_path

    return None
Exemple #4
0
def create_app(config_file):
    app = Flask('blaze')
    app.config.from_pyfile(config_file)

#building path for theme directory which contains templates
    world_blueprint.template_folder = sep.join([app.root_path, "world",\
                                                'templates'])
    world_blueprint.static_folder = sep.join( ["themes",\
                       app.config['SITE_THEME'],'static'])
    app.register_blueprint(admin_blueprint)
    app.register_blueprint(world_blueprint)
    app.register_blueprint(root_blueprint)

    @app.errorhandler(404)
    def file_not_found_error(e):
        r = request.url
        now = datetime.datetime.now()
        with open('error_log.txt', 'a' ) as f:
            current_timestamp = now.strftime("%d-%m-%Y %H:%M:%S")
            f.write("\n 404 %s  %s " %(current_timestamp,r))
        return "page not found", 404


    @app.errorhandler(500)
    def internal_server_error(e):
        r = request.url
        now = datetime.datetime.now()
        with open('error_log.txt', 'a' ) as f:
            current_timestamp = now.strftime("%d-%m-%Y %H:%M:%S")
            f.write("\n 500 %s  %s " %(current_timestamp,r))
        return "page not found", 500
    return app
Exemple #5
0
 def _get_repo_hash(self):
     files = reduce(lambda a, b: a + b,
                    [[(sep.join([d, f]), stat(sep.join([d, f])).st_mtime) for f in fn] for d, dn, fn in
                     walk(self._path)])
     hash = md5()
     hash.update(json.dumps(files))
     return hash.hexdigest()
Exemple #6
0
def cache_model(model, bug_name, overwrite=False):
    """cache a model as pickle inside the pyabolism config folder

        NB Pyabolism bugs are intended to speed up loading models, not for storage
            of edits made. These should be written back to external SBML files.

            Bugs will not in general survive upgrades to Pyabolism code base!
            Use with care!!"""

    raise Exception("Apologies, in it's current state this functionality is best avoided!")

    from .tools import find_config_folder
    config_folder = find_config_folder()

    if not config_folder:
        raise Exception("Unable to save bug, can't find a config folder!")

    from os.path import isdir, sep, isfile

    if not isdir(sep.join([config_folder, 'bugs'])):
        os.mkdir(sep.join([config_folder, 'bugs']))

    pickle_name = sep.join([config_folder, 'bugs', '%s.pickle' % bug_name])

    if isfile(pickle_name) and not overwrite:
        raise Exception('Bug already exists! Pass overwrite=True to replace existing pickle.')

    save_pickle(model, open(pickle_name, 'w'))

    return
def homework():
    opencv_dir = getenv("OPENCV_DIR")
    cascades_path = path.join(sep.join(opencv_dir.split(sep)[0:-3]), "sources",
                              "data", "haarcascades")
    cascades_path_1 = path.join(sep.join(opencv_dir.split(sep)[0:-3]),
                                "sources", "data", "haarcascades", "cuda")
    face_cascade = cv2.CascadeClassifier(
        path.join(cascades_path, "haarcascade_frontalface_default.xml"))
    eyes_cascade = cv2.CascadeClassifier(
        path.join(cascades_path, "haarcascade_eye.xml"))
    img = cv2.imread("people.jpeg")
    img_grayscale = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)

    faces = face_cascade.detectMultiScale(img_grayscale, 1.7, 5)
    for (x, y, w, h) in faces:
        print x, y, w, h
        face_grayscale = np.array(img_grayscale[y:(y + h), x:(x + w)])
        print face_grayscale.shape
        face_color = img[y:(y + h), x:(x + w)]
        eyes = eyes_cascade.detectMultiScale2(face_grayscale)
        print "%s eyes have been found" % len(eyes)
        for (ex, ey, ew, eh) in eyes:
            cv2.rectangle(face_color, (ex, ey), (ex + ew, ey + eh),
                          (0, 255, 0), 2)
        cv2.rectangle(img, (x, y), (x + w, y + h), (255, 0, 0), 2)
    cv2.imshow("Faces", img)
    cv2.waitKey(0)
    cv2.destroyAllWindows()
Exemple #8
0
def find_settings_file(filename='settings.json'):
    # traverse the directories upwards to find and load the project's settings file
    cur_dirs = getcwd().split(sep)
#    settings = None

    b_settings_found = False
    while len(cur_dirs) > 1:
        filepath = join(sep.join(cur_dirs), filename)

        if exists(filepath):
            with open(filepath) as file:
#                settings = load_settings(file)
                logging.info("settings file found: {}".format(filepath))
                b_settings_found = True

#                settings['project_base_dir'] = join(sep.join(cur_dirs))
                break
        else:
            # one directory up in the tree
            cur_dirs = cur_dirs[:-1]
#    if not b_settings_found:
#        logging.info("no settings file found.")
    if b_settings_found:
        return sep.join(cur_dirs), filename
    else:
        return None
Exemple #9
0
def character_list(game, file_match='characters_base-stats'):
    data_dir = '.', 'stat_data', 'fe' + game
    data_dir = sep.join(data_dir)
    compiled_names = list()
    for root, folders, files in walk(data_dir):
        if root != data_dir:
            continue
        for file in files:
            if file_match not in file:
                continue
            filename = data_dir, file
            filename = sep.join(filename)
            table = pd.read_csv(filename, index_col=0)
            name_list = table.index
            for name in name_list:
                if name in compiled_names:
                    continue
                if 'HM' in name:
                    continue
                if name == 'General':
                    continue
                if name == 'Nils':
                    continue
                if '(' in name:
                    continue
                if name not in compiled_names:
                    compiled_names.append(name)
    return compiled_names
Exemple #10
0
def _do_MatPlot(data,meas_params):
    plt.ioff()
    plot, num_subplots = _plot_setup(data, meas_params, useQT=False)
    # pad a bit more to prevent overlap between
    # suptitle and title
    plot.rescale_axis()
    plot.fig.tight_layout(pad=3)

    if 'pdf_subfolder' in CURRENT_EXPERIMENT:
        title_list = plot.get_default_title().split(sep)
        title_list.insert(-1, CURRENT_EXPERIMENT['pdf_subfolder'])
        title = sep.join(title_list)
        plot.save("{}.pdf".format(title))

    if 'png_subfolder' in CURRENT_EXPERIMENT:
        title_list = plot.get_default_title().split(sep)
        title_list.insert(-1, CURRENT_EXPERIMENT['png_subfolder'])
        title = sep.join(title_list)
        plot.fig.savefig("{}.png".format(title),dpi=500)

    if (pdfdisplay['combined'] or
            (num_subplots == 1 and pdfdisplay['individual'])):
        plot.fig.canvas.draw()
        plt.show()
    else:
        plt.close(plot.fig)
    if num_subplots > 1:
        _save_individual_plots(data, meas_params,
                               pdfdisplay['individual'])
    plt.ion()
Exemple #11
0
                def downloadAction():
                    logs = logDict[__model__][__version__]
                    path = None
                    for log in logs:
                        willDown = False
                        if not productVersions or len(productVersions) == 0:
                            willDown = True
                        elif log.productVersion in productVersions:
                            willDown = True
                        print('willDown={}, productVersions={}'.format(
                            willDown, productVersions))
                        if willDown:
                            if GLOBAL_VALUES.packageNameDown:
                                path = sep.join([
                                    outPath, log.logType, log.packageName,
                                    log.jiraId, __version__
                                ])
                            else:
                                path = sep.join([
                                    outPath, log.logType, log.jiraId,
                                    __version__
                                ])
                            if callbackMsg:
                                callbackMsg('下载{}'.format(log.logId))
                                log.download(path)
                    if path and isdir(path) and len(listdir(path)) == 0:
                        rmtree(path)

                    workThread.LockUtil.acquire()
                    GLOBAL_VALUES.downOkCount = GLOBAL_VALUES.downOkCount + 1
                    workThread.LockUtil.release()
                    print('downOkCount={},downNumber={}'.format(
                        GLOBAL_VALUES.downOkCount, GLOBAL_VALUES.downNumber))
                    if async_ and GLOBAL_VALUES.downOkCount >= GLOBAL_VALUES.downNumber:
                        queue.put('{}下载完成'.format(outPath.replace('\\', '/')))
Exemple #12
0
def getLastTwoPartsOfFilePath(absFilePath):
    from os.path import sep
    if absFilePath:
        pathSplitted = absFilePath.split(sep)
        return sep.join(pathSplitted[-2:]
                        ) if len(pathSplitted) > 1 else sep.join(pathSplitted)
    else:
        return ""
Exemple #13
0
def set_log_locations():
    """
    Sets location for qcodes to save log files based on
    the qcodes.config.user.log_location value. Within this folder
    creates python_logs file and (if in notebook) creates ipython_logs
    file, starts ipython log. Sets python_log_loc and ipython_log_loc
    in global_settings.files_setup dictionary to True.
    """
    warnings.simplefilter('error', UserWarning)

    sample_name = get_sample_name()
    try:
        log_location = abspath(
            qc.config.user.log_location.format(sample_name=sample_name))
        python_log_location = sep.join([log_location, 'python_logs', ""])
        ipython_log_location = sep.join([log_location, 'ipython_logs', ""])
    except KeyError:
        raise KeyError('log_location not set in config, see '
                       '"https://github.com/QCoDeS/Qcodes/blob/master'
                       '/docs/examples/Configuring_QCoDeS.ipynb"')
    if exp.files_setup['python_log_loc']:
        print('Python log already started at {}.'.format(python_log_location))
        print('-------------------------')
    else:
        if not os.path.exists(python_log_location):
            os.makedirs(python_log_location)
        python_logfile_name = "{}{}{}".format(
            python_log_location, strftime('%Y-%m-%d_%H-%M-%S', localtime()),
            '_pythonlogfile.log')
        logging.basicConfig(filename=python_logfile_name,
                            level=logging.INFO,
                            format='%(asctime)s %(levelname)s %(message)s',
                            datefmt='%Y-%m-%d_%H-%M-%S')
        exp.files_setup['python_log_loc'] = True
        print('Set up python log location: {}'.format(python_log_location))
        print('-------------------------')
    if exp.files_setup['ipython_log_loc']:
        print(
            'ipython log already started at {}.'.format(ipython_log_location))
        print('-------------------------')
    else:
        if in_ipynb():
            if not os.path.exists(ipython_log_location):
                os.makedirs(ipython_log_location)
            ipython_logfile_name = "{}{}{}".format(
                ipython_log_location, strftime('%Y-%m-%d_%H-%M-%S',
                                               localtime()),
                '_ipythonlogfile.txt')
            try:
                get_ipython().magic(
                    "logstart -t {} append".format(ipython_logfile_name))
                exp.files_setup['ipython_log_loc'] = True
                print('Set up ipython log location: {}'.format(
                    ipython_log_location))
                print('-------------------------')
            except Warning as w:
                print('Could not set up ipython log: ', w)
                print('-------------------------')
Exemple #14
0
 def relative_to(self, frame):
     assert isinstance(frame, Path)
     # Fast mode check for normalized path decendents.
     if len(self._path) >= len(frame._path) + 2 and \
             self._path.startswith(frame._path) and \
             self._path[len(frame._path)+1] == sep:
         return self._path[len(frame._path):]
     # Get the segment sequences from root to self and frame.
     self_family = iter(self.parents())
     frame_family = iter(frame.parents())
     # Find the common ancesstor of self and frame.
     s = None
     f = None
     common = None
     while True:
         s = next(self_family, None)
         f = next(frame_family, None)
         if s is None and f is None:
             if common is None:
                 # common should have at least advanced to root!
                 raise ValueError(
                     "Failed to find common decendent of %s and %s" %
                     (self, frame))
             else:
                 # self and frame exhaused at the same time. Must be the same path.
                 return SELF_STR
         elif s is None:
             # frame is a decendent of self. Self is an ancesstor of frame.
             # We can return remaining segments of frame.
             # Self is "/a" frame = "/a/b/c" common is "/a" result is "../.."
             backtracks = len(list(frame_family)) + 1
             backtrack = [PARENT_STR] * backtracks
             backtrack = sep.join([PARENT_STR] * backtracks)
             # raise NotImplementedError("self %s frame %s common %s backtracks %s backtrack %s" % (
             #    self, frame, common, backtracks, backtrack))
             return backtrack
         elif f is None:
             # self is a decendent of frame. frame is an ancesstor of self.
             # We can return remaining segments of self.
             if common == ROOT:
                 return self._path[len(common._path):]
             else:
                 return self._path[len(common._path) + 1:]
         elif s == f:
             # self and frame decendent are the same, so advance.
             common = s
             pass
         else:
             # we need to backtrack from frame to common.
             backtracks = len(list(frame_family)) + 1
             backtrack = [PARENT_STR] * backtracks
             backtrack = sep.join([PARENT_STR] * backtracks)
             if common == ROOT:
                 forward = self._path[len(common._path):]
             else:
                 forward = self._path[len(common._path) + 1:]
             # print("backtracks", backtracks, "backtrack", backtrack, "forward", forward, "common", common)
             return backtrack + sep + forward
Exemple #15
0
 def test_commands(self):
     call_command('run_resume_compilation')
     self.assertTrue(
         isfile(sep.join([settings.MEDIA_ROOT,
                          'TBP_resumes_by_major.zip'])))
     self.assertTrue(
         isfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip'])))
     self.assertTrue(
         isfile(sep.join([settings.MEDIA_ROOT, 'TBP_electee_resumes.zip'])))
Exemple #16
0
 def returnDirectoryList(self, backup_dir, local_dir):
     main_folder = abspath(sep.join([backup_dir, local_dir]))
     backup_folders = {
         'main'    : main_folder
       , 'archives': abspath(sep.join([main_folder, 'monthly-archives']))  # Contain monthly archives
       , 'diff'    : abspath(sep.join([main_folder, 'rdiff-repository']))  # Contain current month diferential backup
       , 'mirror'  : abspath(sep.join([main_folder, 'mirror']))            # Contain a mirror of the remote folder
       }
     return backup_folders
Exemple #17
0
 def __init__(self):
     # Configure translation API keys
     credentials_file = sep.join([dirname(realpath(__file__)), pardir,
             pardir, pardir, 'config', 'Team-BIASES-3948f0cc1da3.json'])
     os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = credentials_file
     
     self.client = translate.Client()
     
     self.shelf = shelve.open(sep.join([dirname(realpath(__file__)), pardir, pardir, pardir,'/data/cache/translate.google.shelf']))
Exemple #18
0
def save_raw_data():
    data_dir = '.', 'stat_data'
    data_dir = sep.join(data_dir)
    for k in range(4, 10):
        game = str(k)
        game_dir = sep.join([data_dir, 'fe' + game])
        if not exists(game_dir):
            mkdir(game_dir)
        save_stats(game)
Exemple #19
0
 def __init__(self, path):
     tmp = path.split("/")
     self.urban = sep.join(tmp)
     self.path = sep.join(tmp[0:len(tmp) - 1])
     self.pathQ3UT4 = self.path + sep + "q3ut4"
     self.pathDemos = self.pathQ3UT4 + sep + "demos"
     self.demosList = list()
     self.format = ".dm_68"
     self.initDemosList()
Exemple #20
0
 def test_update_zips(self):
     current_dir =getcwd()
     update_resume_zips()
     self.assertEqual(getcwd(),current_dir)
     self.assertTrue(isfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip'])))
     self.assertTrue(isfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip'])))
     self.assertTrue(zipfile.is_zipfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip'])))
     self.assertTrue(zipfile.is_zipfile(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip'])))
     self.assertTrue(exists(RESUMES_BY_MAJOR_LOCATION()))
     self.assertTrue(exists(RESUMES_BY_YEAR_LOCATION()))
Exemple #21
0
def validate_classfile(class_info):
	if len(class_info)==3:
		file_path = sep.join([item for item in class_info[2].split('_') if len(item)>0])  + '.php'
		file_path = sep.join([package_dir,module_dir,file_path])
		if exists(file_path):
			return True
		else:
			return ['%s does not exist' % file_path]
	elif len(class_info)==2:
		return ['%s/%s is not a valid file' % class_info]
Exemple #22
0
 def __init__(self):
     self.stat_dir=sep.join(['.','stat_data','fe5'])
     filePointer=lambda x: sep.join([self.stat_dir,x+'.csv'])
     self.promo_file=filePointer('classes_promotion-gains')
     self.bases_file=filePointer('characters_base-stats')
     self.dismount_file=sep.join(['.','metadata','fe5-dismount.csv'])
     self.file_dict={
         'promo':self.promo_file,\
         'bases':self.bases_file,\
         'dismount':self.dismount_file
         }
Exemple #23
0
    def __init__(self, work_root):
        super().__init__()
        # common variable defination
        self.path_root = work_root

        self.path_model = sep.join([self.path_root, "Steps"])
        self.path_workspace = sep.join([self.path_root, "build"])
        self.path_pics = sep.join([self.path_workspace, "pics"])
        self.path_out = sep.join([self.path_workspace, "out"])

        self.prepare_workspace()
Exemple #24
0
 def _read_dir(self, path, extensions, basedir=None):
     if basedir is None:
         basedir = path
     items = []
     for d in filter(lambda f: isdir(sep.join([path, f])), listdir(path)):
         items = self._read_dir(sep.join([path, d]), extensions, basedir) + items
     for file in filter(lambda f: not isdir(sep.join([path, f])), listdir(path)):
         name, ext = splitext(file)
         if ext.lower() in extensions:
             location = sep.join([path, file]).replace(basedir + sep, '')
             items.append(location)
     return items
Exemple #25
0
def test_more_args(fake_path, case):
    with tempdir() as chroot:
        create_files(["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"])
        expected = [
            join(chroot, suffix)
            for suffix in (sep.join(("a", "b")), "a", sep.join(("a", "e")))
        ] + fake_path

        assert sys.path == fake_path
        with lint.fix_import_path(case):
            assert sys.path == expected
        assert sys.path == fake_path
Exemple #26
0
def test_more_args(fake_path, case):
    with tempdir() as chroot:
        create_files(['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'])
        expected = [
            join(chroot, suffix)
            for suffix in [sep.join(('a', 'b')), 'a', sep.join(('a', 'e'))]
        ] + ["."] + fake_path

        assert sys.path == fake_path
        with lint.fix_import_path(case):
            assert sys.path == expected
        assert sys.path == fake_path
Exemple #27
0
 def downCallback():
     time.sleep(1)
     bar.quit()
     start_file = ''
     if value == 0 :
         start_file = sep.join([foldPath, basename(file_path).replace('.zip','.txt') if '.zip' in basename(file_path) else 'reason.txt'])
     elif value == 1 :
         start_file = sep.join([file_path, '{}.txt'.format(basename(file_path))])
     elif value == 2 :
         start_file = file_path
     if exists(start_file):
         startfile(start_file)
Exemple #28
0
    def run(self,  simulate = True):
        
        pk = TestConstants

        if simulate:
            # run simulation before loading results
            cfg_ref = TestConfig()

            ds_test = TestDataSet(cfg=cfg_ref)

            g = ParamGroup("zigzag_HT_High_RE")
            g.add_test_param("Re_des = 20000", Re_des=20000)
            # ds_test.add_para_group(g)

            g = ParamGroup("zigzag_HT_diff_Re")
            g.add_test_param("Re_des=5000", Re_des=5000)
            g.add_test_param("Re_des=14500(design)", Re_des=14500)
            g.add_test_param("Re_des=20000", Re_des=20000)
            ds_test.add_para_group(g)

            # g = ds_test.new_para_group("zigzag_HT_diff_mdot", [r"$\dot{m}_{off}$ = 10"])
            g = ParamGroup("zigzag_HT_mdot_50")
            g.add_test_param("MDot_50", mdot_hot_odes=MDot(50), mdot_cold_odes=MDot(50))
            # ds_test.add_para_group(g)    

            g = ParamGroup("zigzag_HT_diff_mdot")
            g.add_test_param("MDot_50(-50%)", title=r"$\dot{m}_{off}$=50(-50%)", mdot_hot_odes=MDot(50), mdot_cold_odes=MDot(50))
            g.add_test_param("MDot_100(100%)", title=r"$\dot{m}_{off}$=100(100%)", mdot_hot_odes=MDot(100), mdot_cold_odes=MDot(100))
            g.add_test_param("MDot_150(+50%)", title=r"$\dot{m}_{off}$=150(+50%)", mdot_hot_odes=MDot(150), mdot_cold_odes=MDot(150))
            ds_test.add_para_group(g)

            g = ParamGroup("zigzag_HT_diff_pT_in")
            g.add_test_param("pT=10_450", title=r"$(p,T)_{hi}$ = 10 MPa, 450°", p_hot_in=Pressure.MPa(10), T_hot_in=Temperature.degC(730))
            g.add_test_param("pT=12_300", title=r"$(p,T)_{hi}$ = 12 MPa, 300°", p_hot_in=Pressure.MPa(12), T_hot_in=Temperature.degC(300))
            # ds_test.add_para_group(g)

            self.simulate(ds_test)  
            print('Test data saved, ready to plot figures')

        # loading the newes result
        test_name = self.find_latest_test(self.path_out)                 
        # load the simulation result from latest, pre-saved file
        ds_test = self.load_result(test_name)

        zigzag = 1
        imgfile = ["Meshram_Fig_04.png", "Meshram_Fig_05.png"][zigzag]    
        imgfile = sep.join([self.path_pics, imgfile])

        for test in ds_test:
            destfile = sep.join([self.path_out, ds_test.name, "Meshram_Fig{0}b_compare_{1}.png".format(4 + zigzag, test.name)])
            self.gen_plot_manager(test).draw(img_file=imgfile, dest_file=destfile)

        print('all done!') 
Exemple #29
0
def test_more_args(fake_path, case):
    with tempdir() as chroot:
        create_files(['a/b/c/__init__.py', 'a/d/__init__.py', 'a/e/f.py'])
        expected = [
            join(chroot, suffix)
            for suffix in [sep.join(('a', 'b')), 'a', sep.join(('a', 'e'))]
        ] + ["."] + fake_path

        assert sys.path == fake_path
        with lint.fix_import_path(case):
            assert sys.path == expected
        assert sys.path == fake_path
Exemple #30
0
    def __init__(self, work_root):
        super().__init__()
        # common variable defination
        self.path_root = work_root
        from pathlib import Path
        parent_dir = Path(self.path_root).parent
        self.path_model = sep.join([parent_dir.__str__(), "Modelica", "Steps"])
        self.path_workspace = sep.join([self.path_root, "build"])
        self.path_pics = sep.join([self.path_workspace, "pics"])
        self.path_out = sep.join([self.path_workspace, "out"])

        self.prepare_workspace()        
Exemple #31
0
def rows_and_headers_for(game):
    rows={}
    raw_data_path=('.','stat_data','fe'+game)
    raw_data_path=sep.join(raw_data_path)
    headers=[]
    for root,folders,files in walk(raw_data_path):
        if root != raw_data_path:
            continue
        for file in files:
            table=(raw_data_path,file)
            table=sep.join(table)
            collect_table(table,headers,rows,files)
    return rows,headers
Exemple #32
0
 def __init__(self, game, folder=None):
     if folder is not None:
         assert type(folder) in (tuple, str)
     self.game = game
     self.stat_dict = read_stat_names(game)
     if folder is None:
         self.folder = '.'
     elif type(folder) == tuple:
         self.folder = sep.join(folder)
     elif folder == 'metadata':
         self.folder = folder
     else:
         self.folder = sep.join(['.', folder, 'fe%s' % game])
Exemple #33
0
    def setUp(self):

        from pinfer.io import load_notung_nhx

        self.gTree = load_notung_nhx(sep.join(tests_folder + ['data', 'tree.newick']))

        gTree = load_notung_nhx(sep.join(tests_folder + ['data', 'tree.newick']))

        from pinfer.itree.label import label_birth_death

        label_birth_death(gTree)

        self.gTree_processed = gTree
Exemple #34
0
    def prepare_workspace(self):
        '''
        prepare the workspace for simulation, the directory structure is
        - Steps (currernt work directory) -> path_root
        |--.vscode              - directory for vscode's configurations
        |--docs                 - documents
        |--lib                  - compiled libraries
        |--build                - -> path_workspace, workspace for modelica simulation, all
        |   |                        the relative paths in this program are relative to point
        |   |--out               - Final output of the Simulation
        |   |  |--Test_Batch 1   - Simulation output data for test 1
        |   |  ...               - Simulation output data for test ...
        |   |--pics              - Temp storage dir for referenced pic 
        |--src
        |   |--c                 - c++ sources
            |--Modelica          - Modelica models
                |--Steps                - Directory for modelica models codes
            |--scripts           - shell scripts for compiling and tasks
            |--Python            - Python scripts for parameters sweep          
        |.env                   - configuration file for vscode python extension
        |.gitignore             - git ignore file
        ''' 

        import os
        import shutil      

        # setup working directory
        if not os.path.exists(self.path_workspace):    
            os.mkdir(self.path_workspace)

        os.chdir(self.path_workspace)

        if os.path.exists(self.path_pics):
            shutil.rmtree(self.path_pics)     

        # copy referenced pics for output figure 
        shutil.copytree(sep.join([self.path_root, 'pics']), self.path_pics)

        # directory for output        
        if not os.path.exists(self.path_out):
            os.mkdir(self.path_out)        

        # copy cool prop lib
        libs = ["libCoolProp.a", 'libCoolProp.dll', 'MyProps.dll']
        for lib in libs: 
            lib_path = sep.join([self.path_model, "Resources", "Library", lib])           
            if not os.path.exists(lib_path):
                try:
                    shutil.copyfile(lib_path, sep.join([".",lib])) # completa target name needed 
                except:
                    pass
Exemple #35
0
def load_unit_info(game,unit,lyn_mode=False,father='Arden'):
    data_dir='.','stat_data','fe'+game
    data_dir=sep.join(data_dir)
    unit_info={}

    unit_info['Game']=game
    unit_info['Name']=unit

    if unit in fe4_child_list():
        unit_info['Father']=father

    file_substr='characters_base-stats'

    if game == '7':
        if lyn_mode:
            suffix='1'
        else:
            suffix='2'
        if unit == 'Nils':
            unit='Ninian'
        file_substr+=suffix
        lyndis_league=character_list(
            game,\
            file_match='characters_base-stats1.csv'
            )
        if unit in lyndis_league:
            unit_info['Lyn Mode']=lyn_mode
    if game == '4':
        if unit in fe4_child_list(get_father=True):
            file_substr+='1'
    for root,folders,files in walk(data_dir):
        if root != data_dir:
            continue
        for file in files:
            if file_substr not in file:
                continue
            data_file=data_dir,file
            data_file=sep.join(data_file)
            data=pd.read_csv(data_file,index_col=0)
            if unit in data.index:
                if 'Lv' in data.columns:
                    col='Lv'
                elif 'Level' in data.columns:
                    col='Level'
                unit_info['Class']=data.at[unit,'Class']
                level=data.at[unit,col]
                if type(level) != int:
                    unit_info['Level']=int(level)
                break
    return unit_info
Exemple #36
0
    def test_root_subdir_access(self):
        root = FSRoot(self.base.name)
        target = ['safe', 'readme.txt']

        # joined path
        self.assertEqual(b'safe', root.read(sep.join(target)))
        self.assertEqual('safe', root.text(sep.join(target)))

        # raw list access
        self.assertEqual(b'safe', root.read(target))
        self.assertEqual('safe', root.text(target))

        with self.assertRaises(TypeError):
            root.read(object)
Exemple #37
0
def copy_file(original, target_directory):
    logger = logging.getLogger('root')
    assert (isinstance(original, FilePath))
    assert (os.path.exists(target_directory))
    splited_dirs = collections.deque(original.directory.split(sep))
    splited_dirs.popleft()
    copy_directory = sep.join([target_directory, sep.join(splited_dirs)])
    target_file = FilePath(copy_directory, original.filename)
    try:
        os.makedirs(target_file.directory)
        shutil.copy2(original.as_path(), target_file.as_path())
        logger.info('%s -> %s', original.as_path(), target_file.as_path())
    except FileExistsError as err:
        logger.warning('File exists: %s', err.filename)
    return target_file
Exemple #38
0
 def refresh(self, contents=None):
     if contents == None:
         contents = self._read_dir(sep.join([self._path, 'contents']), ['.json', '.yml', '.yaml'])
     self._contents = contents
     self._rules = self._generate_rules()
     self._remove_rules()
     self._add_rules()
Exemple #39
0
    def check_context(self):
        self.split_path = (getcwd() + sep).replace(self.root_dir, '').split(sep)[:-1]

        context = {
            0: self.root,
            1: self.subject,
            2: self.period,
            3: self.assignment,
            4: self.assignmentgroup,
            5: self.deadline,
            6: self.delivery,
            }.get(len(self.split_path), self.delivery)

        # if we go any deeper than to the delivery folder, show info
        # about the delivery, instead of failing and show nothing
        print (getcwd() + sep).replace(self.root_dir, '')
        if context == self.delivery:
            self.split_path = self.split_path[0:6]
        print self.split_path

        if len(self.split_path) != 0:
            self.subtree = self.metadata[sep.join(self.split_path)]

        context()
        print "#"
def test_more_args(fake_path, case):
    with tempdir() as chroot:
        create_files(["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"])
        expected = (
            [
                join(chroot, suffix)
                for suffix in [sep.join(("a", "b")), "a", sep.join(("a", "e"))]
            ]
            + ["."]
            + fake_path
        )

        assert sys.path == fake_path
        with lint.fix_import_path(case):
            assert sys.path == expected
        assert sys.path == fake_path
Exemple #41
0
 def tearDown(self):
     shutil.rmtree(RESUMES_BY_MAJOR_LOCATION(),ignore_errors=True)
     shutil.rmtree(RESUMES_BY_YEAR_LOCATION(),ignore_errors=True)
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip']))
     except OSError:
         pass
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip']))
     except OSError:
         pass
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_electee_resumes.zip']))
     except OSError:
         pass
     super(CorporateAuxiliaryTestCase,self).tearDown()
def save_csv(filename, data, keys):
    # Applatissement des données en liste et dict (un seul sous-niveau)
    flat = []
    for res in data:
        for k in res.keys():
            if isinstance(res[k], dict):
                d = res.pop(k)
                for k2, v in d.items():
                    k3 = "%s - %s" % (k, k2)
                    if k3 not in keys:
                        keys.append(k3)
                    if isinstance(v, list):
                        res[k3] = " - ".join([unicode(va) for va in v])
                    else:
                        res[k3] = v
            elif k not in keys:
                keys.append(k)
            if k in res and isinstance(res[k], list):
                res[k] = " - ".join([unicode(va) for va in res[k]])
        flat.append(res)
    dictk = {}
    for k in keys:
        dictk[k] = k
    flat.insert(0, dictk)

    with open(sep.join(['data', '%s.csv' % filename]), 'w') as f:
        for res in flat:
            f.write(",".join(["" if k not in res else unicode(res[k]).encode('utf-8') if re_n.match(unicode(res[k])) else "\"%s\"" % res[k].encode('utf-8').replace('"', '""') for k in keys]) + "\n")
Exemple #43
0
 def cd(self, p):
     d = self.dir
     directory = d.split(sep)[:-1]
     if not p.dir or p.dir[0] == sep:
         self.reader.cd(self.reader.root)
         directory = [""]
         if not p.dir:
             self.dir = sep
             return
     folders = p.dir.split(sep)
     for folder in folders:
         try:
             if folder not in [".", ""]:
                 if folder in "..":
                     directory.pop()
                 else:
                     directory.append(folder)
                 folder = self._find_file(folder)
                 self.reader.cd(folder)
         except Exception as e:
             self.dir = d
             self.cd(self.parsers["cd"].parse_args([d]))
             raise
     directory.append("")
     self.dir = sep.join(directory)
Exemple #44
0
def hdfs_to_local(hdfs_path, columns):
    '''
    下载 hdfs 文件夹 到 本地
    :param hdfs_path:  hdfs 文件路径
    :param columns:  列信息
    :return: 本地文件路径
    '''
    file_name = os.path.basename(hdfs_path)
    file_path = sep.join([local_base_path, file_name])
    local_file = "{}.csv".format(file_path)

    if not os.path.exists(local_base_path):
        os.makedirs(local_base_path)

    if os.path.exists(file_path):
        shutil.rmtree(file_path)

    if os.path.exists(local_file):
        os.remove(local_file)

    client.download(hdfs_path, file_path)
    # 合并文件
    os.system("cat {0}/* >> {1}".format(file_path, local_file))

    # 添加列信息
    if columns:
        os.system("sed -i '1 i {0}' {1}".format(columns, local_file))

    return local_file
Exemple #45
0
def parse_load_file(file_name='test', entry_type='post'):
    """
        parse_load_file(file_name='test', page_type='post',\
                       template_name='page_content.html')

        :param file_name: name of the file to be rendered
        :param entry_type: post or page.
                          post refers to blog post
                          page refers to static page
    """
    page_content = {}
    try:
        """
        >>> "test".split('html')[0]
            'test'
        >>> "test-html.html".split('.html')[0]
            'test-html'
        """
        #complete path for yaml file is built
        full_file_name = sep.join([build_contents_path(entry_type),\
                                   file_name.split('.html')[0]]) +'.yaml'
        with open(full_file_name, 'r') as f:
        #yaml file contents are loaded
            page_content = load(f)
        page_content['content'] = parse_content(page_content['content'])
        return page_content

    except IOError:
        flash({'message': 'Entry Content Not Found', 'type': 'error'})
    except Exception, e:
        flash({'message': e, 'type': 'error'})
        return page_content
Exemple #46
0
    def matching_files(self):
        """
        Find files.

        Returns:
            list: the list of matching files.
        """
        matching = []
        matcher = self.file_path_regex
        pieces = self.file_path_regex.pattern.split(sep)
        partial_matchers = list(map(re.compile, (
            sep.join(pieces[:i + 1]) for i in range(len(pieces)))))

        for root, dirs, files in walk(self.top_dir, topdown=True):
            for i in reversed(range(len(dirs))):
                dirname = relpath(join(root, dirs[i]), self.top_dir)
                dirlevel = dirname.count(sep)
                if not partial_matchers[dirlevel].match(dirname):
                    del dirs[i]

            for filename in files:
                if matcher.match(filename):
                    matching.append(abspath(join(root, filename)))

        return matching
Exemple #47
0
def uncache_model(bug_name):
    """Searches .pyabolism file for matching bug name
        if available will load pickle file
        otherwise, will load from SBML and store pickle for future use

        NB Pyabolism bugs are intended to speed up loading models, not for storage
            of edits made. These should be written back to external SBML files for
            safekeeping.

            Bugs will not in general survive upgrades to Pyabolism code base!
            Use with care!
        """
    raise Exception("Apologies, in it's current state this functionality is best avoided!")

    from .tools import find_config_folder
    config_folder = find_config_folder()

    if not config_folder:
        raise Exception("Unable to load bug, can't find a config folder!")

    import pickle
    from os.path import sep

    try:
        return pickle.load(open(sep.join([config_folder, 'bugs', '%s.pickle' % bug_name]), 'r'))
    except IOError:
        raise IOError('Sorry, unable to find a bug of that name...')
Exemple #48
0
 def _set_extract_path(self):
     from os.path import sep
     self.buildout_directory = self.buildout.get('buildout').get(
         'directory')
     DEFAULT_EXTRACT_PATH = sep.join([self.buildout_directory, 'parts'])
     self.extract_path = self.options.get('extract_path',
                                          DEFAULT_EXTRACT_PATH)
Exemple #49
0
 def __init__(self, root, path):
     splitedPath = string.split(path,"/")
     self.path = sep.join(splitedPath)
     self.root = root
     self.size = size_conversion(getsize(os.path.join(root, path)))
     self.date = time.ctime(getmtime(os.path.join(root, path)))
     self._basename = basename(self.path)
Exemple #50
0
def call_modules(auto_discover=()):
    """
    this is called in project urls.py
    for registering desired modules (eg.: admin.py)
    """
    log = logging.getLogger('ella.utils.installedapps.call_modules')

    for app in settings.INSTALLED_APPS:
        modules = set(auto_discover)
        if app in INSTALLED_APPS_REGISTER:
            modules.update(INSTALLED_APPS_REGISTER[app])
        for module in modules:
            try:
                imp = '%s.%s' % (app, module)
                mod = __import__(imp, {}, {}, [''])
                inst = getattr(mod, '__install__', lambda:None)
                inst()
            except ImportError, e:
                msg = 'problem during discovering %s - %s\n%s' % (imp, e, traceback.format_exc())
                # check if problem is inside autodiscovered file (i.e. mispelled module name) OR autodiscovered file does not exists
                mod = __import__(app, {}, {}, [''])
                app_path = mod.__file__.split(path_separator)[:-1]
                app_path.append('%s.py' % module)
                mod_path = path_separator.join(app_path)
                if not exists(mod_path):
                    # autodiscovered file does not exist
                    log.debug(msg)
                else:
                    # ImportError inside autodiscovered file
                    log.error(msg)
Exemple #51
0
def read_file():
    ALL_PROBLEMS = [] # Store all the SAT problems in format Problems->[Problem->[Clause->[variables]]]
    ACTUAL_DIRECTORY = getcwd() # Get the current directory path (../SAT/Reductor)
    PARENT_DIRECTORY = sep.join(ACTUAL_DIRECTORY.split(sep)[1:-1]) # Get the parent directory (../SAT)
    PARENT_DIRECTORY = join(sep, PARENT_DIRECTORY) # Apeend SO separator to access the folder
    SAT_instances_directory = join(PARENT_DIRECTORY, "InstanciasSAT") # Joins the parent directory with InstanciasSAT to get into (../SAT/instanciasSAT)
    _, _, SAT_instances = next(walk(SAT_instances_directory))
    for SAT_instance in SAT_instances:
        SAT_file = open(join(SAT_instances_directory, SAT_instance), 'r')
        problem = []
        for line in SAT_file:
            if line[0] == "c":
                continue
            if line[0] == "p" and not problem:
                ALL_PROBLEMS.append(problem)
                problem.append(line[:-1].split(" "))
                continue
            if line[0] == "p":
                ALL_PROBLEMS.append(problem)
                problem = []
                problem.append(line[:-1].split(" "))
                continue
            if problem:
                problem.append(line[:-3].split(" "))
    return ALL_PROBLEMS
Exemple #52
0
 def _generate_rules(self):
     rules = []
     for c in self._contents:
         name, ext = splitext(c)
         url = self._prefix + c.replace(ext, '').replace('_index', '')
         rule_name = self._name + '_' + c.replace(sep, '_').replace(ext, '').replace('_index', 'index')
         rules.append((url, rule_name, sep.join([self._path, 'contents', c])))
     return rules
 def encode(self, message, cover_file, data_file, new_file, password):
     self.resources.update_status_box(message)
     output_dir = sep.join([expanduser('~'), "Downloads"])
     try:
         image_file = create_stegano_image(original_image=cover_file, data_to_hide=data_file, cipher_key=password)
         image_file.save("{0}{1}{2}.png".format(output_dir, sep, new_file.replace(".png", "")))
         self.resources.update_status_box('{0}.pdf succssfully saved in Downloads folder'.format(new_file.replace(".png", "")))
     except Exception as e:
         self.resources.update_status_box(e)
Exemple #54
0
    def test_more_args(self):
        with tempdir() as chroot:
            create_files(["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"])
            expected = [
                join(chroot, suffix) for suffix in [sep.join(("a", "b")), "a", sep.join(("a", "e"))]
            ] + self.fake

            cases = (
                ["a/b/c/__init__.py", "a/d/__init__.py", "a/e/f.py"],
                ["a/b/c", "a", "a/e"],
                ["a/b/c", "a", "a/b/c", "a/e", "a"],
            )

            self.assertEqual(sys.path, self.fake)
            for case in cases:
                with lint.fix_import_path(case):
                    self.assertEqual(sys.path, expected)
                self.assertEqual(sys.path, self.fake)
 def extract(self, message, image_with_data, new_file, password):
     self.resources.update_status_box(message)
     output_dir = sep.join([expanduser('~'), "Downloads"])
     try:
         extracted_content = extract_data_from_stegano_image(image=image_with_data, cipher_key=password)
         self.write_extracted_content_to_file(extracted_content, "{0}{1}{2}".format(output_dir, sep, new_file))
         self.resources.update_status_box('Extraction successful {0} saved in Downloads folder'.format(new_file))
     except Exception as e:
         self.resources.update_status_box(e)
 def walk(node, parents):
     for key, node2 in node.items():
         if len(node2) > 0:
             config_fn = sep.join([self.src] + parents + [key, "config.yaml"])
             if exists(config_fn):
                 node2.update(get_yaml(config_fn))
         p = parents[:]
         p.append(key)
         walk(node2, p)
Exemple #57
0
    def _worker_run_once(self, conn, c_in, q_in, q_out):
        '''Internal. Load one image, process, and push.
        '''
        # get one tile to process
        try:
            nx, ny, zoom, maptype, format = q_in.pop()
        except:
            c_in.acquire()
            c_in.wait()
            c_in.release()
            return

        # check if the tile already have been downloaded
        filename = self.to_filename(nx, ny, zoom, maptype, format)
        if not exists(filename):

            # calculate the good tile index
            tz = pow(2, zoom)
            lx, ly = unproject(2.0 * (nx + 0.5) / tz - 1, 1 - 2.0 * (ny + 0.5) / tz)
            lx, ly = map(fix180, (lx, ly))

            # get url for this specific tile
            url = self.geturl(
                nx=nx, ny=ny,
                lx=lx, ly=ly,
                tilew=256, tileh=256,
                zoom=zoom,
                format=format,
                maptype=maptype
            )

            # load url content
            try:
                conn.request('GET', url)
                res = conn.getresponse()
                data = res.read()
                if res.status < 200 or res.status >= 300:
                    raise Exception('Invalid HTTP Code %d:%s' % (
                        res.status, res.reason))
            except Exception, e:
                pymt_logger.error('TileServer: %s: %s' % (str(e), filename))
                pymt_logger.error('TileServer: URL=%s' % url)
                return

            # write data on disk
            try:
                directory = sep.join(filename.split(sep)[:-1])
                if not exists(directory):
                    mkdir(directory)
                with open(filename, 'wb') as fd:
                    fd.write(data)
            except:
                pymt_logger.exception('Tileserver: Unable to write %s' % filename)
                return

            # post processing
            self.post_download(filename)
Exemple #58
0
    def check_context(self):

        split_path = getcwd().replace(self.root_dir, '').split(sep)

        # 7 is the depth for deliveries:
        # <root_dir>/subject/period/assignment/assignmentgroup/deadline/delivery/
        # There should be a 'feedback.rst' at this depth
        if len(split_path) < 6:
            print "This directory is not a delivery directory.", "'" + sep.join(split_path) + "'"
            return

        feedback = join(self.root_dir, sep.join(split_path[0:6]), 'feedback')
        print feedback
        if exists(feedback):
            self.set_and_save(feedback)
            print "Added"
        else:
            print "feedback not found in:", dirname(feedback)
Exemple #59
0
	def __call__(self, oldPath):
		head, tail = splitpath(oldPath)
		headParts = head.split(sep)
		if not headParts:
			raise ValueError(
				'Directory part is empty for entry "%s"' % oldPath
				)
		headParts[0] = self.newName
		return sep.join(headParts + [ tail ])
Exemple #60
0
 def tearDown(self):
     del(self.client)
     CorporateResourceGuide.objects.all().delete()
     shutil.rmtree(RESUMES_BY_MAJOR_LOCATION(),ignore_errors=True)
     shutil.rmtree(RESUMES_BY_YEAR_LOCATION(),ignore_errors=True)
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_year.zip']))
     except OSError:
         pass
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_resumes_by_major.zip']))
     except OSError:
         pass
     try:
         remove(sep.join([settings.MEDIA_ROOT, 'TBP_electee_resumes.zip']))
     except OSError:
         pass
     super(CorporateViewsTestCase,self).tearDown()