Beispiel #1
0
    def ensure_metadata_preparation(self):
        """
        prepare the NERDm metadata.  

        This uses the MIDASMetadataBagger class to convert the MIDA POD data 
        into NERDm and to extract metadata from the uploaded files.  
        """

        # start by bagging up the metadata.  If this was done before (prior to
        # final preservation time), the previous metadata bag will be updated.
        mdbagger = MIDASMetadataBagger(self.name,
                                       self.mddir,
                                       self.reviewdir,
                                       config=self.cfg,
                                       minter=self.minter)
        mdbagger.prepare(nodata=True)
        self.datafiles = mdbagger.datafiles

        # copy the contents of the metadata bag into the final preservation bag
        if os.path.exists(self.bagdir):
            # note: caller should be responsible for locking the preservation
            # of the SIP and cleaning up afterward.  Thus, this case should
            # not really occur
            log.warn("Removing previous version of preservation bag, %s",
                     self.bagbldr.bagname)
            if os.path.isdir(self.bagdir):
                shutil.rmtree(self.bagdir)
            else:
                shutil.remove(self.bagdir)
        shutil.copytree(mdbagger.bagdir, self.bagdir)

        # by ensuring the output preservation bag directory, we set up loggning
        self.bagbldr.ensure_bagdir()
        self.bagbldr.log.info("Preparing final bag for preservation as %s",
                              os.path.basename(self.bagdir))
 def update(self, local, current, os):
     if local == None: #no local install
         location = raw_input("Where would you like your TBB install located?\n"
                 + "Simpliest method for this is drag n drop a folder\n-> ").rstrip()
         if location.startswith('\'') or location.startswith('"') and location.endswith('\'') or location.endswith('"'):
                 location = location[1:-1]#cut the extra quotes off
         if not location.endswith('/'):
             location += '/'
         print("Moving current version to \"" + location + "\"")
         if not '/'.join(current.split('/')[:-1]) + '/' == location:
             #if it doesn't already happen to be in place
             if(os == "mac"):
                 copytree(current, location + "TorBrowser.app/")#move it
             else:
                 move(current, location)#move it
         path = current.split('/')
         return location + path[len(path)-1]
     else:
         print("Deleting local install found @ \"" + local + "\"")
         remove(local)
         print("Moving current version to same location as local install was located")
         if(os == 'mac'):
             copytree(current, local)
         else:
             move(current, local)#move current to location of where local was
         return local
Beispiel #3
0
def reset_directory(directory):
    # Loop over directory
    no_original_but_prj_updated = []
    original_copied_to_prj = []
    kept_prj_deleted_original = []
    no_original_did_nothing = []

    for dirname, dirnames, filenames in os.walk(directory):

        for fn1 in filenames:
            if '.original.txt' in fn1:
                prj_file = str.replace(fn1, 'original.txt', 'project.txt')
                if os.path.exists(prj_file):
                    if os.path.getmtime(os.path.join(
                            dirname, fn1)) < os.path.getmtime(
                                os.path.join(dirname, prj_file)):
                        UPDATED = False
                        with open(prj_file, 'r') as fid:
                            for line in fid:
                                if 'Gravity value adjusted by' in line:
                                    UPDATED = True
                        if UPDATED:
                            shutil.move(os.path.join(dirname, fn1),
                                        os.path.join(dirname, prj_file))
                            original_copied_to_prj.append(prj_file)
                        else:
                            shutil.remove(
                                os.path.join(dirname, str.replace(fn1)))
                            kept_prj_deleted_original.append(prj_file)
                    else:

                        jeff = 1
Beispiel #4
0
def copyFromDir(path, saveLoc=None, start=0, delete_file=False):
    # 주소 문자열은 뒤에 / 없으면 에러
    if path[-1:] != '/': raise 'not include "/" '
    if saveLoc is not None:
        if saveLoc[-1:] != '/': raise 'not include "/"'

    sequencal_fileName = start

    # 경로 내의 이미지를 다른곳으로 복사
    for imageName in os.listdir(path):
        if imageName == '.DS_Store': continue

        # 검색된 데이터가 파일이라면
        if os.path.isfile(path + imageName):
            sequencal = __copyDirImage(path, imageName, saveLoc,
                                       sequencal_fileName)
            sequencal_fileName = sequencal
            if delete_file: shutil.remove(path + imageName)  # 파일삭제 (선택사항)

        # 검색된 데이터가 디렉토리라면
        elif os.path.isdir(path + imageName):
            imageDirectory, sequencal = __copyInnerDirImage(
                path, imageName, saveLoc, sequencal_fileName)
            sequencal_fileName = sequencal
            if delete_file: shutil.rmtree(imageDirectory)
    print('done!')
def urlToImage(url, filename):
    downloadFile(url, filename)
    extractImage(filename)

    for file in os.listdir():
        if file.endswith("mwa"):
            shutil.remove(file)
Beispiel #6
0
   def _mv_input(self):
       if len(self._sys_args) != 4:
          return "Error.. invalid syntax!\preserve.py cp <source> <target>"

       _source=self._sys_args[2]
       _target=self._sys_args[3]

       if _source.startswith("grid:/") and _target.startswith("grid:/"):
          #move file within grid
          _result=self._xx_grid2grid("mv", _source, _target)
          if _result is None:  #the move worked!
             _result=self._rm_file()
       elif _source.startswith("grid:/"):
          #move file/dir out of grid
          _id=self._lookup_id(source)
          import AssembleFile
          _as=AssembleFile.AssembleFile(self._mdn_handler)
          _result=_as.process(_id, target)
          if _result is None:
             #remove dir/file in grid
             return self._rm_file(_filename)
          return "Error! copying dir/file out of grid failed"
       elif _target.startswith("grid:/"):
          #move file/dir into grid
          _target_dir, _target_name=os.path.split(_target[5:])
          _parentID=self._lookup_id(_target_dir)
          _df=DisassembleFile.DisassembleFile(_source, _parentID, _target_name, 
                                              self._mdn_handler)
          _result=_df.process()
          if _result is None:
             import shutil
             shutil.remove(_source)
             return None
          return "Error! Trouble copying file into grid"
       return "Error! source and/or target must start with grid:/"
Beispiel #7
0
def main():
    import sys

    try:

        video_src = sys.argv[1]

        cap = cv2.VideoCapture(video_src)

        if cap is None or not cap.isOpened():
            print('Warning: unable to open video source: ', video_src)
        else:
            print('Warning: successfully video source open : ', video_src)
            cam = cap

    except:
        video_src = video

        cap = cv2.VideoCapture(video_src)

        if cap is None or not cap.isOpened():
            print('Warning: unable to open video source: ', video_src)
        else:
            print('Warning: successfully video source open : ', video_src)
            cam = cap

    Tracker(cam).__process_frame__()
    remove("./__pycache__")
    print('Done')
Beispiel #8
0
def destroy_handle(log, handle):
    conf = models.Conf.objects.get(handle=handle)

    cfg = rpki.config.parser(confpath(conf.host.handle, 'rpki.conf'), 'myrpki')
    call_rpkid = build_rpkid_caller(cfg)
    call_pubd = build_pubd_caller(cfg)

    # destroy the <self/> object and the <child/> object from the host/parent.
    rpkid_reply = call_rpkid(
        rpki.left_right.self_elt.make_pdu(action="destroy",
                                          self_handle=handle),
        rpki.left_right.child_elt.make_pdu(action="destroy",
                                           self_handle=conf.host.handle,
                                           child_handle=handle))
    if isinstance(rpkid_reply[0], rpki.left_right.report_error_elt):
        print >> log, "Error while calling pubd to delete client %s:" % handle
        print >> log, rpkid_reply[0]

    pubd_reply = call_pubd(
        rpki.publication.client_elt.make_pdu(action="destroy",
                                             client_handle=handle))
    if isinstance(pubd_reply[0], rpki.publication.report_error_elt):
        print >> log, "Error while calling pubd to delete client %s:" % handle
        print >> log, pubd_reply[0]

    conf.delete()

    shutil.remove(confpath(handle))
 def cleanUp(self, sig, currentTBB, os):
     print("Deleting extra files no longer needed (downloaded installers, sig, etc.)")
     OS.remove(currentTBB)
     OS.remove(sig)
     if(os == 'mac'):
         if(not self.mount == None):
             call("hdiutil detach " + self.mount, shell = True)#dismount
             remove(self.mount)
Beispiel #10
0
def removeFile(src):
    try:
        shutil.remove(src)
    # Directories are the same
    except shutil.Error as e:
        print('File not deleted. Error: %s' % e)
    # Any error saying that the directory doesn't exist
    except OSError as e:
        print('File not deleted. Error: %s' % e)
Beispiel #11
0
def replace(file_path, pattern, subst):
    # Create temp file
    fh, abs_path = mkstemp()
    with fdopen(fh, 'w') as new_file:
        with open(file_path) as old_file:
            for line in old_file:
                new_file.write(line.replace(pattern, subst))
    shutil.copymode(file_path, abs_path)
    shutil.remove(file_path)
    shutil.move(abs_path, file_path)
Beispiel #12
0
def verify_file(file):
    try:
        image = cv2.imread(file)

        if image.shape != (256, 256):
            raise AssertionError("Image {} was invalid".format(file))

    except Exception:
        debug_print("Removing {}".format(file))
        shutil.remove(file)
def main():
    import sys
    try:
        video_src = sys.argv[1]
    except:
        video_src = 0

    App(video_src).run()
    remove("./__pycache__")
    print('Done')
Beispiel #14
0
def safe_remove(_dir):
    if os.path.isdir(_dir):
        if os.path.islink(_dir):
            os.unlink(_dir)
        else:
            shutil.rmtree(_dir)
    else:
        if os.path.islink(_dir):
            os.unlink(_dir)
        else:
            shutil.remove(_dir)
Beispiel #15
0
def safe_remove(_dir):
    if os.path.isdir(_dir):
        if os.path.islink(_dir):
            os.unlink(_dir)
        else:
            shutil.rmtree(_dir)
    else:
        if os.path.islink(_dir):
            os.unlink(_dir)
        else:
            shutil.remove(_dir)
Beispiel #16
0
def copy_std_to_dir(path):
    try:
        remove(path)
    except:
        pass
    try:
        mkdir(path)
    except:
        pass

    for name in glob(RESOURCES_DIR + "/*"):
        copy(name, path + "/" + basename(name))
Beispiel #17
0
def rename_copy_remove():
	shutil.move(file_name, new_name)
	shutil.copy(source, destine)
	shutil.remove(file_name)

	os.mkdir('dir_name')
	#create many directory at once
	os.makedirs('dir_parent/another_dir/more_one/dir_name')
	#the dir must be empy first
	os.rmdir('dir_name')

	#ddanger: remove everything
	shutil.rmtree('some_dir')
Beispiel #18
0
def delete(tempdir, resource_name, **kwargs):
    """
    Delete resource
    :param tempdir: directory to resource name
    :param resource_name: file to be removed
    :param kwargs:
    :return:
    """
    ctx.logger.info('Deleting filename {0}'.format(resource_name))
    temp_path = tempdir + '\'' + resource_name
    if os.path.exists(temp_path):
        shutil.remove(temp_path)
        ctx.logger.info('Filename removed: [{0}]'.format(resource_name))
Beispiel #19
0
    def unzipAssignmentDirectory(self, path):
        """Recursively unzips all ziped files in the students
            assignments directory.
        """
        zipRegex = ".*\.zip"
        zipFiles = [f for f in os.listdir(path) if re.match(zipRegex, f)]
        for zipFile in zipFiles:
            with zipfile.ZipFile(zipFiles) as zf:
                zf.extractall()
            shutil.remove(zipFile)

        directories = [f for f in os.listdir(path) if os.path.isdir(f)]
        for directory in directories:
            self.unzipAssignmentDirectory(os.path.join(path, directory))
Beispiel #20
0
    def unzipAssignmentDirectory(self, path):
        """Recursively unzips all ziped files in the students
            assignments directory.
        """
        zipRegex = ".*\.zip"
        zipFiles = [f for f in os.listdir(path) if re.match(zipRegex, f)]
        for zipFile in zipFiles:
            with zipfile.ZipFile(zipFiles) as zf:
                zf.extractall()
            shutil.remove(zipFile)

        directories = [f for f in os.listdir(path) if os.path.isdir(f)]
        for directory in directories:
            self.unzipAssignmentDirectory(os.path.join(path, directory))
Beispiel #21
0
def create_archive(git_path, module, dest, archive, version, repo, result):
    """ Helper function for creating archive using git_archive """
    all_archive_fmt = {
        '.zip': 'zip',
        '.gz': 'tar.gz',
        '.tar': 'tar',
        '.tgz': 'tgz'
    }
    _, archive_ext = os.path.splitext(archive)
    archive_fmt = all_archive_fmt.get(archive_ext, None)
    if archive_fmt is None:
        module.fail_json(msg="Unable to get file extension from "
                         "archive file name : %s" % archive,
                         details="Please specify archive as filename with "
                         "extension. File extension can be one "
                         "of ['tar', 'tar.gz', 'zip', 'tgz']")

    repo_name = repo.split("/")[-1].replace(".git", "")

    if os.path.exists(archive):
        # If git archive file exists, then compare it with new git archive file.
        # if match, do nothing
        # if does not match, then replace existing with temp archive file.
        tempdir = tempfile.mkdtemp()
        new_archive_dest = os.path.join(tempdir, repo_name)
        new_archive = new_archive_dest + '.' + archive_fmt
        git_archive(git_path, module, dest, new_archive, archive_fmt, version)

        # filecmp is supposed to be efficient than md5sum checksum
        if filecmp.cmp(new_archive, archive):
            result.update(changed=False)
            # Cleanup before exiting
            try:
                shutil.remove(tempdir)
            except OSError:
                pass
        else:
            try:
                shutil.move(new_archive, archive)
                shutil.remove(tempdir)
                result.update(changed=True)
            except OSError:
                exception = get_exception()
                module.fail_json(
                    msg="Failed to move %s to %s" % (new_archive, archive),
                    details="Error occured while moving : %s" % exception)
    else:
        # Perform archive from local directory
        git_archive(git_path, module, dest, archive, archive_fmt, version)
        result.update(changed=True)
Beispiel #22
0
def run_sample_job():
    job = SampleCDXJob(args=sys.argv[1:])

    with job.make_runner() as runner:
        runner.run()

        if os.path.isfile(SEQ_FILE):
            shutil.remove(SEQ_FILE)

        # convert streaming output to sequence file
        count = make_text_null_seq(SEQ_FILE, runner.stream_output())

    if job.options.splitfile and hasattr(runner.fs, "make_s3_key"):
        key = job.fs.make_s3_key(job.options.splitfile)
        key.set_contents_from_filename(SEQ_FILE)
def delete(tempdir,
           resource_name,
           **kwargs):
    """
    Delete resource
    :param tempdir: directory to resource name
    :param resource_name: file to be removed
    :param kwargs:
    :return:
    """
    ctx.logger.info('Deleting filename {0}'.format(resource_name))
    temp_path = tempdir + '\'' + resource_name
    if os.path.exists(temp_path):
        shutil.remove(temp_path)
        ctx.logger.info('Filename removed: [{0}]'.format(resource_name))
Beispiel #24
0
def buildInstance():  # Copy Instances.
    global path
    print("Building instances...")
    controls = os.listdir(path + "\\Controls\\")

    for i in range(len(controls)):
        instance_path = path + "\\Instance" + str(i + 1) + "\\"
        if os.path.exists(instance_path):
            shutil.rmtree(instance_path)
        shutil.copytree(path + "\\Model\\", instance_path)
        if os.path.exists(instance_path + "Control.dat"):
            shutil.remove(instance_path + "Control.dat")
        shutil.copyfile(path + "\\Controls\\" + controls[i],
                        instance_path + "Control.dat")
    print("Instance are OK.")
Beispiel #25
0
def run_sample_job():
    job = SampleCDXJob(args=sys.argv[1:])

    with job.make_runner() as runner:
        runner.run()

        if os.path.isfile(SEQ_FILE):
            shutil.remove(SEQ_FILE)

        # convert streaming output to sequence file
        count = make_text_null_seq(SEQ_FILE, runner.stream_output())

    if job.options.splitfile and hasattr(runner.fs, 'make_s3_key'):
        key = job.fs.make_s3_key(job.options.splitfile)
        key.set_contents_from_filename(SEQ_FILE)
Beispiel #26
0
def should_update_common_wheels():
    # If the cache hasn't been created, create it.
    if not os.path.exists(LOCATIONS["common-wheels"]):
        return True

    # If the requirements was updated after cache, we'll repopulate it.
    cache_last_populated_at = os.path.getmtime(LOCATIONS["common-wheels"])
    requirements_updated_at = os.path.getmtime(REQUIREMENTS["common-wheels"])
    need_to_repopulate = requirements_updated_at > cache_last_populated_at

    # Clear the stale cache.
    if need_to_repopulate:
        shutil.remove(LOCATIONS["common-wheels"], ignore_errors=True)

    return need_to_repopulate
    def fetch_job_results(cls, job_res_url, username_fetch, password_fetch,
                          opt_res_dir, wf_id):
        """
        Fetch job output files from NSG.
        """
        # read/set NSG connection parameters
        KEY = cls.key
        URL = cls.url
        CRA_USER = username_fetch
        PASSWORD = password_fetch
        opt_res_dir = opt_res_dir

        # request all output file urls
        r_all = requests.get(job_res_url,
                             auth=(CRA_USER, PASSWORD),
                             headers=cls.headers)
        root = xml.etree.ElementTree.fromstring(r_all.text)
        all_down_uri = root.find('jobfiles').findall('jobfile')

        # create destination dir if not existing
        if not os.path.exists(opt_res_dir):
            os.mkdir(opt_res_dir)

        # for every file download it to the destination dir
        for i in all_down_uri:
            crr_down_uri = i.find('downloadUri').find('url').text
            r = requests.get(crr_down_uri,
                             auth=(CRA_USER, PASSWORD),
                             headers=cls.headers)
            d = r.headers['content-disposition']
            filename_list = re.findall('filename=(.+)', d)
            for filename in filename_list:
                with open(os.path.join(opt_res_dir, filename), 'wb') as fd:
                    for chunk in r.iter_content():
                        fd.write(chunk)

        fname = opt_res_dir + '_' + wf_id

        if os.path.isfile(fname):
            shutil.remove(fname)

        shutil.make_archive(fname, 'zip', opt_res_dir)

        OptFolderManager.create_opt_res_zip(fin_folder=opt_res_dir,
                                            filetype="",
                                            wf_id=wf_id)

        return ""
Beispiel #28
0
def create_archive(git_path, module, dest, archive, version, repo, result):
    """ Helper function for creating archive using git_archive """
    all_archive_fmt = {'.zip': 'zip', '.gz': 'tar.gz', '.tar': 'tar',
                       '.tgz': 'tgz'}
    _, archive_ext = os.path.splitext(archive)
    archive_fmt = all_archive_fmt.get(archive_ext, None)
    if archive_fmt is None:
        module.fail_json(msg="Unable to get file extension from "
                             "archive file name : %s" % archive,
                         details="Please specify archive as filename with "
                                 "extension. File extension can be one "
                                 "of ['tar', 'tar.gz', 'zip', 'tgz']")

    repo_name = repo.split("/")[-1].replace(".git", "")

    if os.path.exists(archive):
        # If git archive file exists, then compare it with new git archive file.
        # if match, do nothing
        # if does not match, then replace existing with temp archive file.
        tempdir = tempfile.mkdtemp()
        new_archive_dest = os.path.join(tempdir, repo_name)
        new_archive = new_archive_dest + '.' + archive_fmt
        git_archive(git_path, module, dest, new_archive, archive_fmt, version)

        # filecmp is supposed to be efficient than md5sum checksum
        if filecmp.cmp(new_archive, archive):
            result.update(changed=False)
            # Cleanup before exiting
            try:
                shutil.remove(tempdir)
            except OSError:
                pass
        else:
            try:
                shutil.move(new_archive, archive)
                shutil.remove(tempdir)
                result.update(changed=True)
            except OSError as e:
                module.fail_json(msg="Failed to move %s to %s" %
                                     (new_archive, archive),
                                 details="Error occured while moving : %s"
                                         % to_native(e))
    else:
        # Perform archive from local directory
        git_archive(git_path, module, dest, archive, archive_fmt, version)
        result.update(changed=True)
Beispiel #29
0
def init(ctx, generate):
    """
    Initialise databases.
    """
    if not is_imp_container_installed(ctx.obj['image-name'], ctx.obj['image-tag']):
        click.secho('IMP image not installed. Please run `impy install_imp_container` first.', bold=True)

    if generate:
        container_name = generate_container_name(ctx.obj['database-path'])

        init_cmd = "snakemake -s {container_source_code_dir}/rules/ini/init".format(
            container_source_code_dir=CONTAINER_CODE_DIR
        )

        docker_cmd = generate_docker_cmd(
            container_name,
            ctx.obj['database-path'],
            ctx.obj['config-file-path'],
            image_name=ctx.obj['image-name'],
            image_tag=ctx.obj['image-tag'],
            command=init_cmd,
            interactive=ctx.obj['enter'],
            source_code=ctx.obj['source-code'],
            )

        call(docker_cmd, container_name)
    else:
        if ctx.obj['config-file-path'].exists():
            with open(ctx.obj['config-file-path']) as cfile:
                config = json.load(cfile)
                if 'filtering' in config and config['filtering'] != 'hg38':
                    click.secho('No databases already generated for "%s". Please run `impy init` with the `--generate` flag.' % config['filtering'], bold=True)
                    ctx.abort()

        url = "https://webdav-r3lab.uni.lu/public/R3lab/IMP/db/hg38.tgz"
        tmp = ctx.obj['database-path'].parent / 'db.tgz'
        click.secho("[x] Downloading IMP databases '%s' to '%s'" % (url, tmp), fg='green')
        with urllib.request.urlopen(url) as response, open(tmp, 'wb') as out_file:
            copyfileobj(response, out_file, reporthook)
            shutil.copyfileobj(response, out_file)
        click.secho("[x] Extracting to %s" % ctx.obj['database-path'], fg='green')
        with tarfile.open(tmp, 'r') as tarball:
            tarball.extractall(ctx.obj['database-path'], get_members(tarball, 'db'))
        click.secho("[x] Removing tmp file:  %s" % tmp, fg='green')
        shutil.remove(tmp)
Beispiel #30
0
    def checkUnimacroGrammars(self):
        """see if there are any changed grammar files with respect to original file in release
        
        sync with ...
        """
        join, isdir, isfile, listdir = os.path.join, os.path.isdir, os.path.isfile, os.listdir
        u_dir = status.getUnimacroDirectory()
        # u_user_dir = status.getUnimacroUserDirectory()
        u_grammars_dir = status.getUnimacroGrammarsDirectory()
        u_original_grammars_dir = join(u_dir, "UnimacroGrammars")
        assert isdir(u_original_grammars_dir)
        originalPyFiles = [
            f for f in listdir(u_original_grammars_dir) if f.endswith('.py')
        ]
        txtFiles = [f for f in listdir(u_grammars_dir) if f.endswith('.txt')]
        activePyFiles = [
            f for f in listdir(u_grammars_dir) if f.endswith('.py')
        ]

        for f in originalPyFiles:
            org_path = join(u_original_grammars_dir, f)
            txt_file = f.replace('.py', '.txt')
            txt_path = join(u_grammars_dir, txt_file)
            py_path = join(u_grammars_dir, f)
            nice_name = f[:-3]  # strip off .py
            check_unimacro_grammars.checkOriginalFileWithActualTxtPy(
                nice_name, org_path, txt_path, py_path)

        for f in txtFiles:
            f_py = f.replace('.txt', '.py')
            if f_py not in originalPyFiles:
                print(
                    f'txt file "{f}" in ActiveGrammars, but py file {f_py} not in UnimacroGrammars'
                )
                shutil.remove(f)

        for f in activePyFiles:
            f_txt = f.replace('.py', '.txt')
            if not isfile(f_txt):
                print(
                    f'py file "{f}" in ActiveGrammars, but not txt file {f_txt}, so grammar is not in UnimacroGrammars'
                )
Beispiel #31
0
def get_test_data(configfile, home):
    config = jloadf(configfile)
    os.chdir('./v2/ihec/test_data')
    oks = dict()
    for k in config['data']:
        oks[k] = False
        for url in config['data'][k]:
            if wget(url) == 0:
                oks[k] = True
                break
            else:
                logerr('# failed downloading:' + url)
                incomplete = glob.glob('./' + os.path.basename(url))
                if len(incomplete) > 0:
                    assert len(incomplete) == 1, incomplete
                    shutil.remove(incomplete[0])
                    logerr('# removed failed download.. ' + incomplete[0])
    os.chdir(home)    
    for k in oks:
        assert oks[k], ['could not download all test data', k]
Beispiel #32
0
def uninstall_parts(package):
	import shutil
	#sys.prefix
	loc=os.sep.join([sys.prefix, 'lib', 'python' + sys.version[:3], 'site-packages', package]) #try sys.prefix
	if os.path.exists(loc):
		print 'Removing files from ' + loc
		shutil.rmtree(loc,ignore_errors=False)
	loc=os.sep.join([sys.prefix, 'lib', 'python' + sys.version[:3], 'dist-packages', package]) #try dist-packages
	if os.path.exists(loc):
		print 'Removing files from ' + loc
		shutil.rmtree(loc,ignore_errors=False)
	
	#/usr/local
	loc=os.sep.join(['/usr/local', 'lib', 'python' + sys.version[:3], 'site-packages', package]) #try sys.prefix
	if os.path.exists(loc):
		print 'Removing files from ' + loc
		shutil.rmtree(loc,ignore_errors=False)
	loc=os.sep.join(['/usr/local', 'lib', 'python' + sys.version[:3], 'dist-packages', package]) #try dist-packages
	if os.path.exists(loc):
		print 'Removing files from ' + loc
		shutil.rmtree(loc,ignore_errors=False)
		
	if os.path.exists('/usr/local/bin/' + package):
		print 'Removing file: /usr/local/bin/' + package
		try: shutil.remove('/usr/local/bin/' + package)
		except: pass
	if os.path.exists('/usr/bin/' + package):
		print 'Removing file: /usr/bin/' + package
		try: shutil.remove('/usr/bin/' + package)
		except: pass
	if os.path.islink('/usr/bin/' + package):
		print 'Removing link: /usr/bin/' + package
		try: shutil.remove('/usr/bin/' + package)
		except: pass
def make_freeture_config(dfn_config_dic, freeture_cfg_template,
                         destination_folder):

    freeture_session_file = os.path.join(
        destination_folder,
        (dfn_config_dic['station']['hostname'] + '_' +
         datetime.datetime.utcnow().strftime('%Y-%m-%d_%H%M%S') + '_' +
         'freeture.cfg'))

    if os.path.isfile(freeture_session_file):
        shutil.remove(freeture_session_file)
    shutil.copyfile(freeture_cfg_template, freeture_session_file)

    for freet_k, dfn_k in DFN_FREETURE_CONFIG_TRANSLATOR.items():

        comm = ("sed -i " + "\'s/^" + str(freet_k) + " *= *.*/" +
                str(freet_k) + " = " + str(dfn_config_dic['station'][dfn_k]) +
                "/\' " + freeture_session_file)

        os.system(comm)

    return freeture_session_file
Beispiel #34
0
def find_hedges(in_path, outpath, move=None, erase_thres=200):
    """
    Finds all mask images where appropriately sized hedges are present. Deletes
    mask files without hedges and moves those with hedges to a new folder.
    
    Parameters
    ----------
    inpath: str
        Path to mask files
    outpath: str
        Path where masks containing hedges should be moved to.
    move: str (optional)
        String giving the folder path of where to move masks without hedges.
        If left as none then masks without hedges will simply be deleted. 
        When first testing different erase thresholds this is not recommended.
        More CAUTIOUS approach is to first move the masks and inspect that
        only unwanted masks have been moved, and then deleting them manually.
    
    """
    #get all mask image tiles
    files = getFiles(in_path, ending='.png')

    for file in files:
        name = file.rsplit('\\', 1)[1]
        #load hedge mask
        mask = cv2.imread(file, cv2.IMREAD_GRAYSCALE)
        #erase any hedge mask segments that are too small
        mask = erase(mask, erase_thres)
        #check if any hedges are present within the mask image
        if np.max(mask) < 1:
            print('removing {}'.format(file))
            if move:
                shutil.move(file, move)
            else:
                shutil.remove(file)

        else:
            cv2.imwrite(os.path.join(outpath, name), mask)
Beispiel #35
0
def destroy_handle(log, handle):
    conf = models.Conf.objects.get(handle=handle)

    cfg = rpki.config.parser(confpath(conf.host.handle, 'rpki.conf'), 'myrpki')
    call_rpkid = build_rpkid_caller(cfg)
    call_pubd = build_pubd_caller(cfg)

    # destroy the <self/> object and the <child/> object from the host/parent.
    rpkid_reply = call_rpkid(
            rpki.left_right.self_elt.make_pdu(action="destroy", self_handle=handle),
            rpki.left_right.child_elt.make_pdu(action="destroy", self_handle=conf.host.handle, child_handle=handle))
    if isinstance(rpkid_reply[0], rpki.left_right.report_error_elt):
        print >>log, "Error while calling pubd to delete client %s:" % handle
        print >>log, rpkid_reply[0]

    pubd_reply = call_pubd(rpki.publication.client_elt.make_pdu(action="destroy", client_handle=handle))
    if isinstance(pubd_reply[0], rpki.publication.report_error_elt):
        print >>log, "Error while calling pubd to delete client %s:" % handle
        print >>log, pubd_reply[0]

    conf.delete()

    shutil.remove(confpath(handle))
Beispiel #36
0
 def modify(self, file_server):
     """This modifies the file server inside the manager blueprint.
     """
     file_server = self._fix_file_server(file_server)
     serve_under = tempfile.mkdtemp(prefix='cloudify-offline-')
     lgr.info('Running on {0}'.format(serve_under))
     try:
         utils.untar(self.source, serve_under)
         path = os.path.join(serve_under, os.listdir(serve_under)[0])
         metadata = self._get_meta(path)
         lgr.info(metadata)
         # from here on, this can definitely be consolidated as it is also
         # done under `create`
         manager_blueprints = os.path.join(
             path, 'cloudify-manager-blueprints-{0}'.format(
                 metadata['tag']))
         simple_manager_blueprint_path = os.path.join(
             manager_blueprints, 'simple-manager-blueprint.yaml')
         lgr.info('Editing {0}'.format(simple_manager_blueprint_path))
         with open(simple_manager_blueprint_path) as f:
             # read yaml also
             content = f.read()
             lgr.info('Replacing {0} with {1}.'.format(
                 metadata['file_server'], file_server))
             content = content.replace(metadata['file_server'], file_server)
             yaml_content = yaml.safe_load(content)
         with open(simple_manager_blueprint_path, 'w') as f:
             f.write(yaml.dump(yaml_content, default_flow_style=False))
         try:
             shutil.remove(self.source)
         except:
             lgr.error('Could not remove original source {0}'.format(
                 self.source))
         utils.tar(path, 'cloudify-offline.tar.gz')
     finally:
         shutil.rmtree(serve_under)
Beispiel #37
0
def makemovie(plotfn,args_fixed,args,name='movie'):
    """ makes a movie out of plotfn, called with series of args (a dictionary of lists)
    """
    moviefile = '%s.gif' % name
    if os.path.exists(moviefile):
        shutil.remove(moviefile)
    if os.path.exists(name):
        shutil.rmdir(name)

    os.mkdir(name)

    #convert dictionary of lists into list of dictionaries
    sizes = {}
    maxsize = 0
    for arg,val in args.iteritems():
        sizes[arg] = np.size(val)
        if sizes[arg] > maxsize:
            maxsize = sizes[arg]
    N = maxsize
                
    arglist = []
    for i in range(N):
        d = {}
        for k in args.keys():
            d[k] = args[k][i]
        for k in args_fixed.keys():
            d[k] = args_fixed[k]
            
        arglist.append(d)

    plt.ioff()
    for i in range(N):
        plt.figure()
        plotfn(**arglist[i])
        plt.savefig('%s/frame%i.png' % (name,i))
        plt.close()
Beispiel #38
0
def uninstall_parts(package):
    import shutil
    #sys.prefix
    loc = os.sep.join([
        sys.prefix, 'lib', 'python' + sys.version[:3], 'site-packages', package
    ])  #try sys.prefix
    if os.path.exists(loc):
        print 'Removing files from ' + loc
        shutil.rmtree(loc, ignore_errors=False)
    loc = os.sep.join([
        sys.prefix, 'lib', 'python' + sys.version[:3], 'dist-packages', package
    ])  #try dist-packages
    if os.path.exists(loc):
        print 'Removing files from ' + loc
        shutil.rmtree(loc, ignore_errors=False)

    #/usr/local
    loc = os.sep.join([
        '/usr/local', 'lib', 'python' + sys.version[:3], 'site-packages',
        package
    ])  #try sys.prefix
    if os.path.exists(loc):
        print 'Removing files from ' + loc
        shutil.rmtree(loc, ignore_errors=False)
    loc = os.sep.join([
        '/usr/local', 'lib', 'python' + sys.version[:3], 'dist-packages',
        package
    ])  #try dist-packages
    if os.path.exists(loc):
        print 'Removing files from ' + loc
        shutil.rmtree(loc, ignore_errors=False)

    if os.path.exists('/usr/local/bin/' + package):
        print 'Removing file: /usr/local/bin/' + package
        try:
            shutil.remove('/usr/local/bin/' + package)
        except:
            pass
    if os.path.exists('/usr/bin/' + package):
        print 'Removing file: /usr/bin/' + package
        try:
            shutil.remove('/usr/bin/' + package)
        except:
            pass
    if os.path.islink('/usr/bin/' + package):
        print 'Removing link: /usr/bin/' + package
        try:
            shutil.remove('/usr/bin/' + package)
        except:
            pass
Beispiel #39
0
        data.append(json.loads(line))
f.close()
shuf = [i for i in range(len(data))]
count = 0
index = 0
while count < nb_sample and index < len(data):
    try:
        if data[shuf[index]]['uri'][-3:] != 'jpg':
            index += 1
            continue
        if count % 500 == 0:
            print('%d p**n images have been downloaded... %d remaining...' %
                  (count, nb_sample - count))
        if not os.path.exists('./p**n'):
            os.mkdir('./p**n')
        p = os.path.join('./p**n/', os.path.split(data[shuf[index]]['uri'])[1])
        urllib.urlretrieve(data[shuf[index]]['uri'], p)
        if imghdr.what(p) == 'jpeg':
            count += 1
        else:
            shutil.remove(p)
        index += 1
    except Exception as e:
        index += 1
        continue

if count == nb_sample:
    print('downloaded all images needed \n')
else:
    print('%d images not downloaded' & (nb_sample - count))
 def remove_page(self, page_filename):
     shutil.remove(BanjaxBehaviorTest.http_doc_root + "/" + page_filename)
Beispiel #41
0
 def __exit__(self, type, value, traceback):
     latest = os.path.join(self.top_level, "latest")
     if os.path.lexists(latest):
         shutil.remove(latest)
     os.symlink(self.path, latest)
Beispiel #42
0
def createRunScript(case_path, init_potential, run_parallel, solver_name,
                    num_proc):
    print(
        "Create Allrun script, assume this script will be run with pwd = case folder "
    )
    print(
        " run this script with makeRunCommand(), which will do sourcing (without login shell) and cd to case folder"
    )

    fname = case_path + os.path.sep + "Allrun"
    meshOrg_dir = "constant/polyMesh.org"
    mesh_dir = "constant/polyMesh"

    solver_log_file = case_path + os.path.sep + 'log.' + solver_name
    if os.path.exists(solver_log_file):
        if _debug:
            print(
                "Warning: there is a solver log exit, will be deleted to avoid error"
            )
        shutil.remove(solver_log_file)
    if os.path.exists(fname):
        if _debug: print("Warning: Overwrite existing Allrun script ")
    with open(fname, 'w+') as f:
        f.write("#!/bin/bash \n\n")
        # NOTE: Although RunFunctions seem to be sourced, the functions `getApplication`
        # and `getNumberOfProcessors` are not available. solver_name and num_proc do not have
        # to be passed if they can be read using these bash functions
        #f.write("# Source tutorial run functions \n")
        #f.write(". $WM_PROJECT_DIR/bin/tools/RunFunctions \n\n")

        if getFoamRuntime() != 'BlueCFD':
            f.write("source {}/etc/bashrc\n".format(
                getFoamDir()))  # WSL, not working for blueCFD,
        #QProcess has trouble to run, "source {}/etc/bashrc"
        #source command is only supported by bash
        '''
        #WSL has trouble in ln -s
        f.write("# Create symbolic links to polyMesh.org \n")
        f.write("mkdir {} \n".format(mesh_dir))
        f.write("ln -s {}/boundary {}/boundary \n".format(meshOrg_dir, mesh_dir))
        f.write("ln -s {}/faces {}/faces \n".format(meshOrg_dir, mesh_dir))
        f.write("ln -s {}/neighbour {}/neighbour \n".format(meshOrg_dir, mesh_dir))
        f.write("ln -s {}/owner {}/owner \n".format(meshOrg_dir, mesh_dir))
        f.write("ln -s {}/points {}/points \n".format(meshOrg_dir, mesh_dir))
        f.write("\n")
        '''
        # BashWSL, cygwin, docker, run this script in case folder, if this script is run in case folder, no need to provide case path
        case = '.'
        if (init_potential):
            f.write("# Initialise flow \n")
            f.write("potentialFoam -case " + case + " 2>&1 | tee " + case +
                    "/log.potentialFoam \n\n")

        if (run_parallel):
            f.write("# Run application in parallel \n")
            f.write("decomposePar 2>&1 | tee log.decomposePar \n")
            f.write(
                "mpirun -np {} {} -parallel -case {} 2>&1 | tee {}/log.{} \n\n"
                .format(str(num_proc), solver_name, case, case, solver_name))
        else:
            f.write("# Run application \n")
            f.write("{} -case {} 2>&1 | tee {}/log.{} \n\n".format(
                solver_name, case, case, solver_name))

    # on windows linux subsystem, script must use unix line ending:  dos2unix
    if getFoamRuntime() == 'BashWSL':
        out = runFoamCommand("dos2unix Allrun", case_path)
    try:  # Update Allrun permission, it will fail on windows
        out = runFoamCommand("chmod a+x Allrun", case_path)
    except:
        pass  # on windows file system it is default executable to WSL user by default
Beispiel #43
0
 def clean(self):
     exe_name = self.instance.get_exe_name()
     shutil.remove(self.dir_name + exe_basename)
    def dbas_to_nc(self, dba_files, output_path, clobber=False, mode=None):
        
        #if not self._output_path:
        #    self._logger.warning('No NetCDF output_path specified')
        #    return
        if not os.path.isdir(output_path):
            self._logger.error('Invalid output_path specified: {:s}'.format(output_path))
            return
        
        # Create the deployment/trajectory name
        try:
            trajectory_dt = parser.parse(self._attributes['deployment']['trajectory_date'])
        except ValueError as e:
            logging.error('Error parsing deployment trajectory_date: {:s} ({:s})'.format(self._attributes['deployment']['trajectory_date'], e))
            return
            
        # Create a temporary directory for creating/writing NetCDF prior to 
        # moving them to output_path
        tmp_dir = tempfile.mkdtemp()
        self._logger.debug('Temporary NetCDF directory: {:s}'.format(tmp_dir))
            
        # Write one NetCDF file for each input file
        output_nc_files = []
        for dba_file in dba_files:
        
            if not os.path.isfile(dba_file):
                logging.error('Invalid dba file specified: {:s}'.format(dba_file))
                continue
                
            self._logger.info('Processing dba file: {:s}'.format(dba_file))
            
            # Parse the dba file
            dba = create_llat_dba_reader(dba_file)
            if len(dba['data']) == 0:
                logging.warning('Skipping empty dba file: {:s}'.format(dba_file))
                continue
                
            # Split the filename and extension
            dba_filename, dba_ext = os.path.splitext(os.path.basename(dba_file))
            
            # Guess at the realtime/delayed status, based on file type, if not specified
            # by the user
            if mode:
                file_mode = mode
            else:
                try:
                    if dba['dbd_meta']['filename_extension'] in DELAYED_MODE_EXTENSIONS:
                        if mode:
                            file_mode = mode
                        else:
                            file_mode = 'delayed'
                    elif dba['dbd_meta']['filename_extension'] in REALTIME_MODE_EXTENSIONS:
                        if mode:
                            file_mode = mode
                        else:
                            file_mode = 'rt'
                    else:
                        logging.error('No realtime/delayed mode specified and unable to guess: {:s}'.format(dba_file))
                        continue
                except KeyError as e:
                    logging.error(e)
                    continue
            
            # Create the output NetCDF path

            out_nc_file = os.path.join(output_path, '{:s}-{:s}.nc'.format(dba_filename, file_mode))
                
            # Clobber existing files as long as self._clobber == True.  If not, skip
            # this file
            if os.path.isfile(out_nc_file):
                if self._clobber:
                    self._logger.info('Clobbering existing file: {:s}'.format(out_nc_file))
                else:
                    self._logger.warning('Skipping existing NetCDF file: {:s}'.format(out_nc_file))
                    continue
                    
            # Path to hold file while we create it
            _, tmp_nc = tempfile.mkstemp(dir=tmp_dir, suffix='.nc', prefix=os.path.basename(__file__))
            
            try:
                self.init_nc(tmp_nc)
            except (GliderNetCDFWriterException, IOError) as e:
                logging.error('Error initializing {:s}: {:s}'.format(tmp_nc, e))
                continue
            
            try:
                self.open_nc()
                # Add command line call used to create the file
                self.update_history('{:s} {:s}'.format(sys.argv[0], dba_file))
            except (GliderNetCDFWriterException, IOError) as e:
                logging.error('Error opening {:s}: {:s}'.format(tmp_nc, e))
                shutil.remove(tmp_nc)
                continue
            
            # Create and set the trajectory
            trajectory_string = '{:s}-{:s}-rt'.format(self.attributes['deployment']['glider'],
                trajectory_dt.strftime('%Y%m%dT%H%M'),
                file_mode)
            self.set_trajectory_id(trajectory_string)
            # Update the global title attribute
            self._nc.title = 'Slocum Glider dba file: {:s}'.format(trajectory_string)
            
            # Create the source file scalar variable
            self.set_source_file_var(dba['dbd_meta']['filename_label'], dba['dbd_meta'])
            
            # Add the derived sensor definitions
            dba_sensors = [s['sensor'] for s in dba['sensors']]
            if 'drv_timestamp' in dba_sensors and 'drv_timestamp' in self.nc_sensor_defs:
                self.update_sensor_def('drv_timestamp', dba['sensors'][dba_sensors.index('drv_timestamp')])
            if 'drv_m_gps_lat' in dba_sensors and 'drv_m_gps_lat' in self.nc_sensor_defs:
                self.update_sensor_def('drv_m_gps_lat', dba['sensors'][dba_sensors.index('drv_m_gps_lat')])
            if 'drv_m_gps_lon' in dba_sensors and 'drv_m_gps_lon' in self.nc_sensor_defs:
                self.update_sensor_def('drv_m_gps_lon', dba['sensors'][dba_sensors.index('drv_m_gps_lon')])
            if 'drv_pressure' in dba_sensors and 'drv_pressure' in self.nc_sensor_defs:
                self.update_sensor_def('drv_pressure', dba['sensors'][dba_sensors.index('drv_pressure')])
            if 'drv_depth' in dba_sensors and 'drv_depth' in self.nc_sensor_defs:
                self.update_sensor_def('drv_depth', dba['sensors'][dba_sensors.index('drv_depth')])
            if 'drv_interp_m_gps_lat' in dba_sensors and 'drv_interp_m_gps_lat' in self.nc_sensor_defs:
                self.update_sensor_def('drv_interp_m_gps_lat', dba['sensors'][dba_sensors.index('drv_interp_m_gps_lat')])
            if 'drv_interp_m_gps_lon' in dba_sensors and 'drv_interp_m_gps_lon' in self.nc_sensor_defs:
                self.update_sensor_def('drv_interp_m_gps_lon', dba['sensors'][dba_sensors.index('drv_interp_m_gps_lon')])
            
            # Write the data to the NetCDF file
            for r in dba['data']:
                self.stream_dict_insert(r)
            
            # Permanently close the NetCDF file after writing it  
            nc_file = self.finish_nc()
            
            # Add the output NetCDF file name to the list of those to be moved to args.output_dir
            if nc_file:
                shutil.move(tmp_nc, out_nc_file)
                
            output_nc_files.append(out_nc_file)
            
                
        #        self._move_pairs.append([tmp_nc, out_nc_file])
        #        self._has_tmp_nc = True
        #        
        ## Check for tmp NetCDF files. If none, delete the temporary directory
        #if not self._has_tmp_nc:
        #    shutil.rmtree(self._tmp_dir)
        #    self._tmp_dir = None
        #    return
        #        
        ## Move all created NetCDF files to args.output_dir
        #self._output_nc_files = []
        #for tmp_nc,out_nc in self._move_pairs:
        #    if os.path.isfile(out_nc):
        #        if not self.clobber:
        #            self._logger.info('Skipping existing NetCDF file: {:s}'.format(out_nc))
        #            continue
        #        else:
        #            self._logger.info('Clobbering existing NetCDF file: {:s}'.format(out_nc))
        #            try:
        #                os.remove(out_nc)
        #            except OSError as e:
        #                self._logger.error(e)
        #                continue
        #            
        #    # Move the tmp_nc to out_nc
        #    try:
        #        shutil.move(tmp_nc, out_nc)
        #        self._output_nc_files.append(out_nc)
        #    except:
        #        self._logger.error('Error moving {:s}: {:s}'.format(tmp_nc, e))
        #        continue
                
        # Delete the temporary directory once files have been moved
        try:
            self._logger.debug('Removing temporary directory: {:s}'.format(tmp_dir))
            shutil.rmtree(tmp_dir)
        except OSError as e:
            logging.error(e)
        
        return output_nc_files
Beispiel #45
0
def create_package(path, pkg_name, args):
    """Creates an Arch Linux package from the files in directory path.

    The files in path should include a .PKGINFO at the top-level; all
    standard Arch Linux packages should have one of these anyway. This
    method will create a .MTREE file in path (overwriting any existing
    .MTREE).

    Returns:
        the path to the new package, which will have extension
        .pkg.tar.xz.
    """
    if not os.path.lexists(os.path.join(path, ".PKGINFO")):
        raise RuntimeError("No .PKGINFO at " + path)

    owd = os.getcwd()
    os.chdir(path)

    log("info", "Generating .MTREE")
    try: os.unlink(".MTREE")
    except FileNotFoundError: pass
    files = " ".join(os.listdir("."))
    cmd = ("bsdtar -czf .MTREE --format=mtree"
           " --options=!all,use-set,type,uid,mode"
           ",time,size,md5,sha256,link " + files)
    time = timestamp()
    cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
             universal_newlines=True)
    log("command", cmd, cp.stdout.splitlines(), time)
    if cp.returncode:
        exit(1)

    log("info", "Tar-ing up files")
    pkg_name = pkg_name + ".pkg.tar.xz"
    files = " ".join(os.listdir("."))

    tar_cmd = "bsdtar -cf - " + files
    time = timestamp()
    tar_proc = subprocess.Popen(tar_cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE,
                     universal_newlines=False)

    tar_data, tar_error = tar_proc.communicate()

    if tar_proc.returncode:
        log("command", tar_cmd, tar_error.decode("utf-8").splitlines(),
            time)
        exit(1)
    log("command", tar_cmd, [], time)

    xz_cmd = "xz -c -z"
    time = timestamp()
    xz_proc = subprocess.Popen(xz_cmd.split(), stdin=subprocess.PIPE, stdout=subprocess.PIPE,
                    stderr=subprocess.PIPE, universal_newlines=False)

    xz_data, xz_error = xz_proc.communicate(input=tar_data)

    if xz_proc.returncode:
        log("command", xz_cmd, xz_error.decode("utf-8").splitlines(),
            time)
        exit(1)
    log("command", xz_cmd, [], time)

    log("info", "Successfully ran " + tar_cmd + " | " + xz_cmd)
    with open(pkg_name, "bw") as f:
        f.write(xz_data)

    cmd = "bsdtar -tqf " + pkg_name + " .PKGINFO"
    time = timestamp()
    cp = subprocess.run(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
             universal_newlines=True)
    log("command", cmd, cp.stdout.splitlines(), time)
    if cp.returncode:
        exit(1)

    pkg_path = os.path.join(args.toolchain_directory, pkg_name)
    try: shutil.remove(pkg_path)
    except: pass
    shutil.move(pkg_name, args.toolchain_directory)

    log("info", "Created package at path %s" % pkg_path)

    os.chdir(owd)
    return pkg_path
def main(skyFilePath, imageSize):
    if sc.sticky.has_key('honeybee_release'):

        try:
            if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component):
                return -1
            if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component):
                return -1
        except:
            warning = "You need a newer version of Honeybee to use this compoent." + \
            "Use updateHoneybee component to update userObjects.\n" + \
            "If you have already updated userObjects drag Honeybee_Honeybee component " + \
            "into canvas and try again."
            w = gh.GH_RuntimeMessageLevel.Warning
            ghenv.Component.AddRuntimeMessage(w, warning)
            return -1

        hb_radParDict = sc.sticky["honeybee_RADParameters"]().radParDict
        hb_folders = sc.sticky["honeybee_folders"]
        hb_RADPath = hb_folders["RADPath"]
        hb_RADLibPath = hb_folders["RADLibPath"]

    else:
        print "You should first let Honeybee to fly..."
        w = gh.GH_RuntimeMessageLevel.Warning
        ghenv.Component.AddRuntimeMessage(
            w, "You should first let Honeybee to fly...")
        return None, None

    fileNames = ["oconv.exe", "rpict.exe", "pcond.exe", "pflip.exe"]
    # check for files
    for fileName in fileNames:
        if not os.path.isfile(hb_RADPath + "\\" + fileName):
            msg = "Cannot find " + fileName + " at " + hb_RADPath + \
                  "Make sure that RADIANCE is installed on your system and try again."
            ghenv.Component.AddRuntimeMessage(
                gh.GH_RuntimeMessageLevel.Warning, msg)
            return None, None

    # change the sky in case it is for gendaylit
    skyFilePath = checkSky(skyFilePath)

    projectName = skyFilePath.split(".")[0]
    radFile = skyFilePath.split(".")[0] + "_geometry.RAD"
    viewName = "skyView"

    #return the file path
    skyImageFile = projectName + "_" + viewName + ".HDR"
    hSkyImageFile = projectName + "_" + viewName + "_h.HDR"  # human eye
    flippedSkyImageFile = projectName + "_" + viewName + "_f.HDR"  # flipped image so it looks the same as ladybug sky view

    # remove the old file
    if os.path.isfile(skyImageFile):
        try:
            shutil.remove(skyImageFile)
        except:
            "Failed to remove the old sky image. The result might be wrong"

    # write the path string (I should check radiance to be installed on the system
    pathStr = "SET RAYPATH=.;" + hb_RADLibPath + "\nPATH=" + hb_RADPath + ";$PATH\n"

    # generate the rad file
    oconvL = oconvLine(projectName, viewName, [skyFilePath])

    rpictL = rpictRenderSkyLine(imageSize, projectName, viewName)

    rtraceL, resultFile = rtraceLine(projectName, viewName)

    # run the study
    command = "@echo off\n" + \
              "echo Generating the sky view\n" + \
              pathStr + "\n" + \
              oconvL + "\n" + \
              rpictL + "\n" + \
              rtraceL + "\n" + \
              "pcond -h+ " + skyImageFile + " > " + hSkyImageFile + "\n" + \
              "pflip -h " + hSkyImageFile + " > " + flippedSkyImageFile + "\n" + \
              "exit"

    with open(projectName + ".bat", "w") as outf:
        outf.write(command)

    #os.system(projectName + ".bat")
    runCmdAndGetTheResults("/c " + projectName + ".bat")

    # read the result of the global horizontal irradiance
    with open(resultFile, "r") as inf:
        try:
            gHorIrr = inf.readlines()[0].strip()
        except:
            gHorIrr = "Failed to calculate!"

    return flippedSkyImageFile, gHorIrr
Beispiel #47
0
    def plot(self,value,name,epoch_in):
        #self.plot_epoch.append(epoch_in)
        
        if('loss' in name):
            if('train' in name):
                self.plot_epoch_loss_train.append(epoch_in)
                self.train_loss.append(value)
                
                ax1 = self.train_loss_figure.add_subplot(111)
                
                ax1.scatter(np.array(self.plot_epoch_loss_train),np.array(self.train_loss))
                #print(np.array(self.train_loss))
                #print(np.array(self.plot_epoch_train))
                
                try:
                    shutil.remove(self.data_path+'/'+'train/'+name)
                    self.train_loss_figure.savefig(self.data_path+'/'+'train/'+name)
                except:
                    self.train_loss_figure.savefig(self.data_path+'/'+'train/'+name)
            
            elif('test' in name):
                self.plot_epoch_loss_test.append(epoch_in)
                self.test_loss.append(value)
                
                ax2 = self.test_loss_figure.add_subplot(111)
                
                ax2.scatter(np.array(self.plot_epoch_loss_test),np.array(self.test_loss))

                try:
                    shutil.remove(self.data_path+'/'+'test/'+name)
                    self.test_loss_figure.savefig(self.data_path+'/'+'test/'+name)
                except:
                    self.test_loss_figure.savefig(self.data_path+'/'+'test/'+name)



            elif('val' in name):
                self.plot_epoch_loss_val.append(epoch_in)
                self.val_loss.append(value)
                
                ax3 = self.val_loss_figure.add_subplot(111)
                
                ax3.scatter(np.array(self.plot_epoch_loss_val),np.array(self.val_loss))
                
                try:
                    shutil.remove(self.data_path+'/'+'val/'+name)
                    self.val_loss_figure.savefig(self.data_path+'/'+'val/'+name)
                except:
                    self.val_loss_figure.savefig(self.data_path+'/'+'val/'+name)
        
        elif('acc' in name):
            if('train' in name):
                self.plot_epoch_acc_train.append(epoch_in)
                self.train_acc.append(value)
                
                ax4 = self.train_acc_figure.add_subplot(111)
                
                ax4.scatter(np.array(self.plot_epoch_acc_train),np.array(self.train_acc))
                #print(np.array(self.train_loss))
                #print(np.array(self.plot_epoch_train))

                try:
                    shutil.remove(self.data_path+'/'+'train/'+name)
                    self.train_acc_figure.savefig(self.data_path+'/'+'train/'+name)
                except:
                    self.train_acc_figure.savefig(self.data_path+'/'+'train/'+name)
                    
            elif('test' in name):
                self.plot_epoch_acc_test.append(epoch_in)
                self.test_acc.append(value)
                
                ax5 = self.test_acc_figure.add_subplot(111)
                
                ax5.scatter(np.array(self.plot_epoch_acc_test),np.array(self.test_acc))
                
                
                try:
                    shutil.remove(self.data_path+'/'+'test/'+name)
                    self.test_acc_figure.savefig(self.data_path+'/'+'test/'+name)
                except:
                    self.test_acc_figure.savefig(self.data_path+'/'+'test/'+name)
            
            
            
            elif('val' in name):
                self.plot_epoch_acc_val.append(epoch_in)
                self.val_acc.append(value)
                
                ax6 = self.val_acc_figure.add_subplot(111)
                
                ax6.scatter(np.array(self.plot_epoch_acc_val),np.array(self.val_acc))
                try:
                    shutil.remove(self.data_path+'/'+'val/'+name)
                    self.val_acc_figure.savefig(self.data_path+'/'+'val/'+name)
                except:
                    self.val_acc_figure.savefig(self.data_path+'/'+'val/'+name)
def main(args):
    """Write one or more Slocum glider ascii dba files to a CF-compliant Profile
    NetCDF file.
    """
    
    status = 0
    
    # Set up the erddapfoo.lib.m2m.M2mClient logger
    log_level = getattr(logging, args.loglevel.upper())
    log_format = '%(module)s:%(levelname)s:%(message)s [line %(lineno)d]'
    logging.basicConfig(format=log_format, level=log_level)
    
    if not os.path.isdir(args.config_dir):
        logging.error('Invalid configuration directory: {:s}'.format(args.config_dir))
        return 1
        
    if not args.output_dir:
        args.output_dir = os.path.realpath(os.curdir)
        
    if not os.path.isdir(args.output_dir):
        logging.error('Invalid output_dir: {:s}'.format(args.output_dir))
        return 1
        
    # Temp directory
    tmpdir = tempfile.mkdtemp()
    logging.debug('Temporary NetCDF directory: {:s}'.format(tmpdir))
    
    move_pairs = []

    # Initialize the NetCDF writer   
    ncw = ProfileNetCDFWriter(args.config_dir, profile_id=args.profilestart)
    ncw.clobber = True
    
    # Create the deployment/trajectory name
    try:
        trajectory_dt = parser.parse(ncw.attributes['deployment']['trajectory_date'])
    except ValueError as e:
        logging.error('Error parsing deployment trajectory_date: {:s} ({:s})'.format(ncw.attributes['deployment']['trajectory_date'], e))
        return 1
        
    #profile_count = args.profile_start
    
    for dba_file in args.dba_files:
        
        if not os.path.isfile(dba_file):
            logging.error('Invalid dba file specified: {:s}'.format(dba_file))
            continue
            
        logging.debug('Processing dba file: {:s}'.format(dba_file))
        
        # Parse the dba file
        dba = create_llat_dba_reader(dba_file)
        if len(dba['data']) == 0:
            logging.warning('Skipping empty dba file: {:s}'.format(dba_file))
            continue
            
        # Create the yo for profile indexing find the profile minima/maxima
        try:
            profile_times = find_profiles(dba['data'])
        except ValueError as e:
            logging.error('{:s}: {:s}'.format(dba_file, e))
            
        if len(profile_times) == 0:
            logging.debug('No profiles indexed: {:s}'.format(dba_file))
            continue
            
        # All timestamps from stream
        ts = [r['drv_timestamp'] for r in dba['data'] if 'drv_timestamp' in r]
        
        for profile_interval in profile_times:
            
            # Profile start time
            p0 = profile_interval[0]
            # Profile end time
            p1 = profile_interval[-1]
            # Find all rows in ts that are between p0 & p1
            p_inds = np.flatnonzero(np.logical_and(ts >= p0, ts <= p1))
            profile_stream = dba['data'][p_inds[0]:p_inds[-1]]
            
            if args.mode:
                file_mode = args.mode
            else:
                try:
                    if dba['dbd_meta']['filename_extension'] in DELAYED_MODE_EXTENSIONS:
                        file_mode = 'delayed'
                    elif dba['dbd_meta']['filename_extension'] in REALTIME_MODE_EXTENSIONS:
                        file_mode = 'rt'
                    else:
                        logging.warning('Skipping {:s}: Unknown mode filetype: {:s}'.format(dba_file, dba['dbd_meta']['filename_extension']))
                        continue
                except KeyError as e:
                    logging.error(e)
                    status = 1
                    continue
                    
            # Calculate and convert profile mean time to a datetime
            pro_mean_dt = datetime.datetime.utcfromtimestamp(np.mean(profile_interval))
            
            # Create the output NetCDF path
            pro_mean_ts = pro_mean_dt.strftime('%Y%m%dT%H%M%SZ')
            profile_filename = '{:s}-{:s}-{:s}'.format(ncw.attributes['deployment']['glider'], pro_mean_ts, file_mode)
            # Path to temporarily hold file while we create it
            _, tmp_nc = tempfile.mkstemp(dir=tmpdir, suffix='.nc', prefix=os.path.basename(profile_filename))
        
            out_nc_file = os.path.join(args.output_dir, '{:s}.nc'.format(profile_filename))
        
            try:
                ncw.init_nc(tmp_nc)
            except (GliderNetCDFWriterException, IOError) as e:
                logging.error(e)
                status = 1
                continue
            
            try:
                ncw.open_nc()
                # Add command line call used to create the file
                ncw.update_history('{:s} {:s}'.format(sys.argv[0], dba_file))
            except (GliderNetCDFWriterException, IOError) as e:
                logging.error(e)
                status = 1
                shutil.remove(out_nc_file)
                continue
            
            # Create the trajectory string and set the trajectory variable
            trajectory_string = '{:s}-{:s}-rt'.format(ncw.attributes['deployment']['glider'],
                trajectory_dt.strftime('%Y%m%dT%H%M'),
                file_mode)
            ncw.set_trajectory_id(trajectory_string)
            # Update the global title attribute
            ncw._nc.title = 'Slocum Glider Profile: {:s}'.format('{:s}-{:s}-{:s}'.format(ncw.attributes['deployment']['glider'], pro_mean_ts, file_mode))
            
            # Create the source file scalar variable
            ncw.set_source_file_var(dba['dbd_meta']['filename_label'], dba['dbd_meta'])
            
            # Add the derived sensor definitions
            dba_sensors = [s['sensor'] for s in dba['sensors']]
            if 'drv_timestamp' in dba_sensors and 'drv_timestamp' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_timestamp', dba['sensors'][dba_sensors.index('drv_timestamp')])
            if 'drv_m_gps_lat' in dba_sensors and 'drv_m_gps_lat' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_m_gps_lat', dba['sensors'][dba_sensors.index('drv_m_gps_lat')])
            if 'drv_m_gps_lon' in dba_sensors and 'drv_m_gps_lon' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_m_gps_lon', dba['sensors'][dba_sensors.index('drv_m_gps_lon')])
            if 'drv_pressure' in dba_sensors and 'drv_pressure' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_pressure', dba['sensors'][dba_sensors.index('drv_pressure')])
            if 'drv_depth' in dba_sensors and 'drv_depth' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_depth', dba['sensors'][dba_sensors.index('drv_depth')])
            if 'drv_interp_m_gps_lat' in dba_sensors and 'drv_interp_m_gps_lat' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_interp_m_gps_lat', dba['sensors'][dba_sensors.index('drv_interp_m_gps_lat')])
            if 'drv_interp_m_gps_lon' in dba_sensors and 'drv_interp_m_gps_lon' in ncw.nc_sensor_defs:
                ncw.update_sensor_def('drv_interp_m_gps_lon', dba['sensors'][dba_sensors.index('drv_interp_m_gps_lon')])
            
            # Write the data to the NetCDF file
            for r in profile_stream:
                ncw.stream_dict_insert(r)
            
            # Write scalar profile variable and permanently close the NetCDF file 
            nc_file = ncw.finish_nc()
            
            # Add the output NetCDF file name to the list of those to be moved to args.output_dir
            if nc_file:
                move_pairs.append([tmp_nc, out_nc_file])
            
    # Move all created NetCDF files to args.output_dir
    destination_nc_files = []
    for tmp_nc,out_nc in move_pairs:
        if os.path.isfile(out_nc):
            if not args.clobber:
                logging.info('Skipping existing NetCDF file: {:s}'.format(out_nc))
                continue
            else:
                logging.info('Clobbering existing NetCDF file: {:s}'.format(out_nc))
                try:
                    os.remove(out_nc)
                except OSError as e:
                    logging.error(e)
                    continue
                
        # Move the tmp_nc to out_nc
        try:
            shutil.move(tmp_nc, out_nc)
            destination_nc_files.append(out_nc)
        except:
            logging.error('Error moving {:s}: {:s}'.format(tmp_nc, e))
            status = 1
            
    # Delete the temporary directory once files have been moved
    try:
        logging.debug('Removing temporary directory: {:s}'.format(tmpdir))
        shutil.rmtree(tmpdir)
    except OSError as e:
        logging.error(e)
            
    # Print the list of files created
    for dest_nc_file in destination_nc_files:
        sys.stdout.write('{:s}\n'.format(dest_nc_file))
        
    return status
def main(skyFilePath, imageSize):
    if sc.sticky.has_key('honeybee_release'):
        
        try:
            if not sc.sticky['honeybee_release'].isCompatible(ghenv.Component): return -1
            if sc.sticky['honeybee_release'].isInputMissing(ghenv.Component): return -1
        except:
            warning = "You need a newer version of Honeybee to use this compoent." + \
            "Use updateHoneybee component to update userObjects.\n" + \
            "If you have already updated userObjects drag Honeybee_Honeybee component " + \
            "into canvas and try again."
            w = gh.GH_RuntimeMessageLevel.Warning
            ghenv.Component.AddRuntimeMessage(w, warning)
            return -1
        
        hb_radParDict = sc.sticky["honeybee_RADParameters"]().radParDict
        hb_folders = sc.sticky["honeybee_folders"]
        hb_RADPath = hb_folders["RADPath"]
        hb_RADLibPath = hb_folders["RADLibPath"]
        
    else:
        print "You should first let Honeybee to fly..."
        w = gh.GH_RuntimeMessageLevel.Warning
        ghenv.Component.AddRuntimeMessage(w, "You should first let Honeybee to fly...")
        return None, None
    
    fileNames = ["oconv.exe", "rpict.exe", "pcond.exe", "pflip.exe"]
    # check for files
    for fileName in fileNames:
        if not os.path.isfile(hb_RADPath + "\\" + fileName):
            msg = "Cannot find " + fileName + " at " + hb_RADPath + \
                  "Make sure that RADIANCE is installed on your system and try again."
            ghenv.Component.AddRuntimeMessage(gh.GH_RuntimeMessageLevel.Warning, msg)
            return None, None
    
    
    # change the sky in case it is for gendaylit
    skyFilePath = checkSky(skyFilePath)
    
    projectName = skyFilePath.split(".")[0]
    radFile = skyFilePath.split(".")[0] + "_geometry.RAD"
    viewName = "skyView"
    
    #return the file path
    skyImageFile = projectName + "_" + viewName + ".HDR"
    hSkyImageFile = projectName + "_" + viewName + "_h.HDR" # human eye
    flippedSkyImageFile = projectName + "_" + viewName + "_f.HDR" # flipped image so it looks the same as ladybug sky view
    
    # remove the old file
    if os.path.isfile(skyImageFile):
        try: shutil.remove(skyImageFile)
        except: "Failed to remove the old sky image. The result might be wrong"
    
    # write the path string (I should check radiance to be installed on the system
    pathStr = "SET RAYPATH=.;" + hb_RADLibPath + "\nPATH=" + hb_RADPath + ";$PATH\n"
    
    # generate the rad file
    oconvL = oconvLine(projectName, viewName, [skyFilePath])
    
    rpictL = rpictRenderSkyLine(imageSize, projectName, viewName)
    
    rtraceL, resultFile = rtraceLine(projectName, viewName)
    
    # run the study
    command = "@echo off\n" + \
              "echo Generating the sky view\n" + \
              pathStr + "\n" + \
              oconvL + "\n" + \
              rpictL + "\n" + \
              rtraceL + "\n" + \
              "pcond -h+ " + skyImageFile + " > " + hSkyImageFile + "\n" + \
              "pflip -h " + hSkyImageFile + " > " + flippedSkyImageFile + "\n" + \
              "exit"

    with open(projectName + ".bat", "w") as outf:
        outf.write(command)
        
    #os.system(projectName + ".bat")
    runCmdAndGetTheResults( "/c " + projectName + ".bat")
    
    # read the result of the global horizontal irradiance
    with open(resultFile, "r") as inf:
        try:
            gHorIrr = inf.readlines()[0].strip()
        except:
            gHorIrr = "Failed to calculate!"
    
    return flippedSkyImageFile, gHorIrr