Beispiel #1
0
 def cleanup(self):
     if self._finalizer.detach():
         try:
             _rmtree(self.name)
         # On windows only one process can open a file at a time TODO: PermissionError when > 3.5
         except OSError:
             _warnings.warn("Couldn't remove temp directory %s" % self.name)
Beispiel #2
0
 def create_git_repository(self):
     if _os.system('command -v gbp') == 0:
         dsc_name = None
         _os.chdir('..')
         for file in _os.listdir(_os.getcwd()):
             if file.endswith('.dsc'):
                 dsc_name = file
                 break
         if dsc_name is None:
             print('No dsc file found! Aborting creating git repository')
             return
         if self.debian_name in _os.listdir(_os.getcwd()):
             _rmtree(self.debian_name)
         repo_create_status = _call('gbp import-dsc --pristine-tar %s' %
                                    dsc_name,
                                    shell=True)
         if repo_create_status == 0:
             _rmtree('{name}-{version}'.format(
                 name=self.debian_name, version=self.upstream_version))
         else:
             print('gbp import-dsc exited with status %s' %
                   repo_create_status)
     else:
         print(
             'gbp not found, please: sudo apt-get install git-buildpackage')
Beispiel #3
0
 def _cleanup(cls, name, warn_message):
     try:
         _rmtree(name)
     # On windows only one process can open a file at a time TODO: PermissionError when > 3.5
     except OSError:
         _warnings.warn("Couldn't remove temp directory %s" % name)
     _warnings.warn(warn_message, _ResourceWarning)
Beispiel #4
0
    def stop(self, info_dict=None, full_stop=True):
        """
        Stop accelerator.

        Args:
            full_stop (bool): If True, send stop request to accelerator
                application. If False only clean up accelerator client
                environment.
            info_dict (dict or None): If a dict passed, this dict is updated
                with extra information from current operation.
        """
        self._stopped = True

        # Stops
        if full_stop:
            response = self._stop()
            if info_dict is not None and response:
                _utl.recursive_update(info_dict, response)

        # Clears temporary directory
        try:
            _rmtree(self._cache['tmp_dir'])
        except (OSError, KeyError):
            pass

        # Clears cache
        self._cache.clear()
 def cleanup(self):
     try:
         from tempfile import _exists
         if _exists(self.name):
             _rmtree(self.name)
     except ImportError:
         pass
Beispiel #6
0
def _2dr(rnx2dr_path):
    '''Opens process rxEditGde.py to convert specified rnx to dr file for GipsyX. The subprocess is used in order to run multiple instances at once.
    If converted file is already present, nothing happens
    We might want to dump and kill service tree files and stats'''
    in_file_path = rnx2dr_path[0]
    out_file_path = rnx2dr_path[1]
    cache_path = rnx2dr_path[2]
    staDb_path = rnx2dr_path[3]

    out_dir = _os.path.dirname(out_file_path)

    cache_dir = _os.path.join(cache_path, _os.path.basename(
        out_file_path))  #smth like /cache/anau2350.10d.dr.gz/
    if not _os.path.exists(cache_dir):
        _os.makedirs(cache_dir)
    _copy(src=in_file_path, dst=cache_dir)  #copy
    in_file_cache_path = _os.path.join(cache_dir,
                                       _os.path.basename(in_file_path))
    out_file_cache_path = _os.path.join(cache_dir,
                                        _os.path.basename(out_file_path))

    process = _Popen([
        'rnxEditGde.py', '-dataFile', in_file_cache_path, '-staDb', staDb_path,
        '-o', out_file_cache_path
    ],
                     cwd=cache_dir)
    process.wait()
    _copy(src=out_file_cache_path, dst=out_dir)  #copy result to destination
    #clear folder in ram
    _rmtree(cache_dir)
Beispiel #7
0
 def delete(self):
     utils.debug(1, "deleting %s" % self.release)
     for directory in [self.config_path,
                       self.cache_path, self.data_path]:
         if _os.path.isdir(directory):
             utils.debug(2, "deleting dir %s" % directory)
             _rmtree(directory)
Beispiel #8
0
 def clear(self, pool: str):
     """Clear entire pool
     """
     try:
         _rmtree(_path.join(self._path, self._server_name, pool))
     except FileNotFoundError:
         pass
 def cleanup(self):
     try:
         from tempfile import _exists
         if _exists(self.name):
             _rmtree(self.name)
     except ImportError:
         pass
Beispiel #10
0
 def _cleanup(cls, name, warn_message, missing_ok_on_cleanup):
     try:
         _rmtree(name)
     # On windows only one process can open a file at a time
     except OSError:
         if not missing_ok_on_cleanup:
             _warnings.warn("Couldn't remove temp directory %s" % name)
Beispiel #11
0
 def delete(self):
     utils.debug(1, "deleting %s" % self.release)
     for directory in [self.config_path,
                       self.cache_path, self.data_path]:
         if _os.path.isdir(directory):
             utils.debug(2, "deleting dir %s" % directory)
             _rmtree(directory)
Beispiel #12
0
def gd2e(gd2e_table, project_name, num_cores, tqdm, cache_path):
    '''We should ignore stations_list as we already selected stations within merge_table'''
    # try:
    if gd2e_table[gd2e_table['file_exists'] == 0].shape[0] == 0:
        print('{} already processed'.format(project_name))
    else:
        gd2e_table = gd2e_table[gd2e_table['file_exists'] == 0].to_records(
        )  #converting to records in order for mp to work properly as it doesn't work with pandas Dataframe
        num_cores = num_cores if gd2e_table.shape[
            0] > num_cores else gd2e_table.shape[0]
        print(
            'Processing {} |  # files left: {} | Adj. # of threads: {}'.format(
                project_name, gd2e_table.shape[0], num_cores))

        with _Pool(processes=num_cores) as p:
            if tqdm:
                list(
                    _tqdm.tqdm_notebook(p.imap(_gd2e, gd2e_table),
                                        total=gd2e_table.shape[0]))
            else:
                p.map(_gd2e, gd2e_table)  #investigate why list is needed.

    # except:
    print('cleaning IONEX from RAM as exiting')
    #cleaning after execution
    IONEX_cached_path = _os.path.join(cache_path, 'IONEX_merged')
    _rmtree(IONEX_cached_path)
Beispiel #13
0
 def cleanup(self):
     if self._finalizer.detach():
         try:
             _rmtree(self.name)
         # On windows only one process can open a file at a time
         except OSError:
             if not self._missing_ok_on_cleanup:
                 _warnings.warn("Couldn't remove temp directory %s" % self.name)
Beispiel #14
0
 def clean(self):
     utils.debug(1, "cleaning directory")
     for filename in _os.listdir('.'):
         if filename != 'debian':
             if _os.path.isdir(filename):
                 _rmtree(filename)
             else:
                 _os.remove(filename)
Beispiel #15
0
 def clean(self):
     utils.debug(1, "cleaning directory")
     for filename in _os.listdir('.'):
         if filename != 'debian':
             if _os.path.isdir(filename):
                 _rmtree(filename)
             else:
                 _os.remove(filename)
Beispiel #16
0
def remove_dir(path, parent_folder='results/'):
    """

    :param path:
    :param parent_folder:
    :return:
    """
    if path.startswith(parent_folder) and dir_exists(path):
        _rmtree(path)
Beispiel #17
0
 def delete(self, recursive=False):
     if self.is_file():
         if isinstance(self._os_level, int):
             _close(self._os_level)
         _remove(str(self))
     elif self.is_dir():
         if recursive:
             _rmtree(str(self))
         else:
             self.rmdir()
Beispiel #18
0
def _gd2e(gd2e_set):

    out_dir = _os.path.dirname(gd2e_set['output'])
    if not _os.path.exists(out_dir): _os.makedirs(out_dir)  #creating out dir

    if not _os.path.exists(gd2e_set['cache']):
        _os.makedirs(gd2e_set['cache'])  #creatign cache dir
    runAgain = 'gd2e.py -drEditedFile {0} -recList {1} -runType PPP -GNSSproducts {2} -treeSequenceDir {3} -tdpInput {4} -staDb {5} -selectGnss \'{6}\' -gdCov'.format(
        gd2e_set['filename'], gd2e_set['station_name'],
        gd2e_set['gnss_products_dir'], gd2e_set['tree_path'], gd2e_set['tdp'],
        gd2e_set['staDb_path'], gd2e_set['selectGnss'])
    if not gd2e_set['tqdm']: print(runAgain)
    # print(runAgain)
    # try:
    process = _Popen(
        [
            'gd2e.py',
            '-drEditedFile',
            gd2e_set['filename'],
            '-recList',
            gd2e_set['station_name'],
            '-runType',
            'PPP',
            '-GNSSproducts',
            gd2e_set[
                'gnss_products_dir'],  #used to be '-GNSSproducts', gd2e_set['gnss_products_dir'],
            '-treeSequenceDir',
            gd2e_set['tree_path'],
            '-tdpInput',
            gd2e_set['tdp'],
            '-staDb',
            gd2e_set['staDb_path'],
            '-selectGnss',
            gd2e_set['selectGnss']
        ],
        cwd=gd2e_set['cache'],
        stdout=_PIPE)
    # Do we really need a -gdCov option?
    out, err = process.communicate()

    solutions = _get_tdps_pn(gd2e_set['cache'])
    residuals = _get_residuals(gd2e_set['cache'])
    debug_tree = _get_debug_tree(gd2e_set['cache'])

    rtgx_log = _get_rtgx_log(gd2e_set['cache'])
    rtgx_err = _get_rtgx_err(gd2e_set['cache'])
    summary = _get_summary(gd2e_set['cache'])
    _rmtree(path=gd2e_set['cache'])  #clearing cache after run

    _dump_write(data=[
        solutions, residuals, debug_tree, runAgain, rtgx_log, rtgx_err, out,
        err, summary
    ],
                filename=gd2e_set['output'],
                cname='zstd')
Beispiel #19
0
def change_location(src, tgt, move=False, verbose=True):
    '''
    Copies/moves/deletes locations

    :param src:
        Source location where to copy from
    :param tgt:
        Target location where to copy to

        * To backup `src`, set `tgt` explicitly to ``True``. \
        `tgt` will be set to `src` + '_backup_' + \
        :func:`util.system.get_timestamp` then

    :param move:
        Deletes original location after copy (a.k.a. move)

        * To delete `src` , set `tgt` explicitly to ``False`` \
        and `move` to ``True`` (be careful!!1!)

    :param verbose:
        Show warnings
    '''

    from photon.util.system import shell_notify

    if _path.exists(src):
        if tgt:
            if _path.isfile(src):
                _copy2(src, search_location(
                    tgt, create_in=_path.dirname(tgt), verbose=verbose)
                )
            else:
                for l in _listdir(src):
                    change_location(
                        _path.abspath(_path.join(src, l)),
                        _path.abspath(_path.join(tgt, l))
                    )
        if move:
            if _path.isdir(src) and not _path.islink(src):
                _rmtree(src)
            else:
                _remove(src)
        if verbose:
            shell_notify(
                '%s location' % (
                    'deleted'
                    if not tgt and move else
                    'moved'
                    if move else
                    'copied'
                ),
                more=dict(src=src, tgt=tgt)
            )
Beispiel #20
0
 def run(self):
     try:
         print("removing tosdb/_tosdb.py ...")
         _remove(_OUTPUT_PATH)
     except:
         pass
     try:
         print("removing ./build ...")
         _rmtree(_path_join(_OUR_PATH, 'build'))
     except:
         pass
     super().run()
Beispiel #21
0
 def run(self):                  
     try:
         print("removing tosdb/_tosdb.py ...")
         _remove(_OUTPUT_PATH)
     except:
         pass        
     try:
         print("removing ./build ...")
         _rmtree( _path_join(_OUR_PATH,'build') )
     except:
         pass              
     super().run()  
Beispiel #22
0
def igs2jpl(begin,end,products_type,products_dir,tqdm,num_cores=None,run_dir = '/run/user/1017/'):
    #products_dir = '/mnt/data/bogdanm/Products/CODE/source/MGEX/'
    sets = _gen_sets(begin,end,products_type,products_dir,run_dir = run_dir)
    sets = sets.to_records()
    
    with _Pool(num_cores) as p:
        if tqdm: list(_tqdm.tqdm_notebook(p.imap(_sp3ToPosTdp, sets), total=sets.shape[0]))
        else: p.map(_sp3ToPosTdp, sets)
    

    tmp_dir = _os.path.join(run_dir,'tmp_igs2jpl') #creating tmp directory processes will work in
    try:_rmtree(tmp_dir) #clearing memory before processing
    except: print('Could not remove tmp')
Beispiel #23
0
def ce2cm(init_ce_path,num_cores = 10,tqdm=True):
    cache='/run/user/1017/'
    cache_path = _os.path.join(cache,'ce2cm_cache')
    if not _os.path.exists(cache_path): _os.makedirs(cache_path)
    
    init_ce_path = _os.path.abspath(init_ce_path) 
    cm_dirname = _os.path.basename(init_ce_path)+'_cm'
    init_cm_path = _os.path.join(_os.path.dirname(init_ce_path),cm_dirname)
    if _os.path.exists(init_cm_path):
        print('CM folder exists. Removing.')
        _rmtree(init_cm_path)
    print('Copying {} to {}'.format(_os.path.basename(init_ce_path),cm_dirname))
    
    
#     dst = _copytree(src=init_ce_path,dst=init_cm_path)
    print('Finished copying to {}'.format(init_cm_path))
#     pos_files = _glob.glob(init_cm_path+'/*/*pos.gz')
#     print('Found {} pos files. Running'.format(len(pos_files)))
    
    #files to make symlinks
    product_files = _pd.Series(_glob.glob(init_ce_path+'/*/*.gz'))
    product_file_names_df = product_files.str.split('/',expand=True).iloc[:,-1].str.split('.',expand=True)
    symlink_files = product_files[product_file_names_df[1] != 'pos'].to_list()
    # files to copy (.pos)
    pos_files = product_files[product_file_names_df[1] == 'pos'].to_list()
    
    basedir = _os.path.abspath(_os.path.join(symlink_files[0],_os.pardir,_os.pardir,_os.pardir))
    files_symlinks = _pd.Series(symlink_files).str.split('/',expand=True).iloc[:,-3:]
    symlink_src = (basedir + '/' + files_symlinks.iloc[:,0]+'/'+files_symlinks.iloc[:,1]+'/'+files_symlinks.iloc[:,2])
    symlink_dst = (basedir + '/' + files_symlinks.iloc[:,0]+'_cm/'+files_symlinks.iloc[:,1]+'/'+files_symlinks.iloc[:,2])

    year_dirs = basedir + '/' + files_symlinks.iloc[:,0][0]+'_cm/' + files_symlinks.iloc[:,1].unique()
    for dir_path in year_dirs:
        if not _os.path.exists(dir_path): _os.makedirs(dir_path)
    print('creating symlinks for products files (except for *.pos.gz)')
    for i in range(len(symlink_src)):
        _os.symlink(src=_os.path.relpath(path=symlink_src[i],start=_os.path.dirname(symlink_dst[i])),dst=symlink_dst[i])
    
    files_pos = _pd.Series(pos_files).str.split('/',expand=True).iloc[:,-3:]
    pos_src = (basedir + '/' + files_pos.iloc[:,0]+'/'+files_pos.iloc[:,1]+'/'+files_pos.iloc[:,2])
    pos_dst = (basedir + '/' + files_pos.iloc[:,0]+'_cm/'+files_pos.iloc[:,1]+'/'+files_pos.iloc[:,2])
    cache_path_series = _np.ndarray(pos_src.shape,dtype=object)
    cache_path_series.fill(cache_path)
    pos_path_series = _pd.concat([pos_src,pos_dst,_pd.Series(cache_path_series)],axis=1).values
#     return pos_path_series
    with _Pool(processes = num_cores) as p:
        if tqdm: list(_tqdm.tqdm_notebook(p.imap(_ce2cm_single_thread, pos_path_series), total=len(pos_path_series)))
        else: p.map(_ce2cm_single_thread, pos_path_series)
    _rmtree(path=cache_path)
Beispiel #24
0
    def initiate_build(self, saved_path):
        """
        Try building deb package after creating required files using start().
        'uscan', 'uupdate' and 'dpkg-buildpackage' are run if debian/watch is OK.
        """
        uscan_info = self.test_uscan()
        if uscan_info[0] == 0:
            self.run_uscan()
            self.run_uupdate()

            new_dir = '%s-%s' % (self.debian_name, self.upstream_version)
            utils.change_dir('../%s' % new_dir)
            # copy over non-duplicated changelog
            _os.rename('../%s/debian/changelog' % self.debian_name,
                       'debian/changelog')
            _rmtree('../%s' % self.debian_name)
            self.run_buildpackage()
            self.edit_changelog()
            self.create_git_repository()

            debian_path = "%s/%s/debian" % (self.name, new_dir)
            print('\nRemember, your new source directory is %s/%s' %
                  (self.name, new_dir))

        else:
            debian_path = "%s/%s/debian" % (self.name, self.debian_name)

        print("""
This is not a crystal ball, so please take a look at auto-generated files.\n
You may want fix first these issues:\n""")

        utils.change_dir(saved_path)
        _call('/bin/grep --color=auto FIX_ME -r %s/*' % debian_path,
              shell=True)
        _call('/bin/grep --color=auto FIX_ME -r -H %s/*_itp.mail' % self.name,
              shell=True)

        if uscan_info[0] != 0:
            print(
                "\nUse uscan to get orig source files. Fix debian/watch and then run\
                    \n$ uscan --download-current-version\n")

        if self.upstream_watch:
            print("""
*** Warning ***\nUsing npmregistry to download npm dist tarballs, because upstream
git repo is missing tags. Its better to ask upstream to tag their releases
instead of using npm dist tarballs as dist tarballs may contain pre built files
and may not include tests.\n""")
Beispiel #25
0
    def _GIM_gen_header(self,in_set,data_GIM_final):
        file_paths = in_set[:,3]
        cache_paths = in_set[:,4]
        
        
        '''
        No AUX section in the header needed! IGNORING IT
        LINE with # of MAPS is modified
        '''
        num_maps = '{:6d}{:<54s}{}{:<3s}\n'.format(len(data_GIM_final),' ','# OF MAPS IN FILE',' ')
        regex_first_epoch = (rb"EPOCH\sOF\sFIRST\sMAP\s*\n")


        regex_num_maps_b = (rb"INTERVAL\s{12}\n")
        regex_num_maps_e = (rb"#\sOF\sMAPS\sIN\sFILE\s{3}\n")


        regex_aux_start =   (rb"DIFFERENTIAL\sCODE\sBIASES\s+START\sOF\sAUX\sDATA")
        regex_aux_end   =   (rb"DIFFERENTIAL CODE BIASES\s+END OF AUX DATA\s+\n")

        regex_end_header =  (rb"END\sOF\sHEADER\s+\n")

        #extracting header part from the first document
        first_file_path = prep_ionex_file(file_paths[0], cache_paths[0])
        
        with open (first_file_path,'rb') as ionex_first:
            ionex_data_first = ionex_first.read()

            match_first_epoch = _re.search(regex_first_epoch, ionex_data_first)
        _rmtree(_os.path.dirname(first_file_path))    
        #extracting header part from the last document ignoring aux data
        last_file_path = prep_ionex_file(file_paths[-1], cache_paths[-1])
        with open (last_file_path,'rb') as ionex_last:
            ionex_data_last  = ionex_last.read()
            match_last_epoch = _re.search(regex_first_epoch, ionex_data_last) #EPOCH OF LAST MAP (LAST FILE to continue the header as headers can have different line quantity)
            
            match_num_maps_b = _re.search(regex_num_maps_b, ionex_data_last)
            match_num_maps_e = _re.search(regex_num_maps_e, ionex_data_last)

            match_aux_begin  = _re.search(regex_aux_start, ionex_data_last)
            match_aux_end    = _re.search(regex_aux_end, ionex_data_last)
            match_end_header = _re.search(regex_end_header, ionex_data_last)
        _rmtree(_os.path.dirname(last_file_path)) 
        return ((ionex_data_first[:match_first_epoch.end()]\
                + ionex_data_last[match_last_epoch.end():match_num_maps_b.end()]).decode('ascii')\
                + num_maps\
                + (ionex_data_last[match_num_maps_e.end():match_aux_begin.start()]\
                + ionex_data_last[match_aux_end.end():match_end_header.end()]).decode('ascii'))
Beispiel #26
0
def _gen_orbclk(input_set):
    startTime = input_set[0]
    endTime = input_set[1]
    GNSSproducts = input_set[2]
    targetDir = input_set[3]
    h24 = input_set[4]
    makeShadow = input_set[5]
    products_day = input_set[6]
    run_dir = input_set[7]
    #check if target folder exists and create one if not

    if _os.path.exists(run_dir): _rmtree(run_dir) #cleaning ram if failed
    if not _os.path.exists(run_dir):
        _os.makedirs(run_dir)
    
    
    args = ['/home/bogdanm/Desktop/GipsyX_Wrapper/fetchGNSSproducts_J2000.py',
                      '-startTime',str(startTime),
                      '-endTime', str(endTime),
                      '-GNSSproducts', GNSSproducts,
                      '-targetDir', run_dir]
    args.append( '-hr24') if h24 else None
    args.append ('-makeShadowFile') if makeShadow else None


    process = _Popen(args,stdout=_PIPE)
    out, err = process.communicate()

    #rename
    files_ori = _glob.glob('{}/GNSS.*'.format(run_dir))

    try:
        files_ori_df = _pd.Series(files_ori).str.split('.',expand=True)
    except: print(str(products_day),'problem found')
        
    files_renamed = files_ori_df[0].str.slice(0,-4) + str(products_day) + '.' + files_ori_df[1]
    for i in range(files_renamed.shape[0]):
        _os.rename(files_ori[i],files_renamed[i])
    #gzip
    _Popen(['gzip *'],cwd=run_dir,shell=True).communicate()
    #move one level up
    if not _os.path.exists(targetDir):
        _os.makedirs(targetDir)
    for i in range(files_renamed.shape[0]):
        _move(src=files_renamed[i]+'.gz',dst=targetDir)
    _rmtree(run_dir)
    return out,err
Beispiel #27
0
def clear(dataset, folderpath=default_folder):
    """Removes a dataset folder from the specified path
    Args:
        dataset (str): the dataset id to delete
        path (str, optional): Defaults to $HOME/handshape_datasets
    Raises:
        FileNotFoundError: The dataset entered doesnt exist or at least in path
    """
    logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
    try:
        logging.info(f"Removing the dataset {dataset}")
        # removes the directory recursively
        _rmtree(folderpath / dataset)
        logging.info("Success \(•◡•)/")
    except FileNotFoundError:
        warning("""The dataset {} doesn't exist (ಥ﹏ಥ). The options available
                are: \n {}""".format(dataset, "\n".join(listdir(folderpath))))
Beispiel #28
0
def cache_ionex_files(cache_path, IONEX_products_dir, ionex_type, years_list):
    #Copying IONEX maps to cache before execution-------------------------------------------------------------------------------------
    products_dir = _os.path.join(IONEX_products_dir, _os.pardir)
    ionex_files = _pd.Series(
        sorted(_glob.glob(products_dir + '/IONEX_merged/' + ionex_type + '*')))
    ionex_basenames = ionex_files.str.split('/', expand=True).iloc[:, -1]
    ionex_years = ionex_basenames.str.slice(-4).astype(int)
    ionex_files = ionex_files[ionex_years.isin(
        years_list
    )]  #selecting only those ionex files that are needed according to years list

    IONEX_cached_path = _os.path.join(
        cache_path, 'IONEX_merged')  #need to remove when start
    if _os.path.exists(IONEX_cached_path): _rmtree(IONEX_cached_path)
    _os.makedirs(IONEX_cached_path)
    for ionex_file in ionex_files:
        _copy(src=ionex_file, dst=IONEX_cached_path)
Beispiel #29
0
 def download(self):
     utils.debug(1, "downloading %s via npm" % self.name)
     info = _getstatusoutput('npm install "%s"' % self.name)
     if info[0] is not 0:
         exception = "Error downloading package %s\n" % self.name
         exception += info[1]
         raise ValueError(exception)
     # move dir from npm root
     root = _getstatusoutput('npm root')[1].strip('\n')
     _os.rename(_os.path.join(root, self.name), self.name)
     try:
         _os.rmdir(root)  # remove only if empty
     except OSError:
         pass
     # remove any dependency downloaded via npm
     if _os.path.isdir("%s/node_modules" % self.name):
         _rmtree("%s/node_modules" % self.name)
     if self.name is not self.debian_name:
         utils.debug(2, "renaming %s to %s" % (self.name, self.debian_name))
         _os.rename(self.name, self.debian_name)
Beispiel #30
0
 def download(self):
     utils.debug(1, "downloading %s via npm" % self.name)
     info = _getstatusoutput('npm install "%s"' % self.name)
     if info[0] is not 0:
         exception = "Error downloading package %s\n" % self.name
         exception += info[1]
         raise ValueError(exception)
     # move dir from npm root
     root = _getstatusoutput('npm root')[1].strip('\n')
     _os.rename(_os.path.join(root, self.name), self.name)
     try:
         _os.rmdir(root)  # remove only if empty
     except OSError:
         pass
     # remove any dependency downloaded via npm
     if _os.path.isdir("%s/node_modules" % self.name):
         _rmtree("%s/node_modules" % self.name)
     if self.name is not self.debian_name:
         utils.debug(2, "renaming %s to %s" % (self.name, self.debian_name))
         _os.rename(self.name, self.debian_name)
def update_solutions_dict(examples_dir: _Path,
                          dir: str,
                          ex_glob_dict: dict,
                          tag: str = ''):
    """ """
    if is_example_name(dir):  # room for five-symbol name - ex22g
        example_dir = examples_dir / dir
        ref_sol_dir = examples_dir / "solutions" / tag / dir
        if example_dir.exists() and ref_sol_dir.exists():
            _rmtree(ref_sol_dir)
            _logging.info(
                f"removing {ref_sol_dir} and its content"
            )  # if actual solution exists -> clean respective reference solution before copying
        l = len(example_dir.as_posix())
        for expr in ex_glob_dict[dir]:
            expr_counter = 0
            for path in example_dir.glob(expr):
                dst = ref_sol_dir / (path.as_posix()[l + 1:])
                if expr_counter == 0:
                    dst.parent.mkdir(parents=True, exist_ok=True)
                _logging.info(f"Copying {path} -> {_copy(src=path,dst=dst)}")
Beispiel #32
0
def GIM_data_extraction(in_set):
    # Extracting ionex data from file
    file_path = in_set[0]
    cache_path = in_set[1]
    
    
    
    file = prep_ionex_file(file_path, cache_path)

    with open (file, 'rb') as ionex:
        ionex_data = ionex.read()
        GIM_match_begin = [];GIM_match_end = []
        
        matches_begin = _re.finditer(re_begin, ionex_data)
        matches_end   = _re.finditer(re_end, ionex_data)

        for matchNum, match in enumerate(matches_begin):
            GIM_match_begin.append(match.end())

        for matchNum, match in enumerate(matches_end):
            GIM_match_end.append(match.start()-60) #60 is the number of symbols from the end of the map to marker
            
        frame_n = len(GIM_match_begin)//2
        GIM_boundaries_b = _np.asarray(GIM_match_begin).reshape((2,frame_n))
        GIM_boundaries_e = _np.asarray(GIM_match_end).reshape((2,frame_n))
        
        TEC = RMS = _np.ndarray((frame_n),dtype=object)

        for i in range(frame_n):
            TEC[i] = ionex_data[GIM_boundaries_b[0,i]:GIM_boundaries_e[0,i]].decode('ascii')
            RMS[i] = ionex_data[GIM_boundaries_b[1,i]:GIM_boundaries_e[1,i]].decode('ascii')
            
        Datetime = _pd.to_datetime(_pd.Series(TEC).str.slice(2,36),format= '%Y %m %d %H %M %S')
#         print (_pd.concat((Datetime,_pd.Series(TEC),_pd.Series(RMS)),axis=1).head())
    _rmtree(_os.path.dirname(file)) #clean tmp
    return _pd.concat((Datetime,_pd.Series(TEC),_pd.Series(RMS)),axis=1)
Beispiel #33
0
def jpl2merged_orbclk(begin,end,GNSSproducts_dir,num_cores=None,h24_bool=True,makeShadow_bool=True,tqdm=True,run_dir = '/run/user/1017/'):
    '''GipsyX can only merge daily products so effectively we end up having 3 days merged (72 hours, centre 24 +- 24) and not 30 hours'''
    begin64 = _np.datetime64(begin).astype('datetime64[D]')
    end64 = _np.datetime64(end).astype('datetime64[D]')
    products_day = _np.arange(begin64,end64)
    products_begin = ((products_day - _np.timedelta64(3,'h')) - _J2000origin).astype(int)
    products_end = (products_day + _np.timedelta64(27,'h') - _J2000origin).astype(int)
    #rewriting 1st and last values. These are 27 hour products precisely according to boundaries specified
    products_begin[0] = (products_day[0] - _J2000origin).astype(int)
    products_end[-1] = (products_day[-1] + _np.timedelta64(24,'h') - _np.timedelta64(5,'m')- _J2000origin).astype(int)

    year_str =  (_pd.Series(products_day).dt.year).astype(str)
    
    output_merged_dir = _os.path.abspath(GNSSproducts_dir)
    target_path = _os.path.abspath(_os.path.join(output_merged_dir,_os.pardir,_os.pardir,'init',_os.path.basename(output_merged_dir)))
    if _os.path.exists(target_path):
        _rmtree(target_path)
        
    target_dir = target_path +'/' + year_str
    for dir in target_dir.unique(): #creating folder structure before conversion
        _os.makedirs(dir)
    
    repository = _np.ndarray((products_day.shape),object)
    h24 = _np.ndarray((products_day.shape),bool)
    makeShadow = _np.ndarray((products_day.shape),bool)
    
    tmp_merge_path = _os.path.abspath(run_dir)+ '/tmp_merge/'
    run = tmp_merge_path +_pd.Series(products_day).astype(str)
    # Need to clear run before new execution just in case
    if _os.path.exists(tmp_merge_path) : _rmtree(tmp_merge_path)
  
    repository.fill(GNSSproducts_dir)
    h24.fill(h24_bool)
    makeShadow.fill(makeShadow_bool)
    
    input_sets = _np.column_stack([products_begin,products_end,repository,target_dir,h24,makeShadow,products_day,run])

    with _Pool(processes = num_cores) as p:
        if tqdm: list(_tqdm.tqdm_notebook(p.imap(_gen_orbclk, input_sets), total=input_sets.shape[0]))
        else: p.map(_gen_orbclk, input_sets)
    _rmtree(tmp_merge_path) #cleaning
Beispiel #34
0
 def _cleanup(cls, name, warn_message):
     _rmtree(name)
     _warnings.warn(warn_message, _ResourceWarning)
Beispiel #35
0
def _remove_tempdir():
    global tempdir
    try:
        _rmtree(tempdir)
    except:
        pass
 def cleanup(self):
     if self._finalizer.detach():
         _rmtree(self.name)
 def cleanup(self):
     if _exists(self.name):
         _rmtree(self.name)
Beispiel #38
0
def cleanup(dirname):
    """
    Delete temp directory for so that no overlap with other builds.
    """
    if _exists(dirname):
        _rmtree(dirname)
Beispiel #39
0
def rmtree(path):
    _rmtree(path, onerror=_remove_readonly)
Beispiel #40
0
 def cleanup(self):
     if self._finalizer.detach():
         _rmtree(self.name)
Beispiel #41
0
def rmtree(dir):
    path.isdir(dir) and _rmtree(dir)
Beispiel #42
0
def _remove_tempdir():
    global tempdir
    try:
        _rmtree(tempdir)
    except:
        pass
 def shutil_rmtree(path, ignore_errors=False, onerror=None):
   return shutil._rmtree(longpathify(uni(path)), ignore_errors, onerror)
 def _cleanup(cls, name, warn_message):
     _rmtree(name)
     _warnings.warn(warn_message, _ResourceWarning)
Beispiel #45
0
def rmtree(dir):
  path.isdir(dir) and _rmtree(dir)
Beispiel #46
0
def rmtree(dir):
  if path.isdir(dir):
    _rmtree(dir)