コード例 #1
0
ファイル: locking.py プロジェクト: dune-mirrors/dune-common
 def __init__(self, path, flags, *args, **kwargs):
     _Lock.__init__(self,
                    path,
                    *args,
                    flags=flags,
                    timeout=None,
                    **kwargs)
コード例 #2
0
ファイル: __init__.py プロジェクト: ytfqj/deriva-py
def lock_file(file, mode, exclusive=True):
    if PORTALOCKER:
        if parse_version(PORTALOCKER.version) > parse_version("0.6.1"):
            return Lock(file, mode, timeout=60, flags=LOCK_EX if exclusive else LOCK_SH)
        else:
            return Lock(file, mode, timeout=60, flags=LOCK_EX if exclusive else LOCK_SH, truncate=None)
    else:
        return io.open(file, mode)
コード例 #3
0
    def __append_features(self):
        tool.write_to_log("Appending features to %s" % self.profile['SQL_TBL'])

        if self.val_sql_col('Edit_DT'):
            query = '''
                UPDATE {0}
                    SET
                        Edit_DT = GETDATE()
                WHERE
                    Source_File = 'Updated Records {1}'
            '''.format(self.profile['SQL_TBL'], self.batch)
            sql.sql_execute(query_str=query, execute=True)
        elif self.val_sql_col('Edit_Date'):
            query = '''
                UPDATE {0}
                    SET
                        Edit_Date = GETDATE()
                WHERE
                    Source_File = 'Updated Records {1}'
            '''.format(self.profile['SQL_TBL'], self.batch)
            sql.sql_execute(query_str=query, execute=True)

        path = join(sql_dir, self.profile['Acc_TBL'])

        if exists(path):
            for file in list(Path(path).glob('*.sql')):
                with Lock(str(file), 'r') as f:
                    query = f.read()

                sql.sql_execute(query_str=query, execute=True)
コード例 #4
0
        def function(*args, **kwargs):
            # atomic operation
            with Lock(".backup/jobs.json", access) as f:
                if reads:
                    # read, or create an empty one
                    try:
                        jobs = json.load(f)
                    except:  # noqa F841
                        jobs = []
                else:
                    jobs = []

                # pass the jobs and an arg and delegate
                kwargs.update({"jobs": jobs})
                ret = fn(*args, **kwargs)

                # check whether to write
                if writes:
                    # write out and truncate to new size
                    f.seek(0)
                    json.dump(jobs, f, indent=2)
                    f.truncate()

                    # update git, for backup
                    repo = get_repo()
                    repo.index.add("jobs.json")
                    repo.index.commit("Changed due to '{}' from {}".format(
                        fn.__name__, request.remote_addr))

                # propagate back result
                return ret
コード例 #5
0
    def _save_current_data(self):
        """Save examples to data file."""
        assert "timestamp" not in self._current_example, "multiple _save_current_data calls on _current_example = {_coconut_format_0}".format(_coconut_format_0=(self._current_example))
        with Lock(self.data_file, "rb+", timeout=lock_timeout) as df:
# we create the timestamp while we have the lock to ensure its uniqueness
            self._current_example["timestamp"] = time.time()
            self._add_examples([self._current_example])
            self._save_to(df)
コード例 #6
0
    def __task_execute(self, task_profile):
        try:
            shell_line = None
            shell_comm = task_profile['Task_SComm']
            ext = splitext(task_profile['Task'])[1].lower()

            if shell_comm:
                shell_comm = ". '%s'" % shell_comm
            elif ext == '.py':
                shell_comm = 'python'
            elif ext == '.ps1':
                shell_comm = '.'
            elif ext == '.vbs':
                shell_comm = 'cscript'

            if task_profile['Params']:
                params = task_profile['Params']
            else:
                params = ''

            if ext == '.sql':
                with Lock(task_profile['Task'], 'r') as f:
                    query = f.read()

                self.__sql_execute(task_profile, query)
            elif ext == '.exe':
                shell_line = "'{0}' {1}".format(str(task_profile['Task']),
                                                params)
            elif shell_comm:
                shell_line = [
                    'powershell.exe',
                    "{0} '{1}' {2}".format(shell_comm, task_profile['Task'],
                                           params)
                ]

            if shell_line:
                self.__sub_proc = Popen(shell_line,
                                        startupinfo=startupinfo,
                                        stdin=PIPE,
                                        stdout=PIPE,
                                        stderr=PIPE)
                stdout, stderr = self.__sub_proc.communicate()
                self.__process_std_output(task_profile['Task_Name'], stdout)
                self.__process_std_output(task_profile['Task_Name'],
                                          stderr,
                                          is_error=True)

                if self.__sub_proc.returncode:
                    task_profile['Task_Error'] = [
                        self.__sub_proc.returncode,
                        'Ran into error while executing program'
                    ]
        except Exception as e:
            self.write_job_log(format_exc())
            task_profile['Task_Error'] = [type(e).__name__, str(e)]
            pass
コード例 #7
0
ファイル: golemapp.py プロジェクト: U0001F3A2/golem
    def wrapper(*args, **kwargs):
        datadir = set_active_environment(**kwargs)
        kwargs['datadir'] = datadir

        try:
            with Lock(os.path.join(datadir, 'LOCK'), timeout=1):
                func(*args, **kwargs)
        except LockException:
            logger.error(f'directory {datadir} is locked, possibly used by '
                         'another Golem instance')
コード例 #8
0
def open_with_lock(fpath, mode="rb+", timeout=lock_timeout, **kwargs):
    with Lock(fpath, mode, timeout=timeout, **kwargs) as file_handle:
        try:
            yield file_handle
        finally:
            file_handle.flush()
            if "w" in mode or "+" in mode or "a" in mode:
                try:
                    os.fsync(file_handle.fileno())
                except OSError:
                    pass
コード例 #9
0
    def store(self):
        """Store parameters using the filename specified in self.file_path.
        """

        # Store settings in file
        with Lock(self.file_path, 'w', self['file_timeout']) as fout:
            json.dump(self.data, fout)

            # Ensure write is fully complete even for network file systems
            fout.flush()
            os.fsync(fout.fileno())

        return
コード例 #10
0
    def write(self, file_or_path, append=False, timeout=10):
        """
        Write Smother results to a file.

        Parameters
        ----------
        fiile_or_path : str
            Path to write report to
        append : bool
            If True, read an existing smother report from `outpath`
            and combine it with this file before writing.
        timeout : int
            Time in seconds to wait to acquire a file lock, before
            raising an error.

        Note
        ----
        Append mode is atomic when file_or_path is a path,
        and can be safely run in a multithreaded or
        multiprocess test environment.

        When using `parallel_mode`, file_or_path is given a unique
        suffix based on the machine name and process id.
        """
        if isinstance(file_or_path, six.string_types):
            if self.coverage:
                file_or_path = get_smother_filename(
                    file_or_path, self.coverage.config.parallel)

            outfile = Lock(file_or_path,
                           mode='a+',
                           timeout=timeout,
                           fail_when_locked=False)
        else:
            outfile = noclose(file_or_path)

        with outfile as fh:

            if append:
                fh.seek(0)
                try:
                    other = Smother.load(fh)
                except ValueError:  # no smother data
                    pass
                else:
                    self |= other

            fh.seek(0)
            fh.truncate()  # required to overwrite data in a+ mode
            json.dump(self.data, fh)
コード例 #11
0
def open_with_lock(fpath, mode="rb+", timeout=None, **kwargs):
    """Open file with lock."""
    if timeout is None:
        timeout = constants.lock_timeout
    with Lock(fpath, mode, timeout=timeout, **kwargs) as file_handle:
        try:
            yield file_handle
        finally:
            file_handle.flush()
            if "w" in mode or "+" in mode or "a" in mode:
                try:
                    os.fsync(file_handle.fileno())
                except OSError:
                    pass
コード例 #12
0
class _RLock:
    def __init__(self, outer_lock_path, inner_lock_path):
        self.outer_lock = FileLock(outer_lock_path, timeout=120, flags=LOCK_SH)
        self.inner_lock = FileLock(inner_lock_path, timeout=120, flags=LOCK_SH)

    def acquire(self):
        self.outer_lock.acquire()
        self.inner_lock.acquire()
        self.outer_lock.release()

    def release(self):
        self.inner_lock.release()

    def __enter__(self):
        self.acquire()

    def __exit__(self, type_, value, tb):
        self.release()
コード例 #13
0
ファイル: cache.py プロジェクト: HCVE/echo-clustering
def append_data(identifier, data):
    identifier = list(map(str, identifier))
    directory = "data/" + "/".join(identifier[:-1])
    filename = identifier[-1] + ".pickle"
    try:
        os.makedirs(directory)
    except FileExistsError:
        pass

    path = "%s/%s" % (directory, filename)

    try:
        with open(path, "rb") as f:
            file_content = pickle.load(f)
    except (FileNotFoundError, EOFError):
        file_content = []

    with Lock(path, "wb") as f:
        file_content.append(data)
        pickle.dump(file_content, f)
コード例 #14
0
 def should_refresh_token(self, con=None):
     """
     Method for refreshing the token when there are concurrently running instances.
     """
     for _ in range(TOKEN_REFRESH_MAX_TRIES):
         if self.token.is_access_expired:
             try:
                 with Lock(self.token_path,
                           'r+',
                           fail_when_locked=True,
                           timeout=0):
                     if con.refresh_token() is False:
                         raise RuntimeError('Error refreshing token')
                 return None
             except LockException:
                 self.fs_wait = True
                 time.sleep(1)
                 self.token = self.load_token()
         else:
             self.fs_wait = False
             return False
     raise RuntimeError('Could not access locked token file')
コード例 #15
0
    def write(self, file_or_path, timeout=10):
        """
        Write Smother results to a file.

        Parameters
        ----------
        file_or_path : str
            Path to write report to
        timeout : int
            Time in seconds to wait to acquire a file lock, before
            raising an error.

        Note
        ----
        Append mode is atomic when file_or_path is a path,
        and can be safely run in a multithreaded or
        multiprocess test environment.

        When using `parallel_mode`, file_or_path is given a unique
        suffix based on the machine name and process id.
        """
        if isinstance(file_or_path, six.string_types):
            if self.coverage:
                file_or_path = get_smother_filename(
                    file_or_path, self.coverage.config.parallel)

            outfile = Lock(file_or_path,
                           mode='a+',
                           timeout=timeout,
                           fail_when_locked=False)
        else:
            outfile = noclose(file_or_path)

        with outfile as fh:
            fh.seek(0)
            fh.truncate()  # required to overwrite data in a+ mode
            json.dump(self.data, fh)
コード例 #16
0
    def should_refresh_token(self, con=None):
        """
        Method for refreshing the token when there are concurrently running 
        O365 instances. Determines if we need to call the MS server and refresh
        the token and its file, or if another Connection instance has already 
        updated it and we should just load that updated token from the file.

        It will always return False, None, OR raise an error if a token file
        couldn't be accessed after X tries. That is because this method 
        completely handles token refreshing via the passed Connection object 
        argument. If it determines that the token should be refreshed, it locks
        the token file, calls the Connection's 'refresh_token' method (which 
        loads the fresh token from the server into memory and the file), then 
        unlocks the file. Since refreshing has been taken care of, the calling 
        method does not need to refresh and we return None.
        
        If we are blocked because the file is locked, that means another 
        instance is using it. We'll change the backend's state to waiting,
        sleep for 2 seconds, reload a token into memory from the file (since
        another process is using it, we can assume it's being updated), and 
        loop again.
        
        If this newly loaded token is not expired, the other instance loaded
        a new token to file, and we can happily move on and return False.
        (since we don't need to refresh the token anymore). If the same token 
        was loaded into memory again and is still expired, that means it wasn't
        updated by the other instance yet. Try accessing the file again for X 
        more times. If we don't suceed after the loop has terminated, raise a 
        runtime exception
        """

        for _ in range(self.max_tries, 0, -1):
            if self.token.is_access_expired:
                try:
                    with Lock(self.token_path,
                              'r+',
                              fail_when_locked=True,
                              timeout=0):
                        log.debug('Locked oauth token file')
                        if con.refresh_token() is False:
                            raise RuntimeError('Token Refresh Operation not '
                                               'working')
                        log.info('New oauth token fetched')
                    log.debug('Unlocked oauth token file')
                    return None
                except LockException:
                    self.fs_wait = True
                    log.warning('Oauth file locked. Sleeping for 2 seconds...'
                                f'retrying {_ - 1} more times.')
                    time.sleep(2)
                    log.debug('Waking up and rechecking token file for update'
                              ' from other instance...')
                    self.token = self.load_token()
            else:
                log.info('Token was refreshed by another instance...')
                self.fs_wait = False
                return False

        # if we exit the loop, that means we were locked out of the file after
        # multiple retries give up and throw an error - something isn't right
        raise RuntimeError('Could not access locked token file after '
                           f'{self.max_tries}')
コード例 #17
0
    def __package_attach(self):
        if self.__attachments:
            from zipfile import ZipFile
            from exchangelib import FileAttachment
            from portalocker import Lock

            file_dir = join(attach_dir, datetime.today().__format__("%Y%m%d"))
            file_path = join(
                file_dir, '{0}_{1}.zip'.format(
                    hash(datetime.now().__format__("%I:%M:%S %p")),
                    self.__job_profile['Job_Name']))

            if not exists(file_dir):
                makedirs(file_dir)

            if exists(file_path):
                try:
                    remove(file_path)
                except Exception as e:
                    tool.write_to_log(
                        "Attach File Rem - ECode '{0}', {1}".format(
                            type(e).__name__, str(e)))
                    return

            zip_file = ZipFile(file_path, mode='w')

            try:
                for attachment in self.__attachments:
                    try:
                        if isfile(attachment) and exists(
                                attachment) and stat(attachment).st_size > 0:
                            zip_file.write(attachment, basename(attachment))
                    except Exception as e:
                        self.write_job_log(format_exc())
                        tool.write_to_log(
                            "Job '{0}' failed to zip file due to ECode '{1}', {2}"
                            .format(self.__job_profile['Job_Name'],
                                    type(e).__name__, str(e)))
                        pass
                    finally:
                        if isfile(attachment) and exists(
                                attachment) and stat(attachment).st_size > 0:
                            remove(attachment)
            except Exception as e:
                self.write_job_log(format_exc())
                tool.write_to_log(
                    "Job '{0}' failed to zip file in mainloop due to ECode '{1}', {2}"
                    .format(self.__job_profile['Job_Name'],
                            type(e).__name__, str(e)))
                pass
            finally:
                zip_file.close()

            if exists(file_path) and stat(file_path).st_size > 0:
                with Lock(file_path, 'rb') as f:
                    self.__email.attach(
                        FileAttachment(name=basename(file_path),
                                       content_type='zip',
                                       content=f.read(),
                                       is_inline=False))
            elif exists(file_path):
                remove(file_path)

                try:
                    unlink(file_dir)
                except Exception as e:
                    tool.write_to_log("Dir Unlink - ECode '{0}', {1}".format(
                        type(e).__name__, str(e)))
                    pass
コード例 #18
0
 def save_data(self):
     """Forcibly saves data."""
     with Lock(self.data_file, "rb+", timeout=lock_timeout) as df:
         self._save_to(df)
コード例 #19
0
 def __init__(self, outer_lock_path, inner_lock_path):
     self.outer_lock = FileLock(outer_lock_path, timeout=120, flags=LOCK_EX)
     self.inner_lock = FileLock(inner_lock_path, timeout=120, flags=LOCK_EX)
コード例 #20
0
 def _load_data(self):
     """Load examples from data file."""
     ensure_file(self.data_file)
     with Lock(self.data_file, "rb", timeout=lock_timeout) as df:
         self._load_from(df)
コード例 #21
0
        data_dir = '/home/travis/build/pysatData'
        params['data_dirs'] = [data_dir]

    print(''.join(
        ("\nHi there!  pysat will nominally store data in a ",
         "'pysatData' directory which needs to be assigned. ",
         "Please run `pysat.params['data_dirs'] = path` where path ",
         "specifies one or more existing top-level directories that ",
         "may be used to store science data. `path` may either be ",
         "a single string or a list of strings.")))
else:
    # Load up existing parameters file
    params = _params.Parameters()

# Load up version information
with Lock(version_filename, 'r', params['file_timeout']) as version_file:
    __version__ = version_file.read().strip()

from pysat import utils
from pysat._constellation import Constellation
from pysat._instrument import Instrument
from pysat._meta import Meta, MetaLabels
from pysat._files import Files
from pysat._orbits import Orbits
from pysat import instruments

__all__ = ['instruments', 'utils']

# Cleanup
del here
コード例 #22
0
    def __init__(self, path=None, create_new=False):
        sfname = 'pysat_settings.json'
        self.data = {}
        self.file_path = None

        # Define default parameters and values
        dir_format = os.path.join('{platform}', '{name}', '{tag}', '{inst_id}')
        defaults = {
            'clean_level': 'clean',
            'directory_format': dir_format,
            'ignore_empty_files': False,
            'file_timeout': 10,
            'update_files': True,
            'user_modules': {},
            'warn_empty_file_list': False
        }

        # Attach default parameters and values to object
        self.defaults = defaults

        # Define stored pysat parameters without a default setting
        non_defaults = ['data_dirs']
        self.non_defaults = non_defaults

        # If path provided, use it. Otherwise, iterate through potential
        # locations until file is found.
        if path is not None:
            # Confirm path is valid
            if not os.path.exists(path):
                estr = ''.join(('Supplied path does not exist on the local ',
                                'system. Please create it and try again.'))
                raise OSError(estr)

            # Store full file path including fixed settings file name
            self.file_path = os.path.join(path, sfname)

        else:
            # Cycle through locations and load any pysat parameter files found.
            # First, check current working directory, then pysat user directory.
            # Stop looking once an existing file is found.
            file_locs = [
                os.path.join('.', sfname),
                os.path.join(os.path.expanduser('~'), '.pysat', sfname)
            ]
            for fileloc in file_locs:
                if os.path.isfile(fileloc):
                    self.file_path = fileloc
                    break

            # Ensure we have a valid file if the user isn't creating a new one.
            if self.file_path is None and (not create_new):
                estr = ''.join(
                    ('pysat is unable to locate a user settings ',
                     'file. Please check the locations, "./" or ',
                     '"~/.pysat" for the file "pysat_settings.json"', '.'))
                raise OSError(estr)

        if create_new:
            # Initialize new settings file. Method below includes a .store call
            self.clear_and_restart()

        # Load parameters in thread-safe manner.
        # Can't use user set file_timeout since we don't know what it is yet.
        # Can't use NetworkLock in pysat.utils either since this object (params)
        # doesn't exist yet.
        with Lock(self.file_path, 'r', timeout=10) as fout:
            self.data = json.load(fout)
            # In case of network file system
            fout.flush()
            os.fsync(fout.fileno())

        return
コード例 #23
0
ファイル: everaapp.py プロジェクト: davincci199412/Evera
def start(  # pylint: disable=too-many-arguments, too-many-locals
        monitor, concent, datadir, node_address, rpc_address, peer, mainnet,
        net, geth_address, password, accept_terms, accept_concent_terms,
        accept_all_terms, version, log_level, enable_talkback, m):

    freeze_support()
    delete_reactor()

    # Crossbar
    if m == 'crossbar.worker.process':
        start_crossbar_worker(m)
        return 0

    if version:
        print("EVERA version: {}".format(evera.__version__))
        return 0

    set_environment('mainnet' if mainnet else net, concent)
    # These are done locally since they rely on evera.config.active to be set
    from evera.config.active import CONCENT_VARIANT
    from evera.appconfig import AppConfig
    from evera.node import Node

    # We should use different directories for different chains
    datadir = get_local_datadir('default', root_dir=datadir)
    os.makedirs(datadir, exist_ok=True)

    def _start():
        generate_rpc_certificate(datadir)

        # Workarounds for pyinstaller executable
        sys.modules['win32com.gen_py.os'] = None
        sys.modules['win32com.gen_py.pywintypes'] = None
        sys.modules['win32com.gen_py.pythoncom'] = None

        app_config = AppConfig.load_config(datadir)
        config_desc = ClientConfigDescriptor()
        config_desc.init_from_app_config(app_config)
        config_desc = ConfigApprover(config_desc).approve()

        if rpc_address:
            config_desc.rpc_address = rpc_address.address
            config_desc.rpc_port = rpc_address.port
        if node_address:
            config_desc.node_address = node_address

        # Evera headless
        install_reactor()

        from evera.core.common import config_logging
        config_logging(
            datadir=datadir,
            loglevel=log_level,
            config_desc=config_desc)

        log_evera_version()
        log_platform_info()
        log_ethereum_chain()
        log_concent_choice(CONCENT_VARIANT)

        node = Node(
            datadir=datadir,
            app_config=app_config,
            config_desc=config_desc,
            peers=peer,
            use_monitor=monitor,
            use_talkback=enable_talkback,
            concent_variant=CONCENT_VARIANT,
            geth_address=geth_address,
            password=password,
        )

        if accept_terms:
            node.accept_terms()

        if accept_concent_terms:
            node.accept_concent_terms()

        if accept_all_terms:
            node.accept_terms()
            node.accept_concent_terms()

        node.start()

    try:
        with Lock(os.path.join(datadir, 'LOCK'), timeout=1):
            _start()

    except LockException:
        logger.error(f'directory {datadir} is locked, possibly used by '
                     'another Evera instance')
        return 1
    return 0