Exemplo n.º 1
0
 def regenerate(self, directory=None):
     """See `IMailTransportAgentLifecycle`."""
     # Acquire a lock file to prevent other processes from racing us here.
     if directory is None:
         directory = config.DATA_DIR
     lock_file = os.path.join(config.LOCK_DIR, 'mta')
     with Lock(lock_file):
         lmtp_path = os.path.join(directory, 'postfix_lmtp')
         lmtp_path_new = lmtp_path + '.new'
         with open(lmtp_path_new, 'w') as fp:
             self._generate_lmtp_file(fp)
         # Atomically rename to the intended path.
         os.rename(lmtp_path_new, lmtp_path)
         domains_path = os.path.join(directory, 'postfix_domains')
         domains_path_new = domains_path + '.new'
         with open(domains_path_new, 'w') as fp:
             self._generate_domains_file(fp)
         # Atomically rename to the intended path.
         os.rename(domains_path_new, domains_path)
         # If the transport_file_type is 'hash' then run the postmap command
         # on newly generated file to convert them in to hash table like
         # Postfix wants.
         if self.transport_file_type == 'hash':
             errors = []
             for path in (lmtp_path, domains_path):
                 command = self.postmap_command + ' ' + path
                 status = (os.system(command) >> 8) & 0xff
                 if status:
                     msg = 'command failure: %s, %s, %s'
                     errstr = os.strerror(status)
                     log.error(msg, command, status, errstr)
                     errors.append(msg % (command, status, errstr))
             if errors:
                 raise RuntimeError(NL.join(errors))
Exemplo n.º 2
0
def get_next_apk(apks_dir):
    """
    Gets the first available (not-locked) apk files and locks it

    :param apks_dir: directory to scan
    :return: a tuple containing the apk's path and the locked lock
    :rtype: (str, lockfile.LockFile)
    """
    try:
        files = os.listdir(apks_dir)
    except FileNotFoundError:
        # folder doesn't exist
        return None, None
    for f in files:
        if not f.endswith(".apk"):
            continue
        f = os.path.join(apks_dir, f)
        try:
            # lock file should not exist
            filename = f + LOCK_PREFIX

            lock = Lock(filename, lifetime=datetime.timedelta(
                seconds=6000))  # expires in 10 minutes
            if not lock.is_locked:
                lock.lock(timeout=datetime.timedelta(milliseconds=350))
                if os.path.isfile(
                        f
                ):  # the original file could be deleted in the meantime
                    return f, lock
                if lock.is_locked:
                    lock.unlock()
        except (AlreadyLockedError, TimeOutError):
            # some other process is analyzing the file; go ahead and look for another file
            pass
    return None, None
Exemplo n.º 3
0
def dump_to_file(entries, package, version_code, version_name):
    lock_acquired = False
    while not lock_acquired:
        try:
            filename = dest + LOCK_PREFIX
            lock = Lock(filename, lifetime=datetime.timedelta(seconds=6000))  # expires in 10 minutes
            if not lock.is_locked:
                lock.lock(timeout=datetime.timedelta(milliseconds=350))
                lock_acquired = True
                with open(dest, 'a') as f:
                    first = True
                    if os.path.exists(dest) and os.path.getsize(dest) > 0:
                        first = False
                    for entry in entries:
                        entry_dict = entry.__dict__
                        entry_dict['package'] = package
                        entry_dict['versionCode'] = version_code
                        entry_dict['versionName'] = version_name
                        if first:
                            first = False
                        else:
                            f.write(",\n")
                        json.dump(entry_dict, f, indent=4)
                if lock.is_locked:
                    lock.unlock()
        except (AlreadyLockedError, TimeOutError):
            # some other process is analyzing the file; go ahead and look for another file
            pass
Exemplo n.º 4
0
    def run_capsule(self, run_info, gpu_idx):
        lock_path = os.path.join(run_info['trial_dir'], 'lock')
        lock = Lock(lock_path, datetime.timedelta(days=365))
        lock.lock(timeout=datetime.timedelta(seconds=1))
        if not lock.is_locked:
            self.print_log('locking failed for', run_info['trial_id'])
            return None

        # run capsule

        env = {'CUDA_VISIBLE_DEVICES': str(gpu_idx),
               'INFR_TRIAL': run_info['trial_id'],
               'INFR_EXP_PATH': self.experiment_dir,
               'INFR_MODE': self.mode,
               'INFR_REDIRECT_IO': '1',
               'INFR_START_STATE': os.path.join(run_info['trial_dir'], 'start_state.json')}
        if self.cuda_sync:
            env['CUDA_LAUNCH_BLOCKING'] = '1'

        proc = subprocess.Popen([sys.executable, '-m', run_info['start_state']['module_name']],
                                env=env)

        self.print_log('started worker', proc.pid, 'for', run_info['trial_id'], self.mode)

        return {'trial_dir': run_info['trial_dir'],
                'trial_id': run_info['trial_id'],
                'start_at': time.time(),
                'lock': lock,
                'gpu_idx': gpu_idx,
                'proc': proc,
                'pid': proc.pid,
                'ret_code': None}
 def dump_strings(self, entries):
     lock_acquired = False
     while not lock_acquired:
         try:
             filename = self.strings_dest + LOCK_PREFIX
             lock = Lock(filename,
                         lifetime=datetime.timedelta(
                             seconds=6000))  # expires in 10 minutes
             if not lock.is_locked:
                 lock.lock(timeout=datetime.timedelta(milliseconds=350))
                 lock_acquired = True
                 with open(self.strings_dest, 'a') as f:
                     first = True
                     if os.path.exists(
                             self.strings_dest) and os.path.getsize(
                                 self.strings_dest) > 0:
                         first = False
                     for entry in entries:
                         entry_dict = entry.__dict__
                         if first:
                             first = False
                         else:
                             f.write("\n")
                         json.dump(entry_dict, f)
                 if lock.is_locked:
                     lock.unlock()
         except (AlreadyLockedError, TimeOutError):
             # some other process is analyzing the file; go ahead and look for another file
             pass
Exemplo n.º 6
0
def acquire_lock_1(force, lock_file=None):
    """Try to acquire the master lock.

    :param force: Flag that controls whether to force acquisition of the lock.
    :type force: bool
    :param lock_file: Path to the lock file, otherwise `config.LOCK_FILE`.
    :type lock_file: str
    :return: The master lock.
    :raises: `TimeOutError` if the lock could not be acquired.
    """
    if lock_file is None:
        lock_file = config.LOCK_FILE
    lock = Lock(lock_file, LOCK_LIFETIME)
    try:
        lock.lock(timedelta(seconds=0.1))
        return lock
    except TimeOutError:
        if not force:
            raise
        # Force removal of lock first.
        hostname, pid, tempfile = lock.details
        os.unlink(lock_file)
        # Also remove any stale claim files.
        dname = os.path.dirname(lock_file)
        for fname in os.listdir(dname):
            fpath = os.path.join(dname, fname)
            if fpath.startswith(lock_file):
                os.unlink(fpath)
        return acquire_lock_1(force=False)
Exemplo n.º 7
0
def load_json(path, require_exclusive=True, lock_path=None):
    """
    Shortcut for loading json from a file path.

    :param path: The full path to the file
    :type: path: str
    :param require_exclusive: lock file for exclusive read
    :type require_exclusive: bool
    :param lock_path: path for the lock file to use
    :type lock_path: string
    :returns: loaded json
    :rtype: dict
    :raises: IOError, ValueError
    """
    lock = None
    if require_exclusive:
        if not lock_path:
            lock_path = get_lock_path(path)
        lock = Lock(lock_path)
        lock.lock()
    try:
        with open(path) as f:
            return json.load(f)
    finally:
        if lock:
            lock.unlock(unconditionally=True)
Exemplo n.º 8
0
def master_state(lock_file=None):
    """Get the state of the master watcher.

    :param lock_file: Path to the lock file, otherwise `config.LOCK_FILE`.
    :type lock_file: str
    :return: 2-tuple of the WatcherState describing the state of the lock
        file, and the lock object.
    """
    if lock_file is None:
        lock_file = config.LOCK_FILE
    # We'll never acquire the lock, so the lifetime doesn't matter.
    lock = Lock(lock_file)
    try:
        hostname, pid, tempfile = lock.details
    except NotLockedError:
        return WatcherState.none, lock
    if hostname != socket.getfqdn():
        return WatcherState.host_mismatch, lock
    # Find out if the process exists by calling kill with a signal 0.
    try:
        os.kill(pid, 0)
        return WatcherState.conflict, lock
    except ProcessLookupError:
        # No matching process id.
        return WatcherState.stale_lock, lock
Exemplo n.º 9
0
 def test_archive_lock_used(self):
     # Test that locking the maildir when adding works as a failure here
     # could mean we lose mail.
     lock_file = os.path.join(
         config.LOCK_DIR, '{0}-maildir.lock'.format(
             self._mlist.fqdn_listname))
     with Lock(lock_file):
         # Acquire the archiver lock, then make sure the archiver logs the
         # fact that it could not acquire the lock.
         archive_thread = threading.Thread(
             target=Prototype.archive_message,
             args=(self._mlist, self._msg))
         mark = LogFileMark('mailman.error')
         archive_thread.run()
         # Test that the archiver output the correct error.
         line = mark.readline()
         # XXX 2012-03-15 BAW: we really should remove timestamp prefixes
         # from the loggers when under test.
         self.assertTrue(line.endswith(
             'Unable to acquire prototype archiver lock for {0}, '
             'discarding: {1}\n'.format(
                 self._mlist.fqdn_listname,
                 self._msg.get('message-id'))))
     # Check that the message didn't get archived.
     created_files = self._find(config.ARCHIVE_DIR)
     self.assertEqual(self._expected_dir_structure, created_files)
Exemplo n.º 10
0
    def load_store_matrix(self, metadata, as_of_dates, return_matrix=True):
        """
        Calls get_dataset to return a pandas DataFrame for the as_of_dates selected
        Args:
           list as_of_dates: as_of_dates to use
        Returns:
           matrix: dataframe with the features and the last column as the label (called: outcome)
        """
        uuid = metta.metta_io.generate_uuid(metadata)
        matrix_filename = self.matrices_path + '/' + uuid

        with Lock(matrix_filename + '.lock',
                  lifetime=datetime.timedelta(minutes=20)):
            if os.path.isfile(matrix_filename + '.h5'):
                log.debug(' Matrix {} already stored'.format(uuid))
                if return_matrix:
                    df = metta.metta_io.recover_matrix(metadata,
                                                       self.matrices_path)
                    return df, uuid

            else:

                df = self.feature_loader.get_dataset(as_of_dates)
                log.debug('storing matrix {}'.format(uuid))
                metta.metta_io.archive_matrix(matrix_config=metadata,
                                              df_matrix=df,
                                              directory=self.matrices_path,
                                              format='hd5')
                if return_matrix:
                    return df, uuid
Exemplo n.º 11
0
def write_json(path, data, lock_path=None, merge_func=None):
    """
    Shortcut for writing a structure as json to the file system.

    merge_func is a callable that takes two dict and merges them
    together.

    :param path: The full path to the file to write
    :type: path: str
    :param data:  structure to write out as json
    :type data: dict or list
    :param lock_path: path for the lock file to use
    :type lock_path: string
    :raises: ValueError, OSError
    """
    # lock before moving
    if not lock_path:
        lock_path = get_lock_path(path)

    with Lock(lock_path):
        if callable(merge_func):
            try:
                disk_data = load_json(path, require_exclusive=False)
            except FileNotFoundError:
                disk_data = {}
            mem_data = data.copy()
            data = merge_func(disk_data, mem_data)

        # we could probably write directly to the file,
        # but set the permissions to RO
        dn = os.path.dirname(path)
        f = tempfile.NamedTemporaryFile(mode='w', dir=dn, delete=False)
        json.dump(data, f, indent=4)
        os.fchmod(f.file.fileno(), 0o644)
        shutil.move(f.name, path)
Exemplo n.º 12
0
def child_locker(filename, lifetime, queue):
    # First, acquire the file lock.
    with Lock(filename, lifetime):
        # Now inform the parent that we've acquired the lock.
        queue.put(True)
        # Keep the file lock for a while.
        time.sleep(lifetime.seconds - 1)
Exemplo n.º 13
0
 def __init__(self, lifetime=LOCK_LIFETIME, timeout=LOCK_TIMEOUT):
     """
     Create a SafeUpdater with the given lock lifetime and timeout
     (see flufl.lock documentation). The defaults are the lifetime
     and timeout found in the config.
     """
     self.lock = Lock(LOCK_FILE, lifetime=lifetime)
     self.timeout = timeout
 def test_acquire_lock_1_force(self):
     # Create the lock and lock it.
     my_lock = Lock(self.lock_file)
     my_lock.lock(timedelta(seconds=60))
     # Try to aquire it again with force.
     lock = master.acquire_lock_1(True, self.lock_file)
     self.assertTrue(lock.is_locked)
     lock.unlock()
Exemplo n.º 15
0
def write_job(job_fullpath, job_spec):
    """ TODO: Docstring
    """

    # Write the contents to a job
    with Lock(lock_file):
        with open(job_fullpath, "w") as ofp:
            json.dump(job_spec, ofp, sort_keys=True)
Exemplo n.º 16
0
def write_env(job_fullpath, env):
    """TODO: write"""

    env_fullpath = job_fullpath.replace(".job", ".env")

    # write env to env_fullpath
    with Lock(lock_file):
        with open(env_fullpath, "w") as ofp:
            json.dump(env, ofp, sort_keys=True)
Exemplo n.º 17
0
def remove_job(job_fullpath):
    """ TODO: writeme
    """

    with Lock(lock_file):
        if os.path.exists(job_fullpath):
            os.remove(job_fullpath)
        if os.path.exists(job_fullpath.replace(".job", ".env")):
            os.remove(job_fullpath.replace(".job", ".env"))
Exemplo n.º 18
0
 def _lock(self):
     if self._lockobj is None:
         # These will get automatically cleaned up by the test
         # infrastructure.
         self._uid_file = os.path.join(config.VAR_DIR, '.uid')
         if self._context is not None:
             self._uid_file += '.' + self._context
         self._lock_file = self._uid_file + '.lock'
         self._lockobj = Lock(self._lock_file)
     return self._lockobj
Exemplo n.º 19
0
 def create():
     """See `IDatabaseFactory`."""
     with Lock(os.path.join(config.LOCK_DIR, 'dbcreate.lck')):
         database_class = config.database['class']
         database = call_name(database_class)
         verifyObject(IDatabase, database)
         database.initialize()
         SchemaManager(database).setup_database()
         database.commit()
         return database
Exemplo n.º 20
0
def read_job(job_fullpath):
    """ TODO: Docstring
    """

    # Parse the contents of the job
    with Lock(lock_file):
        with open(job_fullpath, "r") as ifp:
            job_spec = json.load(ifp)

    return job_spec
Exemplo n.º 21
0
def read_env(job_fullpath):
    """TODO: write"""

    env_fullpath = job_fullpath.replace(".job", ".env")

    # read env from env_fullpath
    with Lock(lock_file):
        with open(env_fullpath, "r") as ifp:
            env = json.load(ifp)

    return env
Exemplo n.º 22
0
 def ensure_directories_exist(self):
     """Create all the paths if the directories do not exist."""
     if self.create_paths:
         for path_name, path in self.paths.items():
             makedirs(path)
         # Create a paper-git.cfg file if it already doesn't exist.
         lock_file = os.path.join(self.VAR_DIR, 'paper-git-cfg.lck')
         paper_git_cfg = os.path.join(self.ETC_DIR, 'paper-git.cfg')
         with Lock(lock_file):
             if not os.path.exists(paper_git_cfg):
                 with open(paper_git_cfg, 'w') as fp:
                     print(CFG_TEMPLATE, file=fp)
Exemplo n.º 23
0
    def setupTest(self, param, testDataName):

        # Lock to prevent race condition
        lock_file = "lift/lib/.locks/setup.lock"
        lock = Lock(lock_file)
        lock.lifetime = timedelta(days=2)
        lock.lock()

        # ---------------------------------------------------------------------
        # Base path
        # for dataset
        self.dataset = os.getenv('PROJ_DATA_DIR', '')
        if self.dataset == '':
            self.dataset = os.path.expanduser("~/Datasets")
        self.dataset += "/" + testDataName
        # for temp
        self.temp = os.getenv('PROJ_TEMP_DIR', '')
        if self.temp == '':
            self.temp = os.path.expanduser("~/Temp")
        self.temp += "/" + testDataName
        # for volatile temp
        self.volatile_temp = os.getenv('PROJ_VOLTEMP_DIR', '')
        if self.volatile_temp == '':
            self.volatile_temp = "/scratch/" + os.getenv('USER') + "/Temp"
        self.volatile_temp += "/" + testDataName

        # ---------------------------------------------------------------------
        # Path for data loading
        self.train_data = None  # block these as they should not be used
        self.train_mask = None  # block these as they should not be used
        self.debug = self.dataset + "/debug/" + self.prefix_dataset(param)

        # ---------------------------------------------------------------------
        # Path for the model learning
        resdir = os.getenv('PROJ_RES_DIR', '')
        if resdir == '':
            resdir = os.path.expanduser("~/Results")
        self.result = (resdir + "/" + self.getResPrefix(param) +
                       self.prefix_dataset(param) + self.prefix_patch(param) +
                       self.prefix_learning(param))
        # Check if the un-sorted prefix exists
        unsorted_hash_path = (resdir + "/" +
                              self.getResPrefix(param, do_sort=False) +
                              self.prefix_dataset(param, do_sort=False) +
                              self.prefix_patch(param, do_sort=False) +
                              self.prefix_learning(param, do_sort=False))

        if os.path.exists(unsorted_hash_path):
            shutil.copytree(unsorted_hash_path, self.result)
            shutil.rmtree(unsorted_hash_path)

        lock.unlock()
Exemplo n.º 24
0
 def test_master_state(self):
     my_lock = Lock(self.lock_file)
     # Mailman is not running.
     state, lock = master.master_state(self.lock_file)
     self.assertEqual(state, master.WatcherState.none)
     # Acquire the lock as if another process had already started the
     # master.
     my_lock.lock()
     try:
         state, lock = master.master_state(self.lock_file)
     finally:
         my_lock.unlock()
     self.assertEqual(state, master.WatcherState.conflict)
Exemplo n.º 25
0
 def test_master_state(self):
     my_lock = Lock(self.lock_file)
     # Mailman is not running.
     state, lock = master.master_state(self.lock_file)
     self.assertEqual(state, master.WatcherState.none)
     # Acquire the lock as if another process had already started the
     # master.  Use a timeout to avoid this test deadlocking.
     my_lock.lock(timedelta(seconds=60))
     try:
         state, lock = master.master_state(self.lock_file)
     finally:
         my_lock.unlock()
     self.assertEqual(state, master.WatcherState.conflict)
Exemplo n.º 26
0
def run_with_lock(fn, *args, **kwargs):
    lock = Lock(
        getattr(
            settings, "HYPERKITTY_JOBS_UPDATE_INDEX_LOCKFILE",
            os.path.join(gettempdir(), "hyperkitty-jobs-update-index.lock")))
    if lock.is_locked:
        log.warning(
            "Update index lock is accquited by: {}".format(*lock.details))
        return
    with lock:
        try:
            fn(*args, **kwargs)
        except Exception as e:
            log.exception("Failed to update the fulltext index: %s", e)
Exemplo n.º 27
0
    def append_to_csv(self, dataframe, file_name):
        '''Append a pandas dataframe to a csv file. This function is thread save.'''

        # Make a lock

        lock_name = os.path.join(file_name + '.lock')
        lock = Lock(lock_name)
        lock.lifetime = timedelta(minutes=10)

        # Write to the result file with a lock

        with lock:
            with open(file_name, 'a+') as f:
                dataframe.to_csv(f, header=False, index=False)
    def __init__(self, path):
        # Specify the path to a file that will be used to synchronize the lock.
        # Per the flufl.lock documentation, use a file that does not exist.
        self._lock = Lock(path)

        # Locks have a lifetime (default 15 seconds) which is the period of time that the process expects
        # to keep the lock once it has been acquired. We set the lifetime to be 5 minutes as we expect
        # all operations that require locks to be completed within that time.
        self._lock.lifetime = timedelta(minutes=5)

        # Ensure multiple threads within a process run NFSLock operations one at a time.
        # We must acquire the reentrant lock before acquiring the flufl lock and only release after
        # the flufl lock is released.
        self._r_lock = threading.RLock()
Exemplo n.º 29
0
def append_to_csv(data_list, file_name):
    '''Append a list of tuples to a csv file. This function is thread safe.'''
    
    # Make a lock

    lock_name = os.path.join(file_name + '.lock')
    lock = Lock(lock_name)
    lock.lifetime = timedelta(minutes=10)

    # Write to the result file with a lock
    
    with lock:
      with open(file_name, 'a+') as f:
        for t in data_list:
            f.write(','.join([str(x) for x in t]) + '\n')
Exemplo n.º 30
0
def safe_append(file_name, string):
    '''Append a string to a file. This function
    is thread safe.
    '''
    # Make a lock

    lock_name = os.path.join(file_name + '.lock')
    lock = Lock(lock_name)
    lock.lifetime = timedelta(minutes=10)

    # Write to the result file with a lock

    with lock:
        with open(file_name, 'a+') as f:
            f.write(string)