Ejemplo n.º 1
0
def acquire_domain_object_lock(lock_domain, object_path_hash):
  """Try to get a lock on the object in the domain that manages locks for the
  object.

  Returns:
    (bool success, lock)
    success: Indicates whether we won the bid to write the object.
    lock: boto Lock object we either found for this object or that we created.
  """
  # Get a valid new lock name
  lock_id = _lock_name(object_path_hash)

  # Check if we already have a lock for the object.
  query = _select_object_locks_query(lock_domain.name, object_path_hash)
  matching_locks = lock_domain.select(query, consistent_read=True)
  try:
    _ = matching_locks.next()
    # Found a lock for the same object.
    return False, _
  except StopIteration:
    # Did not find any locks.
    logging.info('Success! No matching lock for the object %s.' % object_path_hash)
    pass

  # Set attributes for a new, unconfirmed lock.
  unconfirmed_lock = lock_domain.new_item(unicode(lock_id))
  unconfirmed_lock[unicode('lock_name')] = unicode(lock_id)
  unconfirmed_lock[unicode('user')] = unicode('tierney')
  unconfirmed_lock[unicode('epoch_time')] = unicode(epoch_time())
  while True:
    try:
      unconfirmed_lock.save()
      break
    except boto.exception.SDBResponseError:
      logging.warning('Domain does not exist.')

  # Check if another lock was taken at this time. If so, we release our lock.
  query = _select_object_locks_query(lock_domain.name, object_path_hash)
  matching_locks = lock_domain.select(query, consistent_read=True)
  for _lock in matching_locks:
    # Finding a matching lock verifies that we can read the lock that we wrote.
    if _lock == unconfirmed_lock:
      continue

    # Check if we should ignore the lock due to a timeout.
    if (epoch_time() - float(_lock.epoch_time) > _TIMEOUT_SECONDS):
      logging.info('Encountered (and ignoring) an old lock: %s.' % _lock)
      continue

    # Concede. Delete our lock and return failure.
    logging.info('Found a conflicting lock so we must release.\n' \
                 ' found lock: %s.\n' \
                 ' unconfirmed lock: %s.' %
                 (_lock, unconfirmed_lock))
    # Another lock found. Need to release ours.
    lock_domain.delete_item(unconfirmed_lock)
    return False, _lock

  # Return success and the lock (we have confirmed it at this point though).
  return True, unconfirmed_lock
Ejemplo n.º 2
0
    def exec(self):
        if self.is_windows:
            # TODO: Call scripts one after another
            terminal_cmd = """matlab -nodesktop -nosplash -r " """
            q_cmd = """ cd('{}'),disp(pwd),input('press any key to run...','s'),{},"""
            # The for loop should be made, now it is wrong, the code after is OK
            for script_file in self.script_files:
                # Note that .m should be removed from the filename to run
                terminal_cmd += q_cmd.format(
                    os.path.dirname(script_file),
                    os.path.basename(script_file[:-2]))
                self.log_trigger.emit("Exec: {}".format(
                    os.path.relpath(script_file, self._root)))
            terminal_cmd.rstrip(',')  # remove trailing ,
            terminal_cmd += "\""
            t = epoch_time()
            p = Popen(shlex.split(terminal_cmd))
            # In some computers p.wait() p.terminate() does not do anything
            # in other systems, it blocks the program.
            #p.wait()
            #p.terminate()  # Note that this does not close MATLAB
            # Use psutil to retrieve all the processes with MATLAB
            matlab_proc = list(
                filter(lambda p: "matlab" in p.name().lower(),
                       psutil.process_iter()))
            # Sometimes more than one matlab process exist
            if len(matlab_proc) > 1:
                diff_time = [abs(p.create_time() - t) for p in matlab_proc]
                # Find the process created almost the same time as t,
                # Hopefully this is correct always
                ind_min = min(enumerate(diff_time), key=itemgetter(1))[0]
                self._processes.append(matlab_proc[ind_min])
            elif len(matlab_proc) > 0:  # In fact should be 1
                self._processes.append(matlab_proc[0])

        elif self.is_linux:  # Here the scripts should be run
            terminal_cmd = "gnome-terminal --disable-factory "
            tab_cmd = "--tab --working-directory='{}' -e " + \
                      """'bash -c "matlab -nodesktop -nosplash -r "{}";""" + \
                      """exec bash" ' """
            cmd = terminal_cmd
            for script_file in self.script_files:  # script_file is absolute path
                # Note that .m should be removed from the command of matlab
                matlab_cmd = script_file[:-2]
                cmd += tab_cmd.format(os.path.dirname(script_file),
                                      os.path.basename(matlab_cmd))
                rel_script_path = os.path.relpath(script_file, self._root)
                self.log_trigger.emit("Exec: {}".format(rel_script_path))
            p = Popen(shlex.split(cmd), shell=False, start_new_session=True)
            self._processes.append(p)
Ejemplo n.º 3
0
def importICalendarFile(
    fullpath,
    view,
    targetCollection=None,
    filterAttributes=None,
    activity=None,
    tzinfo=None,
    logger=None,
    selectedCollection=False,
):
    """Import ics file at fullpath into targetCollection.
    
    If selectedCollection is True, ignored targetCollection and import into
    the currently selected sidebar collection.
    If Trash is chosen as the target collection, a new collection will be 
    created instead.

    """
    from osaf.framework.blocks.Block import Block
    import translator, ics
    from osaf import sharing

    if selectedCollection:
        targetCollection = Block.findBlockByName("MainView").getSidebarSelectedCollection()

    trash = schema.ns("osaf.pim", view).trashCollection
    if targetCollection == trash:
        targetCollection = None

    view.commit(sharing.mergeFunction)  # to make target collection available
    # Note: if everyone is following the commit rules that says a commit
    # happens after every user action, this commit is not required.

    rv = sharing.getView(view.repository)
    if targetCollection is not None:
        targetCollection = rv.findUUID(targetCollection.itsUUID)

    if not os.path.isfile(fullpath):
        raise ICalendarImportError(_(u"File does not exist, import cancelled."))

    before = epoch_time()

    (dir, filename) = os.path.split(fullpath)

    try:
        # TODO: coerceTzinfo?

        import stateless

        try:
            collection = stateless.importFile(
                rv, fullpath, collection=targetCollection, filters=filterAttributes, activity=activity
            )
        except:
            if logger:
                logger.exception("Failed importFile %s" % fullpath)
            raise ICalendarImportError(_(u"Problem with the file, import cancelled."))

        if targetCollection is None:
            collectionName = getattr(collection, "displayName", "Untitled")
            if collectionName == "Untitled":
                name = "".join(filename.split(".")[0:-1]) or filename
                collection.displayName = name

        rv.commit(sharing.mergeFunction)  # makes new collection available
        view.refresh(sharing.mergeFunction)  # main ui repo view

    finally:
        sharing.releaseView(rv)

    collection = view.findUUID(collection.itsUUID)

    if targetCollection is None:
        schema.ns("osaf.app", view).sidebarCollection.add(collection)
        sideBarBlock = Block.findBlockByName("Sidebar")
        sideBarBlock.postEventByName("SelectItemsBroadcast", {"items": [collection]})
    if logger:
        logger.info("Imported collection in %s seconds" % (epoch_time() - before))

    return collection
    sleep,
    time        as epoch_time,
    localtime,
    gmtime,
    tzname,
    asctime,
    strftime,
)

# Sleep for 500 ms.
sleep( 0.5 )

# Number of seconds since midnight on 1 Jan 1970.
# Can use as a random number generator seed.
# Combine with 'os.getpid' for a more unique seed.
print( epoch_time( ) )

# Local Timezone (if computer is configured correctly)
print( tzname )

# Local Time
print( localtime( ) )
# Time on the Greenwich Meridian
print( gmtime( ) )
# Local time as a string.
print( asctime( ) )
# GMT as a string.
print( asctime( gmtime( ) ) )
# Just the current year as a string.
print( strftime( "%Y", gmtime( ) ) )
Ejemplo n.º 5
0
def importICalendarFile(fullpath, view, targetCollection = None,
                        filterAttributes = None, activity=None,
                        tzinfo = None, logger=None, selectedCollection = False):
    """Import ics file at fullpath into targetCollection.
    
    If selectedCollection is True, ignored targetCollection and import into
    the currently selected sidebar collection.
    If Trash is chosen as the target collection, a new collection will be 
    created instead.

    """
    from osaf.framework.blocks.Block import Block
    import translator, ics
    from osaf import sharing

    if selectedCollection:
        targetCollection = Block.findBlockByName("MainView").getSidebarSelectedCollection()

    trash = schema.ns("osaf.pim", view).trashCollection
    if targetCollection == trash:
        targetCollection = None


    view.commit(sharing.mergeFunction) # to make target collection available
    # Note: if everyone is following the commit rules that says a commit
    # happens after every user action, this commit is not required.


    rv = sharing.getView(view.repository)
    if targetCollection is not None:
        targetCollection = rv.findUUID(targetCollection.itsUUID)

    if not os.path.isfile(fullpath):
        raise ICalendarImportError(_(u"File does not exist, import cancelled."))

    before = epoch_time()

    (dir, filename) = os.path.split(fullpath)

    try:
        # TODO: coerceTzinfo?

        import stateless
        try:
            collection = stateless.importFile(rv , fullpath,
                collection=targetCollection, filters=filterAttributes,
                activity=activity)
        except:
            if logger:
                logger.exception("Failed importFile %s" % fullpath)
            raise ICalendarImportError(_(u"Problem with the file, import cancelled."))

        if targetCollection is None:
            collectionName = getattr(collection, 'displayName', 'Untitled')
            if collectionName == 'Untitled':
                name = "".join(filename.split('.')[0:-1]) or filename
                collection.displayName = name

        rv.commit(sharing.mergeFunction) # makes new collection available
        view.refresh(sharing.mergeFunction) # main ui repo view

    finally:
        sharing.releaseView(rv)

    collection = view.findUUID(collection.itsUUID)

    if targetCollection is None:
        schema.ns("osaf.app", view).sidebarCollection.add(collection)
        sideBarBlock = Block.findBlockByName('Sidebar')
        sideBarBlock.postEventByName ("SelectItemsBroadcast",
                                      {'items':[collection]})
    if logger:
        logger.info("Imported collection in %s seconds" % (epoch_time()-before))
        
    return collection