Beispiel #1
0
def download_document(filename, email, db):
    #After ownloading the file to a local server
    # we will be updating the downloded_on field of
    #Collaborators table. For version check purpose
    email_id = get_email_id(email, db).id
    file_id = get_file_id_by_filename(filename, db)
    print(email_id, file_id)

    #this part creates a lock opject with the given file path
    file_lock_check = LockFile(file_dir + '/' + filename)
    #using is_locked, we will get to know, it the file is locked or not
    print("the file status   ", file_lock_check.is_locked())
    if file_lock_check.is_locked():
        if is_owner(email_id, file_id, db) == True:
            #if the current user is owner we, will release the lock
            lock.release()
            print("owner file status  ", file_lock_check.is_locked())
        else:
            #otherwise i returns none as the fileis alreay locked
            print("tje file status   ", file_lock_check.is_locked())
            return None

    try:
        print("downloading from here")
        update_status = db.query(Collaborators).filter(
            and_(Collaborators.collaborator_id == email_id,
                 Collaborators.file_id == file_id))
        update_status = update_status.update(
            {'downloaded_on': dt.datetime.now()})
        db.commit()
        print(update_status)
        return True
    except:
        return False
Beispiel #2
0
def GetPassword( resourcefile , list ) :
	count = 0
	pwd = []
	rb = open_workbook(resourcefile)
	r_sheet = rb.sheet_by_index(0)
	from lockfile import LockFile
	lock = LockFile(resourcefile)
	lockid = lock.is_locked()
	print lockid
	for a in xrange(1, 10):
		if lockid == False:
			lock.acquire()
			print "I have locked Resource File"
			break
		else:
			time.sleep (10)
		lockid = lock.is_locked()
	wb = copy(rb)
	w_sheet = wb.get_sheet(0)
	keys = [r_sheet.cell(0, col_index).value for col_index in xrange(r_sheet.ncols)]
	for row_index in xrange(1, r_sheet.nrows):
		d = {keys[col_index]: r_sheet.cell(row_index, col_index).value 
			for col_index in xrange(r_sheet.ncols)} 
		if ( d['IP'] in list) :
			count = count + 1
			pwd = d['Password']
			wb.save(resourcefile)
			lock.release()
		if(count == len(list)+1) :
			break
	return pwd
Beispiel #3
0
def ReadTestCase( resourcefile ) :
	list = []
	row_index = 0
	from lockfile import LockFile
	lock = LockFile(resourcefile)
	lockid = lock.is_locked()
	print lockid
	for a in xrange(1, 2):
		if lockid == False:
			lock.acquire()
			print "I have locked Resource File"
			break
		else:
			time.sleep (10)
		lockid = lock.is_locked()
	rb = open_workbook(resourcefile)
	r_sheet = rb.sheet_by_index(0)
	wb = copy(rb)
	w_sheet = wb.get_sheet(0)
	keys = [r_sheet.cell(0, col_index).value for col_index in xrange(r_sheet.ncols)]
	j = r_sheet.nrows
	q = r_sheet.ncols
	print col_index
	while row_index < j: 
		d = {keys[col_index]: r_sheet.cell(row_index, col_index).value for col_index in xrange(r_sheet.ncols)}
		temp = ""
		if ( d['Execution'] == "yes") :
			temp = d['TC Name']
			print temp
			list.append(temp)
			wb.save(resourcefile)
		row_index = row_index + 1
	lock.release()
	return list
Beispiel #4
0
 def sync_folder(self):
     encrypted_folder_lock = LockFile(self.encrypted_folder)
     if encrypted_folder_lock.is_locked():
         self.info("Acquiring the lock of encrypted folder...")
     with encrypted_folder_lock:
         plain_folder_lock = LockFile(self.plain_folder)
         if plain_folder_lock.is_locked():
             self.info("Acquiring the lock of plaintext folder...")
         with plain_folder_lock:
             self._do_sync_folder()
Beispiel #5
0
def run_each_job(conf_dict, uploadurl):
    if not os.path.exists(CYCLIC_PATH):
        os.makedirs(CYCLIC_PATH, 02775)

    # split each cyclic jobs
    cyclic_jobs_path = get_cyclic_jobs(conf_dict)
    split_job_params = ' '.join(cyclic_jobs_path)
    split_cmd = "%s split -j %s -o %s" % (SRC+'/bin/pst', split_job_params, CYCLIC_PATH)
    split_output = subprocess.check_output(split_cmd, shell=True)
    split_jobs_path = [split_file.split(' => ')[1] for split_file in split_output.split('\n') if split_file]

    # run each splited jobs
    sync_dir = conf_dict.get('sync', {}).get('dir')
    sync_list = conf_dict.get('sync', {}).get('jobs')
    wait_timeout = conf_dict.get('sync', {}).get('timeout')

    if sync_dir and sync_list and 'all' in sync_list:
        try:
            lock = LockFile(sync_dir + os.sep + 'all')
            lock.acquire()
            for unit_jobfile in split_jobs_path:
                run_cmd = "%s run -j %s -u %s" % (SRC + '/bin/pst', unit_jobfile, uploadurl)
                run_shell_cmd(run_cmd)
                print("Remove: %s" % unit_jobfile)
                os.remove(unit_jobfile)
            lock.release()
        except KeyboardInterrupt:
            if lock.is_locked():
                lock.release()
                raise
    else:
        for unit_jobfile in split_jobs_path:
            run_cmd = "%s run -j %s -u %s" % (SRC + '/bin/pst', unit_jobfile, uploadurl)
            # add lock to sync there, e.g. run doker and host test same time
            testcase_name = common.load_conf(unit_jobfile).get('testcase')

            if sync_dir and sync_list and testcase_name in sync_list:
                lock = LockFile(sync_dir + os.sep + testcase_name)
                try:
                    lock.acquire(timeout=wait_timeout)
                    run_shell_cmd(run_cmd)
                    lock.release()
                except LockTimeout as e:
                    print e
                except KeyboardInterrupt:
                    if lock.is_locked():
                        lock.release()
                        raise
            else:
                run_shell_cmd(run_cmd)
            print("Remove: %s" % unit_jobfile)
            os.remove(unit_jobfile)
Beispiel #6
0
    def handle(self, *args, **options):
        task = options["task"]

        if not task in ("count", "delete", "process"):
            raise CommandError(
                "task must be either 'count', 'delete' or 'process'")

        if task == "count":
            count = DownloadRequest.objects.filter(active=True).count()
            self.stdout.write("%d requests pending" % (count))

        if task == "delete":
            lock_file_path = "/tmp/place.downloads.delete"
            lock = LockFile(lock_file_path)

            if lock.is_locked():
                return

            lock.acquire()

            files = glob("%s/media/zip/*.zip" % BASE_DIR)

            for file_name in files:
                date = datetime.datetime.fromtimestamp(
                    os.path.getctime(file_name))
                days = (datetime.datetime.now() - date).days

                if days > 0:
                    os.remove(file_name)

            lock.release()

        if task == "process":
            lock_file_path = "/tmp/place.downloads.process"
            lock = LockFile(lock_file_path)

            if lock.is_locked():
                return

            lock.acquire()

            count = DownloadRequest.objects.filter(active=True).count()

            for dr in DownloadRequest.objects.filter(active=True):
                dr.create_zip_file_and_notify()

            lock.release()

            if count > 0:
                self.stdout.write("%d requests processed" % (count))
Beispiel #7
0
def share_post(sharing, db: Session):

    #sharing the file to the user as collaborator
    #is nothing but adding the user as collaborator
    #once the user is added as collaborator, the collaborator
    #can download it, edit and upload it.
    user_email = sharing.collaborator_id
    filename = sharing.filename
    access_level = sharing.access_level
    try:
        file_id = get_file_id_by_filename(filename, db)
        collaborator_id = get_email_id(user_email, db).id

        file_lock_check = LockFile(file_dir + '/' + filename)

        if file_lock_check.is_locked():
            if is_owner(collaborator_id, file_id, db) == True:
                lock.release()
            else:
                return None

        shared_status = Collaborators(collaborator_id=collaborator_id,
                                      file_id=file_id,
                                      access_level=access_level,
                                      access_date=dt.datetime.now())
        db.add(shared_status)
        db.commit()
        db.refresh(shared_status)
        return True
    except:
        return False
Beispiel #8
0
    def write(self, path):
        """Writes the PlPlugMapM contents to a file."""

        try:
            from lockfile import LockFile
            useLockFile = True
        except:
            useLockFile = False

        if useLockFile:
            # Get locked file
            lock = LockFile(path)
            if not lock.is_locked():
                # Write plugmap file
                lock.acquire()
                ff = open(path, 'w')
                ff.write(self.data._contents)
                ff.close()
                lock.release()
            else:
                raise PlPlugMapMFileException(
                    'path {0} is locked'.format(path))
        else:
            ff = open(path, 'w')
            ff.write(self.data._contents)
            ff.close()
Beispiel #9
0
    def run_function_with_lock(self, function, lock_file, timeout=30, args=[], kwargs={}):
        self.logger.debug('starting function with lock: %s' % lock_file)
        lock = LockFile(lock_file)
        try:
            while not lock.i_am_locking():
                try:
                    lock.acquire(timeout=timeout)
                except (LockTimeout, NotMyLock) as e:
                    self.logger.debug('breaking lock')
                    lock.break_lock()
                    lock.acquire()
                    self.logger.exception(e)

            self.logger.debug('lock acquired: starting function')
            return function(*args, **kwargs)
        finally:
            self.logger.debug('function done, releasing lock')

            if lock.is_locked():
                try:
                    lock.release()
                except NotMyLock:
                    try:
                        os.remove(lock_file)
                    except Exception as e:
                        self.logger.exception(e)
            self.logger.debug('lock released')
Beispiel #10
0
class Repo:
    """
    Class to deal with the metadata surrounding a Git repository
    """
    def __init__(self, parent, repo_url):
        self.parent = parent
        self.url = repo_url
        self.folder_name = os.path.splitext(os.path.basename(repo_url))[0]
        self.containing_folder = os.path.join(parent.repo_folder, self.folder_name)
        if not os.path.exists(self.containing_folder):
            os.makedirs(self.containing_folder)
        self.path = os.path.join(self.containing_folder, 'repo')
        self.lockfile_path = os.path.join(self.containing_folder, 'lock')
        self.lock = LockFile(self.lockfile_path)
        self.json_path = os.path.join(self.containing_folder, 'metadata.json')
        self.data = {}
        if os.path.exists(self.json_path):
            with open(self.json_path) as json_file:
                self.data = json.load(json_file)
        self.__git = None

    def __enter__(self):
        """
        Update context
        """
        self.lock.acquire(timeout=0)
        logger.info('Git: Updating %s', self.url)
        if not os.path.exists(self.path):
            logger.debug('Cloning %s', self.url)
            git.Git(self.containing_folder).clone(self.url, self.path)
        else:
            try:
                repo = self.git(is_updater=True)
                logger.debug('Pulling %s', self.url)
                repo.git.pull()
            except Exception as e:
                logger.debug('Re-Cloning %s because %s', self.url, str(e))
                shutil.rmtree(self.path)
                git.Git(self.containing_folder).clone(self.url, self.path)
        return self

    def __exit__(self, type, value, traceback):
        # Save the updated time
        self.data['last_updated'] = str(datetime.datetime.utcnow())
        self.save()
        logger.info('Git: Update completed for %s', self.url)
        self.lock.break_lock()

    def save(self):
        with open(self.json_path, 'w') as f:
            json.dump(self.data, f)

    def git(self, is_updater=False):
        if self.lock.is_locked() and (not self.parent.is_updater and not is_updater):
            raise AlreadyLocked('This repository is being updated, if you can see this message delete {}'.format(self.lockfile_path))
        else:
            if self.__git is None or is_updater:
                self.__git = git.Repo(self.path)
            return self.__git
Beispiel #11
0
def PythonAllocate ( num , resourcefile , list ) :
	IPList1 = ""
	count = 0
	row_index = 0
	from lockfile import LockFile
	lock = LockFile(resourcefile)
	lockid = lock.is_locked()
	print lockid
	for a in xrange(1, 2):
		if lockid == False:
			lock.acquire()
			print "I have locked Resource File"
			break
		else:
			time.sleep (10)
		lockid = lock.is_locked()
	rb = open_workbook(resourcefile)
	r_sheet = rb.sheet_by_index(0)
	wb = copy(rb)
	w_sheet = wb.get_sheet(0)
	keys = [r_sheet.cell(0, col_index).value for col_index in xrange(r_sheet.ncols)]
	j = r_sheet.nrows
	while row_index < j: 
		d = {keys[col_index]: r_sheet.cell(row_index, col_index).value for col_index in xrange(r_sheet.ncols)}
		length = len(list)
		if ( d['STATUS'] == "free") :
			a = " "
			w_sheet.write(row_index,col_index-2,"allocated")
			wb.save(resourcefile)
			lock.release()
			count = count + 1
			resourcename = d['IP']
			string1 = str(list)+ a + str(resourcename)
			string2 = string1.strip()
			print string2
			list = string2.split( )
			length = len(list)
			print list
			j = 1
		row_index = row_index + 1
	print "list",list
	if ( list == "" ) :
		lock.release()
	return list
    def _download_rpm(self, nvr, arch):
        if nvr is None or arch is None:
            raise ValueError("Invalid option passed to connector")

        filename = '%s.%s.rpm' % (nvr, arch)
        file_path = os.path.split(filename)
        if file_path[0] != '':
            raise ValueError("Nvr can not contain path elements")
        if len(arch.split('/')) != 1 or os.path.split(arch)[0] != '':
            raise ValueError("Arch can not contain path elements")

        rpm_file_path = os.path.join(self._rpm_cache, filename)
        if os.path.exists(rpm_file_path):
            return rpm_file_path

        lockfile = LockFile(file_path)
        if lockfile.is_locked():
            # block until the lock is released and then assume other
            # thread was successful
            lockfile.acquire()
            lockfile.release()
            return rpm_file_path

        # acquire the lock and release when done
        lockfile.acquire()
        try:
            info = self.call('getBuild', {'buildInfo': nvr})
            if info is None:
                return {'error': 'No such build (%s)' % filename}

            if not os.path.exists(self._rpm_cache):
                os.mkdir(self._rpm_cache, )

            url = '%s/%s/%s/%s/%s/%s' % (self._koji_pkg_url, info['name'],
                                         info['version'], info['release'],
                                         arch, filename)

            url_file = grabber.urlopen(url, text=filename)
            out = os.open(rpm_file_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
                          0666)
            try:
                while 1:
                    buf = url_file.read(4096)
                    if not buf:
                        break
                    os.write(out, buf)
            except Exception as e:
                raise e
            finally:
                os.close(out)
                url_file.close()
        finally:
            lockfile.release()

        return rpm_file_path
Beispiel #13
0
    def _download_rpm(self, nvr, arch):
        if nvr is None or arch is None:
            raise ValueError("Invalid option passed to connector")

        filename = '%s.%s.rpm' % (nvr, arch)
        file_path = os.path.split(filename)
        if file_path[0] != '':
            raise ValueError("Nvr can not contain path elements")
        if len(arch.split('/')) != 1 or os.path.split(arch)[0] != '':
            raise ValueError("Arch can not contain path elements")

        rpm_file_path = os.path.join(self._rpm_cache, filename)
        if os.path.exists(rpm_file_path):
            return rpm_file_path

        lockfile = LockFile(file_path)
        if lockfile.is_locked():
            # block until the lock is released and then assume other
            # thread was successful
            lockfile.acquire()
            lockfile.release()
            return rpm_file_path

        # acquire the lock and release when done
        lockfile.acquire()
        try:
            info = self.call('getBuild', {'buildInfo': nvr})
            if info is None:
                return {'error': 'No such build (%s)' % filename}

            if not os.path.exists(self._rpm_cache):
                os.mkdir(self._rpm_cache,)

            url = '%s/%s/%s/%s/%s/%s' % (
                self._koji_pkg_url, info['name'], info['version'],
                info['release'], arch, filename)

            url_file = grabber.urlopen(url, text=filename)
            out = os.open(
                rpm_file_path, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0666)
            try:
                while 1:
                    buf = url_file.read(4096)
                    if not buf:
                        break
                    os.write(out, buf)
            except Exception as e:
                raise e
            finally:
                os.close(out)
                url_file.close()
        finally:
            lockfile.release()

        return rpm_file_path
Beispiel #14
0
class State(object):

    def __init__(self, path=None, lock=False):
        self.path = path
        self.lock = lock
        if not self.path:
            self.path = join(util.get_home_dir(), "appstate.json")
        self._state = {}
        self._prev_state = {}
        self._lockfile = None

    def __enter__(self):
        try:
            self._lock_state_file()
            if isfile(self.path):
                self._state = util.load_json(self.path)
        except exception.PlatformioException:
            self._state = {}
        self._prev_state = deepcopy(self._state)
        return self._state

    def __exit__(self, type_, value, traceback):
        if self._prev_state != self._state:
            with open(self.path, "w") as fp:
                if "dev" in __version__:
                    json.dump(self._state, fp, indent=4)
                else:
                    json.dump(self._state, fp)
        self._unlock_state_file()

    def _lock_state_file(self):
        if not self.lock:
            return
        self._lockfile = LockFile(self.path)

        if self._lockfile.is_locked() and \
                (time() - getmtime(self._lockfile.lock_file)) > 10:
            self._lockfile.break_lock()

        try:
            self._lockfile.acquire()
        except LockFailed:
            raise exception.PlatformioException(
                "The directory `{0}` or its parent directory is not owned by "
                "the current user and PlatformIO can not store configuration "
                "data. \nPlease check the permissions and owner of that "
                "directory. Otherwise, please remove manually `{0}` "
                "directory and PlatformIO will create new from the current "
                "user.".format(dirname(self.path)))

    def _unlock_state_file(self):
        if self._lockfile:
            self._lockfile.release()
Beispiel #15
0
class State(object):

    def __init__(self, path=None, lock=False):
        self.path = path
        self.lock = lock
        if not self.path:
            self.path = join(util.get_home_dir(), "appstate.json")
        self._state = {}
        self._prev_state = {}
        self._lockfile = None

    def __enter__(self):
        try:
            self._lock_state_file()
            if isfile(self.path):
                self._state = util.load_json(self.path)
        except ValueError:
            self._state = {}
        self._prev_state = deepcopy(self._state)
        return self._state

    def __exit__(self, type_, value, traceback):
        if self._prev_state != self._state:
            with open(self.path, "w") as fp:
                if "dev" in __version__:
                    json.dump(self._state, fp, indent=4)
                else:
                    json.dump(self._state, fp)
        self._unlock_state_file()

    def _lock_state_file(self):
        if not self.lock:
            return
        self._lockfile = LockFile(self.path)

        if self._lockfile.is_locked() and \
                (time() - getmtime(self._lockfile.lock_file)) > 10:
            self._lockfile.break_lock()

        try:
            self._lockfile.acquire()
        except LockFailed:
            raise exception.PlatformioException(
                "The directory `{0}` or its parent directory is not owned by "
                "the current user and PlatformIO can not store configuration "
                "data. \nPlease check the permissions and owner of that "
                "directory. Otherwise, please remove manually `{0}` "
                "directory and PlatformIO will create new from the current "
                "user.".format(dirname(self.path)))

    def _unlock_state_file(self):
        if self._lockfile:
            self._lockfile.release()
Beispiel #16
0
    def list_stale_environment_clones(self):
        """ Lists environment clones left behind on disk and no longer used

        Stale clones could be left behind because of an error during a previous run.

        An environment is defined as stale if it fits the name of a managed environment
        with a suffix, and is not pointed at by any symlinks

        :return: list(str)
        """
        links = {}
        candidates = []
        stale_clones = []

        items = os.listdir(self.environment_dir)
        for item in items:
            # Ignore hidden files
            if item.startswith('.'):
                continue

            # Explicitly ignore the master repo name
            if item == self.master_repo_name:
                continue

            # Ignore anything matching the blacklist pattern
            if self.blacklist.match(item):
                self.logger.debug(
                    "Ignoring blacklisted environment {0}".format(item))
                continue

            path = os.path.join(self.environment_dir, item)
            if os.path.islink(path):
                links[os.readlink(path)] = path
            elif os.path.isdir(path):
                candidates.append(path)

        # Look for candidate environments which aren't the target of any symlinks
        for candidate in candidates:
            if candidate not in links:
                environment_path = self.environment_repo_path(
                    self.identify_environment_name_from_clone_name(
                        self.identify_environment_name_from_path(candidate)))
                lock = LockFile(environment_path)
                if lock.is_locked():
                    # Ignore locked environments, might be in use
                    continue

                self.logger.debug(
                    "Stale environment detected: {0}".format(candidate))
                stale_clones.append(candidate)

        return stale_clones
class State(object):
    def __init__(self, path=None, lock=False):
        self.path = path
        self.lock = lock
        if not self.path:
            self.path = join(util.get_home_dir(), "appstate.json")
        self._state = {}
        self._prev_state = {}
        self._lockfile = None

    def __enter__(self):
        try:
            self._lock_state_file()
            if isfile(self.path):
                self._state = util.load_json(self.path)
        except exception.PlatformioException:
            self._state = {}
        self._prev_state = deepcopy(self._state)
        return self._state

    def __exit__(self, type_, value, traceback):
        if self._prev_state != self._state:
            try:
                with open(self.path, "w") as fp:
                    if "dev" in __version__:
                        json.dump(self._state, fp, indent=4)
                    else:
                        json.dump(self._state, fp)
            except IOError:
                raise exception.HomeDirPermissionsError(util.get_home_dir())
        self._unlock_state_file()

    def _lock_state_file(self):
        if not self.lock:
            return
        self._lockfile = LockFile(self.path)

        if self._lockfile.is_locked() and \
                (time() - getmtime(self._lockfile.lock_file)) > 10:
            self._lockfile.break_lock()

        try:
            self._lockfile.acquire()
        except LockFailed:
            raise exception.HomeDirPermissionsError(dirname(self.path))

    def _unlock_state_file(self):
        if self._lockfile:
            self._lockfile.release()
Beispiel #18
0
 def lock(self):
     """
     Method used for acquiring a lock using the lockfile module.
     """
     lock = LockFile(self.lockfile)
     # check if it's locked
     if lock.is_locked():
         # Note that lock.i_am_locking() could be True, so
         # this apporach is not really efficient from a threading 
         # point of view. However, we must be consistent with 
         # MemcachedCacheQueue's behavior.
         return False
     # else we can attempt to acquire a lock
     # we don't want this to fail silently
     # so we set timeout=0
     lock.acquire(timeout=0)
     return True
Beispiel #19
0
class State(object):

    def __init__(self, path=None, lock=False):
        self.path = path
        self.lock = lock
        if not self.path:
            self.path = join(get_home_dir(), "appstate.json")
        self._state = {}
        self._prev_state = {}
        self._lockfile = None

    def __enter__(self):
        try:
            self._lock_state_file()
            if isfile(self.path):
                with open(self.path, "r") as fp:
                    self._state = json.load(fp)
        except ValueError:
            self._state = {}
        self._prev_state = deepcopy(self._state)
        return self._state

    def __exit__(self, type_, value, traceback):
        if self._prev_state != self._state:
            with open(self.path, "w") as fp:
                if "dev" in __version__:
                    json.dump(self._state, fp, indent=4)
                else:
                    json.dump(self._state, fp)
        self._unlock_state_file()

    def _lock_state_file(self):
        if not self.lock:
            return
        self._lockfile = LockFile(self.path)

        if (self._lockfile.is_locked() and
                (time() - getmtime(self._lockfile.lock_file)) > 10):
            self._lockfile.break_lock()

        self._lockfile.acquire()

    def _unlock_state_file(self):
        if self._lockfile:
            self._lockfile.release()
Beispiel #20
0
class State(object):

    def __init__(self, path=None, lock=False):
        self.path = path
        self.lock = lock
        if not self.path:
            self.path = join(get_home_dir(), "appstate.json")
        self._state = {}
        self._prev_state = {}
        self._lockfile = None

    def __enter__(self):
        try:
            self._lock_state_file()
            if isfile(self.path):
                with open(self.path, "r") as fp:
                    self._state = json.load(fp)
        except ValueError:
            self._state = {}
        self._prev_state = deepcopy(self._state)
        return self._state

    def __exit__(self, type_, value, traceback):
        if self._prev_state != self._state:
            with open(self.path, "w") as fp:
                if "dev" in __version__:
                    json.dump(self._state, fp, indent=4)
                else:
                    json.dump(self._state, fp)
        self._unlock_state_file()

    def _lock_state_file(self):
        if not self.lock:
            return
        self._lockfile = LockFile(self.path)

        if (self._lockfile.is_locked() and
                (time() - getmtime(self._lockfile.lock_file)) > 10):
            self._lockfile.break_lock()

        self._lockfile.acquire()

    def _unlock_state_file(self):
        if self._lockfile:
            self._lockfile.release()
Beispiel #21
0
def handle_selected_files(files):
    status = {}
    for file in files:
        status.update([(file, {'name': file, 'loaded': False, 'errors': []})])
        file_path = file_dir + file

        lock = LockFile(file_path)
        if lock.is_locked():
            status[file]['errors'].append(
                'Файл заблокирован (загружается другим процессом).')
            continue
        else:
            lock.acquire()

        if not isSQLite3(file_path):
            status[file]['errors'].append('Не корректный формат файла.')
            lock.release()
            continue

        conn = sqlite3.connect(file_path)
        cursor = conn.cursor()
        packets_file = exec_query(conn, cursor, query_packets_file)
        transfer_file = exec_query(conn, cursor, query_transfer_file)
        if isinstance(transfer_file, str):
            status[file]['errors'].append(transfer_file)
            lock.release()
            continue
        elif isinstance(packets_file, str):
            status[file]['errors'].append(packets_file)
            lock.release()
            continue

        pc_name = transfer_file[0][0]
        mac_addr = transfer_file[0][1]
        date_db = transfer_file[0][2]
        uid_id = transfer_file[0][3]
        if not uid_id:
            status[file]['errors'].append('Не указан Uid')
            lock.release()
            continue

        if not mac_addr:
            status[file]['errors'].append('Не указан mac-адрес')
            lock.release()
            continue

        if not date_db:
            status[file]['errors'].append('Не указана дата выгрузки')
            lock.release()
            continue

        if Transfer.objects.filter(mac_addr=mac_addr, date_db=date_db):
            status[file]['errors'].append('Данный файл уже загружен -' +
                                          mac_addr + ' - ' + date_db)
            lock.release()
            continue

        if not Uid.objects.filter(uid=uid_id):
            uid_obj = Uid(name='Unknown', uid=uid_id)
        else:
            uid_obj = Uid.objects.get(uid=uid_id)
        transfer_obj = Transfer(pc_name=pc_name,
                                mac_addr=mac_addr,
                                date_db=date_db,
                                uid=uid_obj)

        uid_obj.save()
        transfer_obj.save()
        packets_all = []
        for packet in packets_file:
            packets_all.append(
                Traffic(datetime=packet[0],
                        src=packet[1],
                        dst=packet[2],
                        pkt_size=packet[3],
                        transfer=transfer_obj))

        conn.close()
        lock.release()
        #        rm = os.remove(file_path)
        #        status[file]['errors'].append(str(rm))
        try:
            os.remove(file_path)
        except Exception as err:
            status[file]['errors'].append('Ошибка удаления файла: ' + str(err))
        else:
            Traffic.objects.bulk_create(packets_all)
            status[file]['loaded'] = True


#        Traffic.objects.bulk_create(packets_all)
#        status[file]['loaded'] = True

    return status
Beispiel #22
0
class BatchTrigger():
    def __init__(self):
        self.strExecSummaryHTMLFilePath = ""
        self.intPassKITCount = 0
        self.intFailKITCount = 0
        self.lock = None

    #Function to execute the batch kits for the Build Pipeline
    def build_execute_kitlist(self):
        try:
            strEnvironmentFolderPAth = os.path.abspath(
                __file__ + "/../../../") + "/02_Manager_Tier/EnviromentFile/"
            self.strAndroidDeviceJsonFilePath = strEnvironmentFolderPAth + '/android_device_list.json'
            self.lock = LockFile(self.strAndroidDeviceJsonFilePath)
            deviceUtils.create_android_device_json()
            time.sleep(10)
            deviceUtils.install_app_android_device(strAndroidAppFilePath)
            utils.setAttribute_KitBatch('batch_execution', 'status', 'YES')
            oBKitJsonDict = self.get_kit_batch_json()
            self.current_batch_id = oBKitJsonDict["kit_batch"][
                'current_batch_id']
            oKitList = oBKitJsonDict["kit_batch"]['list_of_batches'][
                self.current_batch_id]['list_of_kits']
            if len(oKitList.keys()) > 0:
                utils.setAttribute_KitBatch(
                    'batch_execution', 'current_batch_result_folder',
                    self.current_batch_id + '_' + self.getTimeStamp(True))
                #Main Summary result
                self.Batch_Execution_Summary_Initialize()
                oKitPriority = {}
                #List Kits based on priority
                for current_kit_id in oKitList:
                    oKitPriority[int(
                        oKitList[current_kit_id]['priority'])] = current_kit_id
                #Trigger the Execution for the kits
                for oPKey in sorted(oKitPriority.keys()):
                    #self.trigger_parallel_kit_execution(oKitPriority[oPKey])
                    # Start the Individual Kit parallel execution

                    parallelExecThread = threading.Thread(
                        target=self.trigger_parallel_kit_execution,
                        args=(oKitPriority[oPKey], ))
                    parallelExecThread.daemon = True  # This kills the thread when main program exits
                    parallelExecThread.start()
                    parallelExecThread.name = oKitPriority[oPKey]
                    jobs.append(parallelExecThread)
                    time.sleep(50)
                for oJob in jobs:
                    oJob.join()
                #Footer for Batch summary report
                self.Batch_Execution_Summary_Footer()

            else:
                print('No kits in the Selected batch')
            utils.setAttribute_KitBatch('batch_execution', 'status', 'NO')
        except:
            print(
                'Batch Execution: Exception in build_execute_kitlist Method\n {0}'
                .format(traceback.format_exc().replace('File', '$~File')))
            utils.setAttribute_KitBatch('batch_execution', 'status', 'NO')

    def trigger_parallel_kit_execution(self, oKitPkey):
        print("Hello")
        while True:
            boolGotDevice, strDevice, strPort, strDeviceID = self.get_device_from_json(
            )
            print('boolGotDevice, strDevice, strPort ', boolGotDevice,
                  strDevice, strPort)
            if boolGotDevice: break
            else: time.sleep(30)

        strResultFolder, User_ID, Test_Suite, Main_Client, Second_Client, strCurrentEnvironment = self.update_gloabalVar_json(
            self.current_batch_id, oKitPkey, strPort, strDeviceID)
        self.update_main_result_json(oKitPkey, strResultFolder, User_ID,
                                     Test_Suite, Main_Client, Second_Client,
                                     strCurrentEnvironment)

        strHTMLSummaryPath = strResultFolder + "/Hive-" + strCurrentEnvironment + "_Execution_Summary.HTML"
        #Trigger the test execution for the Kit
        #utils.setAttribute('common', 'appium_port', strPort)
        self.trigger_test_run(oKitPkey)
        #Add Hyper link for the Kit execution
        '''Main_Client = utils.getAttribute('common', 'mainClient')
        Second_Client = utils.getAttribute('common', 'secondClient')
        User_ID = utils.getAttribute('common', 'userName')
        Test_Suite = utils.getAttribute('common', 'test_suite')'''
        Kit_Setup = ""
        oDeviceVersionDict = self.get_device_details()
        print(oDeviceVersionDict)
        for oKey in oDeviceVersionDict.keys():
            Kit_Setup = Kit_Setup + oDeviceVersionDict[oKey][
                'model'] + ' ==> ' + oDeviceVersionDict[oKey][
                    'version'] + '<br>'
        self.Batch_Execution_Summary_KitAddLink(oKitPkey, strHTMLSummaryPath,
                                                Main_Client, Second_Client,
                                                User_ID, Kit_Setup, Test_Suite)
        self.set_device_exec_status(strDevice, 'COMPLETED')

    def get_device_from_json(self):
        strPort = ""
        strDevice = ""
        boolGotDevice = False
        strDeviceID = ""
        try:
            if not self.lock.is_locked():
                self.lock.acquire()
                print(self.lock.path, 'is locked.')

                strJson = open(self.strAndroidDeviceJsonFilePath, mode='r')
                oADLJsonDict = json.loads(strJson.read())
                strJson.close()
                oANDLIST = oADLJsonDict['android_devicelist']
                for oDevice in oANDLIST:
                    strDeviceExecStatus = oANDLIST[oDevice]['status']
                    if not strDeviceExecStatus.upper() == 'IN PROGRESS':
                        strPort = oANDLIST[oDevice]['port']
                        strDeviceID = oANDLIST[oDevice]['device_id']
                        strDevice = oDevice
                        boolGotDevice = True
                        oANDLIST[oDevice]['status'] = 'IN PROGRESS'
                        oADLJsonDict['android_devicelist'] = oANDLIST
                        #Write back the JSON to the GlobalVar.JSON
                        oJson = open(self.strAndroidDeviceJsonFilePath,
                                     mode='w+')
                        oJson.write(
                            json.dumps(oADLJsonDict, indent=4, sort_keys=True))
                        oJson.close()
                        break
                self.lock.release()
                print(self.lock.path, 'is unlocked.')
            return boolGotDevice, strDevice, strPort, strDeviceID
        except:
            print(traceback.format_exc())
            return boolGotDevice, strDevice, strPort, strDeviceID

    def set_device_exec_status(self, strDevice, strStatus):
        if not self.lock.is_locked():
            self.lock.acquire()
            print(self.lock.path, 'is locked.')
            strJson = open(self.strAndroidDeviceJsonFilePath, mode='r')
            oADLJsonDict = json.loads(strJson.read())
            strJson.close()
            oANDLIST = oADLJsonDict['android_devicelist']
            for oDevice in oANDLIST:
                if oDevice == strDevice:
                    oANDLIST[oDevice]['status'] = strStatus
                    oADLJsonDict['android_devicelist'] = oANDLIST
                    #Write back the JSON to the GlobalVar.JSON
                    oJson = open(self.strAndroidDeviceJsonFilePath, mode='w+')
                    oJson.write(
                        json.dumps(oADLJsonDict, indent=4, sort_keys=True))
                    oJson.close()
        self.lock.release()
        print(self.lock.path, 'is unlocked.')

    #Updated the GlobalVarJson According to the Selected Kit
    def update_gloabalVar_json(self,
                               current_batch_id=None,
                               current_kit_id=None,
                               appium_node='4723',
                               strDeviceID=""):
        strEnvironmentFolderPAth = os.path.abspath(
            __file__ + "/../../../") + "/02_Manager_Tier/EnviromentFile/"
        strGlobVarFilePath = strEnvironmentFolderPAth + '/GlobalVar.json'
        strJson = open(strGlobVarFilePath, mode='r')
        oGBVJsonDict = json.loads(strJson.read())
        strJson.close()

        oGlobalDict = oGBVJsonDict['globalVariables']
        if current_batch_id == None or current_kit_id == None: return

        if current_batch_id != "" and current_kit_id != "":
            oJsonDict = self.get_kit_batch_json()
            oKitDetails = oJsonDict["kit_batch"]['list_of_batches'][
                current_batch_id]['list_of_kits'][current_kit_id]
            oGlobalDict['currentEnvironment'] = oKitDetails[
                'currentEnvironment']
            oGlobalDict['apiValidationType'] = oKitDetails['apiValidationType']
            oGlobalDict['mainClient'] = oKitDetails['mainClient']['name']
            oGlobalDict['secondClient'] = oKitDetails['secondClient']['name']
            strAppVer = oKitDetails['currentAppVersion']
            oGlobalDict['currentAppVersion'] = strAppVer
            oGlobalDict['userName'] = oKitDetails['userName']
            oGlobalDict['password'] = oKitDetails['password']
            oGlobalDict['appium_port'] = appium_node
            oGlobalDict['appium_udid'] = strDeviceID
            #Write result folder name
            oGlobalDict['batch_execution'] = {}
            oGlobalDict['batch_execution']['status'] = "YES"
            strResultFolder = current_kit_id + '_' + oGlobalDict[
                'test_suite'] + '_' + self.getTimeStamp(True)
            oGlobalDict['batch_execution'][
                'result_folder_name'] = strResultFolder
            print(strResultFolder)

            #oKitDetails['resultFolderLabel'] = "" # Environment.get()
            oCurrentEnvDict = oGlobalDict['listOfEnvironments'][
                oKitDetails['currentEnvironment']]

            if 'IOS' in oKitDetails['mainClient']['name'].upper():
                oCurrentEnvDict['iOS' + strAppVer][
                    'appFileName'] = oKitDetails['mainClient']['appFileName']
                oCurrentEnvDict['iOS' + strAppVer]['deviceName'] = oKitDetails[
                    'mainClient']['deviceName']
                oCurrentEnvDict[
                    'iOS' +
                    strAppVer]['udid'] = oKitDetails['mainClient']['udid']
                oCurrentEnvDict['iOS' +
                                strAppVer]['platformVersion'] = oKitDetails[
                                    'mainClient']['platformVersion']
            elif 'ANDROID' in oKitDetails['mainClient']['name'].upper():
                oCurrentEnvDict['android' + strAppVer][
                    'appFileName'] = oKitDetails['mainClient']['appFileName']
                oCurrentEnvDict['android' + strAppVer][
                    'deviceName'] = oKitDetails['mainClient']['deviceName']
                oCurrentEnvDict['android' +
                                strAppVer]['platformVersion'] = oKitDetails[
                                    'mainClient']['platformVersion']
            elif 'WEB' in oKitDetails['mainClient']['name'].upper():
                oCurrentEnvDict['web' + strAppVer][
                    'browserName'] = oKitDetails['mainClient']['browserName']
                oCurrentEnvDict['web' + strAppVer]['loginURL'] = oKitDetails[
                    'mainClient']['loginURL']

            oGlobalDict['secondClientValidateFlag'] = oKitDetails[
                'secondClientValidateFlag']
            if 'IOS' in oKitDetails['secondClient']['name'].upper():
                oCurrentEnvDict['iOS' + strAppVer][
                    'appFileName'] = oKitDetails['secondClient']['appFileName']
                oCurrentEnvDict['iOS' + strAppVer]['deviceName'] = oKitDetails[
                    'secondClient']['deviceName']
                oCurrentEnvDict[
                    'iOS' +
                    strAppVer]['udid'] = oKitDetails['secondClient']['udid']
                oCurrentEnvDict['iOS' +
                                strAppVer]['platformVersion'] = oKitDetails[
                                    'secondClient']['platformVersion']
            elif 'ANDROID' in oKitDetails['secondClient']['name'].upper():
                oCurrentEnvDict['android' + strAppVer][
                    'appFileName'] = oKitDetails['secondClient']['appFileName']
                oCurrentEnvDict['android' + strAppVer][
                    'deviceName'] = oKitDetails['secondClient']['deviceName']
                oCurrentEnvDict['android' +
                                strAppVer]['platformVersion'] = oKitDetails[
                                    'secondClient']['platformVersion']
            elif 'WEB' in oKitDetails['mainClient']['name'].upper():
                oCurrentEnvDict['web' + strAppVer][
                    'browserName'] = oKitDetails['secondClient']['browserName']
                oCurrentEnvDict['web' + strAppVer]['loginURL'] = oKitDetails[
                    'secondClient']['loginURL']

            oGlobalDict['listOfEnvironments'][
                oKitDetails['currentEnvironment']] = oCurrentEnvDict
            oGBVJsonDict['globalVariables'] = oGlobalDict

            oGlobalDict['test_suite'] = oKitDetails['test_suite']
            #Write back the JSON to the GlobalVar.JSON
            oJson = open(strGlobVarFilePath, mode='w+')
            oJson.write(json.dumps(oGBVJsonDict, indent=4, sort_keys=True))
            oJson.close()

            return strResultFolder, oGlobalDict['userName'], oGlobalDict[
                'test_suite'], oGlobalDict['mainClient'], oGlobalDict[
                    'secondClient'], oGlobalDict['currentEnvironment']

    def update_main_result_json(self, oKitPkey, strResultFolder, User_ID,
                                Test_Suite, Main_Client, Second_Client,
                                strCurrentEnvironment):

        oMRJson = open(self.MainResultJsonPath, mode='r')
        oMRJsonDict = json.loads(oMRJson.read())
        oMRJson.close()

        oMRJsonDict["list_of_kits"][oKitPkey] = {
            "result_folder": strResultFolder,
            "username": User_ID,
            "test_suite": Test_Suite,
            "mainClient": Main_Client,
            "secondClient": Second_Client,
            "currentEnvironment": strCurrentEnvironment
        }

        #Write back the JSON to the GlobalVar.JSON
        oMRJson = open(self.MainResultJsonPath, mode='w+')
        oMRJson.write(json.dumps(oMRJsonDict, indent=4, sort_keys=True))
        oMRJson.close()

    #Create the Main Results Json
    def create_main_result_json(self):
        self.MainResultJsonPath = self.BatchResultPath + '/main_result.json'
        oMRJson = open(self.MainResultJsonPath, mode='w+')

        oJsonDict = {
            "main_result_folder": self.BatchResultPath,
            "current_batch_id": self.current_batch_id,
            "list_of_kits": {}
        }

        oMRJson.write(json.dumps(oJsonDict, indent=4, sort_keys=False))
        oMRJson.close()

    def trigger_test_run(self, strKitID=""):

        self.set_global_var()
        '''if not ('ZIGBEE' in utils.getAttribute("common", 'apiValidationType').upper() or 'WEB' in utils.getAttribute("common", 'mainClient').upper()):
            subprocess.call('killall node', shell=True)               
            subprocess.Popen(striOSAppiumConnectionString, shell=True)'''
        '''if 'ANDROID' in utils.getAttribute("common", 'mainClient').upper():
            subprocess.call('adb kill-server', shell=True)
            subprocess.call('adb start-server', shell=True) 
            print()'''
        time.sleep(5)
        strTestSuite = utils.getAttribute("common", 'test_suite')
        if strTestSuite == 'BasicSmokeTest_Dual':
            oProcess = subprocess.Popen("behave --tags=BasicSmokeTest",
                                        stdout=subprocess.PIPE,
                                        shell=True)
        elif strTestSuite == 'BasicSmokeTest_Heating':
            oProcess = subprocess.Popen(
                "behave --tags=BasicSmokeTest --tags=Heating",
                stdout=subprocess.PIPE,
                shell=True)
        elif strTestSuite == 'BasicSmokeTest_HotWater':
            oProcess = subprocess.Popen(
                "behave --tags=BasicSmokeTest --tags=HotWater",
                stdout=subprocess.PIPE,
                shell=True)
        elif strTestSuite == 'ScheduleTest_Dual':
            oProcess = subprocess.Popen(
                "behave --tags=ScheduleTest --tags=Verify",
                stdout=subprocess.PIPE,
                shell=True)
        elif strTestSuite == 'ScheduleTest_Heating':
            oProcess = subprocess.Popen(
                "behave --tags=ScheduleTest --tags=Verify --tags=Heating",
                stdout=subprocess.PIPE,
                shell=True)
        elif strTestSuite == 'ScheduleTest_HotWater':
            oProcess = subprocess.Popen(
                "behave --tags=ScheduleTest --tags=Verify --tags=HotWater",
                stdout=subprocess.PIPE,
                shell=True)
        elif strTestSuite == 'Test_Batch':
            oProcess = subprocess.Popen("behave --tags=Test_Batch",
                                        stdout=subprocess.PIPE,
                                        shell=True)
        print('Test suite Triggered')
        while True:
            output = oProcess.stdout.readline()
            if oProcess.poll() is not None:
                break
            if output:
                print(strKitID, output)
        print("@@@@@@@@@@@Successfully completed", strKitID, output)

    def get_kit_batch_json(self):
        strEnvironmentFolderPAth = os.path.abspath(
            __file__ + "/../../../") + "/02_Manager_Tier/EnviromentFile/"
        strGlobVarFilePath = strEnvironmentFolderPAth + '/kit_batch.json'
        strJson = open(strGlobVarFilePath, mode='r')
        oJsonDict = json.loads(strJson.read())
        strJson.close()
        return oJsonDict

    def set_global_var(self):
        strEnvironmentFolderPAth = os.path.abspath(
            __file__ + "/../../../") + "/02_Manager_Tier/EnviromentFile"
        strGlobVarFilePath = strEnvironmentFolderPAth + '/GlobalVar.json'
        strJson = open(strGlobVarFilePath, mode='r')
        oJsonDict = json.loads(strJson.read())
        strJson.close()
        utils.oJsonDict = oJsonDict

    #Gets the Time stamp for creating the folder set or for reporting time stamp based on boolFolderCreate
    def getTimeStamp(self, boolFolderCreate):
        if boolFolderCreate:
            str_format = "%d-%b-%Y_%H-%M-%S"
        else:
            str_format = "%d-%b-%Y %H:%M:%S"
        today = datetime.today()
        return today.strftime(str_format)

    #Batch Execution Summary
    def Batch_Execution_Summary_Initialize(self):
        strAPI = utils.getAttribute('common', 'apiValidationType')
        if 'ZIGBEE' in strAPI.upper():
            strAPIFolder = 'Device_Test_Automation/'
        else:
            strAPIFolder = 'Web-Mobile_Test_Automation'
        strSystemResultFolderName = ''
        if 'DARWIN' in platform.system().upper():
            if os.path.exists("/volumes/hardware"):
                strSystemResultFolderName = getpass.getuser(
                ) + "_" + socket.gethostname().split(".")[0]
                strTestResultFolder = "/volumes/hardware/" + strAPIFolder + '/Test_Results/'
                self.ensure_dir(strTestResultFolder +
                                strSystemResultFolderName)
        elif 'LINUX' in platform.system().upper():
            if os.path.exists("/home/pi/hardware"):
                strSystemResultFolderName = socket.gethostname().split(
                    ".")[0].split("-")[1]
                strTestResultFolder = "/home/pi/hardware/" + strAPIFolder + '/Test_Results/'
                self.ensure_dir(strTestResultFolder +
                                strSystemResultFolderName)

        if not strSystemResultFolderName == "":
            self.strResultsPath = strTestResultFolder + strSystemResultFolderName + '/'
        else:
            self.strResultsPath = os.path.abspath(
                __file__ + "/../../../") + '/03_Results_Tier/'
        strResultsPath = self.strResultsPath + utils.getAttribute_KitBatch(
            'batch_execution', 'current_batch_result_folder') + '/'
        if not os.path.exists(strResultsPath): os.makedirs(strResultsPath)
        self.BatchResultPath = strResultsPath
        self.strExecSummaryHTMLFilePath = strResultsPath + "HIVE_BATCH_Execution_Summary.HTML"
        print(self.strExecSummaryHTMLFilePath)
        self.create_main_result_json()
        try:
            strEnvironmentFilePath = os.path.abspath(
                __file__ + "/../../../") + "/02_Manager_Tier/EnviromentFile"
            oFileW = open(strEnvironmentFilePath + '/scripts/Temp.txt', 'w')
            oFileW.write(self.strExecSummaryHTMLFilePath)
            oFileW.close()
            strCSSFilePath = strEnvironmentFilePath + "/Style.CSS"
            oFileReader = open(strCSSFilePath, 'r')
            strData = oFileReader.read()
            oFileReader.close()
            oFileWriter = open(self.strExecSummaryHTMLFilePath, 'w')
            oFileWriter.write("<!DOCTYPE html>\n")
            oFileWriter.write("<html>\n")
            oFileWriter.write("<head>\n")
            oFileWriter.write("         <meta charset='UTF-8'>\n")
            oFileWriter.write(
                "         <title>Hive - Automation Execution Results Batch-Summary</title>\n"
            )
            oFileWriter.write(strData + '\n')
            oFileWriter.write("</head>\n")
            oFileWriter.write("<body>\n")
            oFileWriter.write("<table id='header'>\n")
            oFileWriter.write("<colgroup>\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("</colgroup>\n")
            oFileWriter.write("<thead>\n")
            oFileWriter.write("<tr class='heading'>\n")
            oFileWriter.write(
                "<th colspan='4' style='font-family:Copperplate Gothic Bold; font-size:1.4em;'>\n"
            )
            oFileWriter.write(
                "Hive - Automation Execution Results Batch-Summary\n")
            oFileWriter.write("</th>\n")
            oFileWriter.write("</tr>\n")
            oFileWriter.write("<tr class='subheading'>\n")
            oFileWriter.write("<th>&nbsp;Date&nbsp;&&nbsp;Time</th>\n")
            #oFileWriter.write("<th>&nbsp;:&nbsp;25-Jul-2014&nbsp;05:02:20&nbsp;PM</th>\n")
            oFileWriter.write("<th>&nbsp;:&nbsp;" + self.getTimeStamp(False) +
                              "</th>\n")
            self.intExecStartTime = time.monotonic()
            oFileWriter.write("<th>&nbsp;Batch Name</th>\n")
            oFileWriter.write("<th>&nbsp;:&nbsp;" + self.current_batch_id +
                              "</th>\n")
            oFileWriter.write("</tr>\n")
            oFileWriter.write("</thead>\n")
            oFileWriter.write("</table>\n")
            oFileWriter.write("<table id='main'>\n")
            oFileWriter.write("<colgroup>\n")
            oFileWriter.write("<col style='width: 10%' />\n")
            oFileWriter.write("<col style='width: 10%' />\n")
            oFileWriter.write("<col style='width: 10%' />\n")
            oFileWriter.write("<col style='width: 15%' />\n")
            oFileWriter.write("<col style='width: 15%' />\n")
            oFileWriter.write("<col style='width: 15%' />\n")
            oFileWriter.write("<col style='width: 15%' />\n")
            oFileWriter.write("<col style='width: 10%' />\n")
            oFileWriter.write("</colgroup>\n")

            oFileWriter.write("<thead>\n")
            oFileWriter.write("<tr class='heading'>\n")
            oFileWriter.write("<th>Kit_ID</th>\n")
            oFileWriter.write("<th>Main_Client</th>\n")
            oFileWriter.write("<th>Second_Client</th>\n")
            oFileWriter.write("<th>User_ID</th>\n")
            oFileWriter.write("<th>Kit_Setup</th>\n")
            oFileWriter.write("<th>Test_Suite</th>\n")
            oFileWriter.write("<th>Execution_Time</th>\n")
            oFileWriter.write("<th>Status</th>\n")
            oFileWriter.write("</tr>\n")
            oFileWriter.write("</thead>\n")

            #Always close files.
            oFileWriter.close()
        except:
            print(
                'Reporter Exception in BATCH_HTML_Execution_Summary_Initialize\n {0}'
                .format(traceback.format_exc().replace('File', '$~File')))

    def Batch_Execution_Summary_KitAddLink(self, Kit_ID, strHTMLSummaryPath,
                                           Main_Client, Second_Client, User_ID,
                                           Kit_Setup, Test_Suite):
        try:
            oFileWriter = open(self.strExecSummaryHTMLFilePath, 'a')
            oFileWriter.write("<tr class='content' >\n")
            #oFileDet = os.path.split(utils.getAttribute_KitBatch('batch_execution', 'current_kit_result_summary_path'))
            #strFilePath = utils.getAttribute_KitBatch('batch_execution', 'current_kit_result_summary_path')
            oFileWriter.write("<td class='justified'><a href='" +
                              './Kit_Results/' + strHTMLSummaryPath +
                              "' target='about_blank'>" + str(Kit_ID) +
                              "</a></td>\n")
            oFileWriter.write("<td class='justified'>" + Main_Client +
                              "</td>\n")
            oFileWriter.write("<td class='justified'>" + Second_Client +
                              "</td>\n")
            oFileWriter.write("<td class='justified'>" + User_ID + "</td>\n")
            oFileWriter.write("<td class='justified'>" + Kit_Setup + "</td>\n")
            oFileWriter.write("<td class='justified'>" + Test_Suite +
                              "</td>\n")
            oFileWriter.write(
                "<td class='justified'>" + utils.getAttribute_KitBatch(
                    'batch_execution', 'current_kit_execution_time') +
                "</td>\n")
            self.strKitStatus = utils.getAttribute_KitBatch(
                'batch_execution', 'current_kit_status')
            strStatusClass = self.strKitStatus[0:4].lower()
            oFileWriter.write("<td class='" + strStatusClass + "'>" +
                              self.strKitStatus + "</td>\n")
            oFileWriter.write("</tr>\n")
            if (self.strKitStatus == "PASSED"):
                self.intPassKITCount = self.intPassKITCount + 1
            if (self.strKitStatus == "FAILED"):
                self.intFailKITCount = self.intFailKITCount + 1

            #Always close files.
            oFileWriter.close()
        except IOError as e:
            print("I/O error({0}): {1}".format(e.errno, e.strerror))

    def Batch_Execution_Summary_Footer(self):

        try:
            oFileWriter = open(self.strExecSummaryHTMLFilePath, 'a')
            oFileWriter.write("</tbody>\n")
            oFileWriter.write("</table>\n")
            oFileWriter.write("<table>\n")
            oFileWriter.write("<script type='text/javascript'>\n")
            oFileWriter.write("window.onload = function () {\n")
            oFileWriter.write("CanvasJS.addColorSet('chartshades',\n")
            oFileWriter.write("[//colorSet Array\n")
            oFileWriter.write("'lightgreen',\n")
            oFileWriter.write("'red'           \n")
            oFileWriter.write("]);\n")
            oFileWriter.write(
                "var chart = new CanvasJS.Chart('chartContainer',\n")
            oFileWriter.write("{\n")
            oFileWriter.write("colorSet: 'chartshades',\n")
            oFileWriter.write("zoomEnabled: true,\n")
            oFileWriter.write("title:{\n")
            oFileWriter.write("fontColor: '#C6FFEC',\n")
            oFileWriter.write("text: 'Execution Status'\n")
            oFileWriter.write("},\n")
            oFileWriter.write("animationEnabled: true,\n")
            oFileWriter.write("backgroundColor: 'black',\n")
            oFileWriter.write("legend:{\n")
            oFileWriter.write("fontColor: '#C6FFEC',\n")
            oFileWriter.write("verticalAlign: 'bottom',\n")
            oFileWriter.write("horizontalAlign: 'center'\n")
            oFileWriter.write("},data: [{        \n")
            oFileWriter.write("indexLabelFontSize: 20,\n")
            oFileWriter.write("indexLabelFontFamily: 'Monospace',     \n")
            oFileWriter.write("indexLabelFontColor: '#C6FFEC', \n")
            oFileWriter.write("indexLabelLineColor: '#C6FFEC',     \n")
            oFileWriter.write("indexLabelPlacement: 'auto',\n")
            oFileWriter.write("type: 'pie',       \n")
            oFileWriter.write("showInLegend: true,\n")
            oFileWriter.write(
                "toolTipContent: '{y} - <strong>#percent%</strong>',\n")
            oFileWriter.write("dataPoints: [\n")
            if not self.intPassKITCount == 0:
                oFileWriter.write(
                    "{  y: " + str(self.intPassKITCount) +
                    ", legendText:'PASS', indexLabel: '{y}' },\n")
            else:
                oFileWriter.write("{  y: " + str(self.intPassKITCount) +
                                  ", legendText:'PASS'},\n")
            if not self.intFailKITCount == 0:
                oFileWriter.write("{  y: " + str(self.intFailKITCount) +
                                  ", legendText:'FAIL' , indexLabel: '{y}'}\n")
            else:
                oFileWriter.write("{  y: " + str(self.intFailKITCount) +
                                  ", legendText:'FAIL'}\n")
            oFileWriter.write("]}]});chart.render();}\n")
            oFileWriter.write("</script>\n")
            oFileWriter.write(
                "<script type='text/javascript' src='./Kit_Results/Temp/canvasjs.min.js'></script>\n"
            )
            oFileWriter.write(
                "<tr  class='content' ><td><div id='chartContainer' style='height: 300px; width: 100%;'></div></td></tr></table>\n"
            )

            oFileWriter.write("<table id='footer'>\n")
            oFileWriter.write("<colgroup>\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("<col style='width: 25%' />\n")
            oFileWriter.write("</colgroup>\n")

            oFileWriter.write("<tfoot>\n")
            oFileWriter.write("<tr class='heading'>\n")

            intExecEndTime = time.monotonic()
            strDuration = str(
                timedelta(seconds=intExecEndTime - self.intExecStartTime))
            strDuration = self.getDuration(strDuration)
            oFileWriter.write("<th colspan='4'>Total Duration: " +
                              strDuration + "</th>\n")
            oFileWriter.write("</tr>\n")
            oFileWriter.write("<tr class='subheading'>\n")
            oFileWriter.write("<td class='pass'>&nbsp;Tests passed</td>\n")
            oFileWriter.write("<td class='pass'>&nbsp;: {}</td>\n".format(
                self.intPassKITCount))
            oFileWriter.write("<td class='fail'>&nbsp;Tests failed</td>\n")
            oFileWriter.write("<td class='fail'>&nbsp;: {}</td>\n".format(
                self.intFailKITCount))
            oFileWriter.write("</tr>\n")
            oFileWriter.write("</tfoot>\n")
            oFileWriter.write("</table>\n")
            oFileWriter.write("</body>\n")
            oFileWriter.write("</html>\n")

            #Always close files.
            oFileWriter.close()
            '''
            if os.path.exists(self.strCurrentTXTFolder + 'ExecutionInProgress.txt'): 
                os.remove(self.strCurrentTXTFolder + 'ExecutionInProgress.txt')'''
        except IOError as e:
            print("I/O error({0}): {1}".format(e.errno, e.strerror))

    def get_connectedAndroid_device(self):
        print()

    def get_device_details(self):
        env = utils.getAttribute('common', 'currentEnvironment')
        username = utils.getAttribute('common', 'userName')
        password = utils.getAttribute('common', 'password')
        ALAPI.createCredentials(env, username=username, password=password)
        self.session = ALAPI.sessionObject()
        resp = ALAPI.getNodesV6(self.session)
        oDeviceVersionDict = {}
        strSLTMacID = ""
        strSLRMacID = ""
        for oNode in resp['nodes']:
            if not 'supportsHotWater' in oNode['attributes']:
                if 'hardwareVersion' in oNode['attributes']:
                    intHardwareVersion = oNode['attributes'][
                        'hardwareVersion']['reportedValue']
                    intSoftwareVersion = oNode['attributes'][
                        'softwareVersion']['reportedValue']
                    if 'NANO' in intHardwareVersion:
                        oDeviceVersionDict['HUB'] = {
                            "model": intHardwareVersion,
                            "version": intSoftwareVersion
                        }
                if 'zigBeeNeighbourTable' in oNode['attributes']:
                    for oDevice in oNode['attributes']['zigBeeNeighbourTable'][
                            'reportedValue']:
                        if 'relationship' in oDevice:
                            if oDevice['relationship'] == 'CHILD':
                                strSLTMacID = oDevice['neighbourAddress']
                            elif oDevice['relationship'] == 'NONE':
                                strSLRMacID = oDevice['neighbourAddress']

        for oNode in resp['nodes']:
            if not 'supportsHotWater' in oNode['attributes']:
                if 'model' in oNode['attributes']:
                    strModel = oNode['attributes']['model']['reportedValue']
                    intSoftwareVersion = oNode['attributes'][
                        'softwareVersion']['reportedValue']
                    if 'SLT' in strModel:
                        oDeviceVersionDict['Thermostat'] = {
                            "model": strModel,
                            "version": intSoftwareVersion,
                            'mac_id': strSLTMacID
                        }
                    elif 'SLR' in strModel:
                        oDeviceVersionDict['Boiler Module'] = {
                            "model": strModel,
                            "version": intSoftwareVersion,
                            'mac_id': strSLRMacID
                        }
        return oDeviceVersionDict

    def getDuration(self, strDuration):
        arrDuration = strDuration.split(':')
        intHour = int(arrDuration[0])
        intMin = int(arrDuration[1])
        intSec = int(float(arrDuration[2]))

        if (intHour > 0):
            strDuration = str(intHour) + " hour(s), " + str(
                intMin) + " minute(s), " + str(intSec) + " seconds"
            if (intHour > 23):
                intDay = intHour // 24
                intHour = intHour % 24
                strDuration = str(intDay) + " day(s), " + str(
                    intHour) + " hour(s), " + str(
                        intMin) + " minute(s), " + str(intSec) + " seconds"
        else:
            strDuration = str(intMin) + " minute(s), " + str(
                intSec) + " seconds"
        return strDuration

    #Ensures the dirPath exists, if not creates the same
    def ensure_dir(self, dirpath):
        if not os.path.exists(dirpath):
            os.makedirs(dirpath)
        return dirpath
Beispiel #23
0
def is_locked(datadir):
    """
    Return True if this datadir is locked for migrations
    """
    lockfile = LockFile(datadir + '/.migration_lock')
    return lockfile.is_locked()
Beispiel #24
0
#                 newghaccount['ghsearch'][keyword] = github.githubApiSearchCode( t_tokens, login, keyword )

#         employee['ghaccount'][login] = newghaccount
#     else:
#         sys.stdout.write( colored('%s\n' %  url, 'white') )

# employee['tested'] = 1
#####

#
# Display results
#
sys.stdout.write(
    colored('[+] %d profiles found.\n' % t_stats['n_ghaccount'], 'green'))

functions.displayResults(t_results, t_keywords)
#####

#
# Save
#
if lock.is_locked():
    lock.release()

if f_progress:
    with open(f_progress, 'w') as json_file:
        json.dump(t_results, json_file)

sys.stdout.write(colored('[+] datas saved: %s\n\n' % f_progress, 'green'))
#####
Beispiel #25
0
from lockfile import LockFile, LockTimeout
import CONSTANTS
filename= CONSTANTS.FILE_DIR+"/password.txt"
lock = LockFile(filename)
#lock.break_lock()
print(lock.i_am_locking())
# lock.break_lock()
print(lock.is_locked())
# lock.release()
lock.acquire()
while lock.i_am_locking():
	try:
	    
	    print(lock.is_locked())    # wait up to 60 seconds
	    print(lock.i_am_locking())
	    fb = open(filename, 'r')
	    da = fb.readlines()
	    for d in da[:1000000]:
	    	print(d)
	    fb.close()
	    lock.release()
	except LockTimeout:
	    print("enterd into exception")
	    lock.break_lock()
	    lock.acquire()
	print(lock.is_locked())
	print(lock.i_am_locking())
	print("I locked", lock.path)
if lock.is_locked():
	lock.release()
Beispiel #26
0
class ContentCache(object):

    def __init__(self, cache_dir=None):
        self.cache_dir = None
        self._db_path = None
        self._lockfile = None

        if not get_setting("enable_cache"):
            return

        self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
        self._db_path = join(self.cache_dir, "db.data")

    def __enter__(self):
        if not self._db_path or not isfile(self._db_path):
            return self

        self.delete()
        return self

    def __exit__(self, type_, value, traceback):
        pass

    def _lock_dbindex(self):
        if not self.cache_dir:
            os.makedirs(self.cache_dir)
        self._lockfile = LockFile(self.cache_dir)
        if self._lockfile.is_locked() and \
                (time() - getmtime(self._lockfile.lock_file)) > 10:
            self._lockfile.break_lock()

        try:
            self._lockfile.acquire()
        except LockFailed:
            return False

        return True

    def _unlock_dbindex(self):
        if self._lockfile:
            self._lockfile.release()
        return True

    def get_cache_path(self, key):
        assert len(key) > 3
        return join(self.cache_dir, key[-2:], key)

    @staticmethod
    def key_from_args(*args):
        h = hashlib.md5()
        for data in args:
            h.update(str(data))
        return h.hexdigest()

    def get(self, key):
        cache_path = self.get_cache_path(key)
        if not isfile(cache_path):
            return None
        with open(cache_path, "rb") as fp:
            data = fp.read()
            if data and data[0] in ("{", "["):
                return json.loads(data)
            return data

    def set(self, key, data, valid):
        cache_path = self.get_cache_path(key)
        if isfile(cache_path):
            self.delete(key)
        if not data:
            return
        if not isdir(self.cache_dir):
            os.makedirs(self.cache_dir)
        tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
        assert valid.endswith(tuple(tdmap.keys()))
        expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))

        if not self._lock_dbindex():
            return False

        if not isdir(dirname(cache_path)):
            os.makedirs(dirname(cache_path))
        with open(cache_path, "wb") as fp:
            if isinstance(data, (dict, list)):
                json.dump(data, fp)
            else:
                fp.write(str(data))
        with open(self._db_path, "a") as fp:
            fp.write("%s=%s\n" % (str(expire_time), cache_path))

        return self._unlock_dbindex()

    def delete(self, keys=None):
        """ Keys=None, delete expired items """
        if not keys:
            keys = []
        if not isinstance(keys, list):
            keys = [keys]
        paths_for_delete = [self.get_cache_path(k) for k in keys]
        found = False
        newlines = []
        with open(self._db_path) as fp:
            for line in fp.readlines():
                if "=" not in line:
                    continue
                line = line.strip()
                expire, path = line.split("=")
                if time() < int(expire) and isfile(path) and \
                        path not in paths_for_delete:
                    newlines.append(line)
                    continue
                found = True
                if isfile(path):
                    try:
                        remove(path)
                        if not listdir(dirname(path)):
                            util.rmtree_(dirname(path))
                    except OSError:
                        pass

        if found and self._lock_dbindex():
            with open(self._db_path, "w") as fp:
                fp.write("\n".join(newlines) + "\n")
            self._unlock_dbindex()

        return True

    def clean(self):
        if not self.cache_dir or not isdir(self.cache_dir):
            return
        util.rmtree_(self.cache_dir)
Beispiel #27
0
from lockfile import LockFile, LockTimeout
import CONSTANTS
filename= CONSTANTS.FILE_DIR+"/password.txt"
lock = LockFile(filename)

print(lock.is_locked())
Beispiel #28
0
class ContentCache(object):

    def __init__(self, cache_dir=None):
        self.cache_dir = None
        self._db_path = None
        self._lockfile = None

        if not get_setting("enable_cache"):
            return

        self.cache_dir = cache_dir or join(util.get_home_dir(), ".cache")
        self._db_path = join(self.cache_dir, "db.data")

    def __enter__(self):
        if not self._db_path or not isfile(self._db_path):
            return self

        self.delete()
        return self

    def __exit__(self, type_, value, traceback):
        pass

    def _lock_dbindex(self):
        if not self.cache_dir:
            os.makedirs(self.cache_dir)
        self._lockfile = LockFile(self.cache_dir)
        if self._lockfile.is_locked() and \
                (time() - getmtime(self._lockfile.lock_file)) > 10:
            self._lockfile.break_lock()

        try:
            self._lockfile.acquire()
        except LockFailed:
            return False

        return True

    def _unlock_dbindex(self):
        if self._lockfile:
            self._lockfile.release()
        return True

    def get_cache_path(self, key):
        assert len(key) > 3
        return join(self.cache_dir, key[-2:], key)

    @staticmethod
    def key_from_args(*args):
        h = hashlib.md5()
        for data in args:
            h.update(str(data))
        return h.hexdigest()

    def get(self, key):
        cache_path = self.get_cache_path(key)
        if not isfile(cache_path):
            return None
        with open(cache_path, "rb") as fp:
            data = fp.read()
            if data and data[0] in ("{", "["):
                return json.loads(data)
            return data

    def set(self, key, data, valid):
        cache_path = self.get_cache_path(key)
        if isfile(cache_path):
            self.delete(key)
        if not data:
            return
        if not isdir(self.cache_dir):
            os.makedirs(self.cache_dir)
        tdmap = {"s": 1, "m": 60, "h": 3600, "d": 86400}
        assert valid.endswith(tuple(tdmap.keys()))
        expire_time = int(time() + tdmap[valid[-1]] * int(valid[:-1]))

        if not self._lock_dbindex():
            return False

        if not isdir(dirname(cache_path)):
            os.makedirs(dirname(cache_path))
        with open(cache_path, "wb") as fp:
            if isinstance(data, (dict, list)):
                json.dump(data, fp)
            else:
                fp.write(str(data))
        with open(self._db_path, "a") as fp:
            fp.write("%s=%s\n" % (str(expire_time), cache_path))

        return self._unlock_dbindex()

    def delete(self, keys=None):
        """ Keys=None, delete expired items """
        if not keys:
            keys = []
        if not isinstance(keys, list):
            keys = [keys]
        paths_for_delete = [self.get_cache_path(k) for k in keys]
        found = False
        newlines = []
        with open(self._db_path) as fp:
            for line in fp.readlines():
                if "=" not in line:
                    continue
                line = line.strip()
                expire, path = line.split("=")
                if time() < int(expire) and isfile(path) and \
                        path not in paths_for_delete:
                    newlines.append(line)
                    continue
                found = True
                if isfile(path):
                    try:
                        remove(path)
                        if not listdir(dirname(path)):
                            util.rmtree_(dirname(path))
                    except OSError:
                        pass

        if found and self._lock_dbindex():
            with open(self._db_path, "w") as fp:
                fp.write("\n".join(newlines) + "\n")
            self._unlock_dbindex()

        return True

    def clean(self):
        if not self.cache_dir or not isdir(self.cache_dir):
            return
        util.rmtree_(self.cache_dir)
Beispiel #29
0
    def on_post(self, req, resp, id):
        """
        Vraci JSON:
        {
            "result": "ok" | "error",
            "error": String
        }

        """

        try:
            user = req.context['user']

            # Kontrola opravneni
            if (not user.is_logged_in()) or (not user.is_org()):
                req.context['result'] = 'Nedostatecna opravneni'
                resp.status = falcon.HTTP_400
                return

            # Kontrola existence ulohy
            task = session.query(model.Task).get(id)
            if task is None:
                req.context['result'] = 'Neexistujici uloha'
                resp.status = falcon.HTTP_404
                return

            # Zverejnene ulohy mohou deployovat pouze admini a garant vlny
            wave = session.query(model.Wave).get(task.wave)
            if (datetime.datetime.utcnow() > wave.time_published and
                    not user.is_admin() and user.id != wave.garant):
                req.context['result'] = ('Po zverejneni ulohy muze deploy '
                                         'provest pouze administrator nebo '
                                         'garant vlny.')
                resp.status = falcon.HTTP_404
                return

            # Kontrola existence gitovske vetve a adresare v databazi
            if (task.git_branch is None) or (task.git_path is None):
                req.context['result'] = ('Uloha nema zadanou gitovskou vetev '
                                         'nebo adresar')
                resp.status = falcon.HTTP_400
                return

            # Kontrola zamku
            lock = util.lock.git_locked()
            if lock:
                req.context['result'] = ('GIT uzamcen zamkem ' + lock +
                                         '\nNekdo momentalne provadi akci s '
                                         'gitem, opakujte prosim akci za 20 '
                                         'sekund.')
                resp.status = falcon.HTTP_409
                return

            # Stav na deploying je potreba nastavit v tomto vlakne
            task.deploy_status = 'deploying'
            session.commit()

            try:
                deployLock = LockFile(util.admin.taskDeploy.LOCKFILE)
                deployLock.acquire(60)  # Timeout zamku je 1 minuta
                deployThread = threading.Thread(
                    target=util.admin.taskDeploy.deploy,
                    args=(task.id, deployLock, scoped_session(_session)),
                    kwargs={}
                )
                deployThread.start()
            finally:
                if deployLock.is_locked():
                    deployLock.release()

            req.context['result'] = {}
            resp.status = falcon.HTTP_200
        except SQLAlchemyError:
            session.rollback()
            raise
        finally:
            session.close()
Beispiel #30
0
def mirror_main():
    """Entry point."""
    signal.signal(signal.SIGINT, lock_ctrl_c_handler)

    parser = argparse.ArgumentParser()

    parser.add_argument('-H', '--host', required=True)
    parser.add_argument('-P', '--port', type=int, default=22)
    parser.add_argument('-c', '--netrc-path', default=expanduser('~/.netrc'))
    parser.add_argument('-r', '--resume', action='store_true',
                        help='Resume incomplete files (experimental)')
    parser.add_argument('-T', '--move-to', required=True)
    parser.add_argument('-L', '--label', default='Seeding')
    parser.add_argument('-d', '--debug', action='store_true')
    parser.add_argument('-v', '--verbose', action='store_true')
    parser.add_argument('-s', '--syslog', action='store_true')
    parser.add_argument('--no-preserve-permissions', action='store_false')
    parser.add_argument('--no-preserve-times', action='store_false')
    parser.add_argument('--max-retries', type=int, default=10)
    parser.add_argument('remote_dir', metavar='REMOTEDIR', nargs=1)
    parser.add_argument('local_dir', metavar='LOCALDIR', nargs=1)

    args = parser.parse_args()
    log = get_logger('xirvik',
                     verbose=args.verbose,
                     debug=args.debug,
                     syslog=args.syslog)
    if args.debug:
        logs_to_follow = (
            'requests',
        )
        for name in logs_to_follow:
            _log = logging.getLogger(name)
            formatter = logging.Formatter('%(asctime)s - %(name)s - '
                                          '%(levelname)s - %(message)s')
            channel = logging.StreamHandler(sys.stderr)

            _log.setLevel(logging.DEBUG)
            channel.setLevel(logging.DEBUG)
            channel.setFormatter(formatter)
            _log.addHandler(channel)

    local_dir = realpath(args.local_dir[0])
    user, _, password = netrc(args.netrc_path).authenticators(args.host)
    sftp_host = 'sftp://{user:s}@{host:s}'.format(
        user=user,
        host=args.host,
    )

    lf_hash = hashlib.sha256(json.dumps(
        args._get_kwargs()).encode('utf-8')).hexdigest()
    lf_path = path_join(gettempdir(), 'xirvik-mirror-{}'.format(lf_hash))
    log.debug('Acquiring lock at {}.lock'.format(lf_path))
    _lock = LockFile(lf_path)
    if _lock.is_locked():
        psax = [x for x in
                sp.check_output(['ps', 'ax']).decode('utf-8').split('\n')
                if sys.argv[0] in x]
        if len(psax) == 1:
            log.info('Breaking lock')
            _lock.break_lock()
    _lock.acquire()
    log.info('Lock acquired')

    log.debug('Local directory to sync to: {}'.format(local_dir))
    log.debug('Read user and password from netrc file')
    log.debug('SFTP URI: {}'.format(sftp_host))

    client = ruTorrentClient(args.host,
                             user,
                             password,
                             max_retries=args.max_retries)

    assumed_path_prefix = '/torrents/{}'.format(user)
    look_for = '{}/{}/'.format(assumed_path_prefix, args.remote_dir[0])
    move_to = '{}/{}'.format(assumed_path_prefix, args.move_to)
    names = {}

    log.debug('Full completed directory path name: {}'.format(look_for))
    log.debug('Moving finished torrents to: {}'.format(move_to))

    log.info('Getting current torrent information (ruTorrent)')
    try:
        torrents = client.list_torrents()
    except requests.exceptions.ConnectionError as e:
        # Assume no Internet connection at this point
        log.error('Failed to connect: {}'.format(e))
        try:
            _lock.release()
        except NotLocked:
            pass
        cleanup_and_exit(1)

    for hash, v in torrents.items():
        if not v[TORRENT_PATH_INDEX].startswith(look_for):
            continue
        bn = basename(v[TORRENT_PATH_INDEX])
        names[bn] = (hash, v[TORRENT_PATH_INDEX],)

        log.info('Completed torrent "{}" found with hash {}'.format(bn, hash,))

    sftp_client_args = dict(
        hostname=args.host,
        username=user,
        password=password,
        port=args.port,
    )

    try:
        with SFTPClient(**sftp_client_args) as sftp_client:
            log.info('Verifying contents of {} with previous '
                     'response'.format(look_for))

            sftp_client.chdir(args.remote_dir[0])
            for item in sftp_client.listdir_iter(read_aheads=10):
                if item.filename not in names:
                    log.error('File or directory "{}" not found in previous '
                              'response body'.format(item.filename))
                    continue

                log.debug('Found matching torrent "{}" from ls output'.format(
                    item.filename))

            if not len(names.items()):
                log.info('Nothing found to mirror')
                _lock.release()
                cleanup_and_exit()

            mirror(sftp_client,
                   client,
                   destroot=local_dir,
                   keep_modes=not args.no_preserve_permissions,
                   keep_times=not args.no_preserve_times)
    except Exception as e:
        if args.debug:
            _lock.release()
            cleanup()
            raise e
        else:
            log.error(str(e))
        _lock.release()
        cleanup_and_exit()

    _all = names.items()
    exit_status = 0
    bad = []
    for bn, (hash, fullpath) in _all:
        # There is a warning that can get raised here by urllib3 if
        # Content-Disposition header's filename field has any
        # non-ASCII characters. It is ignorable as the content still gets
        # downloaded correctly
        log.info('Verifying "{}"'.format(bn))
        r, _ = client.get_torrent(hash)
        try:
            verify_torrent_contents(r.content, local_dir)
        except VerificationError as e:
            log.error('Could not verify "{}" contents against piece hashes '
                      'in torrent file'.format(bn))
            exit_status = 1
            bad.append(hash)

    # Move to _seeding directory and set label
    # Unfortunately, there is no method, via the API, to do this one HTTP
    #   request
    for bn, (hash, fullpath) in _all:
        if hash in bad:
            continue
        log.info('Moving "{}" to "{}" directory'.format(bn, move_to))
        try:
            client.move_torrent(hash, move_to)
        except UnexpectedruTorrentError as e:
            log.error(str(e))

    log.info('Setting label to "{}" for downloaded items'.format(args.label))

    client.set_label_to_hashes(hashes=[hash for bn, (hash, fullpath)
                                       in names.items() if hash not in bad],
                               label=args.label)

    if exit_status != 0:
        log.error('Could not verify torrent checksums')

    _lock.release()
    cleanup_and_exit(exit_status)
Beispiel #31
0
def update_document(destinaton, file, email, db: Session):

    email_id = get_email_id(email, db)
    file_id = get_file_id_by_filename(file.filename, db)
    print(email)

    #to include version check based on modified time and idownoaded time

    version_status = version_check(email_id, file_id, db)
    print("version check is ", version_status)

    if version_status == False:
        return None

        #this part creates a lock object with the given file path
    file_lock_check = LockFile(destinaton + '/' + file.filename)
    #checks if the file is locked or not
    if file_lock_check.is_locked():
        if is_owner(email_id, file_id, db) == True:
            #if the user is owner the lock is released
            lock.release()
        else:
            #returns None if locked and the user is not owner
            print("file is locked for ", email)
            return "locked"

    #locks the file object
    file_lock_check.acquire()

    while file_lock_check.i_am_locking():

        #lock has been implemented on a record.
        #No one can do modification through database, until current transaction is completed.
        #in the file_status, the lock has been implemented on a given file record
        #then the updated file has been uploaded
        #and then the modified time is updated and record will be releasaed.
        #this is row-level locking.
        #Even though the operations can be done on physical file level
        #through the database no one can do modifications on it until the lock is released.
        try:
            file_status = db.query(Files).filter(
                Files.id == file_id).with_for_update().first()
            #
            #time.sleep(20)
            #.update({'modified_on': dt.datetime.now()})

            file_path = destinaton + "/" + file.filename

            with open(file_path, "wb+") as file_object:
                shutil.copyfileobj(file.file, file_object)

            file_status.modified_on = dt.datetime.now()
            db.commit()
            #file_update = update_file_status(file_id, db)
            update_status = db.query(Collaborators).filter(
                and_(Collaborators.collaborator_id == email_id.id,
                     Collaborators.file_id == file_id))
            update_status = update_status.update(
                {'downloaded_on': dt.datetime.now()})
            db.commit()
            #releases the lock if anything goes wrong
            file_lock_check.release()
        except LockTimeout:
            print("enterd into exception")
            #brute method to break the lock
            file_lock_check.break_lock()
            file_lock_check.acquire()
            return False

    if file_lock_check.is_locked():
        file_lock_check.release()

    return True