def _post_exitcode(self): """Postprocess the exitcode in self._process_exitcode""" cmd_ascii = ensure_ascii_string(self.cmd) if not self._process_exitcode == 0: shell_cmd_ascii = ensure_ascii_string(self._shellcmd) self._post_exitcode_log_failure( "_post_exitcode: problem occured with cmd %s: (shellcmd %s) output %s" % (cmd_ascii, shell_cmd_ascii, self._process_output)) else: self.log.debug("_post_exitcode: success cmd %s: output %s" % (cmd_ascii, self._process_output))
def populate_home_dir(self): """Store the required files in the user's home directory. Does not overwrite files that may contain user defined content. """ path = self._home_path() self.gpfs.populate_home_dir( int(self.account.vsc_id_number), int(self.usergroup.vsc_id_number), path, [ensure_ascii_string(p.pubkey) for p in self.pubkeys])
def _read_process(self, readsize=None): """Read from process, return out""" if readsize is None: readsize = self.readsize if readsize is None: readsize = -1 # read all self.log.debug("_read_process: going to read with readsize %s" % readsize) out = self._process.stdout.read(readsize) return ensure_ascii_string(out)
def _wait_for_process(self): """Loop through the process in timesteps collected output is run through _loop_process_output """ # these are initialised outside the function (cannot be forgotten, but can be overwritten) self._loop_count = 0 # internal counter self._loop_continue = True self._process_output = '' # further initialisation self._loop_initialise() time.sleep(self.LOOP_TIMEOUT_INIT) ec = self._process.poll() try: while self._loop_continue and (ec is None or ec < 0): output = self._read_process() self._process_output += output # process after updating the self._process_ vars self._loop_process_output(output) if len(output) == 0: time.sleep(self.LOOP_TIMEOUT_MAIN) ec = self._process.poll() self._loop_count += 1 self.log.debug( "_wait_for_process: loop stopped after %s iterations (ec %s loop_continue %s)" % (self._loop_count, ec, self._loop_continue)) # read remaining data (all of it) output = self._read_process(-1) self._process_output += output self._process_exitcode = ec # process after updating the self._process_ vars self._loop_process_output_final(output) except RunLoopException as err: self.log.debug('RunLoopException %s' % err) self._process_output = ensure_ascii_string(err.output) self._process_exitcode = err.code
def test_ensure_ascii_string(self): """Tests for ensure_ascii_string function.""" unicode_txt = 'this -> ¢ <- is unicode' test_cases = [ ('', ''), ('foo', 'foo'), ([1, 2, 3], "[1, 2, 3]"), (['1', '2', '3'], "['1', '2', '3']"), ({ 'one': 1 }, "{'one': 1}"), # in both Python 2 & 3, Unicode characters that are part of a non-string value get escaped ([unicode_txt], "['this -> \\xc2\\xa2 <- is unicode']"), ({ 'foo': unicode_txt }, "{'foo': 'this -> \\xc2\\xa2 <- is unicode'}"), ] if is_py2(): test_cases.extend([ # Unicode characters from regular strings are stripped out in Python 2 (unicode_txt, 'this -> <- is unicode'), # also test with unicode-type values (only exists in Python 2) (unicode('foo'), 'foo'), (unicode(unicode_txt, encoding='utf-8'), 'this -> \\xa2 <- is unicode'), ]) else: # in Python 3, Unicode characters are replaced by backslashed escape sequences in string values expected_unicode_out = 'this -> \\xc2\\xa2 <- is unicode' test_cases.extend([ (unicode_txt, expected_unicode_out), # also test with bytestring-type values (only exists in Python 3) (bytes('foo', encoding='utf-8'), 'foo'), (bytes(unicode_txt, encoding='utf-8'), expected_unicode_out), ]) for inp, out in test_cases: res = ensure_ascii_string(inp) self.assertTrue(is_string(res)) self.assertEqual(res, out)
def _read_process(self, readsize=None): """Read from async process, return out""" if readsize is None: readsize = self.readsize if self._process.stdout is None: # Nothing yet/anymore return '' try: if readsize is not None and readsize < 0: # read all blocking (it's not why we should use async out = self._process.stdout.read() else: # non-blocking read (readsize is a maximum to return ! out = self._process_module.recv_some(self._process, maxread=readsize) return ensure_ascii_string(out) except (IOError, Exception): # recv_some may throw Exception self.log.exception("_read_process: read failed") return ''
def sync_altered_accounts(self, last, dry_run=True): """ Add new users to the LDAP and update altered users. This does not include usergroups. this does include pubkeys @type last: datetime @return: tuple (new, updated, error) that indicates what accounts were new, changed or could not be altered. """ sync_accounts = [ mkVscAccount(a) for a in self.client.account.modified[last].get()[1] ] accounts = { NEW: set(), UPDATED: set(), ERROR: set(), } logging.info("Found %d modified accounts in the range %s until %s" % (len(sync_accounts), datetime.fromtimestamp(last).strftime("%Y%m%d%H%M%SZ"), self.now.strftime("%Y%m%d%H%M%SZ"))) logging.debug("Modified accounts: %s", [a.vsc_id for a in sync_accounts]) for account in sync_accounts: try: usergroup = mkUserGroup( self.client.account[account.vsc_id].usergroup.get()[1]) except HTTPError: logging.error("No corresponding UserGroup for user %s" % (account.vsc_id, )) continue gecos = ensure_ascii_string(account.person.gecos) logging.debug('fetching public key') public_keys = [ ensure_ascii_string(x.pubkey) for x in self.client.get_public_keys(account.vsc_id) ] if not public_keys: public_keys = [ACCOUNT_WITHOUT_PUBLIC_KEYS_MAGIC_STRING] LDAP_STATE_MAPPER = {'forceinactive': 'inactive'} ldap_attributes = { 'cn': str(account.vsc_id), 'uidNumber': ["%s" % (account.vsc_id_number, )], 'gecos': [gecos], 'mail': [str(account.email)], 'institute': [str(account.person.institute['name'])], 'instituteLogin': [str(account.person.institute_login)], 'uid': [str(account.vsc_id)], 'homeDirectory': [str(account.home_directory)], 'dataDirectory': [str(account.data_directory)], 'scratchDirectory': [str(account.scratch_directory)], 'pubkey': public_keys, 'gidNumber': [str(usergroup.vsc_id_number)], 'loginShell': [str(account.login_shell)], 'researchField': [str(account.research_field[0])], 'status': [ LDAP_STATE_MAPPER.get(str(account.status), str(account.status)) ], 'homeQuota': ["1"], 'dataQuota': ["1"], 'scratchQuota': ["1"], } logging.debug('fetching quota') quotas = self.client.account[account.vsc_id].quota.get()[1] for quota in quotas: for stype in ['home', 'data', 'scratch']: # only gent sets filesets for vo's, so not gvo is user. (other institutes is empty or "None" if quota['storage']['storage_type'] == stype and not quota[ 'fileset'].startswith('gvo'): ldap_attributes['%sQuota' % stype] = ["%d" % quota["hard"]] result = self.add_or_update(VscLdapUser, account.vsc_id, ldap_attributes, dry_run) accounts[result].add(account.vsc_id) return accounts