def can_authenticate_to_old(old): old_client = OLDClient(old['url']) try: return old_client.login( DEFAULT_LOCAL_OLD_USERNAME, DEFAULT_LOCAL_OLD_PASSWORD) except Exception as e: logger.warning( f'Exception of type {type(e)} when attempting to' f' authenticate to the OLD') return False
def authenticate_to_leader(old): logger.debug( f'checking if we can login to the leader OLD at {old["leader"]}' f' using username {old["username"]} and password' f' {old["password"]}') old_client = OLDClient(old['leader']) try: logged_in = old_client.login(old['username'], old['password']) if logged_in: return old_client return False except Exception: logger.exception(f'Failed to login to the leader OLD {old["leader"]}') return False
def __init__(self, username, password, host, **kwargs): """Connect and authenticate to a live OLD web application. """ port = kwargs.get('port', '80') self.my_dir = os.path.abspath(os.path.dirname(__file__)) self.set_record_path(**kwargs) self.setup_localstore(**kwargs) self.old = OLDClient(host, port) try: assert self.old.login(username, password) == True except AssertionError: print ('Unable to login to the OLD application at %s:%s using ' 'username %s and password %s') % (host, port, username, password) sys.exit()
def process_command(dtserver, old_service, command): """Process a sync-OLD! command.""" # Get the OLD metadata from DTServer old = fetch_old(dtserver, command['old_id']) old['url'] = f'{old_service.url}/{old["slug"]}' # Determine whether the OLD already exists and create it if necessary old_exists = does_old_exist(old) if not old_exists: old_exists = create_local_old(old) if not old_exists: msg = f'Failed to create the OLD {old["slug"]} locally' logger.warning(msg) raise SyncOLDError(msg) # Abort if we are not set to sync or if there is nothing to sync with if not old['is_auto_syncing']: logger.debug(f'OLD {old["slug"]} is not set to auto-sync') return if not old['leader']: logger.debug(f'OLD {old["slug"]} has no remote leader OLD') return leader_client = authenticate_to_leader(old) if not leader_client: logger.warning(f'Unable to login to leader OLD {old["leader"]}') return # Fetch the last modified values for each resource in the local OLD and in # the leader OLD and construct a diff. local_client = OLDClient(old['url']) local_client.login( DEFAULT_LOCAL_OLD_USERNAME, DEFAULT_LOCAL_OLD_PASSWORD) local_last_mod = local_client.get('sync/last_modified') leader_last_mod = leader_client.get('sync/last_modified') diff = get_diff(local_last_mod, leader_last_mod) # Perform the local updates by modifying the SQLite db of the OLD directly. meta = sqla.MetaData() db_path = os.path.join(c.OLD_DIR, f"{old['slug']}.sqlite") engine = sqla.create_engine(f'sqlite:///{db_path}') with engine.connect() as conn: # Perform any deletions delete_state = diff['delete'] if delete_state: for table_name, rows in delete_state.items(): if not rows: continue table = sqla.Table(table_name, meta, autoload_with=engine) conn.execute( table.delete().where( table.c.id.in_(rows))) # Perform any additions add_params = diff['add'] if add_params: for batch in batch_tables(add_params): add_state = leader_client.post( 'sync/tables', {'tables': batch}) for table_name, rows in add_state.items(): if not rows: continue table = sqla.Table(table_name, meta, autoload_with=engine) conn.execute( table.insert(), [prepare_row_for_upsert(table_name, row) for row in rows.values()]) # Perform any updates update_params = diff['update'] if update_params: for batch in batch_tables(update_params): update_state = leader_client.post( 'sync/tables', {'tables': batch}) for table_name, rows in update_state.items(): if not rows: continue table = sqla.Table(table_name, meta, autoload_with=engine) for row in rows.values(): row_id = row['id'] updated_row = prepare_row_for_upsert(table_name, row) conn.execute( table.update().where( table.c.id == row_id).values(**updated_row))
class ParserResearcher(object): """Functionality for performing parser-related research on a live OLD web service. Provides general-purpose methods for creating searches, corpora, morphologies, phonologies, morpheme language models and morphological parsers. Also provides convenience methods for creating specific instances of the above-mentioned parser-related resources. TODO: 1. unknown_category and morpheme_splitter need to be property attributes of the researcher's old_client instance. See the Researcher.well_analyzed method. """ def __init__(self, username, password, host, **kwargs): """Connect and authenticate to a live OLD web application. """ port = kwargs.get('port', '80') self.my_dir = os.path.abspath(os.path.dirname(__file__)) self.set_record_path(**kwargs) self.setup_localstore(**kwargs) self.old = OLDClient(host, port) try: assert self.old.login(username, password) == True except AssertionError: print ('Unable to login to the OLD application at %s:%s using ' 'username %s and password %s') % (host, port, username, password) sys.exit() # This is what is stored in record.pickle default_record = { 'searches': {}, 'corpora': {}, 'morphologies': {}, 'phonologies': {}, 'language models': {}, 'parsers': {}, 'parse_summaries': {}, 'users': {} } def set_record_path(self, **kwargs): record_file = kwargs.get('record_file') if record_file: self.record_path = os.path.join(self.my_dir, record_file) def setup_localstore(self, **kwargs): localstore = kwargs.get('localstore', 'localstore') self.localstore = os.path.join(self.my_dir, localstore) self.make_directory_safely(self.localstore) for object_type in self.default_record.keys(): subdir = os.path.join(self.localstore, object_type) self.make_directory_safely(subdir) @property def record(self): """The ``record`` of a researcher is a pickled dict for persisting researcher state. It is useful for avoiding repetition of costly computations. """ try: return self._record except AttributeError: try: self._record = cPickle.load(open(self.record_path, 'rb')) except Exception: self._record = self.default_record return self._record @record.setter def record(self, value): self._record = value def dump_record(self): """Try to pickle the researcher's record. """ try: cPickle.dump(self.record, open(self.record_path, 'wb')) except Exception: log.warn(u'Attempt to to pickle-dump to %s failed.' % self.record_path) def clear_record(self, clear_corpora=False): """Set record to {} and persist. """ log.info(u"Clearing the researcher's record.") corpora = self.record['corpora'] self.record = self.default_record if not clear_corpora: self.record['corpora'] = corpora self.dump_record() def make_directory_safely(self, path): """Create a directory and avoid race conditions. Taken from http://stackoverflow.com/questions/273192/python-best-way-to-create-directory-if-it-doesnt-exist-for-file-write. Listed as ``make_sure_path_exists``. """ try: os.makedirs(path) except OSError, exception: if exception.errno != errno.EEXIST: raise