def run(self): self.dbsession = models.DBSession() try: self._run() except Exception as e: log.exception('Caught Failure in pull:') self.completion_code = 'Unknown failure: %s' % e if self.completion_code is not None: # failure machinery transaction.abort() cfg = models.get_config(self.dbsession) if not cfg.update_log: update_log = [] log_msg = '%s: Initial Update Failed: %s' % (datetime.now().isoformat(), self.completion_code) else: update_log = [cfg.update_log] log_msg = '%s: Update Failed: %s' % (datetime.now().isoformat(), self.completion_code) cfg.update_log = '\n'.join([log_msg] + update_log) session = self.dbsession failure_count = cfg.update_failure_count = (cfg.update_failure_count or 0) + 1 self.dbsession.flush() if failure_count > 3: d = delete(models.KeywordCache.__table__) session.execute(d) d = delete(models.Users.__table__) session.execute(d) d = delete(models.Record.__table__) session.execute(d) d = delete(models.Record_Data.__table__) session.execute(d) d = delete(models.KeywordCache.__table__) session.execute(d) transaction.commit() session = self.dbsession connection = session.connection() connection.execute('vacuum') transaction.commit() log.critical('Done Sync') if self.completion_code is None: # completion code will be non-none on error self.status = 100 self.completion_code = 'ok'
def get(self): request = self.request _ = request.translate cfg = models.get_config(request) if cfg.machine_name or cfg.update_url: # maybe allow updateing info request.session.flash(_('Site Already Registered.')) return HTTPFound(location=request.route_url('search')) return {}
def __call__(self): global initial_pull, initial_pull_thread request = self.request cfg = models.get_config(request) _ = request.translate if not cfg.machine_name or not cfg.update_url: # XXX handled by other tools return _("Not properly configured") if initial_pull: return _("Pull already in progress.") initial_pull = scheduler.PullObject(force=request.params.get("force")) initial_pull_thread = Thread(target=initial_pull.run) initial_pull_thread.start() return {}
def main(global_config, **settings): """ This function returns a Pyramid WSGI application. """ global sched common_appdata_path = shell.SHGetFolderPath(0, shellcon.CSIDL_COMMON_APPDATA, 0, 0) app_data_dir = os.path.join(common_appdata_path, 'CIOC', 'OfflineTools') try: os.makedirs(app_data_dir) except os.error as e: log.debug('os.error: %s', e) engine = create_engine('sqlite:///%s\\OfflineTools.db' % app_data_dir, isolation_level='READ UNCOMMITTED') initialize_sql(engine) cfg = get_config() sched = Scheduler() sched.start() sched.add_cron_job(scheduled_pull, **key_to_schedule(cfg.public_key)) session_lock_dir = os.path.join(app_data_dir, 'session') try: os.makedirs(session_lock_dir) except os.error as e: pass settings['beaker.session.lock_dir'] = session_lock_dir session_factory = session_factory_from_settings(settings) authn_policy = SessionAuthenticationPolicy(callback=groupfinder, debug=True) authz_policy = ACLAuthorizationPolicy() config = Configurator(settings=settings, session_factory=session_factory, root_factory=RootFactory, request_factory='offlinetools.request.OfflineToolsRequest', authentication_policy=authn_policy, authorization_policy=authz_policy) config.add_translation_dirs('offlinetools:locale') config.add_static_view('static', 'offlinetools:static', cache_max_age=3600, permission=NO_PERMISSION_REQUIRED) config.add_route('search', '/', pregenerator=passvars_pregen) config.add_view('offlinetools.views.search.Search', route_name='search', attr='search', permission='view', renderer='search.mak') config.add_route('results', '/results', pregenerator=passvars_pregen) config.add_view('offlinetools.views.search.Search', route_name='results', attr='results', permission='view', renderer='results.mak') config.add_route('record', '/record/{num}', factory='offlinetools.views.record.RecordRootFactory', pregenerator=passvars_pregen) config.add_view('offlinetools.views.record.Record', route_name='record', permission='view', renderer='record.mak') config.add_route('comgen', '/comgen', pregenerator=passvars_pregen) config.add_view('offlinetools.views.comgen.ComGen', renderer='json', route_name='comgen', permission='view') config.add_route('keywordgen', '/keywordgen', pregenerator=passvars_pregen) config.add_view('offlinetools.views.comgen.KeywordGen', renderer='json', route_name='keywordgen') config.add_route('login', '/login', pregenerator=passvars_pregen) config.add_view('offlinetools.views.login.Login', renderer='login.mak', route_name='login', request_method='POST', attr='post', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.login.Login', renderer='login.mak', route_name='login', attr='get', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.login.Login', renderer='login.mak', context='pyramid.httpexceptions.HTTPForbidden', attr='get', permission=NO_PERMISSION_REQUIRED) config.add_route('logout', '/logout', pregenerator=passvars_pregen) config.add_view('offlinetools.views.login.logout', route_name='logout', permission=NO_PERMISSION_REQUIRED) config.add_route('register', '/register', pregenerator=passvars_pregen) config.add_view('offlinetools.views.register.Register', route_name='register', request_method='POST', attr='post', renderer='register.mak', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.register.Register', route_name='register', attr='get', renderer='register.mak', permission=NO_PERMISSION_REQUIRED) config.add_route('updateconfig', '/config', pregenerator=passvars_pregen) config.add_view('offlinetools.views.register.UpdateUrl', route_name='updateconfig', request_method='POST', attr='post', renderer='updateurl.mak', permission=NO_PERMISSION_REQUIRED) config.add_view('offlinetools.views.register.UpdateUrl', route_name='updateconfig', attr='get', renderer='updateurl.mak', permission=NO_PERMISSION_REQUIRED) config.add_route('pull', '/pull', pregenerator=passvars_pregen) config.add_view('offlinetools.views.pull.Pull', route_name='pull', renderer='pull.mak') config.add_route('pull_status', '/pullstatus', pregenerator=passvars_pregen, factory='pyramid.traversal.DefaultRootFactory') config.add_view('offlinetools.views.pull.PullStatus', route_name='pull_status', renderer='json', permission=NO_PERMISSION_REQUIRED) config.add_route('status', '/status', factory='offlinetools.views.status.StatusRootFactory', pregenerator=passvars_pregen) config.add_view('offlinetools.views.status.Status', route_name='status', renderer='status.mak', permission='view') config.add_subscriber('offlinetools.subscribers.add_renderer_globals', 'pyramid.events.BeforeRender') config.scan() return config.make_wsgi_app()
def _run(self): dbsession = self.dbsession cfg = models.get_config(session=dbsession) if not cfg.machine_name or not cfg.update_url: self.completion_code = 'Not properly configured' return url_base = posixpath.join(cfg.update_url, 'offline') self.status = 5 # auth request auth_data = {'MachineName': cfg.machine_name} r = requests.post(posixpath.join(url_base, 'auth'), auth_data, headers=const.DEFAULT_HEADERS, verify=certstore.certfile.name) try: r.raise_for_status() except Exception as e: self.completion_code = 'Request failed: %s' % e return auth = r.json() if auth['fail']: # Log error self.completion_code = 'Request failed: ' + auth['reason'] return tosign = b''.join([ standard_b64decode(auth['challenge']), cfg.machine_name.encode('utf-8')]) signature = get_signature(cfg.private_key, tosign) auth_data['ChallengeSig'] = json.dumps(signature) if cfg.last_update and not self.force: auth_data['FromDate'] = cfg.last_update.isoformat() r = requests.post(posixpath.join(url_base, 'pull'), auth_data, headers=const.DEFAULT_HEADERS, verify=certstore.certfile.name) try: r.raise_for_status() except Exception as e: self.completion_code = 'Request failed: %s' % e return log.debug('Response Size: %d', len(r.content)) with tempfile.TemporaryFile() as fd: fd.write(r.content) fd.seek(0) del r with zipfile.ZipFile(fd, 'r') as zip: log.debug('Full Size: %d', zip.getinfo('export.json').file_size) with TextIOWrapper(zip.open('export.json'), 'utf-8') as zfd: data = json.load(zfd) if data['fail']: self.completion_code = 'failed: %s' % data['reason'] return self.zipfile = zip self._update(data['data'], (cfg.last_update and not self.force)) data = None log.debug('gc collect') gc.collect() log.debug('after gc collect') if (cfg.last_update and not self.force) and (self._new_fields or self._new_records): log.debug('********** Second Update') try: del auth_data['FromDate'] except KeyError: pass auth_data['NewFields'] = list(sorted(set(six.text_type(x['FieldID']) for x in self._new_fields))) auth_data['NewRecords'] = list(sorted(set(x[0] for x in self._new_records))) r = requests.post(posixpath.join(url_base, 'pull2'), auth_data, headers=const.DEFAULT_HEADERS, verify=certstore.certfile.name) try: r.raise_for_status() except Exception as e: self.completion_code = '*************************************** Request failed: %s' % e return log.debug('***************************************************** Response Size: %d', len(r.content)) with tempfile.TemporaryFile() as fd: fd.write(r.content) fd.seek(0) del r with zipfile.ZipFile(fd, 'r') as zip: log.debug('Full Size: %d', zip.getinfo('export.json').file_size) with TextIOWrapper(zip.open('export.json')) as zfd: data = json.load(zfd) if data['fail']: self.completion_code = 'failed: %s' % data['reason'] return self.zipfile = zip self._update2(data['data']) data = None log.debug('gc collect') gc.collect() log.debug('after gc collect') dbsession.flush() self.status = 90 cfg.last_update = datetime.now() update_log = [cfg.update_log] if cfg.update_log else [] cfg.update_log = '\n'.join(['%s: Pull Success' % datetime.now().isoformat()] + update_log) cfg.update_failure_count = 0 dbsession.flush() log.debug('Before UPdate Caches') self._update_caches() log.debug('After Update Caches') self.status = 95 log.debug('********************************************* before commit') transaction.commit() log.debug('********************************************* after commit') return
def config(self): return get_config(self)