def flush_post(request): request.registry.storage.flush() request.registry.permission.flush() request.registry.cache.flush() event = ServerFlushed(request) request.registry.notify(event) return httpexceptions.HTTPAccepted()
def flush_post(request): settings = request.registry.settings flush_enabled = settings.get('kinto.flush_endpoint_enabled', False) if not asbool(flush_enabled): raise httpexceptions.HTTPMethodNotAllowed() request.registry.storage.flush() return httpexceptions.HTTPAccepted()
def command_pending(request): """Report on running job status.""" name = request.matchdict['name'] encoded_job_ids = request.matchdict['jobid'] try: job_ids = decode_ids(encoded_job_ids) except TypeError: # Malformed Job ID. raise exc.HTTPBadRequest("The job ID is invalid.") settings = request.registry.settings cfg_template = find_command_config_from_name(settings, name) spider_cfgs = starmap( render_spider_config, zip( cfg_template.spider_configs, cfg_template.spider_params, repeat(request.params), )) scrapyd = Scrapyd(settings[SCRAPYD_BASE_URL_KEY]) running = 0 for job_id, spider_cfg in zip(job_ids, spider_cfgs): scrapyd_helper = ScrapydJobHelper(settings, spider_cfg, scrapyd) status = scrapyd_helper.report_on_job(job_id) if status is ScrapydJobHelper.JobStatus.unknown: msg = "Job for spider '{}' with id '{}' has an unknown status." \ " Aborting command run.".format(spider_cfg.spider_name, job_id) LOG.error(msg) raise exc.HTTPNotFound(msg) if status is not ScrapydJobHelper.JobStatus.finished: running += 1 # Storing the request in the internal DB dbinterf = web_runner.db.DbInterface(settings['db_filename'], recreate=False) dbinterf.new_request_event(web_runner.db.COMMAND_STATUS, job_ids, request.remote_addr) dbinterf.close() if running: raise exc.HTTPAccepted(detail="Crawlers still running: %d" % running) else: raise exc.HTTPFound(location=request.route_path( "command job results", name=name, jobid=encoded_job_ids, _query=request.params, ), detail="Crawlers finished.")
def spider_pending_view(request): project_name = request.matchdict['project'] spider_name = request.matchdict['spider'] job_id = request.matchdict['jobid'] settings = request.registry.settings scrapyd = Scrapyd(settings[SCRAPYD_BASE_URL_KEY]) status = ScrapydJobHelper(settings, SpiderConfig(spider_name, project_name), scrapyd).report_on_job(job_id) # Storing the request in the internal DB dbinterf = web_runner.db.DbInterface(settings['db_filename'], recreate=False) dbinterf.new_request_event(web_runner.db.SPIDER_STATUS, (job_id, ), request.remote_addr) dbinterf.close() if status is ScrapydJobHelper.JobStatus.finished: raise exc.HTTPFound(location=request.route_path( "spider job results", project=project_name, spider=spider_name, jobid=job_id, ), detail="Job finished.") if status is ScrapydJobHelper.JobStatus.unknown: msg = "Job for spider '{}/{}' with id '{}' has an unknown status." \ " Aborting command run.".format(project_name, spider_name, job_id) LOG.error(msg) raise exc.HTTPNotFound(msg) state = 'Job state unknown.' if status is ScrapydJobHelper.JobStatus.pending: state = "Job still waiting to run" elif status is ScrapydJobHelper.JobStatus.running: state = "Job running." raise exc.HTTPAccepted(detail=state)
def post_moderation(request): settings = request.registry.settings db_conn_str = settings[config.CONNECTION_STRING] publication_id = request.matchdict['id'] posted = request.json if 'is_accepted' not in posted \ or not isinstance(posted.get('is_accepted'), bool): raise httpexceptions.HTTPBadRequest( "Missing or invalid 'is_accepted' value.") is_accepted = posted['is_accepted'] with psycopg2.connect(db_conn_str) as db_conn: with db_conn.cursor() as cursor: if is_accepted: # Give the publisher moderation approval. cursor.execute( """\ UPDATE users SET (is_moderated) = ('t') WHERE username = (SELECT publisher FROM publications WHERE id = %s and state = 'Waiting for moderation')""", (publication_id, )) # Poke the publication into a state change. poke_publication_state(publication_id, cursor) else: # Reject! And Vacuum properties of the publication # record to /dev/null. cursor.execute( """\ UPDATE users SET (is_moderated) = ('f') WHERE username = (SELECT publisher FROM publications WHERE id = %sand state = 'Waiting for moderation')""", (publication_id, )) cursor.execute( """\ UPDATE publications SET (epub, state) = (null, 'Rejected') WHERE id = %s""", (publication_id, )) return httpexceptions.HTTPAccepted()
def flush_post(request): request.registry.storage.flush() request.registry.permission.flush() request.registry.cache.flush() return httpexceptions.HTTPAccepted()