def create_root(config): from scrapy import log from scrapy.settings import CrawlerSettings from slyd.crawlerspec import (CrawlerSpecManager, create_crawler_spec_resource) from slyd.bot import create_bot_resource import slyd.settings from slyd.projects import ProjectsResource root = Resource() root.putChild("static", File(config['docroot'])) crawler_settings = CrawlerSettings(settings_module=slyd.settings) spec_manager = CrawlerSpecManager(crawler_settings) # add project management at /projects projects = ProjectsResource(crawler_settings) root.putChild('projects', projects) # add crawler at /projects/PROJECT_ID/bot log.msg("Slybot specs loading from %s/[PROJECT]" % spec_manager.basedir, level=log.DEBUG) projects.putChild("bot", create_bot_resource(spec_manager)) # add spec at /projects/PROJECT_ID/spec spec = create_crawler_spec_resource(spec_manager) projects.putChild("spec", spec) return root
def test_projects_resource(temp_projects_dir): """Create a ProjectsResource configured to use test settings""" crawler_settings = CrawlerSettings(settings_module=test_settings) projects = ProjectsResource(crawler_settings) projects.projectsdir = temp_projects_dir return projects