예제 #1
0
파일: code.py 프로젝트: wgfi110/crawley
    def run(self, run_command):
        """
            Run the crawler of a code project
        """

        import_user_module("crawlers")
        BaseProject.run(self, run_command, user_crawlers)
예제 #2
0
파일: template.py 프로젝트: 4iji/crawley
    def set_up(self, project_name, **kwargs):
        """
            Setups a crawley's template project
        """

        BaseProject.set_up(self, project_name, **kwargs)

        self._generate_templates(project_name)
예제 #3
0
    def set_up(self, project_name, **kwargs):
        """
            Setups a crawley's template project
        """

        BaseProject.set_up(self, project_name, **kwargs)

        self._generate_templates(project_name)
예제 #4
0
파일: template.py 프로젝트: aparo/crawley
    def set_up(self, project_name):
        """
            Setups a crawley's template project
        """

        BaseProject.set_up(self, project_name)

        generate_template("template", project_name, self.project_dir, new_extension=".crw")
        generate_template("config", project_name, self.project_dir, new_extension=".ini")
예제 #5
0
파일: code.py 프로젝트: hammadk373/crawley
    def syncdb(self, syncb_command):
        """
            Builds the database and find the documents storages.
            Foreach storage it adds a session to commit the results.
        """

        BaseProject.syncdb(self, syncb_command)

        if self.connector is not None:
            self._setup_entities(elixir.entities, syncb_command.settings)
예제 #6
0
파일: code.py 프로젝트: hammadk373/crawley
    def set_up(self, project_name):
        """
            Setups a code project.
            Generates the crawlers and models files based on a template.
        """

        BaseProject.set_up(self, project_name)

        generate_template("models", project_name, self.project_dir)
        generate_template("crawlers", project_name, self.project_dir)
예제 #7
0
파일: code.py 프로젝트: wgfi110/crawley
    def syncdb(self, syncb_command):
        """
            Builds the database and find the documents storages.
            Foreach storage it adds a session to commit the results.
        """

        BaseProject.syncdb(self, syncb_command)

        if self.connector is not None:
            self._setup_entities(elixir.entities, syncb_command.settings)
예제 #8
0
파일: code.py 프로젝트: wgfi110/crawley
    def set_up(self, project_name, **kwargs):
        """
            Setups a code project.
            Generates the crawlers and models files based on a template.
        """

        BaseProject.set_up(self, project_name, **kwargs)

        generate_template("models", project_name, self.project_dir)
        generate_template("crawlers", project_name, self.project_dir)
예제 #9
0
파일: template.py 프로젝트: aparo/crawley
    def run(self, run_command):
        """
            Runs the crawley.

            For this kind of project it needs to generate the crawlers and scrapers
            classes at runtime firts.
        """

        scraper_classes = run_command.syncdb.generator.gen_scrapers()

        compiler = CrawlerCompiler(scraper_classes, run_command.settings)
        crawler_class = compiler.compile()

        BaseProject.run(self, run_command, [crawler_class])
예제 #10
0
파일: template.py 프로젝트: 4iji/crawley
    def syncdb(self, syncb_command):
        """
            Builds the database
        """

        BaseProject.syncdb(self, syncb_command)

        if self.connector is None:
            return

        template = self._get_template(syncb_command)

        syncb_command.generator = Generator(template, syncb_command.settings)
        entities = syncb_command.generator.gen_entities()

        self._setup_entities(entities, syncb_command.settings)
예제 #11
0
    def run(self, run_command):
        """
            Runs the crawley.

            For this kind of project it needs to generate the crawlers and scrapers
            classes at runtime first.
        """

        scraper_classes = run_command.syncdb.generator.gen_scrapers()

        config = self._get_config(run_command)

        compiler = CrawlerCompiler(scraper_classes, config)
        crawler_class = compiler.compile()

        BaseProject.run(self, run_command, [crawler_class])
예제 #12
0
    def syncdb(self, syncb_command):
        """
            Builds the database
        """

        BaseProject.syncdb(self, syncb_command)

        if self.connector is None:
            return

        template = self._get_template(syncb_command)

        syncb_command.generator = Generator(template, syncb_command.settings)
        entities = syncb_command.generator.gen_entities()

        self._setup_entities(entities, syncb_command.settings)
예제 #13
0
파일: template.py 프로젝트: aparo/crawley
    def syncdb(self, syncb_command):
        """
            Builds the database
        """

        BaseProject.syncdb(self, syncb_command)

        if self.connector is None:
            return

        with open(os.path.join(syncb_command.settings.PROJECT_ROOT, "template.crw"), "r") as f:
            template = f.read()

        syncb_command.generator = Generator(template, syncb_command.settings)
        entities = syncb_command.generator.gen_entities()

        self._setup_entities(entities, syncb_command.settings)