def __init__(self): Scraper.__init__(self) c = self.config["bsd"] self.html = HTMLParser() self.call_path = "/page/api/event/search_events" self.params = {"api_ver": "2", "api_id": c["api_id"], "api_ts": str(int(time.time()))} self.signed_params = self.sign_params(c["api_secret"]) self.url = "".join([c["endpoint"], self.call_path, "?", self.signed_params]) self.map = {"event_id": "original_id", "start_dt": "start_time"} self.event_provider = EventProvider() self.push_provider = PushProvider()
def __init__(self): Scraper.__init__(self) api_key = self.config["youtube"]["api_key"] self.url = "https://www.googleapis.com/youtube/v3/search" self.params = { "order": "date", "maxResults": 10, "channelId": "UCH1dpzjCEiGAt8CXkryhkZg", "key": api_key, "type": "upload", "part": "snippet" } self.details = Bernie2016VideoDetailScraper() self.video_provider = VideoProvider() self.push_provider = PushProvider()
def __init__(self): Scraper.__init__(self) api_key = self.config["youtube"]["api_key"] self.url = "https://www.googleapis.com/youtube/v3/search" self.params = { "order": "date", "maxResults": 10, "channelId": "UCH1dpzjCEiGAt8CXkryhkZg", "key": api_key, "type": "upload", "part": "snippet", } self.details = Bernie2016VideoDetailScraper() self.video_provider = VideoProvider() self.push_provider = PushProvider()
def __init__(self): Scraper.__init__(self) c = self.config["bsd"] self.html = HTMLParser() self.call_path = "/page/api/event/search_events" self.params = { "api_ver": "2", "api_id": c["api_id"], "api_ts": str(int(time.time())) } self.signed_params = self.sign_params(c["api_secret"]) self.url = "".join( [c["endpoint"], self.call_path, "?", self.signed_params]) self.map = {"event_id": "original_id", "start_dt": "start_time"} self.event_provider = EventProvider() self.push_provider = PushProvider()
from algorithms.Model1 import Model1 from scrapers.scraper import Scraper import pickle import os import numpy as np from settings import settings if __name__ == "__main__": print("Starting altomfotball.no scraper!") data = None if not settings["use_pickle_model"]: scraper = Scraper.scrape() pickle.dump(scraper, open(os.path.join(os.getcwd(), "model.p"), "wb")) data = scraper else: data = pickle.load(open(os.path.join(os.getcwd(), "model.p"), "rb")) model = Model1() X_train = [] Y_train = [] X_test = [] Y_test = [] train_years = [x for x in range(2016, 2017)] test_years = [x for x in range(2017, 2018)] for year, season in data.items(): if year not in train_years and year not in test_years:
def __init__(self): Scraper.__init__(self) self.url = "https://berniesanders.com/issues/feed/" self.html = HTMLParser() self.issue_provider = IssueProvider() self.push_provider = PushProvider()
def __init__(self, url): Scraper.__init__(self) self.url = url self.html = HTMLParser() self.issue_provider = IssueProvider() self.push_provider = PushProvider()
def gather_process(): logger.info("gather") storage = FileStorage(SCRAPED_FILE) scraper = Scraper() scraper.scrape_process(storage)
def __init__(self): Scraper.__init__(self) self.url = "https://berniesanders.com/news/" self.html = HTMLParser() self.news_provider = NewsProvider()
def __init__(self): Scraper.__init__(self) api_key = self.config["youtube"]["api_key"] self.url = "https://www.googleapis.com/youtube/v3/videos"
def __init__(self, url): Scraper.__init__(self) self.url = url self.html = HTMLParser() self.news_provider = NewsProvider() self.push_provider = PushProvider()
def __init__(self): Scraper.__init__(self) self.url = "https://berniesanders.com/daily/" self.html = HTMLParser() self.article_provider = ArticleProvider()
def __init__(self): Scraper.__init__(self) self.url = "https://berniesanders.com/issues/feed/" self.html = HTMLParser() self.issue_provider = IssueProvider()