def create_scraper_from_config(config):
    EngineClass = engines[config["engine"]["name"]]
    engine_param_names = [param["name"] for param in EngineClass.get_config_params()]
    engine_param_values = [config["engine"][param_name] for param_name in engine_param_names]
    engine_param_values.insert(0, config["name"]) # prepending scraper name to the list
    engine = EngineClass(*engine_param_values)

    ResultsStoreClass = results_stores[config["results_store"]["name"]]
    results_store_param_values = [
        int(float(config["results_store"][param["name"]])) if param["type"] == "Integer"
        else config["results_store"][param["name"]] 
        for param in ResultsStoreClass.get_config_params()]
    results_store_param_values.insert(0, workspace) # prepending workspace and name
    results_store_param_values.insert(0, config["name"])
    results_store = ResultsStoreClass(*results_store_param_values)
    
    scraper = TimelineScraper(name = config["name"], workspace = workspace)
    scraper.logger.setLevel(logging.DEBUG)
    scraper.engine = engine
    scraper.results_store = results_store 

    scrapers[scraper.name] = scraper
import sys,signal,logging
from TimelineScraper import TimelineScraper
from resultstore.FileSystemResultsStore import FileSystemResultsStore
from resultstore.S3ResultsStore import S3ResultsStore
from engines.TwitterTsEngine import TwitterTsEngine
from engines.TradingPlatformsTsEngine import TradingPlatformsTradesTsEngine

workspace = "data"
name = "test_twitter_engine"

scraper = TimelineScraper(name = name, workspace = workspace)
scraper.logger.setLevel(logging.DEBUG)
scraper.engine = TwitterTsEngine(
	name = name, 
	query = "bitcoin", 
	app_key = "qVRt030H3uyC5NH9ZAqoKh15S", 
	access_token = "AAAAAAAAAAAAAAAAAAAAAF%2F8bwAAAAAAugy0B%2BK40gKjn0YsNnT%2BgqnmeYM%3DSaul4cnSMzUa0stBXTIB6ab1TetJjclTnmenLXtpcwHf0vNUhb"
)
# scraper.engine = TradingPlatformsTradesTsEngine(name = name)
# scraper.results_store = FileSystemResultsStore(
# 	name = name,
# 	workspace = workspace, 
# 	rollover_enabled = True, 
# 	rollover_trigger_size = 1e6
# )

scraper.results_store = S3ResultsStore(
	name = name, 
	workspace = workspace, 
	rollover_enabled = True, 
	rollover_trigger_size = 1e6