def majorshareholders_task():
    print("=== MAJOR SHAREHOLDERS SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(majorShareHoldersHandler)
    d = runner.join()
Beispiel #2
0
    def configure(self, options, conf):
        """Configure, based on the parsed options.

        :attention: This is part of the Nose plugin contract.
        """
        super().configure(options, conf)
        if self.enabled:
            import crochet

            # Remove deprecated crochet APIs.
            if hasattr(crochet, "wait_for_reactor"):
                del crochet.wait_for_reactor
            if hasattr(crochet.EventLoop, "wait_for_reactor"):
                del crochet.EventLoop.wait_for_reactor
            if hasattr(crochet, "DeferredResult"):
                del crochet.DeferredResult

            # Make a default timeout forbidden.
            class EventualResult(crochet.EventualResult):
                def _result(self, timeout=None):
                    if timeout is None:
                        raise AssertionError("A time-out must be specified.")
                    else:
                        return super()._result(timeout)

            # Patch it back into crochet.
            crochet._eventloop.EventualResult = EventualResult
            crochet.EventualResult = EventualResult

            if getattr(options, self.option_no_setup):
                crochet.no_setup()
            else:
                crochet.setup()
Beispiel #3
0
    def start_request(self, request_params):
        """Sets up the request params as per Twisted Agent needs.
        Sets up crochet and triggers the API request in background

        :param request_params: request parameters for API call
        :type request_params: dict

        :return: crochet EventualResult
        """
        # request_params has mandatory: method, url, params, headers
        request_params = {
            'method':
            str(request_params.get('method', 'GET')),
            'bodyProducer':
            stringify_body(request_params),
            'headers':
            listify_headers(request_params.get('headers', {})),
            'uri':
            '%s?%s' %
            (request_params['url'],
             urllib_utf8.urlencode(request_params.get('params', []), True))
        }

        # crochet only supports bytes for the url
        if isinstance(request_params['uri'], unicode):
            request_params['uri'] = request_params['uri'].encode('utf-8')

        crochet.setup()
        return self.fetch_deferred(request_params)
def counterparts_task():
    print("=== COUNTERPARTS SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(counterPartsHandler)
    d = runner.join()
def ctkhdetails_task():
    print("=== CTKH DETAILS SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(ctkhDetailsHandler)
    d = runner.join()
Beispiel #6
0
def bootstrap_game(data_path=None, params=(), settings_text=''):
    """
    Bootstrap a test version of the game.

    :param data_path: The working directory for the test game.
    :type data_path: str

    :param settings_text: An optional string to treat as the settings file.
    :type settings_text: str

    :return: A game instance.
    :rtype: mudsling.core.MUDSling
    """
    logging.debug("Bootstrapping game...")
    if data_path is None:
        data_path = os.path.join(os.getcwd(), '.testruns',
                                 str_utils.random_string(length=8))
    if not os.path.exists(data_path):
        os.makedirs(data_path)
    options = get_options(params)
    gamedir = mudsling.runner.init_game_dir(data_path)
    options['gamedir'] = gamedir
    with open(os.path.join(gamedir, 'settings.cfg'), 'w') as f:
        f.write(settings_text)
    game = MUDSling(options)
    mudsling.game = game
    set_game(game)
    game.startService()
    setup()
    return game
def finance_task():
    print("=== FINANCE SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(financeInfoHandler)
    d = runner.join()
def associates_task():
    print("=== ASSOCIATES SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(associatesHandler)
    d = runner.join()
def corporateAZExpress_task():
    print("=== CORPORATEAZ-Express SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(corporateazExpressHandler)
    d = runner.join()
def viewprofile_task():
    print("=== VIEW PROFILE SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(viewProfileHandlder)
    d = runner.join()
Beispiel #11
0
def setup_server():
    """Start the websocket server."""
    crochet.setup()

    config = read_config()

    resource_models = [User, Group]
    for application in SHELL_APPS:
        resource_models.extend(
            resource.DATA_CLASS
            for resource in get_resources(application).values()
            if resource.DATA_CLASS not in (None, NotImplemented)
            and issubclass(resource.DATA_CLASS, ResourceData))

    default_resource = config.get("default_resource", None)
    if default_resource is None:
        default_resource = resource_models[-1].__name__

    if os.environ.get("RUN_MAIN") == "true":
        backend = WebsocketService(
            settings={"default_resource": default_resource})
        backend.create_server(resource_models)
        post_save.connect(backend.send_to_client,
                          sender='management.ResourceData',
                          weak=False)

        for resource_data in resource_models:
            post_save.connect(backend.send_to_client,
                              sender=resource_data,
                              weak=False)
Beispiel #12
0
def init_crawler_runner():
    crochet.setup()
    init_scrapy_env()
    settings = get_project_settings()
    global CRAWLER_RUNNER
    CRAWLER_RUNNER = CrawlerRunner(settings)
    logger.info('Initialized crawler runner: %s' % CRAWLER_RUNNER)
def ownerstructure_task():
    print("=== OWNER STRUCTURE SPIDER CRAWLING ===")
    setup()
    configure_logging()
    runner = CrawlerRunner(settings=get_project_settings())
    runner.crawl(ownerStructureHandler)
    d = runner.join()
Beispiel #14
0
def publish(message, exchange=None, timeout=30):
    """
    Publish a message to an exchange.

    This is a synchronous call, meaning that when this function returns, an
    acknowledgment has been received from the message broker and you can be
    certain the message was published successfully.

    There are some cases where an error occurs despite your message being
    successfully published. For example, if a network partition occurs after
    the message is received by the broker. Therefore, you may publish duplicate
    messages. For complete details, see the :ref:`publishing` documentation.

    >>> from fedora_messaging import api
    >>> message = api.Message(body={'Hello': 'world'}, topic='Hi')
    >>> api.publish(message)

    If an attempt to publish fails because the broker rejects the message, it
    is not retried. Connection attempts to the broker can be configured using
    the "connection_attempts" and "retry_delay" options in the broker URL. See
    :class:`pika.connection.URLParameters` for details.

    Args:
        message (message.Message): The message to publish.
        exchange (str): The name of the AMQP exchange to publish to; defaults to
            :ref:`conf-publish-exchange`
        timeout (int): The maximum time in seconds to wait before giving up attempting
            to publish the message. If the timeout is reached, a PublishTimeout exception
            is raised.

    Raises:
        fedora_messaging.exceptions.PublishReturned: Raised if the broker rejects the
            message.
        fedora_messaging.exceptions.PublishTimeout: Raised if the broker could not be
            contacted in the given timeout time.
        fedora_messaging.exceptions.ValidationError: Raised if the message
            fails validation with its JSON schema. This only depends on the
            message you are trying to send, the AMQP server is not involved.
    """
    crochet.setup()
    pre_publish_signal.send(publish, message=message)

    if exchange is None:
        exchange = config.conf["publish_exchange"]

    eventual_result = _twisted_publish(message, exchange)
    try:
        eventual_result.wait(timeout=timeout)
        publish_signal.send(publish, message=message)
    except crochet.TimeoutError:
        eventual_result.cancel()
        wrapper = exceptions.PublishTimeout(
            "Publishing timed out after waiting {} seconds.".format(timeout))
        publish_failed_signal.send(publish, message=message, reason=wrapper)
        raise wrapper
    except Exception as e:
        _log.error(eventual_result.original_failure().getTraceback())
        publish_failed_signal.send(publish, message=message, reason=e)
        raise
Beispiel #15
0
def scrape_maradmins():
    from crochet import setup

    setup()
    process = CrawlerRunner(get_project_settings())
    d = process.crawl(MaradminSpider)
    d.addBoth(lambda _: reactor.stop())
    reactor.run()
Beispiel #16
0
def start_spiders():
    """
    Run spiders
    """
    setup()
    runner = CrawlerRunner()
    runner.crawl(djinni_spider.DjinniSpider)
    runner.crawl(workua_spider.WorkSpider)
Beispiel #17
0
def _init_crochet(in_twisted=False):
    global crochet_initialized
    if crochet_initialized:
        return
    if in_twisted:
        crochet.no_setup()
    else:
        crochet.setup()
    crochet_initialized = True
Beispiel #18
0
def _init_crochet(in_twisted=False):
    global crochet_initialized
    if crochet_initialized:
        return
    if in_twisted:
        crochet.no_setup()
    else:
        crochet.setup()
    crochet_initialized = True
Beispiel #19
0
def fetch(
    url,
    method='GET',
    headers=None,
    body='',
    timeout=DEFAULT_TIMEOUT,
    connect_timeout=DEFAULT_CONNECT_TIMEOUT,
    tcp_nodelay=False,
):
    """
    Make an HTTP request.

    :param url: the URL to fetch.
    :param method: the HTTP method.
    :param headers: a dictionary mapping from string keys to lists of string
        values.  For example::

            {
                'X-Foo': ['Bar'],
                'X-Baz': ['Quux'],
            }
    :param body: the request body (must be of bytes type).
    :param timeout: maximum allowed request time in seconds.
    :param connect_timeout: maximum time allowed to establish a connection
        in seconds.
    :param tcp_nodelay: flag to enable tcp_nodelay for request

    :returns: a crochet EventualResult object which behaves as a future,
        .wait() can be called on it to retrieve the fido.fido.Response object.
        .wait() throws any exception occurred while performing the request.
        Eventual additional failures information is stored in the crochet
        EventualResult object as stated in the official documentation

    """

    # Twisted requires the method, url, headers to be bytes
    url = to_bytes(url)
    method = to_bytes(method)

    # Make a copy to avoid mutating the original value
    headers = dict(headers or {})

    if not any(header.lower() == 'user-agent' for header in headers):
        headers['User-Agent'] = [DEFAULT_USER_AGENT]

    # initializes twisted reactor in a different thread
    crochet.setup()
    return fetch_inner(
        url,
        method,
        headers,
        body,
        timeout,
        connect_timeout,
        tcp_nodelay,
    )
def setup_crochet():
    """Setup and return crochet for testing Twisted code."""
    try:
        import crochet
    except ImportError as e:
        print "\n\nERROR: Unable to import crochet: {}".format(e)
        print "  You must install it: pip install --no-deps crochet\n"
        raise

    crochet.setup()
    return crochet
Beispiel #21
0
 def __start_reactor(self):
     """
     starts the reactor in a thread and waits for it to start.
     """
     # <HACK>: Override threadpool's threads to be daemons
     from twisted.python import threadpool
     threadpool.ThreadPool.threadFactory = self.__create_daemon_thread
     crochet.setup()
     logging.getLogger('twisted').setLevel(logging.ERROR)
     self.__reactor.callFromThread(self.__event.set)
     self.__event.wait()
def setup_crochet():
    """Setup and return crochet for testing Twisted code."""
    try:
        import crochet
    except ImportError as e:
        print "\n\nERROR: Unable to import crochet: {}".format(e)
        print "  You must install it: pip install --no-deps crochet\n"
        raise

    crochet.setup()
    return crochet
Beispiel #23
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    config = Configurator(root_factory=root_factory, settings=settings)
    config.include('pyramid_chameleon')
    config.add_static_view('static', 'static', cache_max_age=3600)
    config.scan()
    setup()
    app = config.make_wsgi_app()
    start_ssh_server(5022, "admin", "secret", {"app": app, 'config': config})
    return app
Beispiel #24
0
def ulsan_course_task(self):
    task_id = current_task.request.id
    if task_id is None:
        task_id = uuid.uuid1()
    print(f'############# task started: task_id = {task_id}')
    keystring = "ulsan"

    from crawler.models import Course_info, Con_log

    #task_log = Task_log(task_id = task_id, name = 'ulsan_course')
    #task_log.save()
    #maxid = Con_log.objects.aggregate(Max('con_log_id'))
    #con_log_id = str(int('9' + maxid['con_log_id__max']) + 1)[1:]
    #maxid = Con_log.objects.aggregate(Max('con_log_id'))['con_log_id__max']
    #con_log_id = ('{:0'+f'{len(maxid)}'+'d}').format(int(maxid)+1)
    con_log_id = get_next_con_log_id()
    con_log = Con_log(con_log_id=con_log_id)
    con_log.con_id = conids_course[keystring]
    con_log.con_tm = datetime.now().strftime('%H:%M')
    con_log.con_kind_cd = 'COURSE_CRL'
    con_log.save()
    print(f'##### max_log_id = {con_log_id}')

    settings = ulsan_settings
    settings.mode = 'sigungu'
    settings.ITEM_PIPELINES = {
        'crawler.pipelines.course_pipeline': 300,
    }
    #settings.DOWNLOAD_DELAY = 1.0 # 다운로드 지연(디버깅용)

    #settings.LOG_FILE = 'ulsan_course_logfile.log'
    #settings.LOG_LEVEL = logging.ERROR

    setup()
    #sighandler = SigHandler()
    #d = run_spider(settings, sighandler=sighandler, keyheader=keyheaders[keystring], conid=conids[keystring])
    itemcount = ItemCount()
    d = run_spider(settings,
                   itemcount=itemcount,
                   keyheader=keyheaders[keystring],
                   conid=conids_course[keystring])
    d = run_spider(settings,
                   itemcount=itemcount,
                   keyheader=keyheaders[keystring],
                   conid=conids_course[keystring],
                   spider_id=1)
    con_log.reg_dt = timezone.now()
    #con_log.log_desc = f'total count = {sighandler.item_scraped_count}'
    con_log.log_desc = f'total count = {itemcount.item_scraped_count}'
    con_log.con_status_cd = 'SUCCESS'
    con_log.save()

    print('############## task ended')
Beispiel #25
0
def scrape1(request):
    EHeadline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(economy_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(economy_spider.EconomySpider)
    time.sleep(3)
    d = runner.crawl(economy_spider.ExpressSpider)
    time.sleep(3)
    return redirect("../geteconomynews/")
Beispiel #26
0
def scrape2(request):
    SHeadline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(sports_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(sports_spider.SportsSpider)
    time.sleep(3)
    d = runner.crawl(sports_spider.HtimesSpider)
    time.sleep(3)
    return redirect("../getsportsnews/")
Beispiel #27
0
def scrape3(request):
    PHeadline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(politics_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(politics_spider.PoliticsSpider)
    time.sleep(3)
    d = runner.crawl(politics_spider.EconomicSpider)
    time.sleep(3)
    return redirect("../getpoliticsnews/")
Beispiel #28
0
def run_sync(f, *args, **kwargs):
    import crochet
    timeout = kwargs.pop('timeout', None)
    
    @wraps(f)
    def wrapped():
        return f(*args, **kwargs)
    
    if not run_sync._is_setup:
        crochet.setup()
        run_sync._is_setup = True
        
    return crochet.run_in_reactor(wrapped)().wait(timeout)
Beispiel #29
0
def scrape4(request):
    LHeadline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(lifestyle_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(lifestyle_spider.LifestyleSpider)
    time.sleep(3)
    d = runner.crawl(lifestyle_spider.HealthSpider)
    time.sleep(3)
    return redirect("../getlifestylenews/")
Beispiel #30
0
 def __start_reactor(self):
     """
     The reactor is thread blocking, thus we start the reactor
     in the context of another thread, rather then the main one.
     Never leave home without crochet!
     :return:
     """
     from twisted.python import threadpool
     threadpool.ThreadPool.threadFactory = self.__create_daemon_thread
     crochet.setup()
     logging.getLogger('twisted').setLevel(logging.ERROR)
     self.__reactor.callFromThread(self.__event.set)
     self.__event.wait()
Beispiel #31
0
def scrape5(request):
    ENHeadline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(entertainment_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(entertainment_spider.EntertainmentSpider)
    time.sleep(3)
    d = runner.crawl(entertainment_spider.EntrtnmentSpider)
    time.sleep(3)
    return redirect("../getentertainmentnews/")
Beispiel #32
0
def scrape(request):
    Headline.objects.all().delete()
    crawler_settings = Settings()

    setup()
    configure_logging()
    crawler_settings.setmodule(my_settings)
    runner = CrawlerRunner(settings=crawler_settings)
    d = runner.crawl(news_spider.NewsSpider)
    time.sleep(3)
    d = runner.crawl(news_spider.TechSpider)
    time.sleep(3)
    return redirect("../getnews/")
    def __init__(self,
                 spider,
                 allowed_domains,
                 start_urls,
                 follow_links=False):
        self.spider = spider
        self.allowed_domains = allowed_domains
        self.start_urls = start_urls
        self.follow_links = follow_links

        self.__configure_logging()
        crochet.setup()
        pass
Beispiel #34
0
def fetch(url, timeout=None, connect_timeout=None, method='GET',
          content_type=DEFAULT_CONTENT_TYPE, user_agent=DEFAULT_USER_AGENT,
          headers={}, body=''):
    """Make an HTTP request.

    :param url: the URL to fetch.
    :param timeout: maximum allowed request time, in seconds. Defaults to
        None which means to wait indefinitely.
    :param connect_timeout: maximum time alloweed to establish a connection,
        in seconds.
    :param method: the HTTP method.
    :param headers: a dictionary mapping from string keys to lists of string
        values.  For example::

            {
                'X-Foo': ['Bar'],
                'X-Baz': ['Quux'],
            }

    :param content_type: the content type.
    :param user_agent: the user agent.
    :param body: the body of the request.

    :returns: a :py:class:`concurrent.futures.Future` that returns a
        :py:class:`Response` if the request is successful.
    """
    if isinstance(url, unicode):
        url = url.encode('utf-8')

    # Make a copy to avoid mutating the original value
    headers = dict(headers)

    # Add basic header values if absent
    if 'User-Agent' not in headers:
        headers['User-Agent'] = [user_agent]
    if 'Content-Type' not in headers:
        headers['Content-Type'] = [content_type]
    if 'Content-Length' not in headers and body:
        headers['Content-Length'] = [str(len(body))]

    crochet.setup()
    future = concurrent.futures.Future()
    if future.set_running_or_notify_cancel():
        fetch_inner(url, method, headers, body, future, timeout,
                    connect_timeout)
    return future
Beispiel #35
0
def fetch(
    url,
    method='GET',
    headers=None,
    body='',
    timeout=DEFAULT_TIMEOUT,
    connect_timeout=DEFAULT_CONNECT_TIMEOUT,
):
    """
    Make an HTTP request.

    :param url: the URL to fetch.
    :param method: the HTTP method.
    :param headers: a dictionary mapping from string keys to lists of string
        values.  For example::

            {
                'X-Foo': ['Bar'],
                'X-Baz': ['Quux'],
            }
    :param body: the request body (must be of bytes type).
    :param timeout: maximum allowed request time in seconds.
    :param connect_timeout: maximum time allowed to establish a connection
        in seconds.

    :returns: a crochet EventualResult object which behaves as a future,
        .wait() can be called on it to retrieve the fido.fido.Response object.
        .wait() throws any exception occurred while performing the request.
        Eventual additional failures information is stored in the crochet
        EventualResult object as stated in the official documentation

    """

    # Twisted requires the method, url, headers to be bytes
    url = to_bytes(url)
    method = to_bytes(method)

    # Make a copy to avoid mutating the original value
    headers = dict(headers or {})

    if not any(header.lower() == 'user-agent' for header in headers):
        headers['User-Agent'] = [DEFAULT_USER_AGENT]

    # initializes twisted reactor in a different thread
    crochet.setup()
    return fetch_inner(url, method, headers, body, timeout, connect_timeout)
def setup():
    try:
        args = input_args.parse()
        debug_enabled = args.debug or os.environ.get('DEBUG', False)
        reactor_manager.start_reactor(logging=debug_enabled)
        crochet.setup()

        events_server.ensure_server(port=8090)

        app.config.from_pyfile(args.config)

        if args.register:
            server_name = app.config['LEAP_SERVER_NAME']
            leap_register.register_new_user(args.register, server_name)
        else:
            app_factory.create_app(debug_enabled, app)
    finally:
        reactor_manager.stop_reactor_on_exit()
    def setup(self, request_params):
        """Sets up the request params as per Twisted Agent needs.
        Sets up crochet and triggers the API request in background

        :param request_params: request parameters for API call
        :type request_params: dict
        """
        # request_params has mandatory: method, url, params
        if not request_params.get('headers'):
            request_params['headers'] = self._headers
        self.request_params = {
            'method': str(request_params['method']),
            'bodyProducer': stringify_body(request_params),
            'headers': listify_headers(request_params.get('headers')),
            'uri': str(request_params['url'] + '?' + urllib.urlencode(
                request_params['params'], True))
        }

        crochet.setup()
        self.eventual = self.fetch_deferred()
    def start_request(self, request_params):
        """Sets up the request params as per Twisted Agent needs.
        Sets up crochet and triggers the API request in background

        :param request_params: request parameters for API call
        :type request_params: dict

        :return: crochet EventualResult
        """
        # request_params has mandatory: method, url, params, headers
        request_params = {
            'method': str(request_params['method']),
            'bodyProducer': stringify_body(request_params),
            'headers': listify_headers(request_params['headers']),
            'uri': str(request_params['url'] + '?' + urllib.urlencode(
                request_params['params'], True))
        }

        crochet.setup()
        return self.fetch_deferred(request_params)
Beispiel #39
0
 def __init__(self, port):
     setup()
     self._target = WSF()
     self._port = port
     self.setup()
Beispiel #40
0
import hashlib
import time
import psutil
import storjnode
from datetime import datetime
from datetime import timedelta
from btctxstore import BtcTxStore
from dataserv_client import common
from dataserv_client import builder
from dataserv_client import exceptions
from dataserv_client import messaging
from dataserv_client import deserialize
from dataserv_client.bandwidth_test import speedtest
from dataserv_client import __version__
from crochet import setup
setup()  # start twisted via crochet


logger = common.logging.getLogger(__name__)


SHOW_CONFIG_TEMPLATE = """Current configuration.

    Authentication address: {0}
    Payout address: {1}
"""


# TODO move all logic to control, api should only deserialize/validate input

Beispiel #41
0
#!/usr/bin/python
"""
A command-line application that uses Twisted to do an MX DNS query.
"""

from __future__ import print_function

from crochet import setup, run_in_reactor
setup()


@run_in_reactor
def mx(domain):
    """
    Return list of MX domains for a given domain.
    """
    from twisted.names.client import lookupMailExchange
    def got_records(result):
        hosts, authorities, additional = result
        return [str(record.name) for record in additional]
    d = lookupMailExchange(domain)
    d.addCallback(got_records)
    return d


def main(domain):
    print("Mail servers for %s:" % (domain,))
    for mailserver in mx(domain).wait():
        print(mailserver)

#!/usr/bin/env python
from __future__ import print_function

import base64
import functools
import random
import os

import flask
import crochet; crochet.setup()
import twisted.internet
from autobahn import util
from autobahn.wamp import message
from autobahn.twisted import wamp
from autobahn.wamp.types import ComponentConfig
from autobahn.twisted.util import sleep
from twisted.internet.defer import inlineCallbacks
from autobahn.websocket.protocol import parseWsUrl
from autobahn.twisted.websocket import WampWebSocketClientFactory, \
                                       WampWebSocketServerFactory


g = {
    "zone": 0,
    "mode": "dev",
    "battery": {"level": 1},
    "logs": [
        {"type": "current", "name": "current", "title": "Current", "contents": []},
        {"type": "old", "name": "old_1", "title": "#1", "contents": ["This is an old log."]},
        {"type": "old", "name": "old_2", "title": "#2", "contents": ["This is another", "old log."]},
        {"type": "old", "name": "old_3", "title": "#3", "contents": ["Cats."]},
def test_queued():
    from crochet import setup
    setup()

    # Alice sample node.
    alice_wallet = BtcTxStore(testnet=False, dryrun=True)
    alice_wif = alice_wallet.create_key()
    alice_node_id = address_to_node_id(alice_wallet.get_address(alice_wif))
    alice_dht = pyp2p.dht_msg.DHT(
        node_id=alice_node_id,
        networking=0
    )
    alice = FileTransfer(
        pyp2p.net.Net(
            net_type="direct",
            node_type="passive",
            nat_type="preserving",
            passive_port=63400,
            dht_node=alice_dht,
            wan_ip="8.8.8.8",
            debug=1
        ),
        BandwidthLimit(),
        wif=alice_wif,
        store_config={tempfile.mkdtemp(): None},
    )

    # Bob sample node.
    bob_wallet = BtcTxStore(testnet=False, dryrun=True)
    bob_wif = bob_wallet.create_key()
    bob_node_id = address_to_node_id(bob_wallet.get_address(bob_wif))
    bob_dht = pyp2p.dht_msg.DHT(
        node_id=bob_node_id,
        networking=0
    )
    bob = FileTransfer(
        pyp2p.net.Net(
            net_type="direct",
            node_type="passive",
            nat_type="preserving",
            passive_port=63401,
            dht_node=bob_dht,
            wan_ip="8.8.8.8",
            debug=1
        ),
        BandwidthLimit(),
        wif=bob_wif,
        store_config={tempfile.mkdtemp(): None}
    )

    # Simulate Alice + Bob "connecting"
    alice_dht.add_relay_link(bob_dht)
    bob_dht.add_relay_link(alice_dht)

    # Accept all transfers.
    def accept_handler(contract_id, src_unl, data_id, file_size):
        return 1

    # Add accept handler.
    alice.handlers["accept"].add(accept_handler)
    bob.handlers["accept"].add(accept_handler)

    # Create file we're suppose to be uploading.
    data_id = ("5feceb66ffc86f38d952786c6d696c"
               "79c2dbc239dd4e91b46729d73a27fb57e9")
    path = os.path.join(list(alice.store_config)[0], data_id)
    if not os.path.exists(path):
        with open(path, "w") as fp:
            fp.write("0")

    # Alice wants to upload data to Bob.
    upload_contract_id = alice.data_request(
        "download",
        data_id,
        0,
        bob.net.unl.value
    )

    # Delete source file.
    def callback_builder(path, alice, bob, data_id):
        def callback(client, contract_id, con):
            print("Upload succeeded")
            print("Removing content and downloading back")
            os.remove(path)

            # Fix transfers.
            bob.handlers["complete"] = []

            # Synchronize cons and check con.unl.
            time.sleep(1)
            clients = {"alice": alice, "bob": bob}
            for client in list({"alice": alice, "bob": bob}):
                print()
                print(client)
                clients[client].net.synchronize()
                nodes_out = clients[client].net.outbound
                nodes_in = clients[client].net.inbound
                for node in nodes_out + nodes_in:
                    print(node["con"].unl)
                print(clients[client].cons)

            # Queued transfer:
            download_contract_id = alice.data_request(
                "upload",
                data_id,
                0,
                bob.net.unl.value
            )

            print("Download contract ID =")
            print(download_contract_id)

            # Indicate Bob's download succeeded.
            def alice_callback(val):
                print("Download succeeded")
                global queue_succeeded
                queue_succeeded = 1

            def alice_errback(val):
                print("Download failed! Error:")
                print(val)

            # Hook upload from bob.
            d = alice.defers[download_contract_id]
            d.addCallback(alice_callback)
            d.addErrback(alice_errback)

        return callback

    # Register callback for bob (when he's downloaded the data.)
    bob.handlers["complete"] = [
        callback_builder(path, alice, bob, data_id)
    ]

    # d = alice.defers[upload_contract_id]
    # d.addCallback(callback_builder(path, alice, bob, data_id))

    # Main event loop.
    timeout = time.time() + 40
    while not queue_succeeded and time.time() < timeout:
        for client in [alice, bob]:
            if client == alice:
                _log.debug("Alice")
            else:
                _log.debug("Bob")
            process_transfers(client)

        time.sleep(1)

    if not queue_succeeded:
        print("\a")

    for client in [alice, bob]:
        client.net.stop()

    assert(queue_succeeded == 1)
from mock import Mock, patch, MagicMock, sentinel

from twisted.internet.defer import maybeDeferred, inlineCallbacks

from ZenPacks.zenoss.AWS.utils import addLocalLibPath

addLocalLibPath()

try:
    import crochet
except ImportError, e:
    print "\n\n\nERROR:  Unable to load crochet (%s)" % e
    print "   you must install it: pip install --no-deps crochet\n\n\n"
    raise

crochet.setup()


class TestAWSBasePlugin(BaseTestCase):
    def afterSetUp(self):
        from ZenPacks.zenoss.AWS.dsplugins import AWSBasePlugin

        self.plugin = AWSBasePlugin()

    def test_params_empty(self):
        ds = Mock()
        # method will rise TypeError when called with two args
        ds.talesEval = lambda x: None

        self.assertEquals(self.plugin.params(ds, Mock()), {})