Ejemplo n.º 1
0
def board_increase(screen_size, max_size, size):
    global board
    global screen

    gf.increase_screen(settings.screen_size, max_size)
    settings.update()
    screen = pygame.display.set_mode((settings.size), settings.full_screen)
    init_board()
    board = pygame.image.load("media/sprites/board.png").convert()
    update_dialog_box()
Ejemplo n.º 2
0
def board_increase(screen_size, max_size, size):
    global board
    global screen
    
    gf.increase_screen(settings.screen_size, max_size)
    settings.update()
    screen=pygame.display.set_mode((settings.size), settings.full_screen)
    init_board()
    board=pygame.image.load("media/sprites/board.png").convert()
    update_dialog_box()
Ejemplo n.º 3
0
    def getpath(force_select=False):
        filename = settings.load().get("tzpath", "")
        if not filename or force_select:
            FILE_OPTS = ___ = dict()
            ___["title"] = u"Locate the 桶裝出貨表 file."
            ___["defaultextension"] = ".xls"
            ___["filetypes"] = [("Excel files", ".xls"), ("all files", ".*")]
            ___["initialdir"] = u"T:\\Users\\chairman\\Documents\\"
            ___["initialfile"] = u"桶裝出貨表.xls"
            filename = os.path.normpath(tkFileDialog.askopenfilename(**FILE_OPTS))

            settings.update(tzpath=filename)
        return filename
Ejemplo n.º 4
0
    def update(self, *args):
        """update(*args)
           call update for all member sprites

           calls the update method for all sprites in the group.
           Passes all arguments on to the Sprite update function.
           Also leaves guests sprites to be updated at the end."""
        
        guests = []
        for s in self.sprites():
            if not s.guest: s.update(*args)
            else: guests.append(s)
        for s in guests:
            s.update(*args)
Ejemplo n.º 5
0
    def get(self, url, **kwargs):
        """Download this URL and return the HTML. 
        By default HTML is cached so only have to download once.

        url:
            what to download
        kwargs:
            override any of the arguments passed to constructor
        """
        self.reload_proxies()
        self.proxy = None # the current proxy
        self.final_url = None # for tracking redirects
        self.response_code = '' # keep response code
        self.response_headers = {} # keep response headers
        self.downloading_error = None # keep downloading error
        self.num_downloads = self.num_errors = 0 # track the number of downloads made
                
        # update settings with any local overrides
        settings = adt.Bag(self.settings)
        settings.update(kwargs)
        # check cache for whether this content is already downloaded
        key = self.get_key(url, settings.data)
        if self.cache and settings.read_cache:
            try:
                html = self.cache[key]
                if html and settings.pattern and not re.compile(settings.pattern, re.DOTALL | re.IGNORECASE).search(html):
                    # invalid result from download
                    html = None
            except KeyError:
                pass # have not downloaded yet
            else:
                if not html and settings.num_retries > 0:
                    # try downloading again
                    common.logger.debug('Redownloading')
                    settings.num_retries -= 1
                else:
                    # return previously downloaded content
                    return html or settings.default 
        if not settings.use_network:
            # only want previously cached content
            return settings.default 

        html = None
        failed_proxies = set() # record which proxies failed to download for this URL
        # attempt downloading content at URL
        while settings.num_retries >= 0 and html is None:
            settings.num_retries -= 1
            self.proxy = self.get_proxy(settings.proxies)
            # crawl slowly for each domain to reduce risk of being blocked
            self.throttle(url, delay=settings.delay, proxy=self.proxy) 
            html = self.fetch(url, headers=settings.headers, data=settings.data, proxy=self.proxy, user_agent=settings.user_agent, opener=settings.opener, pattern=settings.pattern)

            if html:
                # successfully downloaded
                self.num_downloads += 1
                if settings.max_proxy_errors is not None:
                    Download.proxy_performance.success(self.proxy)
                    # record which proxies failed for this download
                    for proxy in failed_proxies:
                        if Download.proxy_performance.error(self.proxy) > settings.max_proxy_errors:
                            # this proxy has had too many errors so remove
                            common.logger.warning('Removing unstable proxy from list after %d consecutive errors: %s' % (settings.max_proxy_errors, self.proxy))
                            settings.proxies.remove(self.proxy)
            else:
                # download failed - try again
                self.num_errors += 1
                failed_proxies.add(self.proxy)


        if html:
            if settings.num_redirects > 0:
                # allowed to redirect
                redirect_url = self.get_redirect(url=url, html=html)
                if redirect_url:
                    # found a redirection
                    common.logger.debug('%s redirecting to %s' % (url, redirect_url))
                    settings.num_redirects -= 1
                    html = self.get(redirect_url, **settings) or ''
                    # make relative links absolute so will still work after redirect
                    relative_re = re.compile('(<\s*a[^>]+href\s*=\s*["\']?)(?!http)([^"\'>]+)', re.IGNORECASE)
                    html = relative_re.sub(lambda m: m.group(1) + urlparse.urljoin(url, m.group(2)), html)
            html = self._clean_content(html=html, max_size=settings.max_size, force_html=settings.force_html, force_ascii=settings.force_ascii)

        if self.cache and settings.write_cache:
            # cache results
            self.cache[key] = html
            if url != self.final_url:
                # cache what URL was redirected to
                self.cache.meta(key, dict(url=self.final_url))
        
        # return default if no content
        return html or settings.default 
Ejemplo n.º 6
0
    def get(self, url, **kwargs):
        """Download this URL and return the HTML. 
        By default HTML is cached so only have to download once.

        url:
            what to download
        kwargs:
            override any of the arguments passed to constructor
        """
        self.reload_proxies()
        self.proxy = None  # the current proxy
        self.final_url = None  # for tracking redirects
        self.response_code = ''  # keep response code
        self.response_headers = {}  # keep response headers
        self.downloading_error = None  # keep downloading error
        self.num_downloads = self.num_errors = 0  # track the number of downloads made

        # update settings with any local overrides
        settings = adt.Bag(self.settings)
        settings.update(kwargs)
        # check cache for whether this content is already downloaded
        key = self.get_key(url, settings.data)
        if self.cache and settings.read_cache:
            try:
                html = self.cache[key]
                if self.invalid_response(html, settings.pattern):
                    # invalid result from download
                    html = None
            except KeyError:
                pass  # have not downloaded yet
            else:
                if not html and settings.num_retries > 0:
                    # try downloading again
                    common.logger.debug('Redownloading')
                    settings.num_retries -= 1
                else:
                    # return previously downloaded content
                    return html or settings.default
        if not settings.use_network:
            # only want previously cached content
            return settings.default

        html = None
        failed_proxies = set(
        )  # record which proxies failed to download for this URL
        # attempt downloading content at URL
        while settings.num_retries >= 0 and html is None:
            settings.num_retries -= 1
            if settings.proxy:
                self.proxy = settings.proxy
            else:
                self.proxy = self.get_proxy(settings.proxies)
            # crawl slowly for each domain to reduce risk of being blocked
            self.throttle(url, delay=settings.delay, proxy=self.proxy)
            html = self.fetch(url,
                              headers=settings.headers,
                              data=settings.data,
                              proxy=self.proxy,
                              user_agent=settings.user_agent,
                              opener=settings.opener,
                              pattern=settings.pattern,
                              max_size=settings.max_size)

            if html:
                # successfully downloaded
                self.num_downloads += 1
                if settings.max_proxy_errors is not None:
                    Download.proxy_performance.success(self.proxy)
                    # record which proxies failed for this download
                    for proxy in failed_proxies:
                        if Download.proxy_performance.error(
                                self.proxy) > settings.max_proxy_errors:
                            # this proxy has had too many errors so remove
                            common.logger.warning(
                                'Removing unstable proxy from list after %d consecutive errors: %s'
                                % (settings.max_proxy_errors, self.proxy))
                            settings.proxies.remove(self.proxy)
            else:
                # download failed - try again
                self.num_errors += 1
                failed_proxies.add(self.proxy)

        if html:
            if settings.num_redirects > 0:
                # allowed to redirect
                redirect_url = get_redirect(url=url, html=html)
                if redirect_url:
                    # found a redirection
                    common.logger.debug('%s redirecting to %s' %
                                        (url, redirect_url))
                    settings.num_redirects -= 1
                    html = self.get(redirect_url, **settings) or ''
                    # make relative links absolute so will still work after redirect
                    relative_re = re.compile(
                        '(<\s*a[^>]+href\s*=\s*["\']?)(?!http)([^"\'>]+)',
                        re.IGNORECASE)
                    try:
                        html = relative_re.sub(
                            lambda m: m.group(1) + urlparse.urljoin(
                                url, m.group(2)), html)
                    except UnicodeDecodeError:
                        pass
            html = self._clean_content(html=html,
                                       max_size=settings.max_size,
                                       force_html=settings.force_html,
                                       force_ascii=settings.force_ascii)

        if self.cache and settings.write_cache:
            # cache results
            self.cache[key] = html
            if url != self.final_url:
                # cache what URL was redirected to
                self.cache.meta(key, dict(url=self.final_url))

        # return default if no content
        return html or settings.default
Ejemplo n.º 7
0
    def get(self, url, **kwargs):
        """Download this URL and return the HTML. Data is cached so only have to download once.

        `url' is what to download
        `kwargs' can override any of the arguments passed to constructor
        """
        self.reload_proxies()
        self.final_url = None  # for tracking redirects

        # update settings with any local overrides
        settings = adt.Bag(self.settings)
        settings.update(kwargs)

        # check cache for whether this content is already downloaded
        key = self.get_key(url, settings.data)
        if self.cache and settings.read_cache:
            try:
                html = self.cache[key]
                if (
                    html
                    and settings.pattern
                    and not re.compile(settings.pattern, re.DOTALL | re.IGNORECASE).search(html)
                ):
                    # invalid result from download
                    html = None
            except KeyError:
                pass  # have not downloaded yet
            else:
                if not html and settings.num_retries > 0:
                    # try downloading again
                    common.logger.debug("Redownloading")
                else:
                    # return previously downloaded content
                    return html or settings.default
        if not settings.use_network:
            # only want previously cached content
            return settings.default

        html = None
        # attempt downloading content at URL
        while html is None:
            # crawl slowly for each domain to reduce risk of being blocked
            settings.proxy = random.choice(settings.proxies) if settings.proxies else None
            self.throttle(url, delay=settings.delay, proxy=settings.proxy)
            html = self.fetch(
                url,
                headers=settings.headers,
                data=settings.data,
                proxy=settings.proxy,
                user_agent=settings.user_agent,
                opener=settings.opener,
                pattern=settings.pattern,
            )
            if settings.num_retries == 0:
                break  # don't try downloading again
            else:
                settings.num_retries -= 1

        if html:
            if settings.num_redirects > 0:
                # allowed to redirect
                redirect_url = self.get_redirect(url=url, html=html)
                if redirect_url:
                    # found a redirection
                    common.logger.info("%s redirecting to %s" % (url, redirect_url))
                    settings.num_redirects -= 1
                    html = self.get(redirect_url, **settings) or ""
                    # make relative links absolute so will still work after redirect
                    relative_re = re.compile("(<\s*a[^>]+href\s*=\s*[\"']?)(?!http)([^\"'>]+)", re.IGNORECASE)
                    html = relative_re.sub(lambda m: m.group(1) + urljoin(url, m.group(2)), html)
            html = self.clean_content(
                html=html, max_size=settings.max_size, force_html=settings.force_html, force_ascii=settings.force_ascii
            )

        if self.cache and settings.write_cache:
            # cache results
            self.cache[key] = html
            if url != self.final_url:
                # cache what URL was redirected to
                self.cache.meta(key, dict(url=self.final_url))

        # return default if no content
        return html or settings.default
Ejemplo n.º 8
0
        # Try/except just keeps ctrl-c from printing an ugly stacktrace
        try:
            self.bitmex.ws.recent_trades()
            order_manager.run_loop()
        except (KeyboardInterrupt, SystemExit):
            sys.exit()


userSettings = import_path(os.path.join('.', 'settings'))
symbolSettings = None
symbol = sys.argv[1] if len(sys.argv) > 1 else None
if symbol:
    print("Importing symbol settings for %s..." % symbol)
    try:
        symbolSettings = import_path(os.path.join('..',
                                                  'settings-%s' % symbol))
    except Exception as e:
        print("Unable to find settings-%s.py." % symbol)

settings = {}
settings.update(vars(baseSettings))
settings.update(vars(userSettings))
if symbolSettings:
    settings.update(vars(symbolSettings))

# Main export
settings = dotdict(settings)

e = CustomOrderManager()
e.run()
Ejemplo n.º 9
0
    if i.endswith('.py') and i != '__init__.py':
        module = import_module('.%s' % i[:-3], 'web.uiwidget')
        uimodules.append(module)

settings.update({
        'template_path':os.path.join(os.path.dirname(__file__), 'web/template'),
        'static_path':os.path.join(os.path.dirname(__file__), 'static'),
        'upload_path':os.path.join(os.path.dirname(__file__), 'static/upload'),
        'cookie_secret':"x1g5zGibQISFc0+t2G2qcwraupWIKEt2ibwCmQgSfcU=",
        'login_url':'/signin',
        "xsrf_cookies": True,
        'ui_modules' : uimodules,
        'autoescape':None,
        'log_function':log.log_info,
        
        # #设置调试模式:
        # 'debug' = True,
        # #默认为False,即不是调试模式。
        # ##设置gzip压缩:
        # 'gzip'=True,
        # #设置静态文件处理类:
        # 'static_handler_class' = MyStaticFileHandler,
        # #默认是tornado.web.StaticFileHandler

        # #设置静态文件的参数:
        # 'static_handler_args' = { "key1":"value1", "key2":"value2"  },
        # #默认为空字典。
    })


Ejemplo n.º 10
0
def postSettings():
    settingPayload = request.get_json()
    userSettings = settings.update(settingPayload)
    jsonString = json.dumps(userSettings.toJSON())
    return jsonString