Beispiel #1
0
    def get(self):
        self.response.headers['Content-Type'] = 'application/pdf'

        config = Configuration()

        address = 'Ola Nordmann\nNorskeveien 1\n9876 Olabyen'
        member_no = '9876'
        access_code = 'BBQLOL'
        fee = 400
        profile_url = constants.PROFILE_URL
        account_no = config.get('GIRO_ACCOUNT_NO')

        body_template = Template(config.get('GIRO_TEXT'))
        message_template = Template(config.get('GIRO_MESSAGE'))

        data = {
            'member_no': member_no,
            'account_no': account_no,
            'access_code': access_code,
            'profile_url': profile_url
        }

        pdf = PdfGenerator(member_address=address,
                           club_address=config.get('GIRO_ADDRESS'),
                           account_no=account_no,
                           member_no=member_no,
                           access_code=access_code,
                           profile_url=profile_url,
                           heading=config.get('GIRO_SUBJECT'),
                           body=body_template.render(data),
                           fee=fee,
                           due_date='12.12.2012',
                           payment_message=message_template.render(data))

        pdf.generate_pdf(self.response.out)
Beispiel #2
0
 def run(self):
     config = Configuration()
     jid = config.get('connection', 'jid')
     password = config.get('connection', 'password')
     resource = config.get('connection', 'resource')
     debug = config.getboolean('connection', 'debug')
     bot = UpwalkJabberBot(jid, password, resource, debug)
     bot.serve_forever()
Beispiel #3
0
    def load_preferences(self):
        def set_selection_in_view(widget: Gtk.Widget,
                                  selections: list) -> None:
            tree_selection = widget.get_selection()
            for i, source in enumerate(widget.get_model()):
                if source[1] in selections:
                    tree_selection.select_path(Gtk.TreePath(i))

        config = Configuration()
        self.switch_random.set_active(config.get('random'))
        self.set_selection_mode(self.switch_random.get_active())
        set_selection_in_view(self.treeview_source, config.get('source'))
Beispiel #4
0
    def send_notification_mails(self, member):
        """Send the notification mail"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('NOTIFICATION_MAIL_SUBJECT')
        recipients = config.get('NOTIFICATION_MAIL_RECIPIENTS')

        mail_template = JINJA_ENVIRONMENT.get_template(
            'templates/emails/notification_signup.txt')
        data = {'member': member, 'server_url': SERVER_URL}
        body = mail_template.render(data)

        mail.send_mail(sender_address, recipients, subject, body)
Beispiel #5
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        """
        self.mq_server = mq_server if mq_server else Configuration.get("mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = connection.channel()
        self.mq_channel.queue_declare(self.mq_name, durable=True)
        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #6
0
    def send_welcome_mail(self, member):
        """Send welcom email with attachments"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('WELCOME_MAIL_SUBJECT')
        account_no = config.get('GIRO_ACCOUNT_NO')

        mail_template = Template(config.get('WELCOME_MAIL_TEXT'))

        data = {
            'member': member,
            'year': datetime.date.today().year,
            'accountno': account_no,
            'profile_url': constants.PROFILE_URL
        }
        body = mail_template.render(data)

        buf = cStringIO.StringIO()
        address = member.name + '\n' + member.address + '\n' + member.zipcode + ' ' + member.city
        if member.country.name != 'Norge':
            address = address + '\n' + member.country.name

        body_template = Template(config.get('GIRO_TEXT'))
        message_template = Template(config.get('GIRO_MESSAGE'))

        data = {
            'member_no': member.number,
            'account_no': account_no,
            'access_code': member.edit_access_code,
            'profile_url': constants.PROFILE_URL
        }

        due_date = datetime.datetime.now() + datetime.timedelta(days=14)
        due_date_str = due_date.strftime('%d.%m.%Y')

        current_date = datetime.datetime.now()
        if current_date.month >= 7:
            fee = member.member_type.fee / 2
        else:
            fee = member.member_type.fee

        pdf = PdfGenerator(member_address=address,
                           club_address=config.get('GIRO_ADDRESS'),
                           account_no=account_no,
                           member_no=member.number,
                           access_code=member.edit_access_code,
                           profile_url=constants.PROFILE_URL,
                           heading=config.get('GIRO_SUBJECT'),
                           body=body_template.render(data),
                           fee=fee,
                           due_date=due_date_str,
                           payment_message=message_template.render(data))

        pdf.generate_pdf(buf)

        mail.send_mail(sender_address,
                       member.email,
                       subject,
                       body,
                       attachments=[('kontingent.pdf', buf.getvalue())])
Beispiel #7
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        """
        self.mq_server = mq_server if mq_server else Configuration.get(
            "mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = connection.channel()
        self.mq_channel.queue_declare(self.mq_name, durable=True)
        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #8
0
    def send_notification_mails(self, member):
        """Send the notification mail"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('NOTIFICATION_MAIL_SUBJECT')
        recipients = config.get('NOTIFICATION_MAIL_RECIPIENTS')

        mail_template = JINJA_ENVIRONMENT.get_template('templates/emails/notification_signup.txt')
        data = {
            'member': member,
            'server_url': SERVER_URL
        }
        body = mail_template.render(data)

        mail.send_mail(sender_address, recipients, subject, body)
Beispiel #9
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        :param logger:
        """
        self.mq_server = mq_server if mq_server else Configuration.get("mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")

        self.mq_connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = self.mq_connection.channel()

        tmp_queue = self.mq_channel.queue_declare(exclusive=True)
        self.callback_queue = tmp_queue.method.queue
        self.mq_channel.basic_consume(self.on_resonse, no_ack=True, queue=self.callback_queue)

        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #10
0
def setup_logging():
    """
    Set up logging module according to options in application's configuration
    file.
    """
    import logging
    import os.path
    from twisted.python import log
    from config import Configuration

    config = Configuration()

    levels_map = {'CRITICAL': logging.CRITICAL, 'ERROR': logging.ERROR,
                  'WARNING': logging.WARNING, 'INFO': logging.INFO,
                  'DEBUG': logging.DEBUG}

    level_str = config.get('logging', 'level')
    filename = config.get('logging', 'filename')

    try:
        level = levels_map[level_str]
    except KeyError:
        default = logging.INFO
        print ('Unknown logging level %s, using default %s'
               % (level_str, logging.getLevelName(default)))
        level = default

    if filename is None or filename == '':
        filename = 'stdout'

    if filename == 'stdout':
        filepath = None
    else:
        filepath = os.path.join(get_app_dir(), filename)

    # http://twistedmatrix.com/documents/current/core/howto/logging.html#auto3
    observer = log.PythonLoggingObserver()
    observer.start()

    print ("Openning log '%s' with level %s"
           % (filepath if filepath else filename, logging.getLevelName(level)))

    logging.basicConfig(level=level, filename=filepath)
Beispiel #11
0
def change_wallpaper():
    config = Configuration()
    if config.get('random'):
        sources = config.get('source')
        source = sources[randrange(len(sources))]
    else:
        source = config.get('source')[0]
    module = importlib.import_module(source)
    daily = module.get_daily()
    if daily.resolve_url():
        if download(daily.get_url()):
            if daily.get_title():
                title = '{}: {}'.format(daily.get_name(), daily.get_title())
            else:
                title = daily.get_name()
            caption = daily.get_caption()
            credit = daily.get_credit()
            notify_photo_caption(title, caption, credit)
            set_background(comun.POTD)
Beispiel #12
0
    def setup(self, log_folder=None, file_name="stock_tracer_{0}.log"):
        """setup

        :param log_folder:
        :param file_name:
        """
        log_config = Configuration.get("logging")

        logger = logging.getLogger('stock_tracer')
        logger.setLevel(logging.DEBUG)
        formatter = logging.Formatter('[%(asctime)s] - [%(name)s] - [%(levelname)s] - %(message)s')

        # create file log
        if log_config["file"]:
            log_folder = log_folder if log_folder else DEFAULT_LOGFOLDER
            if not path.exists(log_folder):
                makedirs(log_folder)

            log_file_name = file_name.format(datetime.now().strftime("%Y-%m-%d"))
            fh = logging.FileHandler(path.join(log_folder, log_file_name))
            fh.setLevel(logging.DEBUG)
            fh.setFormatter(formatter)
            logger.addHandler(fh)

        # create console log
        if log_config["console"]:
            ch = logging.StreamHandler()
            ch.setLevel(logging.DEBUG)
            ch.setFormatter(formatter)
            logger.addHandler(ch)

        # create es log
        if log_config["es"]:
            es_logging_handler = ESLoggingHandler([{'host': Configuration.get('es_host'), 'port': 9200}],
                                                  es_index_name="stock_log")
            es_logging_handler.setLevel(logging.DEBUG)
            es_logging_handler.setFormatter(formatter)
            logger.addHandler(es_logging_handler)

        self.is_initialized = True
Beispiel #13
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        :param logger:
        """
        self.mq_server = mq_server if mq_server else Configuration.get(
            "mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")

        self.mq_connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = self.mq_connection.channel()

        tmp_queue = self.mq_channel.queue_declare(exclusive=True)
        self.callback_queue = tmp_queue.method.queue
        self.mq_channel.basic_consume(self.on_resonse,
                                      no_ack=True,
                                      queue=self.callback_queue)

        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #14
0
    def get(self):
        self.response.headers['Content-Type'] = 'application/pdf'

        config = Configuration()

        address = 'Ola Nordmann\nNorskeveien 1\n9876 Olabyen'
        member_no = '9876'
        access_code = 'BBQLOL'
        fee = 400
        profile_url = constants.PROFILE_URL
        account_no = config.get('GIRO_ACCOUNT_NO')


        body_template = Template(config.get('GIRO_TEXT'))
        message_template = Template(config.get('GIRO_MESSAGE'))

        data = { 'member_no': member_no, 'account_no': account_no, 'access_code': access_code, 'profile_url': profile_url }

        pdf = PdfGenerator(member_address=address, club_address=config.get('GIRO_ADDRESS'), account_no=account_no,
            member_no=member_no, access_code=access_code, profile_url=profile_url,
            heading=config.get('GIRO_SUBJECT'), body=body_template.render(data), fee=fee, due_date='12.12.2012', payment_message=message_template.render(data))

        pdf.generate_pdf(self.response.out)
Beispiel #15
0
    def get(self, logger_name):
        """get logging instance

        :param logger_name:
        """
        if not self.is_initialized:
            self.setup(Configuration.get("log_folder"))

        if "stock_tracer" not in logger_name:
            logger_name = "stock_tracer." + logger_name

        logger = logging.getLogger(logger_name)
        logger.disabled = False
        return logger
Beispiel #16
0
    def send_welcome_mail(self, member):
        """Send welcom email with attachments"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('WELCOME_MAIL_SUBJECT')
        account_no = config.get('GIRO_ACCOUNT_NO')

        mail_template = Template(config.get('WELCOME_MAIL_TEXT'))

        data = {
            'member': member,
            'year': datetime.date.today().year,
            'accountno': account_no,
            'profile_url': constants.PROFILE_URL
        }
        body = mail_template.render(data)

        buf = cStringIO.StringIO()
        address = member.name + '\n' + member.address + \
            '\n' + member.zipcode + ' ' + member.city
        if member.country.name != 'Norge':
            address = address + '\n' + member.country.name

        body_template = Template(config.get('GIRO_TEXT'))
        message_template = Template(config.get('GIRO_MESSAGE'))

        data = {'member_no': member.number, 'account_no': account_no,
                'access_code': member.edit_access_code, 'profile_url': constants.PROFILE_URL}

        due_date = datetime.datetime.now() + datetime.timedelta(days=14)
        due_date_str = due_date.strftime('%d.%m.%Y')

        current_date = datetime.datetime.now()
        if current_date.month >= 7:
            fee = member.member_type.fee / 2
        else:
            fee = member.member_type.fee

        pdf = PdfGenerator(member_address=address, club_address=config.get('GIRO_ADDRESS'), account_no=account_no,
                           member_no=member.number, access_code=member.edit_access_code, profile_url=constants.PROFILE_URL,
                           heading=config.get('GIRO_SUBJECT'), body=body_template.render(data), fee=fee,
                           due_date=due_date_str, payment_message=message_template.render(data))

        pdf.generate_pdf(buf)

        mail.send_mail(sender_address, member.email, subject,
                       body, attachments=[('kontingent.pdf', buf.getvalue())])
Beispiel #17
0
    def get(self):
        config = Configuration()
        ruler = """
--------------------------------------------------------------------------------
0________1_________2_________3_________4_________5_________6_________7_________8
1        0         0         0         0         0         0         0         0
--------------------------------------------------------------------------------
"""
        template = Template(config.get('WELCOME_MAIL_TEXT'))

        member = Member()
        member.name = 'Ola Normann'
        member.address = 'Norskeveien 1'
        member.zipcode = '9876'
        member.city = 'Olabyen'
        member.country = Country().all().order('order').fetch(1)[0]
        member.email = '*****@*****.**'
        member.phone = '916 75 105'
        member.phone_home = '939 90 115'
        member.phone_work = '101 33 116'
        member.number = '9669'
        member.access_code = 'BBQWTF'
        member.member_type = MemberType.all().order('order').fetch(1)[0]
        sample_data = {
            'year': 2014,
            'fee': 400,
            'account_no': config.get('GIRO_ACCOUNT_NO'),
            'member': member,
            'profile_url': constants.PROFILE_URL
        }
        sample_text = template.render(sample_data)
        # Merge template before submitting text
        data = {'text': ruler + sample_text + ruler}

        template = JINJA_ENVIRONMENT.get_template(
            'templates/settings/email_preview.html')
        self.response.write(template.render(data))
Beispiel #18
0
    def get(self):
        config = Configuration()
        ruler = """
--------------------------------------------------------------------------------
0________1_________2_________3_________4_________5_________6_________7_________8
1        0         0         0         0         0         0         0         0
--------------------------------------------------------------------------------
"""
        template = Template(config.get('WELCOME_MAIL_TEXT'))

        member = Member()
        member.name = 'Ola Normann'
        member.address = 'Norskeveien 1'
        member.zipcode = '9876'
        member.city = 'Olabyen'
        member.country = Country().all().order('order').fetch(1)[0]
        member.email = '*****@*****.**'
        member.phone = '916 75 105'
        member.phone_home = '939 90 115'
        member.phone_work = '101 33 116'
        member.number = '9669'
        member.access_code = 'BBQWTF'
        member.member_type = MemberType.all().order('order').fetch(1)[0]
        sample_data =  {
            'year': 2014,
            'fee': 400,
            'account_no': config.get('GIRO_ACCOUNT_NO'),
            'member': member,
            'profile_url': constants.PROFILE_URL
            }
        sample_text = template.render(sample_data)
        # Merge template before submitting text
        data = {'text': ruler + sample_text + ruler }

        template = JINJA_ENVIRONMENT.get_template('templates/settings/email_preview.html')
        self.response.write(template.render(data))
Beispiel #19
0
 def load_preferences(self):
     config = Configuration()
     select_value_in_combo(self.combobox_source, config.get('source'))
     self.switch_random.set_active(config.get('random'))
     self.set_source_state(not config.get('random'))
Beispiel #20
0
import os

from api.app import app
from config import Configuration

from core.util.problem_detail import ProblemDetail
from core.app_server import returns_problem_detail

from controller import setup_admin_controllers
from templates import (
    admin as admin_template,
    admin_sign_in_again as sign_in_again_template,
)

# The secret key is used for signing cookies for admin login
app.secret_key = Configuration.get(Configuration.SECRET_KEY)

@app.before_first_request
def setup_admin():
    if getattr(app, 'manager', None) is not None:
        setup_admin_controllers(app.manager)

def requires_admin(f):
    @wraps(f)
    def decorated(*args, **kwargs):
        admin = app.manager.admin_sign_in_controller.authenticated_admin_from_request()
        if isinstance(admin, ProblemDetail):
            return app.manager.admin_sign_in_controller.error_response(admin)
        elif isinstance(admin, Response):
            return admin
        return f(*args, **kwargs)
Beispiel #21
0
class AcquisitionFeed(OPDSFeed):

    FACET_REL = "http://opds-spec.org/facet"
    FEED_CACHE_TIME = int(Configuration.get('default_feed_cache_time', 600))

    @classmethod
    def groups(cls, _db, title, url, lane, annotator,
               force_refresh=False, use_materialized_works=True):
        """The acquisition feed for 'featured' items from a given lane's
        sublanes, organized into per-lane groups.
        """
        # Find or create a CachedFeed.
        cached, usable = CachedFeed.fetch(
            _db,
            lane=lane,
            type=CachedFeed.GROUPS_TYPE,
            facets=None,
            pagination=None,
            annotator=annotator,
            force_refresh=force_refresh
        )
        if usable:
            return cached

        works_and_lanes = lane.sublane_samples(
            use_materialized_works=use_materialized_works
        )
        if not works_and_lanes:
            # We did not find enough works for a groups feed.
            # Instead we need to display a flat feed--the
            # contents of what would have been the 'all' feed.
            if not isinstance(lane, Lane):
                # This is probably a top-level controller or
                # application object.  Create a dummy lane that
                # contains everything.
                lane = Lane(_db, "Everything")
            # Generate a page-type feed that is filed as a
            # groups-type feed so it will show up when the client
            # asks for it.
            cached = cls.page(
                _db, title, url, lane, annotator,
                cache_type=CachedFeed.GROUPS_TYPE,
                force_refresh=force_refresh,
                use_materialized_works=use_materialized_works
            )
            return cached

        if lane.include_all_feed:
            # Create an 'all' group so that patrons can browse every
            # book in this lane.
            works = lane.featured_works(
                use_materialized_works=use_materialized_works
            )
            for work in works:
                works_and_lanes.append((work, None))

        all_works = []
        for work, sublane in works_and_lanes:
            if sublane is None:
                # This work is in the (e.g.) 'All Science Fiction'
                # group. Whether or not this lane has sublanes,
                # the group URI will point to a linear feed, not a
                # groups feed.
                v = dict(
                    lane=lane,
                    label='All ' + lane.display_name,
                    link_to_list_feed=True,
                )
            else:
                v = dict(
                    lane=sublane
                )
            annotator.lanes_by_work[work].append(v)
            all_works.append(work)

        feed = AcquisitionFeed(
            _db, title, url, all_works, annotator,
        )

        # Render a 'start' link and an 'up' link.
        top_level_title = annotator.top_level_title() or "Collection Home"
        AcquisitionFeed.add_link_to_feed(feed=feed.feed, href=annotator.default_lane_url(), rel="start", title=top_level_title)

        if isinstance(lane, Lane):
            visible_parent = lane.visible_parent()
            if isinstance(visible_parent, Lane):
                title = visible_parent.display_name
            else:
                title = top_level_title
            up_uri = annotator.groups_url(visible_parent)
            AcquisitionFeed.add_link_to_feed(feed=feed.feed, href=up_uri, rel="up", title=title)
            feed.add_breadcrumbs(lane, annotator)
        
        annotator.annotate_feed(feed, lane)

        content = unicode(feed)
        cached.update(content)
        return cached

    @classmethod
    def page(cls, _db, title, url, lane, annotator=None,
             facets=None, pagination=None,
             cache_type=None, force_refresh=False,
             use_materialized_works=True
    ):
        """Create a feed representing one page of works from a given lane."""
        facets = facets or Facets.default()
        pagination = pagination or Pagination.default()
        cache_type = cache_type or CachedFeed.PAGE_TYPE

        # Find or create a CachedFeed.
        cached, usable = CachedFeed.fetch(
            _db,
            lane=lane,
            type=cache_type,
            facets=facets,
            pagination=pagination,
            annotator=annotator,
            force_refresh=force_refresh
        )
        if usable:
            return cached

        if use_materialized_works:
            works_q = lane.materialized_works(facets, pagination)
        else:
            works_q = lane.works(facets, pagination)

        if not works_q:
            works = []
        else:
            works = works_q.all()
        feed = cls(_db, title, url, works, annotator)

        # Add URLs to change faceted views of the collection.
        for args in cls.facet_links(annotator, facets):
            OPDSFeed.add_link_to_feed(feed=feed.feed, **args)

        if len(works) > 0:
            # There are works in this list. Add a 'next' link.
            OPDSFeed.add_link_to_feed(feed=feed.feed, rel="next", href=annotator.feed_url(lane, facets, pagination.next_page))

        if pagination.offset > 0:
            OPDSFeed.add_link_to_feed(feed=feed.feed, rel="first", href=annotator.feed_url(lane, facets, pagination.first_page))

        previous_page = pagination.previous_page
        if previous_page:
            OPDSFeed.add_link_to_feed(feed=feed.feed, rel="previous", href=annotator.feed_url(lane, facets, previous_page))

        # Add "up" link and breadcrumbs
        top_level_title = annotator.top_level_title() or "Collection Home"
        visible_parent = lane.visible_parent()
        if isinstance(visible_parent, Lane):
            title = visible_parent.display_name
        else:
            title = top_level_title
        if visible_parent:
            up_uri = annotator.lane_url(visible_parent)
            OPDSFeed.add_link_to_feed(feed=feed.feed, href=up_uri, rel="up", title=title)
            feed.add_breadcrumbs(lane, annotator)

        OPDSFeed.add_link_to_feed(feed=feed.feed, rel='start', href=annotator.default_lane_url(), title=top_level_title)
        
        annotator.annotate_feed(feed, lane)

        content = unicode(feed)
        cached.update(content)
        return cached

    @classmethod
    def search(cls, _db, title, url, lane, search_engine, query, pagination=None,
               annotator=None
    ):
        if not isinstance(lane, Lane):
            search_lane = Lane(
                _db, "Everything", searchable=True, fiction=Lane.BOTH_FICTION_AND_NONFICTION)
        else:
            search_lane = lane

        results = search_lane.search(query, search_engine, pagination=pagination)
        opds_feed = AcquisitionFeed(_db, title, url, results, annotator=annotator)
        AcquisitionFeed.add_link_to_feed(feed=opds_feed.feed, rel='start', href=annotator.default_lane_url(), title=annotator.top_level_title())

        if len(results) > 0:
            # There are works in this list. Add a 'next' link.
            AcquisitionFeed.add_link_to_feed(feed=opds_feed.feed, rel="next", href=annotator.search_url(lane, query, pagination.next_page))

        if pagination.offset > 0:
            AcquisitionFeed.add_link_to_feed(feed=opds_feed.feed, rel="first", href=annotator.search_url(lane, query, pagination.first_page))

        previous_page = pagination.previous_page
        if previous_page:
            AcquisitionFeed.add_link_to_feed(feed=opds_feed.feed, rel="previous", href=annotator.search_url(lane, query, previous_page))

        # Add "up" link and breadcrumbs
        AcquisitionFeed.add_link_to_feed(feed=opds_feed.feed, rel="up", href=annotator.lane_url(search_lane), title=lane.display_name)
        opds_feed.add_breadcrumbs(search_lane, annotator, include_lane=True)

        annotator.annotate_feed(opds_feed, lane)
        return unicode(opds_feed)

    @classmethod
    def single_entry(cls, _db, work, annotator, force_create=False):
        """Create a single-entry feed for one specific work."""
        feed = cls(_db, '', '', [], annotator=annotator)
        if not isinstance(work, Edition) and not work.presentation_edition:
            return None
        return feed.create_entry(work, None, even_if_no_license_pool=True,
                                 force_create=force_create)

    @classmethod
    def error_message(cls, identifier, error_status, error_message):
        """Turn an error result into an OPDSMessage suitable for
        adding to a feed.
        """
        return OPDSMessage(identifier.urn, error_status, error_message)

    @classmethod
    def facet_links(self, annotator, facets):
        for group, value, new_facets, selected, in facets.facet_groups:
            url = annotator.facet_url(new_facets)
            if not url:
                continue
            group_title = Facets.GROUP_DISPLAY_TITLES[group]
            facet_title = Facets.FACET_DISPLAY_TITLES[value]
            link = dict(href=url, title=facet_title)
            link['rel'] = self.FACET_REL
            link['{%s}facetGroup' % AtomFeed.OPDS_NS] = group_title
            if selected:
                link['{%s}activeFacet' % AtomFeed.OPDS_NS] = "true"
            yield link


    def __init__(self, _db, title, url, works, annotator=None,
                 precomposed_entries=[]):
        """Turn a list of works, messages, and precomposed <opds> entries
        into a feed.
        """
        if not annotator:
            annotator = Annotator()
        self.annotator = annotator

        super(AcquisitionFeed, self).__init__(title, url)

        lane_link = dict(rel="collection", href=url)
        for work in works:
            self.add_entry(work, lane_link)

        # Add the precomposed entries and the messages.
        for entry in precomposed_entries:
            if isinstance(entry, OPDSMessage):
                entry = entry.tag
            self.feed.append(entry)

    def add_entry(self, work, lane_link):
        """Attempt to create an OPDS <entry>. If successful, append it to
        the feed.
        """
        entry = self.create_entry(work, lane_link)
        if entry is not None:
            if isinstance(entry, OPDSMessage):
                entry = entry.tag
            self.feed.append(entry)
        return entry

    def create_entry(self, work, lane_link, even_if_no_license_pool=False,
                     force_create=False, use_cache=True):
        """Turn a work into an entry for an acquisition feed."""
        identifier = None
        if isinstance(work, Edition):
            active_edition = work
            identifier = active_edition.primary_identifier
            active_license_pool = None
            work = None
        else:
            active_license_pool = self.annotator.active_licensepool_for(work)
            if not work:
                # We have a license pool but no work. Most likely we don't have
                # metadata for this work yet.
                return None

            if isinstance(work, BaseMaterializedWork):
                identifier = work.license_pool.identifier
                active_edition = None
            elif active_license_pool:
                identifier = active_license_pool.identifier
                active_edition = active_license_pool.presentation_edition
            elif work.presentation_edition:
                active_edition = work.presentation_edition
                identifier = active_edition.primary_identifier

        # There's no reason to present a book that has no active license pool.
        if not identifier:
            logging.warn("%r HAS NO IDENTIFIER", work)
            return None

        if not active_license_pool and not even_if_no_license_pool:
            logging.warn("NO ACTIVE LICENSE POOL FOR %r", work)
            return self.error_message(
                identifier,
                403,
                "I've heard about this work but have no active licenses for it."
            )

        if not active_edition and not isinstance(work, BaseMaterializedWork):
            logging.warn("NO ACTIVE EDITION FOR %r", active_license_pool)
            return self.error_message(
                identifier,
                403,
                "I've heard about this work but have no metadata for it."
            )

        try:
            return self._create_entry(work, active_license_pool, active_edition,
                                      identifier, lane_link, force_create, 
                                      use_cache)
        except UnfulfillableWork, e:
            logging.info(
                "Work %r is not fulfillable, refusing to create an <entry>.",
                work,
            )
            return self.error_message(
                identifier, 
                403,
                "I know about this work but can offer no way of fulfilling it."
            )
        except Exception, e:
            logging.error(
                "Exception generating OPDS entry for %r", work,
                exc_info = e
            )
            return None
Beispiel #22
0
 def instance():
     return Configuration.get()
Beispiel #23
0
import requests
from requests.exceptions import ConnectionError
import sys
sys.path.append("../")
from config import Configuration
import json

conf = Configuration("../cat.conf")
headers = {
    'Content-Type': 'application/json',
    'Authorization': 'Bearer ' + conf.get("DO_AUTHKEY")
}
response = requests.get(conf.get("DO_APIHOST") + "droplets", headers=headers)
print response.text
response = json.loads(response.text)

for droplets in response['droplets']:
    res = requests.delete(
        conf.get("DO_APIHOST") + 'droplets/' + str(droplets['id']))
    print res
                        action='store',
                        default='',
                        type=str,
                        help='label for output')

    parser.add_argument('-f',
                        dest='fname',
                        action='store',
                        default='config.cfg',
                        type=str,
                        help='config filename')

    args = parser.parse_args()

    config = Configuration(args.configuration, args.fname)
    x, p, dm, kernels, priors = config.get()

    if args.label != "" and not args.label in os.listdir(args.configuration):
        os.mkdir(os.path.join(args.configuration, args.label))

    if 'data.csv' in os.path.join(args.configuration, args.label):
        raise ValueError("dataset already exists at %s!" %
                         os.path.join(args.configuration, args.label))

    betaTrue = np.zeros((x.shape[0], dm.shape[0]))

    for f in range(config.f):
        ind = min(np.where(f < config.cumnf)[0]) + 1
        # print f,ind

        kernel = kernels[ind]
Beispiel #25
0
app = Flask(__name__)

testing = 'TESTING' in os.environ
db_url = Configuration.database_url(testing)
SessionManager.initialize(db_url)
session_factory = SessionManager.sessionmaker(db_url)
_db = flask_scoped_session(session_factory, app)
SessionManager.initialize_data(_db)

app.config['BABEL_DEFAULT_LOCALE'] = LanguageCodes.three_to_two[Configuration.localization_languages()[0]]
app.config['BABEL_TRANSLATION_DIRECTORIES'] = "../translations"
babel = Babel(app)

import routes
if Configuration.get(Configuration.INCLUDE_ADMIN_INTERFACE):
    import admin.routes

debug = Configuration.logging_policy().get("level") == 'DEBUG'
logging.getLogger().info("Application debug mode==%r" % debug)
app.config['DEBUG'] = debug
app.debug = debug

def run():
    debug = True
    url = Configuration.integration_url(
        Configuration.CIRCULATION_MANAGER_INTEGRATION, required=True)
    scheme, netloc, path, parameters, query, fragment = urlparse.urlparse(url)
    if ':' in netloc:
        host, port = netloc.split(':')
        port = int(port)
Beispiel #26
0
class ConversionProcess():
    """
    Class impolementing conversion process. Run command defined in application's
    configuration file and control it. When process is finished unexpectedly,
    log its returncode and stderr and stdout.

    State of process coluld be checked by this properties: started, finished,
    paused, cancelled, pid. When process is finished its status is in additional
    properties: returncode, stderr, stdout.

    Process support this operations: run, terminate, pause, resume.
    """
    def __init__(self, input_file, sub_file, output_file, log_stdout=False):
        """
        Store information about input and output files and subtitles. Store if
        log stdout and set object's attributes.
        @param input_file str, Path to input file
        @param sub_file str, Path to subtitles file
        @param output_file str, Path to output file
        @param log_stdout bool, Store stdout after process finish
        """
        self.input_file = input_file
        self.sub_file = sub_file
        self.output_file = output_file
        self.log_stdout = log_stdout

        self.config = Configuration()
        self.logger = logging.getLogger(self.__class__.__name__)

        self.process_transport = None
        self.process_protocol = None

        self.started = False
        self.finished = False
        self.paused = False
        self.cancelled = False
        self.deferred = defer.Deferred()
        self.deferred.addErrback(self.process_exited)

        self.pid = None
        self.returncode = None
        self.stderr = None
        self.stdout = None

    def run(self):
        """
        Star conversion process.
        @return t.i.d.Deferred
        """
        assert not self.started

        conversion_command = self.get_conversion_command()
        conversion_command = encode(conversion_command)

        args = shlex.split(conversion_command)
        executable = args[0]

        self.open_stdout_log()
        self.open_stderr_log()

        proto = WatchingProcessProtocol(self.deferred)

        proto.outReceived = lambda data: self.stdout_log.write(data)
        proto.errReceived = lambda data: self.stderr_log.write(data)

        self.logger.info('Starting conversion process of %s', self.input_file)

        kwargs = {}

        if sys.platform == 'win32':
            import win32process
            kwargs['win32flags'] = win32process.CREATE_NO_WINDOW

        self.process_transport = reactor.spawnProcess(proto, executable, args, **kwargs)

        self.process_protocol = proto
        self.pid = self.process_transport.pid
        self.started = True

        return self.deferred

    def terminate(self):
        """
        Terminate running process. It means terminate OS's process and cancel
        deferred.
        """
        if self.finished:
            return

        self.logger.info('Terminating conversion process of %s', self.input_file)

        try:
            self.process_transport.signalProcess('TERM')
        except error.ProcessExitedAlready:
            return  # process already exited, so it was callbacked

        self.deferred.cancel()

    def pause(self):
        """
        Pause process running.
        """
        assert self.started
        assert not self.finished
        assert not self.paused

        p = self._get_psutil_process()
        if p:
            p.suspend()
            self.paused = True
        else:
            self.logger.debug('psutil process is None')

    def resume(self):
        """
        Resume paused process.
        """
        assert self.started
        assert not self.finished
        assert self.paused

        self.paused = False

        p = self._get_psutil_process()
        if p:
            p.resume()
        else:
            self.logger.debug('psutil process is None')

    def get_conversion_command(self):
        """
        Make conversion command from patterns in application's config file and
        process's attributes.
        @return str, Conversion command to execute
        """
        if sys.platform in ('win32', 'cygwin'):
            convertor_exe = self.config.get('command', 'convertor_exe_win')
        else:
            convertor_exe = self.config.get('command', 'convertor_exe_unix')

        convertor_exe = '"' + convertor_exe + '"'  # spaces in path

        input_file_alias = self.config.get('command', 'input_file_alias')
        output_file_alias = self.config.get('command', 'output_file_alias')
        convertor_args = self.config.get('command', 'convertor_args')

        convertor_args = convertor_args.replace(input_file_alias,
                                                self.input_file)
        convertor_args = convertor_args.replace(output_file_alias,
                                                self.output_file)

        convertor_args = self.extend_command_by_sub(self.sub_file,
                                                    convertor_args)

        conversion_command = convertor_exe + ' ' + convertor_args

        self.logger.debug('Convert command: ' + conversion_command)

        return conversion_command

    def extend_command_by_sub(self, sub_file, conversion_command):
        """
        Expand command's alias for subtitles with params or nothing
        if subtitles are not requested.
        @param sub_file str, Path of subtitles file
        @param conversion_command str, Command without expanded alias for
            subtitles
        @return str, Expanded command
        """
        sub_params = ''
        if sub_file:
            sub_file_alias = self.config.get('command', 'subtitle_file_alias')
            sub_params = self.config.get('command', 'subtitle_params')
            sub_params = sub_params.replace(sub_file_alias, sub_file)

        sub_params_alias = self.config.get('command', 'subtitle_params_alias')
        conversion_command = conversion_command.replace(sub_params_alias,
                                                        sub_params)
        return conversion_command

    def open_stderr_log(self):
        """
        Open temporary file to write stderr of process.
        @return int, FD of file
        """
        self.stderr_log = tempfile.TemporaryFile()
        return self.stderr_log.fileno()

    def open_stdout_log(self):
        """
        Open temporary file to write stdout of process.
        @return int, FD of file
        """
        self.stdout_log = tempfile.TemporaryFile()
        return self.stdout_log.fileno()

    @defer.inlineCallbacks
    def process_exited(self, failure):
        """
        Callbacked when OS's process is finished. Set status finished, store
        returncode and process's logs.
        @param failure t.p.f.Failure, t.i.e.ProcessDone or
            t.i.e.ProcessTerminated
        @return t.i.d.Deferred
        """
        self.finished = True

        status_type = failure.trap(error.ProcessDone, error.ProcessTerminated)

        try:
            status = failure.value
            if status_type is error.ProcessDone:
                self.returncode = 0
            elif status_type is error.ProcessTerminated:
                if status.exitCode is not None:
                    self.returncode = status.exitCode
                elif status.signal is not None:
                    self.returncode = -1 * status.signal
                else:
                    raise ValueError('Unknown exit status')

            self.logger.info('Conversion process of %s exited with status %s',
                             self.input_file, self.returncode)

            self.stderr = yield self.read_from_temp_file(self.stderr_log)

            if self.log_stdout:
                self.stdout = yield self.read_from_temp_file(self.stdout_log)
        finally:
            self.stdout_log.close()
            self.stderr_log.close()

    @async_function
    def read_from_temp_file(self, log_file):
        """
        Read data from log of process.
        @param log_file str, Path to file with log
        @return t.i.d.Deferred, str
        """
        log_file.seek(0)
        buff = log_file.read()
        data = decode(buff)
        return data

    def _get_psutil_process(self):
        # psutil provides cross-platform process stop & cont orders
        import psutil
        p = psutil.Process(self.pid)
        return (None if p.status in (psutil.STATUS_DEAD, psutil.STATUS_ZOMBIE) else p)
Beispiel #27
0
 def __init__(self, basic_auth_provider=None, oauth_providers=None):
     self.basic_auth_provider = basic_auth_provider
     self.oauth_providers = oauth_providers or []
     self.secret_key = Configuration.get(Configuration.SECRET_KEY)
class Sample(object):

    def __init__(self,config,ds,run,nsample,thin,burnin,levels=-1,randomize=True,randomSamplerOrder=True):

        self.config = Configuration(config,randomizePriors=randomSamplerOrder)

        if randomize:
            self.config.randomize()

        self.run = run
        self.nsample = nsample
        self.thin = thin
        self.burnin = burnin
        self.levels = levels
        self.randomSamplerOrder = randomSamplerOrder

        # default to using all levels
        if self.levels == -1:
            self.levels = self.config.levels

        # how many functions to use
        self.f = sum(self.config.nf[:self.levels+1])

        self.x, self.p, self.dm, self.kernels, self.priors = self.config.get()
        self.yKernel = self.kernels[0]
        for i in range(self.levels+1):
            k = 'k%d'%(i+1)
            self.__dict__[k] = self.kernels[i+1]
        self.kernels = self.kernels[1:self.levels+2]

        if self.config.hierarchy and self.levels < self.config.levels:
            # self.yKernel = self.yKernel.kernels[-1]
            args = self.yKernel.kernels[:self.levels]+[self.yKernel.kernels[-1]]
            self.yKernel = Hierarchical(*args)

        self.y = pd.read_csv(os.path.join(config,ds,'data.csv')).values

        self.dm = self.dm[:self.f,:]

        # self.model = self.config.model
        self.model = Model(self.x,self.y,self.dm)

        if randomize:
            self.model.beta = self.config.model.beta[:,:self.f]

        self._buildSamplers()

        kwargs = {}
        kwargs['yKernel'] = self.yKernel
        for i,k in enumerate(self.kernels):
            kwargs['k%d'%(i+1)] = k

        self.freeze = Freezer(model=self.model,**kwargs)
        self.startSample = self.freeze.freeze()
        self.samples = []

    def load(self,f):
        samples = Analysis.loadSamples(f)
        self.startSample = samples[0]
        self.samples = samples
        self.freeze.push(**self.samples[-1])

    def _buildSamplers(self):
        self.samplers = []

        if self.config.hierarchy:

            self.samplers.append((self.yKernel.__dict__['k%d'%(self.levels+1)],'sigma',
                                    Slice('ySigma',
                                        lambda x: self.model.dataLikelihood(self.yKernel,**{'k%d_sigma'%(self.config.levels+1):x}),
                                        lambda x: self.priors['yKernel']['sigma'].logpdf(x),
                                        self.config.config.getfloat('yKernel','slice-w'),self.config.config.getfloat('yKernel','slice-m'),logspace=True)
                                ))

            for i in range(self.levels):
                self.samplers.append((self.yKernel.__dict__['k%d'%(i+1)],'sigma',
                                        Slice('sigma',
                                            lambda x,i=i: self.model.dataLikelihood(self.yKernel,**{'k%d_sigma'%i:x}),
                                            lambda x: self.priors['k%d'%(i+1)]['sigma'].logpdf(x),
                                            self.config.config.getfloat('k%d'%(i+1),'slice-w'),self.config.config.getfloat('k%d'%(i+1),'slice-m'),logspace=True)
                                    ))

                self.samplers.append((self.yKernel.__dict__['k%d'%(i+1)],'lengthscale',
                                        Slice('lengthscale',
                                            lambda x,i=i: self.model.dataLikelihood(self.yKernel,**{'k%d_lengthscale'%i:x}),
                                            lambda x: self.priors['k%d'%(i+1)]['lengthscale'].logpdf(x),
                                            self.config.config.getfloat('k%d'%(i+1),'slice-w'),self.config.config.getfloat('k%d'%(i+1),'slice-m'),logspace=True)
                                    ))

            self.samplers.append((self.k1,'sigma',
                                    PriorSlice('k1-sigma', self.model,
                                        self.priors['functions'][0], self.priors['k1']['sigma'],
                                        'sigma',
                                        self.config.config.getfloat('k1','slice-w'),
                                        self.config.config.getfloat('k1','slice-m'),
                                        logspace=True)
                                ))

            self.samplers.append((self.k1,'lengthscale',
                                    PriorSlice('k1-lengthscale', self.model,
                                        self.priors['functions'][0], self.priors['k1']['lengthscale'],
                                        'lengthscale',
                                        self.config.config.getfloat('k1','slice-w'),
                                        self.config.config.getfloat('k1','slice-m'),
                                        logspace=True)
                                ))

        else:

            self.samplers.append((self.yKernel,'sigma',
                                    Slice('ySigma',
                                        lambda x: self.model.dataLikelihood(self.yKernel,sigma=x),
                                        lambda x: self.priors['yKernel']['sigma'].logpdf(x),
                                        self.config.config.getfloat('yKernel','slice-w'),self.config.config.getfloat('yKernel','slice-m'),logspace=True)
                                ))

            for i in range(self.levels+1):
                k = 'k%d'%(i+1)

                self.samplers.append((self.__dict__[k],'sigma',
                                        PriorSlice('%s-sigma'%k, self.model,
                                            self.priors['functions'][i], self.priors[k]['sigma'],
                                            'sigma',
                                            self.config.config.getfloat(k,'slice-w'),
                                            self.config.config.getfloat(k,'slice-m'),
                                            logspace=True)
                                    ))

                self.samplers.append((self.__dict__[k],'lengthscale',
                                        PriorSlice('%s-lengthscale'%k, self.model,
                                            self.priors['functions'][i], self.priors[k]['lengthscale'],
                                            'lengthscale',
                                            self.config.config.getfloat(k,'slice-w'),
                                            self.config.config.getfloat(k,'slice-m'),
                                            logspace=True)
                                    ))

    def sample(self):

        for i in range(len(self.samples),self.nsample):
            self._sampleIteration()

            if i % self.thin == 0 and i > self.burnin:
                self.samples.append(self.freeze.freeze())

    def _sampleIteration(self):
        order = range(self.levels+1+len(self.samplers))

        if self.randomSamplerOrder:
            order = np.random.choice(order,len(order),replace=False)

        for o in order:
            if o < self.config.f:
                prior = self.priors['functions'][o]
                prior.sample(self.model,self.yKernel)
            else:
                obj, param, sampler = self.samplers[o-self.levels-1]
                obj.__dict__[param] = sampler.sample(obj.__dict__[param])

        # print

        # for f,prior in self.priors['functions'].iteritems():
        #     if f > self.levels:
        #         continue
        #     prior.sample(self.model,self.yKernel)
        #
        # for obj, param, sampler in self.samplers:
        #     obj.__dict__[param] = sampler.sample(obj.__dict__[param])

    def save(self,dir):
        self.freeze.save(self.samples,os.path.join(dir,'samples.json'))
        self.freeze.save([self.startSample],os.path.join(dir,'startSample.json'))
Beispiel #29
0
        log_file_name = file_name.format(datetime.now().strftime("%Y-%m-%d"))
        fh = logging.FileHandler(path.join(log_folder, log_file_name))
        fh.setLevel(logging.DEBUG)

        # create console log
        ch = logging.StreamHandler()
        ch.setLevel(logging.DEBUG)

        # create formatter and add it to the handlers
        formatter = logging.Formatter('[%(asctime)s] - [%(name)s] - [%(levelname)s] - %(message)s')
        fh.setFormatter(formatter)
        ch.setFormatter(formatter)
        # add the handlers to the logger
        logger.addHandler(fh)
        logger.addHandler(ch)

    def get(self, logger_name):
        """get logging instance

        :param logger_name:
        """
        if "stock_tracer" not in logger_name:
            logger_name = "stock_tracer." + logger_name

        logger = logging.getLogger(logger_name)
        logger.disabled = False
        return logger

Logger = Logger.getInstance(Configuration.get("log_folder"))