def main():
    config = Configuration()
    credentials = config.get_credentials()

    # Create an httplib2.Http object to handle our HTTP requests and authorize it
    # with our good Credentials.
    http = httplib2.Http()
    http = credentials.authorize(http)

    # Construct the service object for the interacting with the Admin Reports API.
    service = discovery.build('admin', 'reports_v1', http=http)
    activities = service.activities()

    settings = config.settings
    engine = engine_from_config(settings, 'sqlalchemy.')
    DBSession.configure(bind=engine)

    try:
        login_list = activities.list(userKey='all', applicationName='login', maxResults=1000).execute()
        DBSession.query(LoginItem).first()

        print("Success!")
    except client.AccessTokenRefreshError:
        print("Failure. Access token is invalid. Please re-run the tool to get a new access token.")
    except OperationalError:
        print("Database has not been initialised. Please run acctwatch_initdb.")
    except:
        print("Some other unknown error occured")
    def setup(self):
        Configuration.load()
        self.url = Configuration.integration_url(Configuration.CIRCULATION_MANAGER_INTEGRATION)

        millenium = Configuration.integration(Configuration.MILLENIUM_INTEGRATION)
        self.test_username = millenium.get(Configuration.AUTHENTICATION_TEST_USERNAME)
        self.test_password = millenium.get(Configuration.AUTHENTICATION_TEST_PASSWORD)
Beispiel #3
0
 def run(self):
     config = Configuration()
     jid = config.get('connection', 'jid')
     password = config.get('connection', 'password')
     resource = config.get('connection', 'resource')
     debug = config.getboolean('connection', 'debug')
     bot = UpwalkJabberBot(jid, password, resource, debug)
     bot.serve_forever()
    def test_create_config(self):
        """
        Tests creation of a blank configuration ensuring that the file is not created on the file system until after the
        save() method is called on the configuration object. Also implicitly tests writing blank config files.

        """
        conf = Configuration(testconfig, create=True)
        if os.path.exists(testconfig):
            self.fail("File should not be written until save() is executed")
        conf.save()
        self.assertTrue(os.path.isfile(testconfig), "File should exist after having been written")
	def doStuff():
		global Config
		global yourThread
		with dataLock:
			# Do your stuff with commonDataStruct Here
			logger.info('rereading config file looking for changes - thread {}'.format(threading.current_thread().name))
			Config = Configuration(Default_Config_FilePath)
			logger.debug("Loading system config file from file: " + ConfigFilePath)
			Config.load(ConfigFilePath)

		# Set the next thread to happen
		yourThread = threading.Timer(CONFIG_INTERVAL, doStuff, ())
		yourThread.start()
Beispiel #6
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        """
        self.mq_server = mq_server if mq_server else Configuration.get("mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")
        connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = connection.channel()
        self.mq_channel.queue_declare(self.mq_name, durable=True)
        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #7
0
    def __init__(self, model):
        Logger.info("Created communication")

        self.host = Configuration.get_hostname()
        self.port = Configuration.get_port()
        self.socket = None
        self.model = model
        self.communication_tries = 20
        self.time = 10

        self.role = Configuration.get_role()
        self.json_builder = JsonBuilder(self.role)

        self.prepare_connection()
Beispiel #8
0
    def load(self):
        """
        Loads a config.json file and run a validation process.
        If the configurations seem valid, returns Configuration object.

        """
        util.set_working_directory()
        # paths relatively to this script's location
        schema_json = util.json_decode(self.schema_path)
        if schema_json is None:
            msg = "Problem has occurred during the decoding procedure" + \
                  " with the following file: " + self.schema_path + "."
            logging.error(msg)
            raise IOError(msg)
        tools_json = util.json_decode(self.tools_path)
        if tools_json is None:
            msg = "Problem has occurred during the decoding procedure" + \
                  " with the following file: " + self.tools_path + "."
            logging.error(msg)
            raise IOError(msg)

        try:
            with open(self.config_path, mode="r") as file:
                config_string = file.read()
            decoder = json.JSONDecoder(object_pairs_hook=checking_hook)
            config_json = decoder.decode(config_string)
        except IOError:
            msg = "The file does not exist or cannot be read:" + \
                  (os.path.split(self.config_path))[1]
            logging.error(msg)
            raise IOError(msg)
        except ValueError as value_error:
            msg = (os.path.split(self.config_path))[1] + " file is not valid"
            logging.error(msg)
            print(value_error)
            raise ValidationError(msg)
        except KeyError as k_error:
            msg = "Duplicate key specified."
            logging.error(msg)
            print(k_error)
            print("Modify: " + (os.path.split(self.config_path))[1])
            raise ValidationError(msg)

        valid = validation.is_valid_json(config_json, schema_json)
        if not valid:
            msg = "Validation failed for " + \
                  (os.path.split(self.config_path))[1] + "."
            logging.error(msg)
            raise ValidationError(msg)

        config = Configuration()
        config.iterations = config_json["IterationCount"]
        config.runs = config_json["Runs"]
        config.queries = config_json["Queries"]
        config.scenarios = config_json["Scenarios"]
        config.sizes = util.get_power_of_two(config_json["MinSize"], config_json["MaxSize"])
        config.tools = config_json["Tools"]
        config.optional_arguments = config_json["OptionalArguments"]

        return config
Beispiel #9
0
 def add_configuration_links(cls, feed):
     for rel, value in (
             ("terms-of-service", Configuration.terms_of_service_url()),
             ("privacy-policy", Configuration.privacy_policy_url()),
             ("copyright", Configuration.acknowledgements_url()),
             ("about", Configuration.about_url()),
     ):
         if value:
             d = dict(href=value, type="text/html", rel=rel)
             if isinstance(feed, OPDSFeed):
                 feed.add_link(**d)
             else:
                 # This is an ElementTree object.
                 link = E.link(**d)
                 feed.append(link)
Beispiel #10
0
    def send_notification_mails(self, member):
        """Send the notification mail"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('NOTIFICATION_MAIL_SUBJECT')
        recipients = config.get('NOTIFICATION_MAIL_RECIPIENTS')

        mail_template = JINJA_ENVIRONMENT.get_template('templates/emails/notification_signup.txt')
        data = {
            'member': member,
            'server_url': SERVER_URL
        }
        body = mail_template.render(data)

        mail.send_mail(sender_address, recipients, subject, body)
Beispiel #11
0
    def do_run(self):
        args = self.parse_command_line()
        if args.blocking_refresh:
            concurrently = ''
        else:
            concurrently = 'CONCURRENTLY'
        # Initialize database
        from model import (
            MaterializedWork,
            MaterializedWorkWithGenre,
        )
        db = self._db
        for i in (MaterializedWork, MaterializedWorkWithGenre):
            view_name = i.__table__.name
            a = time.time()
            db.execute("REFRESH MATERIALIZED VIEW %s %s" % (concurrently, view_name))
            b = time.time()
            print "%s refreshed in %.2f sec." % (view_name, b-a)

        # Close out this session because we're about to create another one.
        db.commit()
        db.close()

        # The normal database connection (which we want almost all the
        # time) wraps everything in a big transaction, but VACUUM
        # can't be executed within a transaction block. So create a
        # separate connection that uses autocommit.
        url = Configuration.database_url()
        engine = create_engine(url, isolation_level="AUTOCOMMIT")
        engine.autocommit = True
        a = time.time()
        engine.execute("VACUUM (VERBOSE, ANALYZE)")
        b = time.time()
        print "Vacuumed in %.2f sec." % (b-a)
Beispiel #12
0
def run():
    args = parse_args()
    logger = logging.getLogger("BugLoc")
    logger.setLevel(logging.INFO)
    formatter = logging.Formatter(
        '%(asctime)s - %(name)s - %(levelname)s - %(message)s')

    config = Configuration(args.project)

    if config.log_path:
        file_handler = logging.FileHandler(config.log_path)
        file_handler.setLevel(logging.INFO)
        file_handler.setFormatter(formatter)
        logger.addHandler(file_handler)
    else:
        console_handler = logging.StreamHandler()
        console_handler.setLevel(logging.INFO)
        console_handler.setFormatter(formatter)
        logger.addHandler(console_handler)
    logger.info('---------------------------------------')
    logger.info('Current project name : {}'.format(args.project))
    logger.info('Running with args : {}'.format(args))

    if args.prepare:
        prepare(args, config)
    if args.train:
        train(args, config)
    if args.evaluate:
        evaluate(config)
    if args.test:
        test(args, config)
Beispiel #13
0
def main():
    config = Configuration()

    engine = create_engine(config.db_connection_url, echo=True)
    if not database_exists(engine.url):
        create_database(engine.url)

    Base.metadata.create_all(engine)  # Create all tables if necessary
    Session = sessionmaker(bind=engine)
    session = Session()

    mumble_client = pymumble.Mumble(
        host=config.mumble_server_host,
        user=config.mumble_username,
        password=config.mumble_server_password,
        certfile=config.certfile_path,
        keyfile=config.keyfile_path,
        reconnect=config.reconnect)
    mumble_client.set_application_string(config.mumble_application_string)
    mumble_client.set_loop_rate(config.loop_rate)

    mumble_client.start()
    mumble_client.is_ready()

    mumble_client.callbacks.add_callback(
        pymumble.constants.PYMUMBLE_CLBK_USERCREATED,
        create_on_user_join_callback(mumble_client, session, config)
    )
    mumble_client.callbacks.add_callback(
        pymumble.constants.PYMUMBLE_CLBK_TEXTMESSAGERECEIVED,
        create_on_message_callback(mumble_client, session)
    )

    while True:
        time.sleep(1)
Beispiel #14
0
 def restoreRegionBackup(self, region, fileName):
     regionName = self.session.api.Region.get_sim_name(region)
     regionOwner = self.session.api.Region.get_master_avatar_uuid(region)
     
     backupStoragePath = os.path.join(Configuration.instance().generateFullOarBackupPath(regionOwner, region), fileName)
     
     #try to copy the OAR to the transfer location
     transferFileName = fileName + "_" + inworldz.util.general.id_generator()
     transferPath = os.path.join(Configuration.instance().transferSharePath, transferFileName)
     
     shutil.copy(backupStoragePath, transferPath)
     
     self.session.api.Region.CopyFileFromTransferLocation(region, transferFileName, True)
     
     if not self.session.api.Region.Restore(region, regionName, fileName, False, False):
         raise Exception("Restore failed for region {0} '{1}'".format(region, regionName))
Beispiel #15
0
 def from_environment(cls, redirect_uri, test_mode=False):
     if test_mode:
         return cls('/path', '/callback', test_mode)
     config = dict(Configuration.integration(
         Configuration.GOOGLE_OAUTH_INTEGRATION
     )['web'])
     return cls(config, redirect_uri, test_mode)
    def __init__(self, config, burnin=0, long=False, label=None):
        self.configDir = config
        self.config = Configuration(config)
        self.burnin = burnin
        self.long = long
        self.label = label

        self.datasets = os.listdir(self.configDir)
        self.datasets = filter(lambda x: x[:2] == 'ds', self.datasets)
        self.datasets.sort()

        self.runs = {}
        self.data = {}
        for ds in self.datasets:
            runs = os.listdir(os.path.join(self.configDir, ds))
            runs = filter(lambda x: x[:3] == 'run', runs)

            if not label is None:
                runs = filter(lambda x: x[-len(label):] == label, runs)

            runs.sort()

            self.runs[ds] = runs
            self.data[ds] = pd.read_csv(
                os.path.join(self.configDir, ds, 'data.csv')).values
Beispiel #17
0
    def __init__(self, input_file, sub_file, output_file, log_stdout=False):
        """
        Store information about input and output files and subtitles. Store if
        log stdout and set object's attributes.
        @param input_file str, Path to input file
        @param sub_file str, Path to subtitles file
        @param output_file str, Path to output file
        @param log_stdout bool, Store stdout after process finish
        """
        self.input_file = input_file
        self.sub_file = sub_file
        self.output_file = output_file
        self.log_stdout = log_stdout

        self.config = Configuration()
        self.logger = logging.getLogger(self.__class__.__name__)

        self.process_transport = None
        self.process_protocol = None

        self.started = False
        self.finished = False
        self.paused = False
        self.cancelled = False
        self.deferred = defer.Deferred()
        self.deferred.addErrback(self.process_exited)

        self.pid = None
        self.returncode = None
        self.stderr = None
        self.stdout = None
    def annotate_catalog(self, catalog, live=True):
        """Add links and metadata to every catalog."""
        if live:
            search_controller = "search"
        else:
            search_controller = "search_qa"
        search_url = self.app.url_for(search_controller)
        catalog.add_link_to_catalog(catalog.catalog,
                                    href=search_url,
                                    rel="search",
                                    type=OPENSEARCH_MEDIA_TYPE)
        register_url = self.app.url_for("register")
        catalog.add_link_to_catalog(catalog.catalog,
                                    href=register_url,
                                    rel="register",
                                    type=OPDS_CATALOG_REGISTRATION_MEDIA_TYPE)

        # Add a templated link for getting a single library's entry.
        library_url = unquote(self.app.url_for("library", uuid="{uuid}"))
        catalog.add_link_to_catalog(
            catalog.catalog,
            href=library_url,
            rel="http://librarysimplified.org/rel/registry/library",
            type=OPDSCatalog.OPDS_TYPE,
            templated=True)

        vendor_id, ignore, ignore = Configuration.vendor_id(self.app._db)
        catalog.catalog["metadata"]["adobe_vendor_id"] = vendor_id
Beispiel #19
0
    def __init__(self):
        """Instantiate and run a script."""
        parser = self.parser()
        self.args = parser.parse_args()
        if self.args.config:
            config_directory = self.args.config
        else:
            config_directory = Configuration.default_directory()

        if not os.path.exists(config_directory):
            self.log.warn("%s does not exist, creating it.", config_directory)
            os.makedirs(config_directory)

        self.log.debug("Using config directory %s", config_directory)
        self.config = Configuration.from_directory(config_directory,
                                                   self.args.bots)
Beispiel #20
0
 def __init__(self):
     self.config = Configuration("config.json").config["database"]
     self.connection = psycopg2.connect(
         "host={0} user={1} dbname={2} password={3} sslmode={4}".format(
             self.config["host"], self.config["user"],
             self.config["dbname"], self.config["password"],
             self.config["ssl"]))
Beispiel #21
0
    def save_network_diagnostics(self, epoch, l2, R):
        """
        save diagnostics of the network
        """

        self.diag['network']['l2_penalty'][epoch] = l2
        self.log['l2_penalty'].append(Cfg.floatX(l2))

        i = 0
        j = 0
        for layer in self.trainable_layers:
            if layer.isdense:
                self.diag['network']['W_norms'][i][:, epoch] = np.sum(layer.W.get_value() ** 2, axis=0)
                if layer.b is not None:
                    self.diag['network']['b_norms'][i][:, epoch] = layer.b.get_value() ** 2
                i += 1

            if layer.isdense | layer.isconv:
                dW = np.sqrt(np.sum((layer.W.get_value() - self.diag['network']['W_copy'][j]) ** 2))
                self.diag['network']['dW_norms'][j][epoch] = dW
                if layer.b is not None:
                    db = np.sqrt(np.sum((layer.b.get_value() - self.diag['network']['b_copy'][j]) ** 2))
                    self.diag['network']['db_norms'][j][epoch] = db
                j += 1

        # diagnostics only relevant for the SVDD loss
        if Cfg.svdd_loss:
            self.diag['network']['R'][epoch] = R
            self.diag['network']['c_norm'][epoch] = np.sqrt(np.sum(self.cvar.get_value() ** 2))
Beispiel #22
0
    def update_R_c(self):
        """
        method to update R and c while leaving the network parameters fixed in a block coordinate optimization
        """

        print("Updating radius R and center c...")

        # Get updates
        R, c = update_R_c(self.diag['train']['rep'], np.sum(self.diag['train']['rep'] ** 2, axis=1),
                          solver=Cfg.QP_solver)

        # Update values
        self.Rvar.set_value(Cfg.floatX(R))
        self.cvar.set_value(Cfg.floatX(c))

        print("Radius R and center c updated.")
    def fulfill_open_access(self, licensepool, delivery_mechanism):
        # Keep track of a default way to fulfill this loan in case the
        # patron's desired delivery mechanism isn't available.
        fulfillment = None
        for lpdm in licensepool.delivery_mechanisms:
            if not (lpdm.resource and lpdm.resource.representation
                    and lpdm.resource.representation.url):
                # We don't actually know how to deliver this
                # allegedly open-access book.
                continue
            if lpdm.delivery_mechanism == delivery_mechanism:
                # We found it! This is how the patron wants
                # the book to be delivered.
                fulfillment = lpdm
                break
            elif not fulfillment:
                # This will do in a pinch.
                fulfillment = lpdm

        if not fulfillment:
            # There is just no way to fulfill this loan.
            raise NoOpenAccessDownload()

        rep = fulfillment.resource.representation
        cdn_host = Configuration.cdn_host(Configuration.CDN_OPEN_ACCESS_CONTENT)
        content_link = cdnify(rep.url, cdn_host)
        media_type = rep.media_type
        return FulfillmentInfo(
            identifier_type=licensepool.identifier.type,
            identifier=licensepool.identifier.identifier,
            content_link=content_link, content_type=media_type, content=None, 
            content_expires=None
        )
Beispiel #24
0
    def __init__(self):
        
        self._config = Configuration.getInstance().getConfig()
        
        self._driver = Driver()
        self._createSensor(self._config[Configuration.KEY_IMU_CLASS])
        
        #PID constants must have the same length
        self._pidAnglesSpeedKP = self._config[Configuration.PID_ANGLES_SPEED_KP] 
        self._pidAnglesSpeedKI = self._config[Configuration.PID_ANGLES_SPEED_KI]
        self._pidAnglesSpeedKD = self._config[Configuration.PID_ANGLES_SPEED_KD]
        
        #PID constants must have the same length
        self._pidAnglesKP = self._config[Configuration.PID_ANGLES_KP] 
        self._pidAnglesKI = self._config[Configuration.PID_ANGLES_KI]
        self._pidAnglesKD = self._config[Configuration.PID_ANGLES_KD]
        
        self._pidAccelKP = self._config[Configuration.PID_ACCEL_KP]
        self._pidAccelKI = self._config[Configuration.PID_ACCEL_KI]
        self._pidAccelKD = self._config[Configuration.PID_ACCEL_KD]
        
        #PID
        self._pidKP = self._pidAnglesSpeedKP + self._pidAnglesKP + self._pidAccelKP
        self._pidKI = self._pidAnglesSpeedKI + self._pidAnglesKI + self._pidAccelKI
        self._pidKD = self._pidAnglesSpeedKD + self._pidAnglesKD + self._pidAccelKD
        
        self._pid = PID(FlightController.PID_PERIOD, \
                        self._pidKP, self._pidKI, self._pidKD, \
                        self._readPIDInput, self._setPIDOutput, \
                        "stabilization-pid")
        self._pid.setTargets([0.0]*len(self._pidKP))
        

        self._isRunning = False
    def __init__(self, data_source_name, list_name, metadata_client=None,
                 overwrite_old_data=False,
                 annotation_field='text',
                 annotation_author_name_field='name',
                 annotation_author_affiliation_field='location',
                 first_appearance_field='timestamp',
                 **kwargs
             ):
        super(CustomListFromCSV, self).__init__(data_source_name, **kwargs)
        self.foreign_identifier = list_name
        self.list_name = list_name
        self.overwrite_old_data=overwrite_old_data

        if not metadata_client:
            metadata_url = Configuration.integration_url(
                Configuration.METADATA_WRANGLER_INTEGRATION,
                required=True
            )
            metadata_client = SimplifiedOPDSLookup(metadata_url)
        self.metadata_client = metadata_client

        self.annotation_field = annotation_field
        self.annotation_author_name_field = annotation_author_name_field
        self.annotation_author_affiliation_field = annotation_author_affiliation_field
        self.first_appearance_field = first_appearance_field
Beispiel #26
0
def structure_weighted_binary_cross_entropy_with_logits(
        input, target: torch.Tensor):
    target_pad = F.pad(target, [10, 10, 10, 10], mode='circular')
    weit = torch.abs(
        F.avg_pool2d(target_pad, kernel_size=21, stride=1, padding=0) - target)
    b, c, h, w = weit.shape
    weit = (
        weit - weit.view(b, c, -1).min(dim=-1, keepdim=True)[0].unsqueeze(-1)
    ) / (1e-6 + weit.view(b, c, -1).max(dim=-1, keepdim=True)[0].unsqueeze(-1)
         - weit.view(b, c, -1).min(dim=-1, keepdim=True)[0].unsqueeze(-1))
    dx = F.conv2d(F.pad(target, [1, 1, 0, 0], mode='reflect'),
                  torch.FloatTensor([-0.5, 0, 0.5]).view(1, 1, 1,
                                                         3).to(target.device),
                  stride=1,
                  padding=0)
    dy = F.conv2d(F.pad(target, [0, 0, 1, 1], mode='reflect'),
                  torch.FloatTensor([-0.5, 0, 0.5]).view(1, 1, 3,
                                                         1).to(target.device),
                  stride=1,
                  padding=0)
    torch.abs_(dx)
    torch.abs_(dy)
    edge_info = (dx + dy) > 0.4
    weit[edge_info] = 0.0
    weit = 1 + Configuration.instance().S_LOSS_GAMA * weit
    wbce = F.binary_cross_entropy_with_logits(input, target, reduction='none')
    wbce = (weit * wbce)
    return wbce.sum()
Beispiel #27
0
    def __init__(self, tasks_queue):
        """
        Store queue of tasks and set object's attributes.
        @param tasks_queue Queue, Queue of tasks
        """
        self.tasks_queue = tasks_queue

        self.logger = logging.getLogger(self.__class__.__name__)

        self._running = False
        self._paused = False
        self._cancelled = False
        self.processes = set()
        self.tasks_done = []
        self.tasks_incomplete = []
        self.tasks_failed = []

        self.scheduler = tx_task.LoopingCall(self.schedule_tasks)
        self.deferred = defer.Deferred()

        self.config = Configuration()

        self.processes_count = self.config.getint('scheduler',
                                                  'processes_count')
        self.logger.debug('Count of processes to run: %s', self.processes_count)

        self.scheduler_timeout = self.config.getint('scheduler',
                                                    'scheduler_timeout')
        self.logger.debug('Scheduler timeout: %s', self.scheduler_timeout)
Beispiel #28
0
 def run_program(cls, args):
     r'''Run a game server.
         Takes options from the command line, including number of games and the
         default map.
     '''#'''
     opts = {}
     if args:
         if args[0] in variants:
             Configuration.set_globally('variant', args[0])
         else: cls.usage('Unknown variant %r', args[0])
     
     manager = ThreadManager()
     server = cls(manager)
     if manager.add_server(server):
         manager.run()
     else: manager.log.critical("Failed to open the socket.")
Beispiel #29
0
def start_readers():

    logger = Logger().getLogger()
    config = Configuration()

    readers = [
        var
        for var in config.iniFile.get('mpd_box', 'mpd_box.readers').split('\n')
        if var
    ]
    logger.info(readers)

    # Dynamically load and start each reader in a thread
    # Eg : mpd_box.readers.nfc.raspberry_explorer_board
    for reader in readers:
        logger.info(reader)
        # Separate path and file name
        path, obj = reader.rsplit('.', 1)
        # Load path mpd_box.readers.nfc
        p = __import__(path, globals(), locals(), [obj])
        # Load py file raspberry_explorer_board
        object1 = getattr(p, obj)

        logger.info('Service are running start %s thread' % obj)

        # Get Object raspberry_explorer_board from py file and execute.
        thread.start_new_thread(getattr(object1, obj), ())

        logger.info('Thread is running')
Beispiel #30
0
 def __init__(self, collection, lookup_client, *args, **kwargs):
     if not lookup_client:
         content_server_url = (Configuration.integration_url(
             Configuration.CONTENT_SERVER_INTEGRATION))
         lookup_client = SimplifiedOPDSLookup(content_server_url)
     super(ContentServerBibliographicCoverageProvider,
           self).__init__(collection, lookup_client, *args, **kwargs)
Beispiel #31
0
    def from_config(cls, _db):
        """Initialize an AuthdataUtility from site configuration.

        :return: An AuthdataUtility if one is configured; otherwise
        None.

        :raise CannotLoadConfiguration: If an AuthdataUtility is
        incompletely configured.
        """
        integration = Configuration.integration(
            Configuration.ADOBE_VENDOR_ID_INTEGRATION)
        if not integration:
            return None
        vendor_id = integration.get(Configuration.ADOBE_VENDOR_ID)
        library_uri = integration.get(cls.LIBRARY_URI_KEY)
        library = Library.instance(_db)
        library_short_name = library.library_registry_short_name
        secret = library.library_registry_shared_secret
        other_libraries = integration.get(cls.OTHER_LIBRARIES_KEY, {})
        if (not vendor_id or not library_uri or not library_short_name
                or not secret):
            raise CannotLoadConfiguration(
                "Adobe Vendor ID configuration is incomplete. %s, %s, library.library_registry_short_name and library.library_registry_shared_secret must all be defined."
                % (cls.LIBRARY_URI_KEY, Configuration.ADOBE_VENDOR_ID))
        if '|' in library_short_name:
            raise CannotLoadConfiguration(
                "Library short name cannot contain the pipe character.")
        return cls(vendor_id, library_uri, library_short_name, secret,
                   other_libraries)
Beispiel #32
0
    def page(cls, _db, title, url, annotator=None,
             use_materialized_works=True):

        """Create a feed of content to preload on devices."""
        configured_content = Configuration.policy(Configuration.PRELOADED_CONTENT)

        identifiers = [Identifier.parse_urn(_db, urn)[0] for urn in configured_content]
        identifier_ids = [identifier.id for identifier in identifiers]

        if use_materialized_works:
            from core.model import MaterializedWork
            q = _db.query(MaterializedWork)
            q = q.filter(MaterializedWork.primary_identifier_id.in_(identifier_ids))

            # Avoid eager loading of objects that are contained in the 
            # materialized view.
            q = q.options(
                lazyload(MaterializedWork.license_pool, LicensePool.data_source),
                lazyload(MaterializedWork.license_pool, LicensePool.identifier),
                lazyload(MaterializedWork.license_pool, LicensePool.edition),
            )
        else:
            q = _db.query(Work).join(Work.primary_edition)
            q = q.filter(Edition.primary_identifier_id.in_(identifier_ids))

        works = q.all()
        feed = cls(_db, title, url, works, annotator)

        annotator.annotate_feed(feed, None)
        content = unicode(feed)
        return content
Beispiel #33
0
    def __init__(self,
                 data_source_name,
                 list_name,
                 metadata_client=None,
                 overwrite_old_data=False,
                 annotation_field='text',
                 annotation_author_name_field='name',
                 annotation_author_affiliation_field='location',
                 first_appearance_field='timestamp',
                 **kwargs):
        super(CustomListFromCSV, self).__init__(data_source_name, **kwargs)
        self.foreign_identifier = list_name
        self.list_name = list_name
        self.overwrite_old_data = overwrite_old_data

        if not metadata_client:
            metadata_url = Configuration.integration_url(
                Configuration.METADATA_WRANGLER_INTEGRATION, required=True)
            metadata_client = SimplifiedOPDSLookup(metadata_url)
        self.metadata_client = metadata_client

        self.annotation_field = annotation_field
        self.annotation_author_name_field = annotation_author_name_field
        self.annotation_author_affiliation_field = annotation_author_affiliation_field
        self.first_appearance_field = first_appearance_field
Beispiel #34
0
    def forward_prop(self, X_ccv, X_cvx, **kwargs):

        Z_ccv = Cfg.floatX(self.pool_size[0] * self.pool_size[1]) * \
            pool_2d(X_ccv, mode='average_exc_pad', **self.pool_opts)
        Z_cvx = my_pool_2d(X_cvx - X_ccv, mode='max', **self.pool_opts) + Z_ccv

        return Z_ccv, Z_cvx
Beispiel #35
0
def lane_for_other_languages(_db, library, exclude_languages):
    """Make a lane for all books not in one of the given languages."""

    language_lanes = []
    other_languages = Configuration.tiny_collection_languages(library)

    if not other_languages:
        return None

    for language_set in other_languages:
        name = LanguageCodes.name_for_languageset(language_set)
        language_lane = Lane(
            _db,
            library,
            full_name=name,
            genres=None,
            fiction=Lane.BOTH_FICTION_AND_NONFICTION,
            searchable=True,
            languages=language_set,
        )
        language_lanes.append(language_lane)

    lane = Lane(
        _db,
        library,
        full_name="Other Languages",
        sublanes=language_lanes,
        exclude_languages=exclude_languages,
        searchable=True,
        genres=None,
    )
    lane.default_for_language = True
    return lane
Beispiel #36
0
def main():
    #Configure logging options
    logging.basicConfig(format='%(levelname)s:%(message)s',
                        level=logging.DEBUG)
    logging.info("Starting server")

    #Create config object
    config = Configuration("/home/orikeidar01/config.json", "anylink")
    config.database.set_default_table("anylink")

    #Initiate SFTP server
    AnylinkServer.allow_reuse_address = True
    server = AnylinkServer(config.bind_addr, config=config)
    logging.info("Server is now serving")

    try:
        #Start SFTP server thread
        sftp_thread = threading.Thread(target=server.serve_forever)
        sftp_thread.start()

        #Create managers
        requests_manager = RequestsManager(SFTPHandler)
        account_manager = AccountManager(config.database)
        requests_manager.start_scanning()

        #Inject functions
        setattr(anylink, "get_account_manager", lambda: account_manager)
        setattr(anylink, "get_requests_manager", lambda: requests_manager)

        #Start web server
        anylink.start_website()

    except KeyboardInterrupt:
        server.shutdown()
Beispiel #37
0
    def server_side_validation(self, identifier, password):
        if not hasattr(self, 'identifier_re'):
            self.identifier_re = Configuration.policy(
                Configuration.IDENTIFIER_REGULAR_EXPRESSION,
                default=Configuration.DEFAULT_IDENTIFIER_REGULAR_EXPRESSION)
        if not hasattr(self, 'password_re'):
            self.password_re = Configuration.policy(
                Configuration.PASSWORD_REGULAR_EXPRESSION,
                default=Configuration.DEFAULT_PASSWORD_REGULAR_EXPRESSION)

        valid = True
        if self.identifier_re:
            valid = valid and (self.identifier_re.match(identifier) is not None)
        if self.password_re:
            valid = valid and (self.password_re.match(password) is not None)
        return valid
def _relight_flow(config: Configuration) -> None:
    '''
    The "relight" flow renders a picture of a texture using a Light, Viewer, and Camera from a (trained) SVBRDF autoencoder.

    Args:
        config: Configuration specifying the parameters of the flow.
    '''
    with torch.no_grad():
        autoencoder, svbrdf, lights, viewer, camera, input_path, output_path = config.load_relight_flow(
        )
        autoencoder.eval()
        # It is assumed that the dimensions of the input image will be accepted by the network.
        input_image = image.load(path=input_path, encoding='sRGB')
        num_texture_rows = input_image.size(0)
        num_texture_cols = input_image.size(1)
        input_distance = utils.create_radial_distance_field(
            num_rows=num_texture_rows, num_cols=num_texture_cols)
        # By convention, PyTorch expects Tensors to be in [B, D, R, C] format.
        input_batch = torch.cat([input_image, input_distance],
                                dim=2).unsqueeze(0).permute(0, 3, 1, 2)
        normals, svbrdf.parameters = SVBRDFAutoencoder.interpret(
            autoencoder.forward(input_batch))
        _shade_render_save(normals=normals,
                           svbrdf=svbrdf,
                           lights=lights,
                           viewer=viewer,
                           camera=camera,
                           path=output_path)
Beispiel #39
0
 def add_configuration_links(cls, feed):
     for rel, value in (
         ("terms-of-service", Configuration.terms_of_service_url()),
         ("privacy-policy", Configuration.privacy_policy_url()),
         ("copyright", Configuration.acknowledgements_url()),
         ("about", Configuration.about_url()),
         ("license", Configuration.license_url()),
     ):
         if value:
             d = dict(href=value, type="text/html", rel=rel)
             if isinstance(feed, OPDSFeed):
                 feed.add_link_to_feed(feed.feed, **d)
             else:
                 # This is an ElementTree object.
                 link = OPDSFeed.link(**d)
                 feed.append(link)
Beispiel #40
0
    def __init__(self, template, debug=False, dry_run=False):
        self.config = Configuration()
        self.debug = debug
        self.dry_run = dry_run

        # The work queue will figure out a valid combination of MongoDB access
        # parameters, e.g., host/port, URI, or replica set discovery via DNS
        self.wq = WorkQueue(host=self.config.mongodb_host,
                            port=self.config.mongodb_port,
                            uri=self.config.mongodb_uri,
                            srv_name=self.config.mongodb_rs_srv,
                            database=self.config.mongodb_queue_db,
                            replicaset=self.config.mongodb_rs,
                            collection=self.config.mongodb_queue_col)

        if not os.path.exists(template):
            raise Exception("Template file does not exist")
        self.template_dir = os.path.dirname(template)
        self.template_file = os.path.basename(template)
        if self.template_file == "":
            raise Exception("Template must be a file, not a directory")

        self.jinja = jinja2.Environment(loader=jinja2.FileSystemLoader(
            self.template_dir),
                                        autoescape=False)
    def apply(self, parent, sent, vMWEId=None, parse=False, vMWEType=None, mwtMerge=False):
        Counters.blackMergeNum += 1
        if sent and not parse:
            sent.blackMergeNum += 1
        config = parent.configuration
        newBuffer = list(config.buffer)
        if mwtMerge:
            newStack = list(config.stack)[:-1]
            newStack.append([config.stack[-1]])
        else:
            newStack = list(config.stack)[:-2]
            newStack.append([config.stack[-2], config.stack[-1]])
        newTokens = list(config.tokens)
        vMWETokens = Sentence.getTokens(newStack[-1])
        if len(vMWETokens) > 1 or (len(vMWETokens) == 1 and mwtMerge):
            if vMWEId is None:
                vMWEId = VMWE.getVMWENumber(newTokens) + 1
            vMWE = VMWE(vMWEId, vMWETokens[0])
            if parse:
                sent.identifiedVMWEs.append(vMWE)
            vMWE.tokens = vMWETokens
            if vMWEType is not None:
                vMWE.type = vMWEType
            newTokens.append(vMWE)
        elif len(vMWETokens) == 1:
            newTokens.append(vMWETokens[0])

        newConfig = Configuration(stack=newStack, buffer=newBuffer, tokens=newTokens, sent=sent, transition=self)

        super(BlackMerge, self).__init__(config=newConfig, previous=parent, sent=sent)
Beispiel #42
0
def reinitialize_primal_variables(layer):

    gpu_vars = (layer.l, layer.k)
    cpu_vars = ()

    heavy_vars = (layer.W_i, layer.b_i, layer.l_i)

    if Cfg.store_on_gpu:
        gpu_vars += heavy_vars
    else:
        cpu_vars += heavy_vars

    zero = Cfg.floatX(0)

    gpu_updates = []
    for var in gpu_vars:
        gpu_updates.append((var, var.fill(zero)))
    gpu_fun = theano.function([], updates=gpu_updates)

    def update_all_fun():
        gpu_fun()
        for var in cpu_vars:
            var.fill(zero)

    return update_all_fun
Beispiel #43
0
def main():
	logger = logging.getLogger(__name__)
	logger.setLevel(logging.DEBUG)
	handler = logging.StreamHandler()
	handler.setLevel(logging.DEBUG)
	fmt = logging.Formatter("%(asctime)s - %(levelname)s - %(message)s")
	handler.setFormatter(fmt)
	logger.addHandler(handler)

	usage='Usage: %s [options] subcommand [args]' % commands.progname
	
	parser = OptionParser(usage=usage, add_help_option=False)
	parser.add_option("--debug", dest="debug", action="store_true", help="Enable debug output")
	parser.add_option("-c", "--config-path", dest="base_path", default=None, help="Path to configuration files")
	parser.add_option("-i", "--key-id", dest="key_id", default=None, help="Scalr API key ID")
	parser.add_option("-a", "--access-key", dest="key", default=None, help="Scalr API access key")
	parser.add_option("-l", "--login", dest="ldap_login", default=None, help="Login for LDAP authentication")
	parser.add_option("-p", "--password", dest="ldap_password", default=None, help="Password for LDAP authentication")
	parser.add_option("-u", "--api-url", dest="api_url", default=None, help="Scalr API URL (IF you use open source Scalr installation)")
	parser.add_option("-e", "--env-id", dest="env_id", default=None, help="Scalr Environment ID")
	parser.add_option("-h", "--help", dest="help", action="store_true", help="Help")
	
	args, cmd, subargs = split_options(sys.argv)

	subcommands = sorted([command.name for command in get_commands() if not command.name.startswith('_')])
	help = parser.format_help() + \
			'\nAvailable subcommands:\n\n' + '\n'.join(subcommands) + \
			"\n\nFor more information try '%s help <subcommand>'" % commands.progname
			
	options = parser.parse_args(args)[0]		
			
	if not cmd or options.help:
		print help
		sys.exit(1)

	try:
		c = Configuration(options.base_path)
		c.set_environment(options.key, options.key_id, options.api_url, options.env_id, options.ldap_login, options.ldap_password)
		if options.debug:
			c.set_logger(logger)
			
	except ScalrEnvError, e:
		if not cmd.startswith('configure') and cmd != 'help':
			print "\nNo login information found."
			print "Please specify options -i -a and -u, or run '%s help configure' to find out how to set login information permanently.\n" % commands.progname
			#print help
			sys.exit(1)
Beispiel #44
0
    def do_work():
        # Something just finished!
        if Cracker.crt_process is not None and Cracker.crt_process.isdead():
            Cracker.process_result()

        # Process is still running - update eta
        if Cracker.crt_process is not None:
            Cracker.update_eta()
            return

        if slow_stop_flag:
            Comunicator.info_logger(
                "Slow shutdown signal received - shutting down!")
            sys.exit(0)

        # Before getting more work make sure we are up to date
        Cracker.complete_missing()

        # Test capabilities once
        if not Cracker.capabilities_tested:
            Configuration.test_capabilities()
            Cracker.capabilities_tested = True

        # Nothing is running - getting more work
        try:
            work = Cracker.req.getwork()
        except Cracker.req.ServerDown:
            Comunicator.printer(Comunicator.printer(Requester.DownMessage))
            return

        die(work is True, "A server side error occured while getting work!")

        # No work to be done right now
        if work is None:
            Comunicator.printer(
                "No work to be done, checking in 10 seconds again.")
            return

        # Redundant check
        if work is False:
            Comunicator.warning_logger("Capabilities out of date!")
            return

        # Make status seem a bit more responsive
        Cracker.old_eta = "Cracking process starting"

        Cracker.start_cracking(work)
Beispiel #45
0
def create_default_lanes(_db, library):
    """Reset the lanes for the given library to the default.

    The database will have the following top-level lanes for
    each large-collection:
    'Adult Fiction', 'Adult Nonfiction', 'Young Adult Fiction',
    'Young Adult Nonfiction', and 'Children'.
    Each lane contains additional sublanes.
    If an NYT integration is configured, there will also be a
    'Best Sellers' top-level lane.

    If there are any small- or tiny-collection languages, the database
    will also have a top-level lane called 'World Languages'. The
    'World Languages' lane will have a sublane for every small- and
    tiny-collection languages. The small-collection languages will
    have "Adult Fiction", "Adult Nonfiction", and "Children/YA"
    sublanes; the tiny-collection languages will not have any sublanes.

    If run on a Library that already has Lane configuration, this can
    be an extremely destructive method. All new Lanes will be visible
    and all Lanes based on CustomLists (but not the CustomLists
    themselves) will be destroyed.

    """
    # Delete existing lanes.
    for lane in _db.query(Lane).filter(Lane.library_id==library.id):
        _db.delete(lane)

    top_level_lanes = []

    # Hopefully this library is configured with explicit guidance as
    # to how the languages should be set up.
    large = Configuration.large_collection_languages(library) or []
    small = Configuration.small_collection_languages(library) or []
    tiny = Configuration.tiny_collection_languages(library) or []

    # If there are no language configuration settings, we can estimate
    # the current collection size to determine the lanes.
    if not large and not small and not tiny:
        estimates = library.estimated_holdings_by_language()
        large, small, tiny = _lane_configuration_from_collection_sizes(estimates)
    priority = 0
    for language in large:
        priority = create_lanes_for_large_collection(_db, library, language, priority=priority)

    create_world_languages_lane(_db, library, small, tiny, priority)
async def main():
    parser = ArgumentParser()
    add_parsing_arguments(parser)
    args = parser.parse_args()
    config = Configuration.from_args(args)
    client = TelegramClient('scraper', config.api_id, config.api_hash)
    await client.start()
    await scrap_group(client, config)
 def apply(self, parent, sent, vMWEId=None, parse=False):
     Counters.mergeNum += 1
     config = parent.configuration
     newStack = list(config.stack)[:-2]
     newStack.append([config.stack[-2], config.stack[-1]])
     newConfig = Configuration(stack=newStack, buffer=list(config.buffer), tokens=list(config.tokens), sent=sent,
                               transition=self)
     super(Merge, self).__init__(config=newConfig, previous=parent, sent=sent)
Beispiel #48
0
def create_default_lanes(_db, library):
    """Reset the lanes for the given library to the default.

    The database will have the following top-level lanes for
    each large-collection:
    'Adult Fiction', 'Adult Nonfiction', 'Young Adult Fiction',
    'Young Adult Nonfiction', and 'Children'.
    Each lane contains additional sublanes.
    If an NYT integration is configured, there will also be a
    'Best Sellers' top-level lane.

    If there are any small- or tiny-collection languages, the database
    will also have a top-level lane called 'World Languages'. The
    'World Languages' lane will have a sublane for every small- and
    tiny-collection languages. The small-collection languages will
    have "Adult Fiction", "Adult Nonfiction", and "Children/YA"
    sublanes; the tiny-collection languages will not have any sublanes.

    If run on a Library that already has Lane configuration, this can
    be an extremely destructive method. All new Lanes will be visible
    and all Lanes based on CustomLists (but not the CustomLists
    themselves) will be destroyed.

    """
    # Delete existing lanes.
    for lane in _db.query(Lane).filter(Lane.library_id==library.id):
        _db.delete(lane)

    top_level_lanes = []

    # Hopefully this library is configured with explicit guidance as
    # to how the languages should be set up.
    large = Configuration.large_collection_languages(library) or []
    small = Configuration.small_collection_languages(library) or []
    tiny = Configuration.tiny_collection_languages(library) or []

    # If there are no language configuration settings, we can estimate
    # the current collection size to determine the lanes.
    if not large and not small and not tiny:
        estimates = library.estimated_holdings_by_language()
        large, small, tiny = _lane_configuration_from_collection_sizes(estimates)
    priority = 0
    for language in large:
        priority = create_lanes_for_large_collection(_db, library, language, priority=priority)

    create_world_languages_lane(_db, library, small, tiny, priority)
Beispiel #49
0
 def __init__(self):
     # fetch config data
     conf = Configuration()
     # create Azure File share service
     self.file_service = FileService(
         account_name=conf.account_name, account_key=conf.account_key)
     # set azure share file name (container)
     self.file_share = conf.file_share
 def from_environment(cls):
     config = Configuration.integration(
         Configuration.MILLENIUM_INTEGRATION, required=True)
     host = config.get(Configuration.URL)
     if not host:
         cls.log.info("No Millenium Patron client configured.")
         return None
     return cls(host)            
Beispiel #51
0
    def _set_auth(self):
        """Sets client authentication details for the Metadata Wrangler"""

        metadata_wrangler_url = Configuration.integration_url(
            Configuration.METADATA_WRANGLER_INTEGRATION
        )
        self.client_id = self.client_secret = None
        if (metadata_wrangler_url
            and self.base_url.startswith(metadata_wrangler_url)):
            values = Configuration.integration(Configuration.METADATA_WRANGLER_INTEGRATION)
            self.client_id = values.get(Configuration.METADATA_WRANGLER_CLIENT_ID)
            self.client_secret = values.get(Configuration.METADATA_WRANGLER_CLIENT_SECRET)

            details = [self.client_id, self.client_secret]
            if len([d for d in details if not d]) == 1:
                # Raise an error if one is set, but not the other.
                raise CannotLoadConfiguration("Metadata Wrangler improperly configured.")
Beispiel #52
0
def user_menu():
    operation_menu_txt = Configuration.tabulate_output("OPMENU")
    try:
        op_mode = int(input(operation_menu_txt + "\nSelect: "))
        print("\n" + Errors.INVALID_CHOICE) if (
            op_mode != 1 and op_mode != 2) else run_app(op_mode)
    except ValueError:
        print("\n" + Errors.ONLY_NUMBERS)
Beispiel #53
0
    def setup(self):
        super(TestVendorIDModel, self).setup()
        self._integration()
        vendor_id, node_value, delegates = Configuration.vendor_id(self._db)
        self.model = AdobeVendorIDModel(self._db, node_value, delegates)

        # Here's a library that participates in the registry.
        self.library = self._library()
 def from_config(cls):
     config = Configuration.integration(cls.FIRSTBOOK, required=True)
     host = config.get(Configuration.URL)
     key = config.get(cls.SECRET_KEY)
     if not host:
         cls.log.warning("No First Book client configured.")
         return None
     return cls(host, key)
Beispiel #55
0
 def from_environment(cls, redirect_uri, test_mode=False):
     if test_mode:
         return cls('/path', '/callback', test_mode)
     config = Configuration.integration(
         Configuration.GOOGLE_OAUTH_INTEGRATION
     )
     client_json_file = config[Configuration.GOOGLE_OAUTH_CLIENT_JSON]
     return cls(client_json_file, redirect_uri, test_mode)
Beispiel #56
0
    def __init__(self, mq_server=None, mq_name=None, logger=None):
        """__init__

        :param mq_server:
        :param mq_name:
        :param logger:
        """
        self.mq_server = mq_server if mq_server else Configuration.get("mq_server")
        self.mq_name = mq_name if mq_name else Configuration.get("mq_name")

        self.mq_connection = pika.BlockingConnection(
            pika.ConnectionParameters(host=self.mq_server))
        self.mq_channel = self.mq_connection.channel()

        tmp_queue = self.mq_channel.queue_declare(exclusive=True)
        self.callback_queue = tmp_queue.method.queue
        self.mq_channel.basic_consume(self.on_resonse, no_ack=True, queue=self.callback_queue)

        self.logger = logger if logger else Logger.get(self.__class__.__name__)
Beispiel #57
0
def setup_logging():
    """
    Set up logging module according to options in application's configuration
    file.
    """
    import logging
    import os.path
    from twisted.python import log
    from config import Configuration

    config = Configuration()

    levels_map = {'CRITICAL': logging.CRITICAL, 'ERROR': logging.ERROR,
                  'WARNING': logging.WARNING, 'INFO': logging.INFO,
                  'DEBUG': logging.DEBUG}

    level_str = config.get('logging', 'level')
    filename = config.get('logging', 'filename')

    try:
        level = levels_map[level_str]
    except KeyError:
        default = logging.INFO
        print ('Unknown logging level %s, using default %s'
               % (level_str, logging.getLevelName(default)))
        level = default

    if filename is None or filename == '':
        filename = 'stdout'

    if filename == 'stdout':
        filepath = None
    else:
        filepath = os.path.join(get_app_dir(), filename)

    # http://twistedmatrix.com/documents/current/core/howto/logging.html#auto3
    observer = log.PythonLoggingObserver()
    observer.start()

    print ("Openning log '%s' with level %s"
           % (filepath if filepath else filename, logging.getLevelName(level)))

    logging.basicConfig(level=level, filename=filepath)
Beispiel #58
0
    def send_welcome_mail(self, member):
        """Send welcom email with attachments"""
        config = Configuration()
        sender_address = config.get('WELCOME_MAIL_SENDER')
        subject = config.get('WELCOME_MAIL_SUBJECT')
        account_no = config.get('GIRO_ACCOUNT_NO')

        mail_template = Template(config.get('WELCOME_MAIL_TEXT'))

        data = {
            'member': member,
            'year': datetime.date.today().year,
            'accountno': account_no,
            'profile_url': constants.PROFILE_URL
        }
        body = mail_template.render(data)

        buf = cStringIO.StringIO()
        address = member.name + '\n' + member.address + \
            '\n' + member.zipcode + ' ' + member.city
        if member.country.name != 'Norge':
            address = address + '\n' + member.country.name

        body_template = Template(config.get('GIRO_TEXT'))
        message_template = Template(config.get('GIRO_MESSAGE'))

        data = {'member_no': member.number, 'account_no': account_no,
                'access_code': member.edit_access_code, 'profile_url': constants.PROFILE_URL}

        due_date = datetime.datetime.now() + datetime.timedelta(days=14)
        due_date_str = due_date.strftime('%d.%m.%Y')

        current_date = datetime.datetime.now()
        if current_date.month >= 7:
            fee = member.member_type.fee / 2
        else:
            fee = member.member_type.fee

        pdf = PdfGenerator(member_address=address, club_address=config.get('GIRO_ADDRESS'), account_no=account_no,
                           member_no=member.number, access_code=member.edit_access_code, profile_url=constants.PROFILE_URL,
                           heading=config.get('GIRO_SUBJECT'), body=body_template.render(data), fee=fee,
                           due_date=due_date_str, payment_message=message_template.render(data))

        pdf.generate_pdf(buf)

        mail.send_mail(sender_address, member.email, subject,
                       body, attachments=[('kontingent.pdf', buf.getvalue())])
Beispiel #59
0
def make_lanes(_db, definitions=None):

    definitions = definitions or Configuration.policy(
        Configuration.LANES_POLICY
    )

    if not definitions:
        lanes = make_lanes_default(_db)
    else:
        lanes = [Lane(_db=_db, **definition) for definition in definitions]

    return LaneList.from_description(_db, None, lanes)
    def run(self):
        while True:
            try:
                config = Configuration(CONFIG_PATH)
                client = Client(config)
                fs = FileSystemImage(config.get_option("daemon", "home-dir"),
                                     config.get_option("daemon", "app-dir"),
                                     client)

                rm_exists = True if config.get_option("sync",
                                                      "rm-exists").upper() \
                                    == "TRUE" else False
                save_orig = True if config.get_option("sync",
                                                      "save-orig").upper() \
                                    == "TRUE" else False
                sleep_time = int(
                    float(config.get_option("daemon", "sleep-time")))

                if self.first_run == True:
                    fs.sync_disk_priority(rm_exists, save_orig)
                    self.first_run = False
                else:
                    fs.sync_local_priority(rm_exists, save_orig)
            except Exception as e:
                print(str(e))
                time.sleep(NO_NETWORK_EXCEPTION_SLEEP)
            finally:
                time.sleep(sleep_time)