Exemplo n.º 1
0
Arquivo: api.py Projeto: t00m/Vazaar
 def __init__(self, app):
     self.app = app
     self.log = get_logger('V-API', self.app.get_logging_level())
     self.bus = dbus.SessionBus() # Attach the object to D-Bus
     bus_name = dbus.service.BusName(BUSNAME, bus=self.bus)
     dbus.service.Object.__init__(self, bus_name, "/org/Vazaar")
     self.log.debug("Vazaar D-Bus\tinitialized")
Exemplo n.º 2
0
    def __init__(self, app, vuri=None, create=False):
        """Returns a new Virtual Resource"""
        self.app = app
        self.loglevel = self.app.get_logging_level()
        self.log = get_logger('VResource', self.loglevel)

        try:
            self.created = None
            self.rlabel = None # anything (title, plaintext, url, file, ...)
            self.rtype = None # a Nepomuk class
            self.rmetadata = [] # tuples of (predicate, object)
            self.rcontent = None # plaintext content
            self.props = None
            self.prop = None

            if create:
                self.__created_timestamp()

            if not vuri:
                vuri = 'vazaar://' + str(uuid.uuid4())
                self.__created_timestamp()

            self.rid = URIRef(vuri) # vazar:// + uuid4 uri
            self.log.debug("New virtual resource created: %s" % self.rid)
        except Exception, error:
            self.log.error(error)
Exemplo n.º 3
0
 def __init__(self, app):
     self.app = app
     self.ask = self.app.ask
     self.loglevel = self.app.get_logging_level()
     self.log = get_logger('VStore', self.loglevel)
     self.store = self.app.store
     self.lastmodified = None
     self.cache = {}
     self.qgraph = 0
     self.qcache = 0
     self.log.debug("VirtualStore\tinitialized")
Exemplo n.º 4
0
Arquivo: store.py Projeto: t00m/Vazaar
    def __init__(self, app):
        """This class manages real operations against a graph. This graph
        is defined by one connection (at time).

        By default StoreManager loads the default connection (if any). If
        no connection is defined the backend is not operative.

        Connections can be created at runtime by some frontend (GUI/CLI)
        """
        self.app = app
        self.app.settings['store'] = {}
        self.loglevel = self.app.get_logging_level()
        self.log = get_logger('Store', self.loglevel)
        self.conn = self.app.conn.get_default_connection()  # Default connection
        self.store = None   # Phisycal Store (sqlite, mysql, ...)
        self.graph = None   # RDF Graph (a ConjunctiveGraph)
        self.log.debug( _("StoreManager\tinitialized using '%s' connection" % self.conn) )
Exemplo n.º 5
0
Arquivo: query.py Projeto: t00m/Vazaar
 def __init__(self, app):
     self.app = app
     self.graph = self.app.store.get_graph()
     self.loglevel = self.app.get_logging_level()
     self.log = get_logger('Ask', self.loglevel)
     self.log.debug( _("QueryManager\tinitialized") )
     self.total = 0
     self.cols = 0
     self.clipboard = 0
     self.notes = 0
     self.images = 0
     self.audio = 0
     self.video =  0
     self.text = 0
     self.apps = 0
     self.website = 0
     self.remote = 0
     self.feed = 0
     self.folder = 0
Exemplo n.º 6
0
    def __init__(self, app):
        self.log = get_logger('Hooks')
        self.app = app
        self.rebuild_plugins()
        self.plugins = self.init_plugins()
        self.blacklist = []

        for plugin in self.get_all_plugins():
            try: # read status from config file
                activate = self.app.cfgmgr.get_value('Plugins', plugin.key)
                if activate == '0':
                    self.blacklist_plugin(plugin)
            except: # activate plugin by default and save config
                self.activate_plugin(plugin)

        totplugins = len(self.get_all_plugins())
        blacklisted = len(self.blacklist)

        self.log.info("Plugins: %d - In use: %d" % (totplugins, totplugins - blacklisted))
Exemplo n.º 7
0
# -*- coding: utf-8 -*-
#
from django import forms
from django.utils.translation import gettext_lazy as _

from common.utils import validate_ssh_private_key, ssh_pubkey_gen, get_logger
from orgs.mixins.forms import OrgModelForm
from ..models import AdminUser, SystemUser
from ..const import GENERAL_LIMIT_SPECIAL_CHARACTERS_HELP_TEXT

logger = get_logger(__file__)
__all__ = [
    'FileForm',
    'SystemUserForm',
    'AdminUserForm',
    'PasswordAndKeyAuthForm',
]


class FileForm(forms.Form):
    file = forms.FileField()


class PasswordAndKeyAuthForm(forms.ModelForm):
    # Form field name can not start with `_`, so redefine it,
    password = forms.CharField(
        widget=forms.PasswordInput,
        max_length=128,
        strip=True,
        required=False,
        help_text=_('Password or private key passphrase'),
Exemplo n.º 8
0
from django.contrib.auth import logout as auth_logout

from common.utils import get_logger
from common.permissions import (PermissionsMixin, IsValidUser,
                                UserCanUpdatePassword)
from ... import forms
from ...models import User
from ...utils import (
    get_user_or_pre_auth_user,
    check_password_rules,
    get_password_check_rules,
)

__all__ = ['UserPasswordUpdateView', 'UserVerifyPasswordView']

logger = get_logger(__name__)


class UserPasswordUpdateView(PermissionsMixin, UpdateView):
    template_name = 'users/user_password_update.html'
    model = User
    form_class = forms.UserPasswordForm
    success_url = reverse_lazy('users:user-profile')
    permission_classes = [IsValidUser, UserCanUpdatePassword]

    def get_object(self, queryset=None):
        return self.request.user

    def get_context_data(self, **kwargs):
        check_rules = get_password_check_rules()
        context = {
Exemplo n.º 9
0
# -*- coding: utf-8 -*-
#
import uuid
import os
import shutil
from django.conf import settings
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from ..pjenkins.exec_jenkins import JenkinsWork
from common.utils import get_logger
from datetime import datetime

logger = get_logger('jumpserver')


class DeployList(models.Model):
    SUCCESS = "SUCCESS"
    RUNNING = "RUNNING"
    FAILED = "FAILURE"
    LOG_DIR = os.path.join(settings.PROJECT_DIR, 'logs', 'deploy')
    BUILD_FILE_DIR = '/deploy/'
    DEPLOY_FILE_DIR = '/deploy/'
    DEST_FILE_DIR = '/data/'
    BACKUP_DIR = '/deploy/{0}/bak/'
    BACKUP_FILE_DIR = '{APP_NAME}_backup_{VERSION}/{APP_NAME}_full_backup_{VERSION}.tar.gz'
    BACKUP_DIRECTORY_DIR = '/deploy/{APP_NAME}/bak/{APP_NAME}_backup_{VERSION}/'

    STATUS_CHOICES = (
        (SUCCESS, SUCCESS),
        (RUNNING, RUNNING),
Exemplo n.º 10
0
 def __init__(self, db_uri, logger = None):
     self.db_uri = db_uri
     if logger is None:
         self.log = get_logger(NAME)
     else:
         self.log = logger
Exemplo n.º 11
0
    def setUp(self):
        uri = 'bolt://localhost:7687'
        user = '******'
        password = os.environ['NEO_PASSWORD']

        self.driver = GraphDatabase.driver(uri, auth = (user, password))
        self.data_folder = 'data/COTC007B'
        self.schema = ICDC_Schema(['data/icdc-model.yml', 'data/icdc-model-props.yml'])
        self.log = get_logger('Test Loader')
        self.loader = DataLoader(self.driver, self.schema)
        self.file_list = [
            "data/Dataset/COP-program.txt",
            "data/Dataset/COTC007B-case.txt",
            "data/Dataset/COTC007B-cohort.txt",
            "data/Dataset/COTC007B-cycle.txt",
            "data/Dataset/COTC007B-demographic.txt",
            "data/Dataset/COTC007B-diagnostic.txt",
            "data/Dataset/COTC007B-enrollment.txt",
            "data/Dataset/COTC007B-extent_of_disease.txt",
            "data/Dataset/COTC007B-physical_exam.txt",
            "data/Dataset/COTC007B-principal_investigator.txt",
            "data/Dataset/COTC007B-prior_surgery.txt",
            "data/Dataset/COTC007B-study.txt",
            "data/Dataset/COTC007B-study_arm.txt",
            "data/Dataset/COTC007B-vital_signs.txt",
            "data/Dataset/NCATS-COP01-blood_samples.txt",
            "data/Dataset/NCATS-COP01-case.txt",
            "data/Dataset/NCATS-COP01-demographic.txt",
            "data/Dataset/NCATS-COP01-diagnosis.txt",
            "data/Dataset/NCATS-COP01-enrollment.txt",
            "data/Dataset/NCATS-COP01-normal_samples.txt",
            "data/Dataset/NCATS-COP01-tumor_samples.txt",
            "data/Dataset/NCATS-COP01_20170228-GSL-079A-PE-Breen-NCATS-MEL-Rep1-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep1-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep1-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep2-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep3-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep2-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep2-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep3-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep3-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_cohort_file.txt",
            "data/Dataset/NCATS-COP01_path_report_file_neo4j.txt",
            "data/Dataset/NCATS-COP01_study_file.txt"
        ]
        self.file_list_unique = [
            "data/Dataset/COP-program.txt",
            "data/Dataset/COTC007B-case.txt",
            "data/Dataset/COTC007B-cohort.txt",
            "data/Dataset/COTC007B-cycle.txt",
            "data/Dataset/COTC007B-demographic.txt",
            "data/Dataset/COTC007B-diagnostic.txt",
            "data/Dataset/COTC007B-enrollment.txt",
            "data/Dataset/COTC007B-extent_of_disease.txt",
            "data/Dataset/COTC007B-physical_exam.txt",
            "data/Dataset/COTC007B-principal_investigator.txt",
            "data/Dataset/COTC007B-prior_surgery.txt",
            "data/Dataset/COTC007B-study.txt",
            "data/Dataset/COTC007B-study_arm.txt",
            "data/Dataset/COTC007B-vital_signs_unique.txt",
            "data/Dataset/NCATS-COP01-blood_samples.txt",
            "data/Dataset/NCATS-COP01-case.txt",
            "data/Dataset/NCATS-COP01-demographic.txt",
            "data/Dataset/NCATS-COP01-diagnosis.txt",
            "data/Dataset/NCATS-COP01-enrollment.txt",
            "data/Dataset/NCATS-COP01-normal_samples.txt",
            "data/Dataset/NCATS-COP01-tumor_samples.txt",
            "data/Dataset/NCATS-COP01_20170228-GSL-079A-PE-Breen-NCATS-MEL-Rep1-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep1-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep1-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep2-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-076A-Breen-NCATS-MEL-Rep3-Lane1.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep2-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep2-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep3-Lane2.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_GSL-079A-Breen-NCATS-MEL-Rep3-Lane3.tar-file_neo4j.txt",
            "data/Dataset/NCATS-COP01_cohort_file.txt",
            "data/Dataset/NCATS-COP01_path_report_file_neo4j.txt",
            "data/Dataset/NCATS-COP01_study_file.txt"
        ]
Exemplo n.º 12
0
try:
    from urllib.parse import urlparse
except ImportError:
    # py2
    from urlparse import urlparse

from api import models
from api.models import Url, Host, Purge
from common import utils

app = Flask(__name__)
app.config.from_object('settings')

db = SQLAlchemy(app)

logger = utils.get_logger(__name__)

def add_host(hostname):
    ''' Add host do DB '''
    host = Host(hostname=hostname)
    db.session.add(host)

    try:
        db.session.commit()
    except sqlalchemy.exc.IntegrityError:
        return 'Duplicated host', 500

    return '', 201

@app.route('/healthcheck', methods=['GET'])
def healthcheck():
Exemplo n.º 13
0
Arquivo: job.py Projeto: t00m/Vazaar
 def __init__(self, lvres):
     # Accept a list of resources and return a job
     self.log = get_logger("Job")
     self.lvres = [] # a list of resources
     self.dstart = datetime.datetime.now()
     self.lvres += lvres
Exemplo n.º 14
0
 def __init__(self):
     self.log = get_logger("Base")
     self.timer = Timer(self.log)
 def __init__(self, logger=None):
     if logger is None:
         self.log = get_logger(NAME)
     else:
         self.log = logger
Exemplo n.º 16
0
    def __init__(
        self,
        metric_with_modes,
        dataset_name,
        directory,
        logger=None,
        epsilon=0.00005,
        score_file="scores.tsv",
        metric_best: Dict = {},
    ):
        """Keep best model's checkpoint by each datasets & metrics

        Args:
            metric_with_modes: Dict, metric_name: mode
                if mode is 'min', then it means that minimum value is best, for example loss(MSE, MAE)
                if mode is 'max', then it means that maximum value is best, for example Accuracy, Precision, Recall
            dataset_name: str, dataset name on which metric be will be calculated
            directory: directory path for saving best model
            epsilon: float, threshold for measuring the new optimum, to only focus on significant changes.
                Because sometimes early-stopping gives better generalization results
        """
        if logger is not None:
            self.log = logger
        else:
            self.log = get_logger("BestKeeper")

        self.score_file = score_file
        self.metric_best = metric_best

        self.log.info(
            colored(
                f"Initialize BestKeeper: Monitor {dataset_name} & Save to {directory}",
                "yellow",
                attrs=["underline"]))
        self.log.info(f"{metric_with_modes}")

        self.x_better_than_y = {}
        self.directory = Path(directory)
        self.output_temp_dir = self.directory / f"{dataset_name}_best_keeper_temp"

        for metric_name, mode in metric_with_modes.items():
            if mode == "min":
                self.metric_best[
                    metric_name] = self.load_metric_from_scores_tsv(
                        directory / dataset_name / metric_name / score_file,
                        metric_name,
                        np.inf,
                    )
                self.x_better_than_y[metric_name] = lambda x, y: np.less(
                    x, y - epsilon)
            elif mode == "max":
                self.metric_best[
                    metric_name] = self.load_metric_from_scores_tsv(
                        directory / dataset_name / metric_name / score_file,
                        metric_name,
                        -np.inf,
                    )
                self.x_better_than_y[metric_name] = lambda x, y: np.greater(
                    x, y + epsilon)
            else:
                raise ValueError(f"Unsupported mode : {mode}")
Exemplo n.º 17
0
def run(spark):
    """Combines GTFS schedule feed with vehicle positions Parquet files
  and updates the VPDelays and HlyDelays tables

  Args:
    spark: Spark Session object
  """

    log = utils.get_logger()

    with DBConnCommonQueries() as conn:
        dbtables.create_if_not_exists(conn, dbtables.HlyDelays)

    feedDescs = GTFSFetcher.read_feed_descs()
    curFeedDesc = None
    dfStopTimes = None
    feedRequiredFiles = ["stops.txt", "stop_times.txt", "trips.txt"]

    gtfsFetcher = GTFSFetcher(spark)
    with DBConn() as conn:
        entriesToProcess = dbtables.PqDates \
          .select_pqdates_not_in_delays(conn, 'NOT IsInHlyDelaysS3')
    for targetDate in entriesToProcess:
        if dfStopTimes is None or not curFeedDesc.includes_date(targetDate):
            curFeedDesc = None
            dfStopTimes = None
            for fd in feedDescs:
                if fd.includes_date(targetDate) and fd.includes_files(
                        feedRequiredFiles):
                    curFeedDesc = fd
                    dfStopTimes = gtfsFetcher.read_stop_times(curFeedDesc)
                    log.info('USING FEED "%s" for %s', curFeedDesc.version,
                             targetDate.strftime("%Y-%m-%d"))
                    break
        else:
            log.info('RE-USING FEED "%s" for %s', curFeedDesc.version,
                     targetDate.strftime("%Y-%m-%d"))

        if dfStopTimes:
            dfVehPos = read_vp_parquet(spark, targetDate)

            calcVPDelays = \
              VPDelaysCalculator(spark, targetDate, dfStopTimes, dfVehPos)
            dfVPDelays = calcVPDelays.create_result_df()

            cols_order = [
                'RouteId', 'StopName', 'DateEST', 'HourEST', 'AvgDelay',
                'AvgDist', 'Cnt'
            ]
            calcHlyDelays = HlyDelaysCalculator(spark, dfVPDelays)
            dfHlyDelays = calcHlyDelays.create_result_df().persist()
            dfGrpRoutes = calcHlyDelays.group_routes(dfHlyDelays) \
              .withColumn('StopName', F.lit('ALLSTOPS'))
            dfGrpStops = calcHlyDelays.group_stops(dfHlyDelays) \
              .withColumn('RouteId', F.lit('ALLROUTES'))
            dfGrpAll = calcHlyDelays.group_all(dfHlyDelays) \
              .withColumn('RouteId', F.lit('ALLROUTES')) \
              .withColumn('StopName', F.lit('ALLSTOPS'))
            dfHlyDelaysBus = dfHlyDelays.filter(
                dfHlyDelays.RouteId.rlike("^[0-9]"))
            dfHlyDelaysTrain = dfHlyDelays.filter(
                ~dfHlyDelays.RouteId.rlike("^[0-9]"))
            dfGrpStopsBus = calcHlyDelays.group_stops(dfHlyDelaysBus) \
              .withColumn('RouteId', F.lit('ALLBUSES'))
            dfGrpAllBus = calcHlyDelays.group_all(dfHlyDelaysBus) \
              .withColumn('RouteId', F.lit('ALLBUSES')) \
              .withColumn('StopName', F.lit('ALLSTOPS'))
            dfGrpStopsTrain = calcHlyDelays.group_stops(dfHlyDelaysTrain) \
              .withColumn('RouteId', F.lit('ALLTRAINS'))
            dfGrpAllTrain = calcHlyDelays.group_all(dfHlyDelaysTrain) \
              .withColumn('RouteId', F.lit('ALLTRAINS')) \
              .withColumn('StopName', F.lit('ALLSTOPS'))

            dfAllHly = dfHlyDelays[cols_order] \
              .union(dfGrpRoutes[cols_order]) \
              .union(dfGrpStops[cols_order]) \
              .union(dfGrpAll[cols_order]) \
              .union(dfGrpStopsBus[cols_order]) \
              .union(dfGrpAllBus[cols_order]) \
              .union(dfGrpStopsTrain[cols_order]) \
              .union(dfGrpAllTrain[cols_order])

            with DBConnCommonQueries() as conn:
                dbtables.create_if_not_exists(conn, dbtables.RouteStops)
                data = dfAllHly[['RouteId', 'StopName']] \
                  .distinct() \
                  .collect()
                dbtables.RouteStops.insert_values(conn, data)
                conn.commit()

            calcHlyDelays.update_s3(dfAllHly, targetDate)

            with DBConn() as conn:
                dbtables.PqDates.update_in_delays(conn, targetDate,
                                                  "IsInHlyDelaysS3")
                conn.commit()
Exemplo n.º 18
0
Arquivo: conn.py Projeto: t00m/Vazaar
 def __init__(self, app):
     self.app = app
     self.loglevel = self.app.get_logging_level()
     self.log = get_logger('Conn', self.loglevel)
     self.log.debug( _("ConnManager\tinitialized") )
Exemplo n.º 19
0
 def __init__(self, app):
     self.log = get_logger('Metadata')
     self.xtract = extractor.Extractor()
     self.app = app
     self.magic = magic.open(magic.MAGIC_MIME_TYPE)
     self.magic.load()
Exemplo n.º 20
0
async def create_app() -> PodcastWebApp:
    """ Prepare application """
    redis_pool = await aioredis.create_pool(settings.REDIS_CON)
    session_engine = SimpleCookieStorage(
    ) if settings.TEST_MODE else RedisStorage(redis_pool)
    middlewares = [
        session_middleware(session_engine),
        request_user_middleware,
        aiohttp_i18n.babel_middleware(),
    ]

    if settings.DEBUG:
        middlewares.append(aiohttp_debugtoolbar.middleware)

    app = PodcastWebApp(middlewares=middlewares,
                        logger=logger,
                        debug=settings.DEBUG)
    app.redis_pool = redis_pool
    app.gettext_translation = app_i18n.aiohttp_translations
    app.on_shutdown.append(shutdown_app)

    # db conn
    app.database = database_init(database)
    app.database.set_allow_sync(False)
    app.objects = peewee_async.Manager(app.database)

    app["static_root_url"] = settings.STATIC_URL
    jinja_env = aiohttp_jinja2.setup(
        app,
        loader=jinja2.FileSystemLoader(settings.TEMPLATE_PATH),
        context_processors=[
            aiohttp_jinja2.request_processor,
            context_processors.podcast_items,
            context_processors.mobile_app_web_view,
        ],
        filters={
            "datetime_format": jinja_filters.datetime_format,
            "human_length": jinja_filters.human_length,
        },
        extensions=["jinja2.ext.i18n"],
    )
    jinja_env.globals.update(tags)
    jinja_env.install_gettext_translations(app.gettext_translation)
    if settings.DEBUG:
        aiohttp_debugtoolbar.setup(app, intercept_redirects=False)

    # make routes
    from urls import urls as app_routes

    for route in app_routes:
        app.router.add_route(**route.as_dict)

    app.router.add_static("/static", settings.STATIC_PATH, name="static")

    app.logger = get_logger()
    app.rq_queue = rq.Queue(
        name="youtube_downloads",
        connection=Redis(*settings.REDIS_CON),
        default_timeout=settings.RQ_DEFAULT_TIMEOUT,
    )

    if settings.SENTRY_DSN:
        sentry_logging = LoggingIntegration(level=logging.INFO,
                                            event_level=logging.ERROR)
        sentry_sdk.init(settings.SENTRY_DSN,
                        integrations=[AioHttpIntegration(), sentry_logging])

    return app
Exemplo n.º 21
0
# coding:utf-8
#

from django.contrib.auth import get_user_model
from django.conf import settings

from . import client
from common.utils import get_logger
from authentication.openid.models import OIDT_ACCESS_TOKEN

UserModel = get_user_model()

logger = get_logger(__file__)

BACKEND_OPENID_AUTH_CODE = \
    'authentication.openid.backends.OpenIDAuthorizationCodeBackend'


class BaseOpenIDAuthorizationBackend(object):

    @staticmethod
    def user_can_authenticate(user):
        """
        Reject users with is_active=False. Custom user models that don't have
        that attribute are allowed.
        """
        is_active = getattr(user, 'is_active', None)
        return is_active or is_active is None

    def get_user(self, user_id):
        try:
Exemplo n.º 22
0
from aiohttp_session import session_middleware, SimpleCookieStorage
from aiohttp_session.redis_storage import RedisStorage
from redis import Redis
from sentry_sdk.integrations.aiohttp import AioHttpIntegration
from sentry_sdk.integrations.logging import LoggingIntegration

import settings
import app_i18n
from common import context_processors
from common import jinja_filters
from common.middlewares import request_user_middleware
from common.jinja_template_tags import tags
from common.models import database
from common.utils import get_logger, database_init

logger = get_logger()
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())


class PodcastWebApp(web.Application):
    """ Extended web Application for podcast-specific logic """

    rq_queue: rq.Queue = None
    objects: peewee_async.Manager = None
    redis_pool: aioredis.ConnectionsPool = None
    gettext_translation: app_i18n.AioHttpGettextTranslations = None


async def shutdown_app(app):
    """ Safe close server """
    app.redis_pool.close()
Exemplo n.º 23
0
 def __init__(self, app):
     self.app = app
     self.loglevel = self.app.get_logging_level()
     self.log = get_logger('Provider', self.loglevel)
     self.log.debug( _("DataProvider\tinitialized") )
     self.running = False # Data Provider status
Exemplo n.º 24
0
 def __init__(self, gui):
     """Resource Factory creates new VirtualResources and transport
     them to In queue as a job"""
     self.log = get_logger('Factory')
     self.gui = gui
     self.log.info("ResourceFactory initialized")
Exemplo n.º 25
0
from flask import Flask, request, render_template

from common import utils
from common.exceptions import ShotgunBatchError, ShotgunUploadError
import version_copy

LOGGER = utils.get_logger("version_copy_app")
app = Flask(__name__)


@app.route("/version/copy", methods=["GET", "POST"])
def version_copy_app():
    """ Shotgun Version Copy AMI main entrypoint. Renders version copy html template """
    # Getting logger
    LOGGER.info("Started version copy app.")

    # getting post data
    post_data = request.form

    # Checking version ids. Tool only supports copying a single version at a time.
    version_ids = post_data["selected_ids"].split(",")
    if len(version_ids) > 1:
        LOGGER.info("More than one ID selected. Exiting.")
        return "<strong>Version copy only supports one version at a time. Please select a single version and try again."

    # Getting shotgun version dict.
    sg = utils.get_sg_connection()
    sg_version = utils.get_version_from_id(sg, int(version_ids[0]))

    # Formatting data to send to template
    copy_data = version_copy.format_version_display_info(post_data, sg_version)
Exemplo n.º 26
0
#!/usr/bin/env python3
# encoding: utf-8

from dht import DHTServer
from common.database import RedisClients
from common.utils import get_logger
from config import Config

logger = get_logger("logger_dht_main")

if __name__ == "__main__":
    logger.info("dht running successful ! >>>> {0}:{1}".format(Config.BIND_IP,Config.BIND_PORT))
    dht = DHTServer()
    dht.start()
    dht.auto_send_find_node()
Exemplo n.º 27
0
 def __init__(self):
     self.log = get_logger(__name__)
Exemplo n.º 28
0
Arquivo: vdbus.py Projeto: t00m/Vazaar
 def __init__(self):
     """Initalize VazaarDBus class"""
     self.loglevel = 'ERROR'
     self.log = get_logger('D-Bus', self.loglevel)
Exemplo n.º 29
0
    def __init__(self,
                 input_src,
                 batch_size,
                 args,
                 pretrain,
                 vocab=None,
                 evaluation=False):
        """
        eg:
            train_batch = DataLoader(args['train_file'], args['batch_size'], args, pretrain, evaluation=False)
        :param input_src:
        :param batch_size:
        :param args:
        :param pretrain:
        :param vocab:
        :param evaluation:
        """
        self.batch_size = batch_size
        self.args = args
        self.eval = evaluation
        self.shuffled = not self.eval
        self.logger = get_logger(args['logger_name'])

        # check if input source is a file or a Document object
        if isinstance(input_src, str):
            filename = input_src
            assert filename.endswith(
                'conllu'), "Loaded file must be conllu file."
            # 加载所有句子:['word', 'upos', 'deps']
            # 4	总统	总统	NN	NN	_	6	Agt	6:Agt|12:Agt	_
            # [总统, NN, 6:Agt|12:Agt]
            self.conll, data = self.load_file(filename, evaluation=self.eval)
            # data= [
            #             [ #sent1
            #                   [word1,pos1,deps1],
            #                   [word2,pos2,deps2],
            #             ],
            #             [ #sent2
            #             ]
            #       ]
        elif isinstance(input_src, Document):
            filename = None
            doc = input_src
            self.conll, data = self.load_doc(doc)

        # handle vocab
        if vocab is None:
            self.vocab = self.init_vocab(data)
        else:
            self.vocab = vocab
        # token2id:
        self.pretrain_vocab = pretrain.vocab

        # filter and sample data
        if args.get('sample_train', 1.0) < 1.0 and not self.eval:
            keep = int(args['sample_train'] * len(data))
            data = random.sample(data, keep)
            self.logger.info("Subsample training set with rate {}".format(
                args['sample_train']))

        data = self.preprocess(data, self.vocab, self.pretrain_vocab, args)
        # data=[
        #        [ # sent1
        #           word_list,
        #           char_list,
        #           pos_list,
        #           pre-train_list,
        #           graph_list
        #        ]
        #      ]

        if self.shuffled:
            random.shuffle(data)
        self.num_examples = len(data)

        # chunk into batches
        # 先按照句长排序,然后再切分为batches
        self.data = self.chunk_batches(data)
        if filename is not None:
            self.logger.info("{} batches created for {}.".format(
                len(self.data), filename))
Exemplo n.º 30
0
Arquivo: user.py Projeto: jcops/diting
from django.contrib import messages
from django.shortcuts import render, HttpResponse


__all__ = [
    'UserListView', 'UserCreateView', 'UserDetailView',
    'UserUpdateView',
    'UserGrantedAssetView',
    'UserExportView',  'UserBulkImportView', 'UserProfileView',
    'UserProfileUpdateView', 'UserPasswordUpdateView',
    'UserPublicKeyUpdateView', 'UserBulkUpdateView',
    'UserPublicKeyGenerateView', 'LDAPUserListView',
    'LdapUserDetailView', 'LdapUserUpdateView',
]

logger = get_logger(__name__)


class LdapUserUpdateView(AdminUserRequiredMixin, SuccessMessageMixin, FormView):
    form_class = forms.LDAPUserUpdateForm
    template_name = 'users/ldap_user_update.html'
    success_url = reverse_lazy('users:ldap-user-list')
    success_message = update_success_msg

    def get(self, request, *args, **kwargs):
        #获取客户端传来的参数PK
        pk = kwargs.get('pk')
        if settings.AUTH_LDAP:
            try:
                ldap_tool = LDAPTool()
                status = ldap_tool.ldap_get_user(pk, isdict=True)
Exemplo n.º 31
0
 def __init__(self, args, dataset=None):
     self.log = utils.get_logger("MattingNetModel")
     self.args = args
     self.dataset = dataset  # used to access data created in DataWrapper