Example #1
0
def login(server: str, username: str, password: str):
    url = server + "?method=EduLink.Login"
    payload = {
        "id": "1",
        "jsonrpc": "2.0",
        "method": "EduLink.Login",
        "params": {
            "establishment_id": "2",
            "fcm_token_old": "none",
            "from_app": False,
            "password": password,
            "ui_info": {
                "format": "2",
                "git_sha": utils.uuid(
                ),  # In official requests this is a SHA-1 however in practice it does not matter.
                "version": "0.5.181"
            },
            "username": username
        },
        "uuid": utils.uuid()
    }
    headers = {
        "Content-Type": "application/json;charset=UTF-8",
        "X-API-Method": "EduLink.Login"
    }

    response = requests.post(url, json=payload, headers=headers)
    content = response.json()

    if content["result"]["success"] is True:
        content = content["result"]  # To decrease code verbosity.

        authtoken = content["authtoken"]

        school_name = content["establishment"]["name"]
        school_logo_base64 = content["establishment"]["logo"]

        pupil_forename = content["user"]["forename"]
        pupil_surname = content["user"]["surname"]
        pupil_username = content["user"]["username"]

        avatar_base64 = content["user"]["avatar"]["photo"]
        avatar_width = content["user"]["avatar"]["width"]
        avatar_height = content["user"]["avatar"]["height"]

        groups_year_id = content["user"]["year_group_id"]
        groups_community_id = content["user"]["community_group_id"]
        groups_form_id = content["user"]["form_group_id"]

        user = Profile(authtoken, school_name, school_logo_base64,
                       pupil_forename, pupil_surname, pupil_username,
                       avatar_base64, avatar_width, avatar_height,
                       groups_year_id, groups_community_id, groups_form_id)

        return user

    else:
        msg = content["result"]["error"]
        raise errors.SchoolDetailsError(msg)
Example #2
0
 def __init__(self, name, title, files, stype, cfg, trees,
              cuts = DEFAULT_TRUE, weights = '1.0', scale = 1.0, lumi = 1.0,
              channel = '', roottree = 'HWWTree'):
     assert(type(name) is str)
     assert(type(title) is str)
     assert(type(cuts) is str)
     assert(type(weights) is str)
     assert(type(scale) is float)
     self.__root = roottree # << the root tree (all other trees will be friends)
     self.__trees = None # << list of TChain's controlled by this sample 
     self.__tree_names = utils.uniqify(utils.listify(trees) + [self.__root])
     self.__name = name # << name of the sample 
     self.__title = title # << title (used in legends, &c.)
     self.__cuts = cuts.replace(' ','') # << cuts that define sample selection
     self.__cuts_base = self.__cuts
     self.__applied_cuts = None
     self.__weights = weights.replace(' ','') # << event weights 
     self.__weights_base = self.__weights
     self.__scale = scale # << product of scale factors
     self.__counters = {} # << counters maintained by this sample
     self.__histograms = {} # << histograms produced by this sample
     self.__internals = [] # << other internal objects to keep 
     self.__locked = False # << whether the sample is mutable (ie., can produce histograms)
     self.__selections = {} # << named selections of this sample
     self.__uid = utils.uuid()
     samplebase.__init__(self, files, lumi, stype, cfg, channel, title)
Example #3
0
def launch_job(worker, epoch, partitions, ckpt_path, data_partition_names,
               input_fn_string, model_fn_string, train_fn_string,
               exec_fn_string, mst, train):
    """

    :param worker:
    :param epoch:
    :param partitions:
    :param ckpt_path:
    :param data_partition_names:
    :param input_fn_string:
    :param model_fn_string:
    :param train_fn_string:
    :param exec_fn_string:
    :param mst:
    :param train:
    :return:
    """
    exec_id = uuid()
    params = [
        epoch, partitions, ckpt_path,
        [data_partition_name for data_partition_name in data_partition_names],
        input_fn_string, model_fn_string, train_fn_string, mst, train
    ]

    result = worker.execute(exec_id, exec_fn_string, params)
    status = dill.loads(base64.b64decode(result.data))
    if status != "LAUNCHED":
        raise Exception("Remote job launch failed. Reason: " + status)

    return exec_id
Example #4
0
 def get_range(self, expression, quantile=1.0):
     '''
     Calculate the range spanned by a given expression (any valid formula)
     that contains at least quantile fraction of events
     '''
     tform = ROOT.TTreeFormula(utils.uuid(), expression, self.get_tree())
     tform.SetQuickLoad(True)
     xmin =  float('inf')
     xmax = -float('inf')
     itree = self.get_tree().GetTreeNumber()
     xvals = []
     for ientry in xrange(self.get_tree().GetEntries()):
         if self.__trees[self.__root].GetEntryList() != None:
             if not self.__trees[self.__root].GetEntryList().Contains(ientry):
                 continue
         self.get_tree().LoadTree(ientry)
         if itree != self.get_tree().GetTreeNumber():
             itree = self.get_tree().GetTreeNumber()
             tform.UpdateFormulaLeaves()
         xvals.append(tform.EvalInstance(0))
         if xvals[-1] < xmin: xmin = xvals[-1]
         if xvals[-1] > xmax: xmax = xvals[-1]
     if quantile < 1.0:
         xvals = sorted(xvals)
         index = int((len(xvals) * (1-quantile))/2)
         return (xvals[index], xvals[-index])
     else:
         return (xmin, xmax)          
Example #5
0
def preload_data(workers, input_fn_string, preload_fn_string, train_partitions,
                 valid_partitions, train_availability, valid_availability,
                 scheduler_log_file, begin_time):
    """

    :param workers:
    :param input_fn_string:
    :param preload_fn_string:
    :param train_partitions:
    :param valid_partitions:
    :param train_availability:
    :param valid_availability:
    :param scheduler_log_file:
    :param begin_time:
    """
    for i, worker in workers.items():
        worker.initialize_worker()

    exec_ids = []
    for worker_id, worker in workers.items():

        data_partitions = []
        for availability, partitions in zip(
            [train_availability, valid_availability],
            [train_partitions, valid_partitions]):
            for i, available in enumerate(availability[worker_id]):
                if available:
                    data_partitions.append((partitions[i]))

        exec_id = uuid()
        params = [input_fn_string, data_partitions]

        result = worker.execute(exec_id, preload_fn_string, params)
        status = dill.loads(base64.b64decode(result.data))

        if status != "LAUNCHED":
            raise Exception("Remote job launch failed. Reason: " + status)

        exec_ids.append((exec_id, worker_id))

    # wait for everything to finish
    while len(exec_ids) > 0:
        for exec_id, worker_id in exec_ids:
            worker = workers[worker_id]
            status = dill.loads(base64.b64decode(worker.status(exec_id).data))

            if status["status"] == "FAILED":
                print(status)
                raise Exception("Remote job execution failed")
            elif status["status"] == "INVALID ID":
                raise Exception("Invalid Id")
            elif status["status"] == "COMPLETED":
                exec_ids.remove((exec_id, worker_id))
                message = "TIME: %d, EVENT: PRELOAD_COMPLETED, WORKER: %d\n" % (
                    time.time() - begin_time, worker_id)
                scheduler_log_file.write(message)
                print(message[:-1])
                scheduler_log_file.flush()
        time.sleep(1)
Example #6
0
    def __init__(self, **kwargs):
        """ Override default __init__, if the mapper has an id
            column and it isn't set, set it to a new uuid."""
        for k, v in kwargs.items():
            setattr(self, k, v)

        if hasattr(self, 'id') and not self.id:
            self.id = uuid()
Example #7
0
def _get_user_profile(user):
    try:
        profile = user.get_profile()
    except UserInfo.DoesNotExist:
        return UserInfo.objects.create(
            user=user,
            public_key=utils.uuid(), # FIXME: Temporarily using UUID
            location='',
            comment='')
    else:
        return profile
    def pp(self, filename):
        cw = self.cw
        print_ = get_print(cw)
        ui_setting = utils.ui_setting
        ext = os.path.splitext(filename)[1].lower()
        if not os.path.isfile(filename):
            print('no file: {}'.format(filename))
            return
        
        filename_new = None
        if self.type == 'video' and (self.audio is not None or ext != '.mp4') and not self.stream.live: # UHD or non-mp4
            if self.audio is not None: # merge
                print_('Download audio: {}'.format(self.audio))
                hash = uuid()
                path = os.path.join(os.path.dirname(filename), '{}_a.tmp'.format(hash))
                if cw is not None:
                    cw.trash_can.append(path)
                if constants.FAST:
                    downloader_v3.download(self.audio, chunk=1024*1024, n_threads=2, outdir=os.path.dirname(path), fileName=os.path.basename(path), customWidget=cw, overwrite=True)
                else:
                    downloader.download(self.audio, outdir=os.path.dirname(path), fileName=os.path.basename(path), customWidget=cw, overwrite=True)
                ext, out = ffmpeg.merge(filename, path, cw=cw, vcodec=self.vcodec)
                #print(out)
                name, ext_old = os.path.splitext(filename)
                if ext_old.lower() != ext.lower():
                    print_('rename ext {} --> {}'.format(ext_old, ext))
                    filename_new = '{}{}'.format(name, ext)
                    if os.path.isfile(filename_new):
                        os.remove(filename_new)
                    os.rename(filename, filename_new)
            else: # convert non-mp4 video -> mp4
                name, ext_old = os.path.splitext(filename)
                filename_new = '{}.mp4'.format(name)
                print_('Convert video: {} -> {}'.format(filename, filename_new))
                ffmpeg.convert(filename, filename_new, cw=cw)
        elif self.type == 'audio' and ext != '.mp3': # convert non-mp3 audio -> mp3
            name, ext_old = os.path.splitext(filename)
            filename_new = '{}.mp3'.format(name)
            ffmpeg.convert(filename, filename_new, '-shortest -preset ultrafast -b:a {}k'.format(get_abr()), cw=cw)

        if self.type == 'audio' and ui_setting.albumArt.isChecked():
            try:
                self.thumb.seek(0)#
                ffmpeg.add_cover(filename_new, self.thumb, {'artist':self.username, 'title':self.title}, cw=cw)
            except Exception as e:
                s = print_error(e)[-1]
                print_(s)

        utils.pp_subtitle(self, filename, cw)

        return filename_new
Example #9
0
    def issueToken(self, login, max_seats, request, response):
        """ Creates a uid and stores in a cookie browser-side
        """
        # When no cookie is present, we generate one, store it and
        # set it in the response:

	alsoProvides(request, IDisableCSRFProtection)

        cookie_val = uuid()

        if self.DEBUG:
            print "issueToken::" + cookie_val

        self.setCookie(cookie_val)
    def issueToken(self, login, max_seats, request, response):
        """ Creates a uid and stores in a cookie browser-side
        """
        # When no cookie is present, we generate one, store it and
        # set it in the response:

        alsoProvides(request, IDisableCSRFProtection)

        cookie_val = uuid()

        if self.DEBUG:
            print "issueToken::" + cookie_val

        self.setCookie(cookie_val)
Example #11
0
def _send_message_real(cur_user, server_port, reply_for_uuid, content, recipients):
    uuid = utils.uuid()
    print str(type(uuid))
    is_public = (recipients is None)
    sent_message = cur_user.sentmessage_set.create(
        uuid=uuid,
        content_type='placeholder',
        content=content,
        time_sent=datetime.utcnow(),
        reply_for='',
        is_public=is_public)
    
    if not is_public:
        server_map = {}
        
        # Each recipient is in server:port/username format.
        for recipient in recipients:
            print 'Recipient: ' + recipient
            (server, username) = recipient.split('/')
            if not server_map.has_key(server):
                server_map[server] = []
            server_map[server].append(username)
            
        for server in server_map.keys():
            recip_list = ';'.join(server_map[server])
            http = httplib2.Http()
            url = 'http://' + server + '/post-office/';
            
            sender_uri = 'http://' + server_port + \
                    '/' + cur_user.username
            print 'Send to server %s:' % server
            print ' UUID: ' + uuid
            print ' Sender URI: ' + sender_uri
            print ' Recipients: ' + recip_list
            print ' Content: "%s"' % content
            
            headers = {
                'Content-type': 'application/xml',
                'X-Jelato-UUID': uuid,
                'X-Jelato-Sender': sender_uri,
                'X-Jelato-Reply-For': reply_for_uuid,
                'X-Jelato-Recipients': recip_list
            }
            (response, content) = http.request(
                url,
                'POST',
                content,
                headers=headers)
Example #12
0
 def get_profile(self, hname, axis='x'):
     '''
     Return profile of 2D histogram
     '''
     assert self.__histograms[hname].Class().InheritsFrom('TH2')
     prof = getattr(self.__histograms[hname], 'Profile%s'%(axis.upper()))(utils.uuid())
     self.__histograms[hname+'_prof'] = prof
     if self.get_stype()!='bkg':
         prof.SetMarkerColor(self.__style_def['color'])
         prof.SetLineColor(self.__style_def['color'])
     if self.get_stype()=='bkg':
         prof.SetLineColor(self.__style_def['fillcolor'])
         prof.SetMarkerColor(self.__style_def['fillcolor'])
     prof.GetYaxis().SetTitle(self.__histograms[hname].ytitle)
     prof.GetYaxis().SetTitleOffset(1.25)
     prof.GetXaxis().SetTitle(self.__histograms[hname].xtitle)
     return prof
Example #13
0
    def get_combined(self, name, dtype=None):
        ''' Retrieve set of histograms / counters by name (and optionally by data type) from all samples in collection
            and combine into a single object.

            Returns combined object.
        '''
        objs = []
        for sample in self.__samples.values():
            if dtype != None and dtype in samplebase.alltypes:
                if dtype != sample.get_stype():
                    continue
            try:
                objs += [ sample[name] ]
            except:
                self.log.warning('object [%s] not found in [%s]'%(name, sample.get_name()))
        h = objs[0](utils.uuid())
        return sum(objs[1:], h)        
Example #14
0
 def select(self, cuts=None, ignore_base=False, update=False, elist=None, name=None):
     ''' Apply selection to root TTree such that only selected events are used thereafter
         Note: if "cuts" argument is not set, use the base cuts for this sample; if "ignore_base"
               is set, then ignore the sample base cuts and use "cuts" argument only
               (otherwise logical && of sample cuts and "cuts" argument is implied)
               If update flag is set, then append cuts to those already applied (if any), in order to chain selections. '''
     def parenthesis(x):
         return '(' + str(x) + ')'
     if self.__locked:
         return -1
     if elist != None:
         self.__trees[self.__root].SetEntryList(elist)
         if cuts == None: # << no further selection to add
             if name != None:
                 self[name] = elist
             return elist.GetN()
     thecuts = ''
     if cuts == None:
         if ignore_base:
             raise ValueError
         thecuts = self.__cuts_base
         self.__cuts = DEFAULT_TRUE # NB: unset cuts because entry list will be used (avoid unnecessary CPU cycles)
                                    #     (since __cuts attribute is used by default in histogramming)
     else:
         if ignore_base: thecuts = cuts
         else:           thecuts = '*'.join([parenthesis(self.__cuts_base), parenthesis(cuts)])
         self.__cuts = DEFAULT_TRUE # NB: unset cuts because entry list will be used (avoid unnecessary CPU cycles)
     nbase = self.__trees[self.__root].GetEntries() 
     if update and self.__trees[self.__root].GetEntryList() != None:
         nbase = self.__trees[self.__root].GetEntryList().GetN()    
         nselected = ROOT.filters.chain(self.__trees[self.__root], thecuts, True, self.__trees[self.__root].GetEntryList())
     else:
         self.__trees[self.__root].SetEntryList(None)
         key = utils.uuid(l=5)
         nselected = self.__trees[self.__root].Draw('>>%s'%(self.__obj_name('entries_'+key)), thecuts, 'entrylist')
         elist = ROOT.gROOT.FindObject(self.__obj_name('entries_'+key))
         if elist != None:
             self.__trees[self.__root].SetEntryList(elist)
         else:
             self.__trees[self.__root].SetEntryList(None)
     self.log.debug('applying selection [%s], keeping %d of %d entries'%(thecuts[0:145] + '...', nselected, nbase))
     self.__applied_cuts = thecuts
     if name != None:
         self[name] = self.__trees[self.__root].GetEntryList()
     return self.__trees[self.__root].GetEntryList().GetN()
Example #15
0
def get_imgs_page(page, session, cw=None, try_=1):
    print('##### get_imgs_page', try_)
    print_ = get_print(cw)
    if cw is not None and not cw.alive:
        return

    if True:
        try:
            imgs = get_imgs_page_legacy(page, session, cw)
            if imgs == 'seed':
                raise SeedError()
            return imgs
        except Exception as e:
            if not isinstance(e, SeedError):
                raise

    jpgs = capture(page, session, cw, ignore_error=try_ > 1)
    imgs = []
    rand = Random()
    rand.seed((u'{}{}').format(page.title, clock()))
    hash = uuid()
    DIR = get_outdir('manamoa')
    for p, jpg in enumerate(jpgs):
        if isinstance(jpg, Empty):
            img = Image(jpg.url, page, p)
            imgs.append(img)
            continue

        img_tmp = os.path.join(DIR, ('tmp{}_{:04}.jpg').format(hash, p))
        if cw is not None:
            cw.trash_can.append(img_tmp)
        if not os.path.isdir(DIR):
            try:
                os.makedirs(DIR)
            except:
                pass

        with open(img_tmp, 'wb') as (f):
            f.write(jpg)
        img = Image(u'tmp://' + img_tmp, page, p)
        imgs.append(img)

    return imgs
Example #16
0
def server(code: str):
    url = "https://provisioning.edulinkone.com/?method=School.FromCode"
    payload = {
        "id": "1",
        "jsonrpc": "2.0",
        "method": "School.FromCode",
        "params": {
            "code": code
        },
        "uuid": utils.uuid()
    }

    response = requests.post(url, json=payload)
    content = response.json()

    if content["result"]["success"] is True:
        return content["result"]["school"]["server"]
    else:
        msg = content["result"]["error"]
        raise errors.ProvisioningError(msg)
Example #17
0
def saveplots(merge = False, outfn = 'myplots.pdf', suffix = ''):
    '''
    Save all open canvases to disk
    '''
    pdfnames = []
    epsnames = []
    for i, c in enumerate(ROOT.gROOT.GetListOfCanvases()):
        suf = suffix
        if merge:
            suf = suf + utils.uuid(8)
        c.SetName(c.GetName().replace('_Clone', '') + '_CAN')
        fname = utils.normstring(c.GetName()) + suf
        c.Print('plots/c-%s.eps'%(fname))
        c.Print('plots/c-%s.pdf'%(fname))
        pdfnames.append('plots/c-%s.pdf'%(fname))
        epsnames.append('plots/c-%s.eps'%(fname))
    if merge:
        stat, out = shell('rm -rf plots/%s'%(outfn),echo=False)
        stat, out = shell('pdfmerge %s plots/%s'%(' '.join(pdfnames), outfn), False, echo=True)
        stat, out = shell('rm -rf %s'%(' '.join(pdfnames)), echo=False)
        stat, out = shell('rm -rf %s'%(' '.join(epsnames)), echo=False)
Example #18
0
    def __init__(self, host='127.0.0.1', port='8021', auth='ClueCon',
                 listener=None,
                 logger=None):

        self.host = self.server = host
        self.port = port
        self.auth = auth
        self._id = utils.uuid()
        self._orig_cmd = None
        self.log = logger or utils.get_logger(utils.pstr(self))
        # generally speaking clients should only host one call app
        self._apps = {}
        self.apps = type('apps', (), {})()
        self.apps.__dict__ = self._apps  # dot-access to apps from 'apps' attr
        self.client = self  # for app funcarg insertion

        # WARNING: order of these next steps matters!
        # create a local connection for sending commands
        self._con = Connection(self.server, self.port, self.auth)
        # if the listener is provided it is expected that the
        # user will run the set up methods (i.e. connect, start, etc..)
        self.listener = listener
Example #19
0
    def __init__(self, host='127.0.0.1', port='8021', auth='ClueCon',
                 listener=None,
                 logger=None):

        self.host = self.server = host
        self.port = port
        self.auth = auth
        self._id = utils.uuid()
        self._orig_cmd = None
        self.log = logger or utils.get_logger(utils.pstr(self))
        # generally speaking clients should only host one call app
        self._apps = {}
        self.apps = type('apps', (), {})()
        self.apps.__dict__ = self._apps  # dot-access to `_apps` from `apps`
        self.client = self  # for app funcarg insertion

        # WARNING: order of these next steps matters!
        # create a local connection for sending commands
        self._con = Connection(self.server, self.port, self.auth)
        # if the listener is provided it is expected that the
        # user will run the set up methods (i.e. connect, start, etc..)
        self.listener = listener
Example #20
0
    def __init__(self, host=HOST, port=PORT, auth=AUTH,
                 session_map=None,
                 bg_jobs=None,
                 rx_con=None,
                 call_corr_var_name='variable_call_uuid',
                 call_corr_xheader_name='originating_session_uuid',
                 autorecon=30,
                 max_limit=float('inf'),
                 # proxy_mng=None,
                 _tx_lock=None):
        '''
        Parameters
        ----------
        host : string
            Hostname or ip of the FS engine server to listen to
        port : string
            Port on which the FS server is offering an esl connection
        auth : string
            Authentication password for connecting via esl
        call_corr_var_name : string
            Name of the freeswitch variable (without the 'variable_' prefix)
            to use for associating sessions into calls (see _handle_create).
        call_corr_xheader_name : string
            Name of an Xheader which can be used to associate sessions into
            calls.
            This is useful if an intermediary device such as a B2BUA is being
            tested and is the first hop receiving requests. Note in
            order for this association mechanism to work the intermediary
            device must be configured to forward the Xheaders it recieves.
            (see `self._handle_create` for more details)
        autorecon : int, bool
            Enable reconnection attempts on server disconnect. An integer
            value specifies the of number seconds to spend re-trying the
            connection before bailing. A bool of 'True' will poll
            indefinitely and 'False' will not poll at all.
        '''
        self.server = host
        self.port = port
        self.auth = auth
        self._sessions = session_map or OrderedDict()
        self._bg_jobs = bg_jobs or OrderedDict()
        self._calls = OrderedDict()  # maps aleg uuids to Sessions instances
        self.hangup_causes = Counter()  # record of causes by category
        self.failed_sessions = OrderedDict()
        self._handlers = self.default_handlers  # active handler set
        self._unsub = ()
        self.consumers = {}  # callback chains, one for each event type
        self._waiters = {}  # holds events being waited on
        self._blockers = []  # holds cached events for reuse
        # store up to the last 10k of each event type
        self.events = defaultdict(functools.partial(deque, maxlen=1e3))

        # constants
        self.autorecon = autorecon
        self._call_var = None
        self.call_corr_var = call_corr_var_name
        self.set_xheader_var(call_corr_xheader_name)
        self.max_limit = max_limit
        self._id = utils.uuid()

        # if a mng is provided then assume this listener will
        # be instantiated as a shared object and thus will require
        # at least one shared lock for the tx connection (since
        # it is used by internally by Session instances)
        # if proxy_mng:
        # self._tx_lock = proxy_mng.Lock() if proxy_mng else None
        # self._tx_lock = _tx_lock
        self._shared = False

        # sync
        self._exit = mp.Event()  # indicate when event loop should terminate
        self._lookup_blocker = mp.Event()  # used block event loop temporarily
        self._lookup_blocker.set()
        self.log = utils.get_logger(utils.pstr(self))
        self._epoch = self._fs_time = 0.0

        # set up contained connections
        self._rx_con = rx_con or Connection(self.server, self.port, self.auth)
        self._tx_con = Connection(self.server, self.port, self.auth)

        # mockup thread
        self._thread = None
        self.reset()
Example #21
0
    def load_app(self, ns, on_value=None, **prepost_kwargs):
        """Load annotated callbacks and from a namespace and add them
        to this client's listener's callback chain.

        :param ns: A namespace-like object containing functions marked with
            @event_callback (can be a module, class or instance).
        :params str on_value: id key to be used for registering app callbacks
            with `EventListener`
        """
        listener = self.listener
        name = utils.get_name(ns)
        app = self._apps.get(name, None)
        if not app:
            # if handed a class, instantiate appropriately
            app = ns() if isinstance(ns, type) else ns
            prepost = getattr(app, 'prepost', False)
            if prepost:
                args, kwargs = utils.get_args(app.prepost)
                funcargs = tuple(weakref.proxy(getattr(self, argname))
                                 for argname in args if argname != 'self')
                ret = prepost(*funcargs, **prepost_kwargs)
                if inspect.isgenerator(ret):
                    # run init step
                    next(ret)
                    app._finalize = ret

            # assign a 'consumer id'
            cid = on_value if on_value else utils.uuid()
            self.log.info("Loading call app '{}' for listener '{}'"
                          .format(name, listener))
            icb, failed = 1, False
            # insert handlers and callbacks
            for ev_type, cb_type, obj in marks.get_callbacks(app):
                if cb_type == 'handler':
                    # TODO: similar unloading on failure here as above?
                    listener.add_handler(ev_type, obj)

                elif cb_type == 'callback':
                    # add default handler if none exists
                    if ev_type not in listener._handlers:
                        self.log.info(
                            "adding default session lookup handler for event"
                            " type '{}'".format(ev_type)
                        )
                        listener.add_handler(
                            ev_type,
                            listener.lookup_sess
                        )
                    added = listener.add_callback(ev_type, cid, obj)
                    if not added:
                        failed = obj
                        listener.remove_callbacks(cid, last=icb)
                        break
                    icb += 1
                    self.log.debug("'{}' event callback '{}' added for id '{}'"
                                   .format(ev_type, obj.__name__, cid))

            if failed:
                raise TypeError("app load failed since '{}' is not a valid"
                                "callback type".format(failed))
            # register locally
            self._apps[name] = app
            app.cid, app.name = cid, name

        return app.cid
    def _pp(self, filename):
        cw = self.cw
        print_ = get_print(cw)
        ui_setting = utils.ui_setting
        ext = os.path.splitext(filename)[1].lower()
        if not os.path.isfile(filename):
            print(u'no file: {}'.format(filename))
            return

        filename_new = None
        if self.type == 'video' and (self.audio is not None
                                     or ext != '.mp4'):  # UHD or non-mp4
            if self.audio is not None:  # merge
                print_(u'Download audio: {}'.format(self.audio))
                hash = uuid()
                path = os.path.join(os.path.dirname(filename),
                                    '{}_a.tmp'.format(hash))
                if cw is not None:
                    cw.trash_can.append(path)
                if constants.FAST:
                    downloader_v3.download(self.audio,
                                           chunk=1024 * 1024,
                                           n_threads=2,
                                           outdir=os.path.dirname(path),
                                           fileName=os.path.basename(path),
                                           customWidget=cw,
                                           overwrite=True)
                else:
                    downloader.download(self.audio,
                                        outdir=os.path.dirname(path),
                                        fileName=os.path.basename(path),
                                        customWidget=cw,
                                        overwrite=True)
                ext, out = ffmpeg.merge(filename,
                                        path,
                                        cw=cw,
                                        vcodec=self.vcodec)
                #print(out)
                name, ext_old = os.path.splitext(filename)
                if ext_old.lower() != ext.lower():
                    print_(u'rename ext {} --> {}'.format(ext_old, ext))
                    filename_new = u'{}{}'.format(name, ext)
                    if os.path.isfile(filename_new):
                        os.remove(filename_new)
                    os.rename(filename, filename_new)
            else:  # convert non-mp4 video -> mp4
                name, ext_old = os.path.splitext(filename)
                filename_new = u'{}.mp4'.format(name)
                print_(u'Convert video: {} -> {}'.format(
                    filename, filename_new))
                ffmpeg.convert(filename, filename_new, cw=cw)
        elif self.type == 'audio' and ext != '.mp3':  # convert non-mp3 audio -> mp3
            name, ext_old = os.path.splitext(filename)
            filename_new = u'{}.mp3'.format(name)
            ffmpeg.convert(filename,
                           filename_new,
                           '-shortest -preset ultrafast -b:a {}k'.format(
                               get_abr()),
                           cw=cw)

        if self.type == 'audio' and ui_setting.albumArt.isChecked():
            try:
                self.thumb.seek(0)  #
                ffmpeg.add_cover(filename_new,
                                 self.thumb, {
                                     'artist': self.username,
                                     'title': self.title
                                 },
                                 cw=cw)
            except Exception as e:
                s = print_error(e)[-1]
                print_(s)

        if ui_setting and ui_setting.subtitle.isChecked():
            lang = {
                'korean': 'ko',
                'english': 'en',
                'japanese': 'ja'
            }[compatstr(ui_setting.subtitleCombo.currentText()).lower()]
            if lang in self.subtitles:
                try:
                    subtitle = self.subtitles[lang]
                    filename_sub = u'{}.vtt'.format(
                        os.path.splitext(filename)[0])
                    downloader.download(
                        subtitle,
                        os.path.dirname(filename_sub),
                        fileName=os.path.basename(filename_sub),
                        overwrite=True)
                    filename_sub_new = u'{}.srt'.format(
                        os.path.splitext(filename_sub)[0])
                    cw.imgs.append(filename_sub_new)
                    cw.dones.add(
                        os.path.realpath(filename_sub_new).replace(
                            '\\\\?\\', ''))
                    srt_converter.convert(filename_sub, filename_sub_new)
                    cw.setSubtitle(True)
                finally:
                    try:
                        os.remove(filename_sub)
                    except:
                        pass

        return filename_new
Example #23
0
    def __init__(self, host=HOST, port=PORT, auth=AUTH,
                 session_map=None,
                 bg_jobs=None,
                 rx_con=None,
                 call_corr_var_name='variable_call_uuid',
                 call_corr_xheader_name='originating_session_uuid',
                 autorecon=30,
                 max_limit=float('inf'),
                 # proxy_mng=None,
                 _tx_lock=None):
        '''
        Parameters
        ----------
        host : string
            Hostname or ip of the FS engine server to listen to
        port : string
            Port on which the FS server is offering an esl connection
        auth : string
            Authentication password for connecting via esl
        call_corr_var_name : string
            Name of the freeswitch variable (without the 'variable_' prefix)
            to use for associating sessions into calls (see _handle_create).
        call_corr_xheader_name : string
            Name of an Xheader which can be used to associate sessions into
            calls.
            This is useful if an intermediary device such as a B2BUA is being
            tested and is the first hop receiving requests. Note in
            order for this association mechanism to work the intermediary
            device must be configured to forward the Xheaders it recieves.
            (see `self._handle_create` for more details)
        autorecon : int, bool
            Enable reconnection attempts on server disconnect. An integer
            value specifies the of number seconds to spend re-trying the
            connection before bailing. A bool of 'True' will poll
            indefinitely and 'False' will not poll at all.
        '''
        self.server = host
        self.port = port
        self.auth = auth
        self._sessions = session_map or OrderedDict()
        self._bg_jobs = bg_jobs or OrderedDict()
        self._calls = OrderedDict()  # maps aleg uuids to Sessions instances
        self.hangup_causes = Counter()  # record of causes by category
        self.failed_sessions = OrderedDict()
        self._handlers = self.default_handlers  # active handler set
        self._unsub = ()
        self.consumers = {}  # callback chains, one for each event type
        self._waiters = {}  # holds events being waited on
        self._blockers = []  # holds cached events for reuse
        # store up to the last 10k of each event type
        self.events = defaultdict(functools.partial(deque, maxlen=1e3))

        # constants
        self.autorecon = autorecon
        self._call_var = None
        self.call_corr_var = call_corr_var_name
        self.set_xheader_var(call_corr_xheader_name)
        self.max_limit = max_limit
        self._id = utils.uuid()

        # if a mng is provided then assume this listener will
        # be instantiated as a shared object and thus will require
        # at least one shared lock for the tx connection (since
        # it is used by internally by Session instances)
        # if proxy_mng:
        # self._tx_lock = proxy_mng.Lock() if proxy_mng else None
        # self._tx_lock = _tx_lock
        self._shared = False

        # sync
        self._exit = mp.Event()  # indicate when event loop should terminate
        self._lookup_blocker = mp.Event()  # used block event loop temporarily
        self._lookup_blocker.set()
        self.log = utils.get_logger(utils.pstr(self))
        self._epoch = self._fs_time = 0.0

        # set up contained connections
        self._rx_con = rx_con or Connection(self.server, self.port, self.auth)
        self._tx_con = Connection(self.server, self.port, self.auth)

        # mockup thread
        self._thread = None
        self.reset()
Example #24
0
def evaluate_msts(mst_eval_fn, mst_evaluation_state, current_msts, ckpt_root):
    """

    :param mst_eval_fn:
    :param mst_evaluation_state:
    :param current_msts:
    :param ckpt_root:
    :return:
    """
    stop_mst_ids, new_msts = mst_eval_fn(mst_evaluation_state)
    for mst_id in stop_mst_ids:
        mst_evaluation_state[mst_id]["state"] = "COMPLETED"

    current_msts = [(mst_id, mst) for mst_id, mst in current_msts
                    if mst_id not in stop_mst_ids]

    id_max = max(mst_evaluation_state.keys())
    for mst_id, new_mst in zip(range(id_max + 1, id_max + 1 + len(new_msts)),
                               new_msts):

        ckpt_path = ckpt_root + "/" + str(mst_id) + "_" + uuid()
        if not os.path.exists(ckpt_path):
            os.makedirs(ckpt_path)

        ckpt_path = ckpt_path + "/model"

        if 'init_ckpt_path' in new_mst:
            copy_tree(new_mst['init_ckpt_path'], ckpt_path)

        mst_evaluation_state[mst_id] = {
            "state": "RUNNING",
            "epoch": -1,
            "train_error": [],
            "train_loss": [],
            "valid_error": [],
            "valid_loss": [],
            "ckpt_path": ckpt_path,
            "mst": new_mst
        }
        log_file = open("./logs/" + str(mst_id) + ".log", 'a')
        log_message(log_file, "Checkpoint Path: " + ckpt_path + "\n")
        log_message(log_file, "MST: " + mst_identifier(new_mst) + "\n")
        if 'init_mst' in new_mst:
            log_message(log_file,
                        "Init MST ID: " + str(new_mst['init_mst_id']) + "\n")
            log_message(
                log_file,
                "Init MST: " + mst_identifier(new_mst['init_mst']) + "\n")

            mst_evaluation_state[mst_id]['valid_error'] = [
                x for x in mst_evaluation_state[new_mst['init_mst_id']]
                ['valid_error']
            ]
            mst_evaluation_state[mst_id]['train_error'] = [
                x for x in mst_evaluation_state[new_mst['init_mst_id']]
                ['train_error']
            ]
            mst_evaluation_state[mst_id]['valid_loss'] = [
                x for x in mst_evaluation_state[new_mst['init_mst_id']]
                ['valid_loss']
            ]
            mst_evaluation_state[mst_id]['train_loss'] = [
                x for x in mst_evaluation_state[new_mst['init_mst_id']]
                ['train_loss']
            ]

            mst_evaluation_state[mst_id]['epoch'] = mst_evaluation_state[
                new_mst['init_mst_id']]['epoch']

        current_msts.append((mst_id, new_mst))

    return current_msts, mst_evaluation_state
Example #25
0
def schedule(worker_ips,
             train_partitions,
             valid_partitions,
             train_availability,
             valid_availability,
             input_fn,
             model_fn,
             train_fn,
             initial_msts,
             mst_eval_fn,
             ckpt_root='/tmp',
             preload_data_to_mem=True,
             backend='tf'):
    """
    :param workers:
    :param train_partitions:
    :param valid_partitions:    
    :param train_availability:
    :param valid_availability:    
    :param input_fn:
    :param model_fn:
    :param train_fn:
    :param initial_msts:
    :param mst_eval_fn:
    :param ckpt_root:
    :param preload_data_to_mem:
    """
    begin_time = time.time()

    print('Starting HT job: ' +
          datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"))

    if os.path.exists("./logs"):
        shutil.rmtree("./logs")
    os.makedirs("./logs")
    scheduler_log_file = open("./logs/scheduler.log", "w")

    workers = {i: xc.ServerProxy(ip) for i, ip in enumerate(worker_ips)}

    current_msts = [(mst_id, mst) for mst_id, mst in enumerate(initial_msts)]
    mst_evaluation_state = {}

    if os.path.exists(ckpt_root):
        shutil.rmtree(ckpt_root)

    for mst_id, mst in current_msts:
        ckpt_path = ckpt_root + "/" + str(mst_id) + "_" + uuid()
        if not os.path.exists(ckpt_path):
            os.makedirs(ckpt_path)
        ckpt_path = ckpt_path + "/model"
        mst_evaluation_state[mst_id] = {
            "state": "RUNNING",
            "epoch": -1,
            "train_error": [],
            "train_loss": [],
            "valid_error": [],
            "valid_loss": [],
            "ckpt_path": ckpt_path,
            "mst": mst
        }
        log_file = open("./logs/" + str(mst_id) + ".log", 'a')
        log_message(log_file, "Checkpoint Path: " + ckpt_path + "\n")
        log_message(log_file, "MST: " + mst_identifier(mst) + "\n")

    if backend == 'tf':
        exec_fn_string = base64.b64encode(
            dill.dumps(tf_execute_helper, byref=False)).decode("ascii")
    elif backend == 'pytorch':
        exec_fn_string = base64.b64encode(
            dill.dumps(pytorch_execute_helper, byref=False)).decode("ascii")

    preload_fn_string = base64.b64encode(
        dill.dumps(preload_data_helper, byref=False)).decode("ascii")

    input_fn_string = base64.b64encode(dill.dumps(input_fn,
                                                  byref=False)).decode("ascii")
    model_fn_string = base64.b64encode(dill.dumps(model_fn,
                                                  byref=False)).decode("ascii")
    train_fn_string = base64.b64encode(dill.dumps(train_fn,
                                                  byref=False)).decode("ascii")

    if preload_data_to_mem:
        # preload data into the worker memory
        preload_data(workers, input_fn_string, preload_fn_string,
                     train_partitions, valid_partitions, train_availability,
                     valid_availability, scheduler_log_file, begin_time)

    # assume # train partitions = # valid. partitions
    P = len(train_partitions)
    W = len(workers)

    random.seed = 2019

    def _get_runnable_unit(epoch_units, w, availability,
                           epoch_mst_execution_state):
        random.shuffle(epoch_units)
        for idx, (mst_id, mst, partition) in enumerate(epoch_units):
            if availability[w][partition] == 1 and (
                    epoch_mst_execution_state[mst_id] == False):
                del epoch_units[idx]
                return mst_id, mst, partition
        return -1, -1, -1

    iteration = 0
    while len(current_msts) > 0:

        epoch_mst_evaluation_state = {
            mst_id: {
                "train_error": [],
                "train_loss": [],
                "valid_error": [],
                "valid_loss": []
            }
            for mst_id, mst in current_msts
        }

        for mode, availability, partitions in zip(
            ["TRAIN", "VALID"], [train_availability, valid_availability],
            [train_partitions, valid_partitions]):

            epoch_units = [(mst_id, mst, partition) for partition in range(P)
                           for (mst_id, mst) in current_msts]
            epoch_mst_execution_state = {
                mst_id: False
                for mst_id, _ in current_msts
            }
            epoch_machine_state = [None for _ in range(W)]

            epoch_begin_time = time.time()
            while len(epoch_units) > 0 or sum(
                [1 for x in epoch_machine_state if x is not None]) > 0:
                for w in [w for w in range(W) if w in workers]:

                    try:
                        if epoch_machine_state[w] is None:
                            mst_id, mst, p = _get_runnable_unit(
                                epoch_units, w, availability,
                                epoch_mst_execution_state)
                            if mst_id != -1:
                                exec_id = launch_job(
                                    workers[w],
                                    mst_evaluation_state[mst_id]['epoch'] + 1,
                                    [p],
                                    mst_evaluation_state[mst_id]['ckpt_path'],
                                    [partitions[p]], input_fn_string,
                                    model_fn_string, train_fn_string,
                                    exec_fn_string, mst, mode == "TRAIN")
                                epoch_mst_execution_state[mst_id] = True
                                epoch_machine_state[w] = (mst_id, mst, p,
                                                          exec_id)

                                message = "TIME: %d, EVENT: %s_LAUNCHED, ITERATION: %d, WORKER: %d, MST: %d, PARTITIONS: %s, EPOCH: %d, %s\n" % (
                                    time.time() - begin_time, mode, iteration,
                                    w, mst_id, "/".join([str(x) for x in [p]]),
                                    mst_evaluation_state[mst_id]['epoch'] + 1,
                                    mst_identifier(mst))
                                log_message(scheduler_log_file,
                                            message,
                                            print_message=True)
                        elif epoch_machine_state[w] is not None:
                            mst_id, mst, p, exec_id = epoch_machine_state[w]
                            completed, status = check_finished(
                                workers[w], exec_id)
                            if completed:
                                epoch_mst_execution_state[mst_id] = False
                                epoch_machine_state[w] = None

                                log_file = open(
                                    "./logs/" + str(mst_id) + ".log", 'a')
                                log_message(log_file,
                                            status["result"]["message"])

                                loss = status["result"]["loss"]
                                error = status["result"]["error"]

                                if mode == "TRAIN":
                                    epoch_mst_evaluation_state[mst_id][
                                        'train_loss'].extend(loss)
                                    epoch_mst_evaluation_state[mst_id][
                                        'train_error'].extend(error)
                                else:
                                    epoch_mst_evaluation_state[mst_id][
                                        'valid_loss'].extend(loss)
                                    epoch_mst_evaluation_state[mst_id][
                                        'valid_error'].extend(error)

                                message = "TIME: %d, EVENT: %s_COMPLETED, ITERATION: %d, WORKER: %d, MST: %d, PARTITIONS: %s, EPOCH: %d, %s\n" % (
                                    time.time() - begin_time, mode, iteration,
                                    w, mst_id, "/".join([str(x) for x in [p]]),
                                    mst_evaluation_state[mst_id]['epoch'] + 1,
                                    mst_identifier(mst))
                                log_message(scheduler_log_file,
                                            message,
                                            print_message=True)
                    except Exception as e:
                        print(e)
                        print('Worker {0} failure detected....'.format(str(w)))
                        # removing w from available workers
                        workers.pop(w, None)

                        # if there was any mst unit running, remove it back to the queue
                        if epoch_machine_state[w] is not None:
                            mst_id, mst, p, exec_id = epoch_machine_state[w]
                            print(
                                'MST {0} partition {1} moved back to queue....'
                                .format(str(mst_id), str(p)))
                            epoch_units.append((mst_id, mst, p))
                            epoch_machine_state[w] = None
                            epoch_mst_execution_state[mst_id] = False

                        # starting from beginning
                        break

                # check failed workers are up again
                for w in range(W):
                    if w not in workers:
                        try:
                            #print('Checking worker {0}....'.format(str(w)))
                            con = xc.ServerProxy(worker_ips[w])
                            con.is_live()
                            workers[w] = con
                            epoch_machine_state[w] = None
                            print('Worker {0} back online....'.format(str(w)))

                            if preload_data_to_mem:
                                # preload data into the worker memory
                                preload_data([workers[w]], input_fn_string,
                                             preload_fn_string,
                                             train_partitions,
                                             valid_partitions,
                                             [train_availability[w]],
                                             [valid_availability[w]],
                                             scheduler_log_file, begin_time)

                        except Exception as e:
                            #print(e)
                            continue

                sys.stdout.flush()
                time.sleep(config.POLLING_PERIOD)

            message = 'Iteration: {}, {} Elapsed Time: {}\n'.format(
                iteration, mode,
                time.time() - epoch_begin_time)
            log_message(scheduler_log_file, message, print_message=True)

        # update mst evaluation state
        mst_evaluation_state = update_mst_evaluation_state(
            epoch_mst_evaluation_state, mst_evaluation_state)

        # mst evaluation
        current_msts, mst_evaluation_state = evaluate_msts(
            mst_eval_fn, mst_evaluation_state, current_msts, ckpt_root)
        iteration += 1

    print('Total HT job time: ' + str(time.time() - begin_time))
Example #26
0
def doublepad(canvas = None, xmin = 1, xmax = 10, 
               ymina = 1, ymaxa = 10,
               yminb = 1, ymaxb = 10, 
               setLogX = False, setLogY = False,
               gridX = False, gridY = False,
               xtitle = '',
               ytitlea = '', ytitleb = '', fontsize=0.1):
    ''' Set up a canvas with a main pad and a sub (ratio) pad '''
    if canvas == None:
        canvas = plotter()
    canvas.Divide(1, 2)
    scale = 0.6/0.4
    pada = canvas.cd(1)
    pada.Clear()
    pada.SetPad(0.0, 0.425, 1.0, 1.0)
    pada.GetFrame().SetBorderMode(0)
    pada.SetBorderSize(5)
    pada.SetTopMargin(0.075)
    pada.SetRightMargin(0.05)
    pada.SetLeftMargin(0.15)
    pada.SetBottomMargin(0.0)
    pada.SetGridx(gridX)
    pada.SetGridy(gridY)
    if (setLogX):
      if (xmin == 0.0): xmin = 1.0
      pada.SetLogx()
    if (setLogY):
      if (ymina == 0.0): ymina = 1.0 
      pada.SetLogy()
    canvas += pada      
    hbacka = ROOT.TH1F(utils.uuid(), '', 2, xmin, xmax)
    hbacka.Fill(xmin, ymina)
    hbacka.Fill(xmin + (xmax - xmin)/2, ymaxa)
    hbacka.SetMinimum(ymina)  
    hbacka.SetMaximum(ymaxa)
    xa = hbacka.GetXaxis() 
    xa.SetMoreLogLabels()
    xa.SetRangeUser(xmin, xmax)
    xa.SetLabelColor(ROOT.kWhite)
    xa.SetLabelSize(0.0)
    ya = hbacka.GetYaxis()
    ya.SetTitle(ytitlea)
    ya.SetTitleFont(42)
    ya.SetTitleSize( fontsize )
    ya.SetTitleOffset(0.80)
    ya.SetLabelSize( fontsize )
    ya.SetNdivisions(306)
    hbacka.SetStats(ROOT.kFALSE)
    hbacka.Draw('AXIS')
    canvas += hbacka
    padb = canvas.cd(2)
    padb.SetPad(0.0, 0.0, 1.0, 0.425)
    padb.GetFrame().SetBorderMode(0)
    padb.SetTopMargin(0.0)
    padb.SetRightMargin(0.05)
    padb.SetLeftMargin(0.15)
    padb.SetBottomMargin(0.35)
    if (setLogX):
      if (xmin == 0.0): xmin = 1.0
      padb.SetLogx()
    canvas += padb
    hbackb = ROOT.TH1F(utils.uuid(), '', 2, xmin, xmax)
    hbackb.Fill(xmin, yminb)
    hbackb.Fill(xmin + (xmax - xmin)/2, ymaxb)
    hbackb.SetMinimum(yminb)
    hbackb.SetMaximum(ymaxb)
    xa = hbackb.GetXaxis()
    xa.SetMoreLogLabels()
    xa.SetTitle(xtitle)
    xa.SetTitleSize( fontsize * scale )
    xa.SetTitleOffset(1.1)
    xa.SetLabelSize( fontsize * scale )
    xa.SetTickLength(0.050)
    xa.SetLabelOffset(0.020)
    xa.SetRangeUser(xmin, xmax)
    xa.SetNoExponent()
    ya = hbackb.GetYaxis()
    ya.SetTitle(ytitleb)
    ya.SetTitleFont(42)
    ya.SetTitleSize( fontsize * scale )
    ya.SetTitleOffset(0.8/scale)
    ya.CenterTitle()
    ya.SetLabelSize( fontsize * scale )
    ya.SetNdivisions(306)
    hbackb.SetStats(ROOT.kFALSE)
    hbackb.Draw('AXIS')
    canvas += hbackb
    return canvas
Example #27
0
    def authenticateCredentials(self, credentials):
        """See IAuthenticationPlugin.

        This plugin will actually never authenticate.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        request = self.REQUEST
	alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']
        pas_instance = self._getPAS()

        login = credentials.get('login')
        password = credentials.get('password')

        if None in (login, password, pas_instance) and (
            credentials.get('source') != 'plone.session'):
            return None
        else:
            session_source = self.session

            ticket = credentials.get('cookie')

            if session_source._shared_secret is not None:
                ticket_data = tktauth.validateTicket(
                    session_source._shared_secret, ticket,
                    timeout=session_source.timeout,
                    mod_auth_tkt=session_source.mod_auth_tkt)
            else:
                ticket_data = None
                manager = queryUtility(IKeyManager)
                if manager is None:
                    return None
                for secret in manager[u"_system"]:
                    if secret is None:
                        continue

                    ticket_data = tktauth.validateTicket(secret, ticket,
                        timeout=session_source.timeout,
                        mod_auth_tkt=session_source.mod_auth_tkt)

                    if ticket_data is not None:
                        break

            if ticket_data is None:
                return None

            (digest, userid, tokens, user_data, timestamp) = ticket_data
            pas = self._getPAS()
            info = pas._verifyUser(pas.plugins, user_id=userid)

            if info is None:
                return None

            login = info['login']

        cookie_val = self.getCookie()
        
        # get max seats from member data property or cache and default to 1 if not set
        try:
            max_seats = self.getMaxSeatsForLogin(login)
        except:
            traceback.print_exc()

        # When debugging, print the maxSeats value that was resolved
        if self.DEBUG:
            print "authenticateCredentials():: Max Seats is " + str( max_seats )

        if max_seats == 1:
            if cookie_val:
                # A cookie value is there.  If it's the same as the value
                # in our mapping, it's fine.  Otherwise we'll force a
                # logout.
                existing = self.mapping1.get(login, None)
                
                if self.DEBUG:
                    if existing:
                        print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join( existing['tokens'] )
                
                if existing and cookie_val not in existing['tokens']:
                    # The cookies values differ, we want to logout the
                    # user by calling resetCredentials.  Note that this
                    # will eventually call our own resetCredentials which
                    # will cleanup our own cookie.
                    try:
                        self.resetAllCredentials(request, response)
                        pas_instance.plone_utils.addPortalMessage(_(
                            u"Someone else logged in under your name.  You have been \
                            logged out"), "error")
                    except:
                        traceback.print_exc()
                elif existing is None:
                    # The browser has the cookie but we don't know about
                    # it.  Let's reset our own cookie:
                    self.setCookie('')
    
            else:
                # When no cookie is present, we generate one, store it and
                # set it in the response:
                cookie_val = uuid()
                # do some cleanup in our mappings
                existing = self.mapping1.get(login)
                
                if existing and 'tokens' in existing:
                    try:
                        if existing['tokens'][0] in self.mapping2:
                            del self.mapping2[existing['tokens'][0]]
                    except:
                        pass
    
                try:
                    from_ip = self.get_ip( request )
                except:
                    traceback.print_exc()

                now = DateTime()
                self.mapping1[login] = { 'tokens':[] }
                self.mapping1[login]['tokens'].append( cookie_val )
                self.mapping2[cookie_val] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + self.time_to_persist_cookies )}
                self.setCookie(cookie_val)
        else:
            # Max seats is not 1. Treat this as a floating licenses scenario.
            # Nobody is logged out, but once the max seats threshold is reached,
            # active tokens must expire before new users may log in.
            if cookie_val:
                # When the cookie value is there, try to verify it or activate it if is it not added yet
                self.verifyToken( cookie_val, login, max_seats, request, response )
            else:
                if self.DEBUG:
                    print "authenticateCredentials:: Try to issue a token because there is no cookie value."
                    
                # When no cookie is present, attempt to issue a token and use the cookie to store it
                self.issueToken(login, max_seats, request, response)
                # if max_seats are filled, then force logout
                if self.isLoginAtCapacity(login, max_seats):
                    self.forceLogoutForUser(login, request, response)
    
        return None  # Note that we never return anything useful
Example #28
0
 def iid(self):
     if constants.FT_IID not in self.attrs:
         self.attrs[constants.FT_IID] = [utils.uuid()]
     return self.attrs[constants.FT_IID][0]
Example #29
0
    def load_app(self, ns, on_value=None, **prepost_kwargs):
        """Load annotated callbacks and from a namespace and add them
        to this client's listener's callback chain.

        :param ns: A namespace-like object containing functions marked with
            @event_callback (can be a module, class or instance).
        :params str on_value: id key to be used for registering app callbacks
            with `EventListener`
        """
        listener = self.listener
        name = utils.get_name(ns)
        if name not in self._apps:
            # if handed a class, instantiate appropriately
            app = ns() if isinstance(ns, type) else ns
            prepost = getattr(app, 'prepost', False)
            if prepost:
                args, kwargs = utils.get_args(app.prepost)
                funcargs = tuple(weakref.proxy(getattr(self, argname))
                                 for argname in args if argname != 'self')
                ret = prepost(*funcargs, **prepost_kwargs)
                if inspect.isgenerator(ret):
                    # run init step
                    next(ret)
                    app._finalize = ret

            # assign a 'consumer id'
            cid = on_value if on_value else utils.uuid()
            self.log.info("Loading call app '{}' for listener '{}'"
                          .format(name, listener))
            icb, failed = 1, False
            # insert handlers and callbacks
            for ev_type, cb_type, obj in marks.get_callbacks(app):
                if cb_type == 'handler':
                    # TODO: similar unloading on failure here as above?
                    listener.add_handler(ev_type, obj)

                elif cb_type == 'callback':
                    # add default handler if none exists
                    if ev_type not in listener._handlers:
                        self.log.info(
                            "adding default session lookup handler for event"
                            " type '{}'".format(ev_type)
                        )
                        listener.add_handler(
                            ev_type,
                            listener.lookup_sess
                        )
                    added = listener.add_callback(ev_type, cid, obj)
                    if not added:
                        failed = obj
                        listener.remove_callbacks(cid, last=icb)
                        break
                    icb += 1
                    self.log.debug("'{}' event callback '{}' added for id '{}'"
                                   .format(ev_type, obj.__name__, cid))

            if failed:
                raise TypeError("app load failed since '{}' is not a valid"
                                "callback type".format(failed))
            # register locally
            self._apps[name] = app
            app.cid, app.name = cid, name
            return cid
    def authenticateCredentials(self, credentials):
        """See IAuthenticationPlugin.

        This plugin will actually never authenticate.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        request = self.REQUEST
        alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']
        pas_instance = self._getPAS()

        login = credentials.get('login')
        password = credentials.get('password')

        if None in (login, password, pas_instance) and (
                credentials.get('source') != 'plone.session'):
            return None
        else:
            session_source = self.session

            ticket = credentials.get('cookie')

            if session_source._shared_secret is not None:
                ticket_data = tktauth.validateTicket(
                    session_source._shared_secret,
                    ticket,
                    timeout=session_source.timeout,
                    mod_auth_tkt=session_source.mod_auth_tkt)
            else:
                ticket_data = None
                manager = queryUtility(IKeyManager)
                if manager is None:
                    return None
                for secret in manager[u"_system"]:
                    if secret is None:
                        continue

                    ticket_data = tktauth.validateTicket(
                        secret,
                        ticket,
                        timeout=session_source.timeout,
                        mod_auth_tkt=session_source.mod_auth_tkt)

                    if ticket_data is not None:
                        break

            if ticket_data is None:
                return None

            (digest, userid, tokens, user_data, timestamp) = ticket_data
            pas = self._getPAS()
            info = pas._verifyUser(pas.plugins, user_id=userid)

            if info is None:
                return None

            login = info['login']

        cookie_val = self.getCookie()

        # get max seats from member data property or cache and default to 1 if not set
        try:
            max_seats = self.getMaxSeatsForLogin(login)
        except:
            traceback.print_exc()

        # When debugging, print the maxSeats value that was resolved
        if self.DEBUG:
            print "authenticateCredentials():: Max Seats is " + str(max_seats)

        if max_seats == 1:
            if cookie_val:
                # A cookie value is there.  If it's the same as the value
                # in our mapping, it's fine.  Otherwise we'll force a
                # logout.
                existing = self.mapping1.get(login, None)

                if self.DEBUG:
                    if existing:
                        print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join(
                            existing['tokens'])

                if existing and cookie_val not in existing['tokens']:
                    # The cookies values differ, we want to logout the
                    # user by calling resetCredentials.  Note that this
                    # will eventually call our own resetCredentials which
                    # will cleanup our own cookie.
                    try:
                        self.resetAllCredentials(request, response)
                        pas_instance.plone_utils.addPortalMessage(
                            _(u"Someone else logged in under your name.  You have been \
                            logged out"), "error")
                    except:
                        traceback.print_exc()
                elif existing is None:
                    # The browser has the cookie but we don't know about
                    # it.  Let's reset our own cookie:
                    self.setCookie('')

            else:
                # When no cookie is present, we generate one, store it and
                # set it in the response:
                cookie_val = uuid()
                # do some cleanup in our mappings
                existing = self.mapping1.get(login)

                if existing and 'tokens' in existing:
                    try:
                        if existing['tokens'][0] in self.mapping2:
                            del self.mapping2[existing['tokens'][0]]
                    except:
                        pass

                try:
                    from_ip = self.get_ip(request)
                except:
                    traceback.print_exc()

                now = DateTime()
                self.mapping1[login] = {'tokens': []}
                self.mapping1[login]['tokens'].append(cookie_val)
                self.mapping2[cookie_val] = {
                    'userid':
                    login,
                    'ip':
                    from_ip,
                    'startTime':
                    now,
                    'expireTime':
                    DateTime(now.asdatetime() + self.time_to_persist_cookies)
                }
                self.setCookie(cookie_val)
        else:
            # Max seats is not 1. Treat this as a floating licenses scenario.
            # Nobody is logged out, but once the max seats threshold is reached,
            # active tokens must expire before new users may log in.
            if cookie_val:
                # When the cookie value is there, try to verify it or activate it if is it not added yet
                self.verifyToken(cookie_val, login, max_seats, request,
                                 response)
            else:
                if self.DEBUG:
                    print "authenticateCredentials:: Try to issue a token because there is no cookie value."

                # When no cookie is present, attempt to issue a token and use the cookie to store it
                self.issueToken(login, max_seats, request, response)
                # if max_seats are filled, then force logout
                if self.isLoginAtCapacity(login, max_seats):
                    self.forceLogoutForUser(login, request, response)

        return None  # Note that we never return anything useful
Example #31
0
 def produce_element_with_delay(self, delay_ms: int):
     key = uuid()
     log.info(
         f"Producing element with key [ {key} ] and delay [ {delay_ms} ]")
     self._produce_record_sync(key, str(delay_ms))
Example #32
0
 def __init__(self, name=None):
     self.__pid = name or uuid()
     self.__coords = Coordinates()
     self.__labyrinth = Labyrinth()
     self.__moves_counter = 0
     self.__finish_state = None