Exemplo n.º 1
0
 def getEntries(self, dayRange=8, dayOffset=0, **kwargs):
     key = get_cache_key(self.login_data, self.cache_key, dayRange=dayRange, dayOffset=dayOffset, useDate=True, **kwargs)
     logger.debug(key)
     entries = self._loadCache(key)
     error = None
     if entries is None:
         (error, bookings) = self.getAllBookings()
         #logger.debug(bookings)
         if not bookings:
             return None
         Range = namedtuple('Range', ['start', 'end'])
         # week = Range(start=datetime.now(), end=datetime.now() + timedelta(days=8))
         entries = []
         for booking, rules, start, duration in bookings:
             # print(list(rules))
             for rule in rules.between(
                     datetime.datetime.combine(datetime.datetime.now().date() + datetime.timedelta(days=dayOffset), datetime.time(0, 0)),
                     datetime.datetime.combine(datetime.datetime.now().date() + datetime.timedelta(days=dayOffset + dayRange),
                                      datetime.time(23, 59)), inc=True):
                 rule_start = datetime.datetime.combine(rule, start.time())
                 r = Range(start=rule_start, end=rule_start + duration)
                 entries.append(self._make_entry(r, booking))
         entries = self.sortBookings(entries, **kwargs)
         if not error:
             redis.set(key, pickle.dumps(entries), ex=24 * 3600)
     return error, entries
Exemplo n.º 2
0
    def searchEntries(self, text):
        text = text.lower()
        key = get_cache_key(self.login_data, self.cache_key + ':search', text)
        entry_data = self._loadCache(key)
        if entry_data is None:
            entries = []
            error, bookings = self.getEntries(dayRange=365)
            # (error, data) = self._ajaxResponse()
            # if error or not data:
            #     return None
            toomany = False
            for booking in bookings:
                if 'status_id' in booking and int(booking['status_id']) == 99:
                    continue

                if any(key in booking and booking[key] and text in booking[key].lower()
                       for key in ['descr', 'room', 'place', 'note']): # ['text', 'bezeichnung', 'ort', 'notizen']):
                    #entries += self._parseBookings(booking)
                    entries.append(booking)
                    if len(entries) >= 10:
                        toomany = True
                        break
            # entries = self.sortBookings(entries)
            toomanymsg = "Zu viele Ergebnisse, zeige die ersten 10."
            if not error:
                redis.set(key, pickle.dumps((toomanymsg if toomany else None, entries)), ex=12*3600)
            if toomany:
                error = error + toomanymsg if error else toomanymsg
        else:
            error, entries = entry_data
        return error, entries
Exemplo n.º 3
0
def getAjaxResponse(*args,
                    login_data,
                    isAjax=True,
                    timeout=3600,
                    additionalCacheKey=None,
                    **params):
    key = get_cache_key(login_data,
                        *args,
                        additionalCacheKey=additionalCacheKey,
                        **params)
    resp_str = redis.get(key)
    resp = json.loads(resp_str.decode('utf-8')) if resp_str else None
    if not resp or not timeout:
        relogin = False
        while True:

            (success, cookies) = login(login_data, updateCache=relogin)
            if not success:
                return cookies, None
            try:
                if isAjax:
                    resp = cc_func(*args,
                                   cookies=cookies,
                                   login_data=login_data,
                                   params=params)
                else:
                    resp = cc_api(*args,
                                  cookies=cookies,
                                  login_data=login_data,
                                  params=params)
            except Exception as e:
                resp_str = redis.get(key + "_latest")
                if resp_str:
                    resp_time = float(redis.get(key + "_latest:time"))
                    resp = json.loads(resp_str.decode('utf-8'))
                    msg = f'Server unavailable. Data is from {datetime.fromtimestamp(resp_time)}'
                    return msg, resp['data']
                else:
                    return "Error: Server unavailable!", None
            if resp['status'] == 'success':
                break
            elif relogin:
                break
            else:  # retry
                relogin = True
        if resp['status'] != 'success' or 'data' not in resp:
            if 'message' in resp:
                return resp['message'], None
            else:
                return str(resp), None
        else:
            resp_str = json.dumps(resp)
            redis.set(key, resp_str, ex=timeout)
            redis.set(key + "_latest", resp_str)
            redis.set(key + "_latest:time", datetime.now().timestamp())
    return None, resp['data']
Exemplo n.º 4
0
def parseGeburtstage(login_data):
    key = get_cache_key(login_data, 'birthdays', useDate=True)
    msg = loadCache(key)
    if not msg:
        (error, data) = getAjaxResponse('home',
                                        'getBlockData',
                                        login_data=login_data,
                                        timeout=1800)

        if not data:
            print(error)
            return error
        else:
            try:
                html = data['blocks']['birthday']['html']
                # soup = BeautifulSoup(html, 'html.parser')
                # comments = soup.find_all(string=lambda text:isinstance(text, Comment))
                # [comment.extract() for comment in comments]
                # row = str(soup.table.tr)
                split = re.split(
                    "<td><a (data-person-id='[^']+')[^>]+><img[^>]*></a><td[^>]*><a class='tooltip-person'[^>]*>([^<]+)</a><td[^>]*>([0-9]+)</?[^>]+>",
                    html)
                msg = ""
                p_id = None
                for line in split:
                    if not line:
                        continue
                    m = re.search('<th colspan="3">([^<]+)<tr>', line)
                    m2 = re.match('data-person-id=\'([^\']+)\'', line)
                    if m:
                        msg += "<i>%s</i>\n" % m.group(1)
                    elif m2:
                        p_id = m2.group(1)
                        msg += getPersonLink(login_data, p_id)
                    elif re.match('[0-9]+', line):
                        if p_id:
                            msg += f"{line} /P{p_id}\n"
                        else:
                            msg += f"{line}\n"
                        p_id = None
                    elif re.match('[^<>]+', line):
                        msg += "%s</a>: " % line
                if error:
                    msg += f"\n<i>{error}</i>"
                else:
                    redis.set(key, pickle.dumps(msg), ex=3600 * 24)
            except Exception as e:
                return "Error while parsing: %s" % e
    return msg
Exemplo n.º 5
0
    def _getCategories(self):
        key = get_cache_key(self.login_data, self.cache_key + 'master_data')
        cat_data = self._loadCache(key)
        if not cat_data:
            (error, data) = super()._ajaxResponse(func='getMasterData',
                                                  timeout=None)
            if not data:
                return error, data
            categories = data['category']
            cat_params = {}
            ctr = 0
            for c in categories:
                cat_params[f'category_ids[{ctr}]'] = c
                ctr += 1
            cat_data = categories, cat_params
            redis.set(key, pickle.dumps(cat_data), ex=7 * 24 * 3600)

        return cat_data
Exemplo n.º 6
0
 def getAllBookings(self):
     key = get_cache_key(self.login_data, self.cache_key, useDate=True)
     entries = self._loadCache(key)
     if not entries:
         entries = []
         (error, data) = self._ajaxResponse()
         if not data:
             return error, entries
         for b in data:
             booking = data[b]
             if int(booking['status_id']) == 99:
                 continue
             rules, start, duration = self._parseBooking(booking)
             entries.append((booking, rules, start, duration))
         if not error:
             redis.set(key, pickle.dumps(entries), ex=12 * 3600)
         return error, entries
     return None, entries
Exemplo n.º 7
0
    def getAllBookings(self):
        self.categories, cat_params = self._getCategories()

        key = get_cache_key(self.login_data, self.cache_key, useDate=True)
        entries = self._loadCache(key)
        if not entries:
            (error, data) = self._ajaxResponse(**cat_params)

            if not data:
                return error, data
            entries = []
            for c in data:
                category = data[c]
                for b in category:
                    booking = category[b]
                    rules, start, duration = self._parseBooking(booking)
                    entries.append((booking, rules, start, duration))
            if not error:
                redis.set(key, pickle.dumps(entries), ex=3600 * 12)
            return error, entries
        return None, entries
Exemplo n.º 8
0
def download_file(login_data, url):
    key = get_cache_key(login_data, 'song:download', url)
    res = loadCache(key)
    if not res:
        (success, res) = login(login_data)
        if not success:
            return False, res
        try:
            # path = 'temp_file'
            logger.info(f"Donwloading {url}")
            r = requests.get(url, cookies=res, stream=True, timeout=20)
            if r.status_code == 200:
                res = {}

                if url.endswith('.txt') or url.endswith('.sng'):
                    if url.endswith('.txt'):
                        msg = [r.text]
                    else:  # sng
                        msg = [r.text]

                    res.update({
                        'type': 'msg',
                        'msg': msg,
                    })
                else:
                    bio = BytesIO(r.content)
                    res.update({
                        'type': 'file',
                        'file': bio,
                    })
            else:
                logger.warning(r)
                res['msg'] = r.text[:50]
                return False, res
        except Exception as e:
            logger.warning(e)
            res['msg'] = e
            return False, res
        redis.set(key, pickle.dumps(res))
    return True, res
Exemplo n.º 9
0
def findGroup(login_data, name):
    key = get_cache_key(login_data, 'group:find', name)
    res = loadCache(key)
    error = None
    if not res or True:
        (error, data) = getAjaxResponse("db",
                                        "getMasterData",
                                        login_data=login_data,
                                        timeout=None)
        if not data:  # or 'groups':
            return {
                'success': False,
                'msg': error,
            }
        res = {
            'success': False,
            'msg': [f"No group found with the name {name} :("],
        }
        matches = []
        groups = data['groups']
        if re.match('/G([0-9]+)', name):
            g_id = name[2:]
            if g_id in groups:
                matches.append(groups[g_id])
        else:
            name = name.lower()
            for g in data['groups']:
                group = groups[g]
                bez = group['bezeichnung']
                if name in bez.lower():
                    matches.append(group)
        t = []
        if len(matches) == 0:
            pass
        elif len(matches) < 10:
            (error, persons) = getAjaxResponse("db",
                                               "getAllPersonData",
                                               login_data=login_data,
                                               timeout=24 * 3600)

            if not persons:
                return {'success': False, 'msg': error}
            for g in matches:
                g_id = g['id']
                if t:
                    t[-1] += '\n\n'
                url = urljoin(login_data['url'],
                              f'?q=churchdb#GroupView/searchEntry:#{g_id}')
                if len(matches) == 1:
                    #t.append(f'<a href="{url}">{g["bezeichnung"]}</a>\n')
                    t += printGroup(login_data=login_data,
                                    group=g,
                                    persons=persons,
                                    masterData=data,
                                    list=False,
                                    onlyName=False)
                    img_id = g['groupimage_id']
                    if img_id:
                        try:
                            img_data = getAjaxResponse(
                                f'files/{img_id}/metadata',
                                login_data=login_data,
                                isAjax=False,
                                timeout=24 * 3600)
                            res['photo'] = urljoin(login_data['url'],
                                                   img_data[1]['url'])
                        except:
                            pass
                else:
                    t.append(
                        f'<a href="{url}">{g["bezeichnung"]}</a> /G{g_id}\n')

            res.update({'msg': t, 'success': True})

        elif len(matches) <= 50:
            for g in matches:
                g_id = g['id']
                url = urljoin(login_data['url'],
                              f'?q=churchdb#GroupView/searchEntry:#{g_id}')
                t.append(f'<a href="{url}">{g["bezeichnung"]}</a> /G{g_id}\n')
                res.update({'msg': t, 'success': True})
        else:
            res.update({
                'msg': ['Zu viele Gruppen gefunden! Bitte Suche verfeinern'],
                'success':
                False
            })

    if error:
        res['msg'].append(f'\n<i>{error}</i>')
    else:
        redis.set(key, pickle.dumps(res), ex=7 * 24 * 3600)
    return res
Exemplo n.º 10
0
def login(login_data=None, updateCache=False, login_token=False):
    key = get_cache_key(login_data, 'login_cookies', usePerson=True)
    cookies_pickle = redis.get(key)
    cookies = pickle.loads(cookies_pickle) if cookies_pickle else None

    # Check if session cookie still valid
    if cookies and not updateCache:
        data = cc_func('resource',
                       'pollForNews',
                       cookies,
                       login_data=login_data)
        if not data or 'data' not in data:
            cookies = None
        else:
            data = data['data']
            userid = data['userid']
            if not userid or userid == -1:
                cookies = None

    if not cookies or updateCache:  # need to login using permanent login key
        logger.info(f"Cookie is invalid for {login_data['personid']}")
        key_token = get_cache_key(login_data, 'login_token', usePerson=True)
        login_key_pickle = redis.get(key_token)
        login_key = pickle.loads(
            login_key_pickle) if login_key_pickle else None
        resp1 = requests.head(login_data['url'])
        cookies = resp1.cookies
        if not login_key or login_token:  # login key not valid, try login token
            logger.info(
                f"Getting new login token for {login_data['personid']}")
            login_url = urljoin(
                login_data['url'],
                f"?q=profile&loginstr={login_data['token']}&id={login_data['personid']}"
            )
            resp = requests.get(login_url, cookies=cookies)

            if 'Der verwendete Login-Link ist nicht mehr aktuell und kann deshalb nicht mehr verwendet werden.' in resp.text:
                redis.delete(get_user_login_key(login_data['telegramid']))
                return False, 'Login fehlgeschlagen, versuchs es mit einem neuen Link.'
            else:  # get new login key & cookies using login token
                data = cc_api(f'persons/{login_data["personid"]}/logintoken',
                              cookies=cookies,
                              login_data=login_data,
                              returnJson=True)
                if data['status'] == 'success':
                    inner_data = data['data']
                    # cookies = resp.cookies.get_dict()
                    redis.set(key_token, pickle.dumps(inner_data['data']))
                    redis.set(key, pickle.dumps(cookies.get_dict()))
                else:
                    return False, 'Login fehlgeschlagen, bitte log dich neu ein.'
        else:  # get new cookies using login key
            try:
                token_url = f'whoami?login_token={login_key}&user_id={login_data["personid"]}'
                data = cc_api(token_url,
                              cookies,
                              login_data=login_data,
                              returnJson=True)
                if data['status'] == 'success':
                    logger.info(data)
                    redis.set(key, pickle.dumps(cookies.get_dict()))
                else:
                    logger.warning(data)
                    return False, f'Login fehlgeschlagen, bitte log dich neu ein.'
            except Exception as e:
                return False, f'Could not renew token:\n{str(e)}'
            # redis.delete(get_user_login_key(login_data['telegramid']))
            # return False, 'Login fehlgeschlagen, versuchs es mit einem neuen Link.'

    return True, cookies