Beispiel #1
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    yield from _db.execute('delete from old_rda;')
    yield from _db.execute('delete from rda;')

    rda_rus = requests.get('http://rdaward.org/rda_rus.txt').text

    params_old = []
    params = []
    lines = rda_rus.split('\r\n')
    re_rda_line = re.compile(
        r'(^[A-Z][A-Z]-\d\d)\s+[^\t]+\t*([A-Z][A-Z]-\d\d|\*\*\*)?')
    for line in lines:
        match_rda_line = re_rda_line.match(line)
        if match_rda_line:
            if match_rda_line.group(2):
                old = match_rda_line.group(1)
                new = None if match_rda_line.group(
                    2) == '***' else match_rda_line.group(2)
                params_old.append({'old': old, 'new': new})
            else:
                params.append({'rda': match_rda_line.group(1)})

    yield from _db.execute("""insert into old_rda
        values (%(old)s, %(new)s)""",\
        params_old, progress=True)

    yield from _db.execute("""insert into rda
        values (%(rda)s)""",\
        params, progress=True)
Beispiel #2
0
def main():
    logger = logging.getLogger('')
    handler = logging.handlers.WatchedFileHandler('/var/log/cfmrda.qrz.log')
    handler.setFormatter(logging.Formatter(\
        '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'))
    logger.setLevel(logging.DEBUG)
    logger.addHandler(handler)
    handler.setLevel(logging.DEBUG)

    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    qrzru = QRZRuLink(asyncio.get_event_loop())
    pfx = Pfx('/usr/local/webcluster/cty.dat')

    @asyncio.coroutine
    def db_write(data):
        yield from _db.execute(
            """
            insert into callsigns_rda (callsign, source, rda)
            values (%(callsign)s, 'QRZ.ru', %(rda)s)""", data)


    callsigns = yield from _db.execute(\
        """select distinct hunter from rda_hunter""")
    logging.debug('callsigns list received -- ' + str(len(callsigns)))
    params = []
    cnt = 0
    ru_cnt = 0
    fnd_cnt = 0
    for _cs in callsigns:
        cnt += 1
        cs_pfx = pfx.get(_cs)
        if cs_pfx in ['R', 'R2F', 'R9']:
            m_special = RE_SPECIAL.search(_cs)
            if m_special:
                continue
            ru_cnt += 1
            data = yield from qrzru.query(_cs)
            if data and 'state' in data and data['state']:
                fnd_cnt += 1
                params.append({'callsign': _cs, 'rda': data['state']})
                logging.debug(_cs + ' found')
                if len(params) >= 100:
                    yield from db_write(params)
                    params = []
                    logging.debug('Processed ' + str(cnt) + '/' + str(ru_cnt) + '/'\
                        + str(fnd_cnt) + ' of ' + str(len(callsigns)))
        cnt += 1
    logging.debug('qrz query complete')
Beispiel #3
0
def export_rankings(conf):
    """rebuild rankings table in db and export top100 to json file for web"""
    logging.debug('export rankings')

    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    yield from _db.execute("select from build_rankings()")
    logging.debug('rankings table rebuilt')

    rankings = yield from _db.execute(
        """
                select rankings_json('_rank < 104') as data
                """, None, False)
    save_json(rankings, conf.get('web', 'root') + '/json/rankings.json')
    logging.debug('export rankings finished')
Beispiel #4
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    with open('/var/www/adxc.test/csv/rda_old_new.csv', 'r') as f_data:
        params = []
        for line in f_data.readlines():
            fields = {}
            fields['old'], fields['new'] = line.strip().split(';')
            if fields['old'] and fields['new']:
                params.append(fields)

        yield from _db.execute("""insert into old_rda
            values (%(old)s, %(new)s)""",\
            params, progress=True)
Beispiel #5
0
def export_msc(conf):
    """export misc db data to json file for web"""
    logging.debug('export misc')

    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    data = {}
    data['qsoCount'] = (yield from _db.execute(
        """
        select count(*) as qso_count
        from qso;    
    """, None, False))

    data['userActivity'] = (yield from _db.execute(
        """
        select json_agg(data) from
            (select json_build_object('callsign', 
                coalesce(qsl_wait.callsign, qsl_today.callsign, email.callsign), 
                'qslWait', qsl_wait, 'qslToday', qsl_today, 
                'email', email) as data 
            from
                (select user_cs as callsign, count(*) as qsl_wait 
                from cfm_qsl_qso 
                where state is null 
                group by user_cs) as qsl_wait 
                full join
                (select user_cs as callsign, count(*) as qsl_today 
                from cfm_qsl_qso 
                where state
                group by user_cs) as qsl_today 
                on qsl_wait.callsign = qsl_today.callsign 
                full join
                (select user_cs as callsign, count(*) as email 
                from cfm_request_qso 
                where not sent and user_cs is not null  
                group by user_cs) as email 
                on coalesce(qsl_wait.callsign, qsl_today.callsign) = email.callsign
                order by coalesce(qsl_wait.callsign, qsl_today.callsign, 
                    email.callsign)
            )  as data""", None, False))

    save_json(data, conf.get('web', 'root') + '/json/msc.json')
    logging.debug('export misc finished')
Beispiel #6
0
def export_recent_uploads(conf):
    """export 20 recent uploaded file batches to json file for web"""
    logging.debug('export recent uploads')

    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    data = yield from _db.execute(
        """
        select json_agg(json_build_object(
            'activators', activators,
            'rda', rda,
            'uploadDate', to_char(max_tstamp, 'DD mon YYYY'),
            'uploadTime', to_char(max_tstamp, 'HH24:MI'),
            'uploadType', upload_type,
            'uploader', uploader)) as data
        from
        (select  user_cs as uploader, upload_type,
                max(tstamp) as max_tstamp,
                array_agg(id) as ids
            from uploads 
            where ext_logger_id is null
            group by date(tstamp), user_cs, upload_type
            order by max_tstamp desc
            limit 20) as ru,
        lateral 
        (select array_agg(distinct station_callsign) as activators 
            from qso 
            where upload_id = any(ids)) as acts,
        lateral 
        (select array_agg(rda) as rda   
            from 
                (select json_build_object('rda', array_agg(distinct rda), 
                        'id', upload_id) as rda 
                from qso 
                where upload_id = any(ids) 
                group by upload_id) as rdas0) as rdas 
        """, None, False)
    save_json(data, conf.get('web', 'root') + '/json/recent_uploads.json')
    logging.debug('export recent uploads finished')
Beispiel #7
0
def export_stat(conf):
    """export statistic data to json file for web"""
    logging.debug('export stats')

    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    data = {}
    data['qso by rda'] = (yield from _db.execute(
        """
        select json_object_agg(rda, data) as data from
            (select rda, json_object_agg(band, data) as data from
                (select rda, band, json_object_agg(mode, qso_count) as data from
                    (select count(*) as qso_count, rda, band, mode 
                    from qso 
                    where upload_id is null or 
                        (select enabled from uploads 
                        where id=upload_id) 
                    group by mode, band, rda
                    ) as q0
                group by rda, band) as q1
            group by rda) as q2
    """, None, False))
    for rda_data in data['qso by rda'].values():
        rda_total = {'total': 0}
        for band_data in rda_data.values():
            band_total = 0
            for mode, qso_count in band_data.items():
                band_total += qso_count
                if mode not in rda_total:
                    rda_total[mode] = 0
                rda_total[mode] += qso_count
            band_data['total'] = band_total
            rda_total['total'] += band_total
        rda_data['total'] = rda_total
    save_json(data, conf.get('web', 'root') + '/json/stat.json')
    logging.debug('export stats finished')
#!/usr/bin/python3
#coding=utf-8

import json

from db import DBConn
from conf import CONF

DB = DBConn(CONF.items('db'))
DB.connect()

feeders_data = DB.execute("""
    select id, props 
    from devices
    where device_type_id = 3""")

for feeder_data in feeders_data:
    for timer in feeder_data['props'][2]:
        timer.append(0)

    feeder_data['props'] = json.dumps(feeder_data['props'])

    DB.execute(
        """
        update devices 
        set props = %(props)s
        where id = %(id)s""", feeder_data)
Beispiel #9
0
def main():
    """sends cfm requests"""
    start_logging('send_cfm_requests')
    logging.debug('start send cfm requests')
    conf = site_conf()
    secret = get_secret(conf.get('files', 'secret'))
    db_params = conf.items('db')

    _db = DBConn(db_params)
    yield from _db.connect()
    data = yield from _db.execute(
        """
        select correspondent, correspondent_email,
            json_agg(json_build_object('callsign', callsign, 
            'stationCallsign', station_callsign, 'rda', rda, 'band', band, 
            'mode', mode, 'tstamp', to_char(tstamp, 'DD mon YYYY HH24:MI'), 
            'rcvRST', rec_rst, 'sntRST', sent_rst)) as qso
        from
            (select * 
            from cfm_request_qso 
            where not sent and correspondent not in  
            (select callsign from cfm_request_blacklist)) as data
        group by correspondent, correspondent_email""", None, True)
    if not data:
        return
    sent_to = []
    for row in data:
        token = create_token(secret, {'callsign': row['correspondent']})
        link_cfm = conf.get('web', 'address') + '/#/cfm_qso/?token=' + token + \
            '&callsign=' + row['correspondent']
        link_blacklist = conf.get('web', 'address') +\
            '/#/cfm_blacklist/?token=' + token
        qso_txt = format_qsos(row['qso'])
        text = ("""
Здравствуйте, {correspondent}.
Просим Вас поддержать проект CFMRDA для создания единой базы по программе диплома RDA.

Вы можете подтвердить конкретные связи, которые очень важны Вашим корреспондентам, приславшим запросы или залить полностью свой лог.

""" + qso_txt + """
Для подтверждения QSO зайдите на эту страницу - {link_cfm}
Если указанные данные верны, поставьте отметки "Подтвердить" в каждом QSO и нажмите кнопку "OK"

Было бы удобнее, если бы вы зарегистрировались на CFMRDA.ru и загрузили бы свои логи в базу данных сайта.
Если Вы не хотите регистрироваться или у Вас возникли какие-то трудности при загрузке, пришлите свой лог, желательно в формате ADIF на адрес техподдержки [email protected] 

Спасибо. 73!
Команда CFMRDA.ru


Если вы не хотите в дальнейшем получать подобные запросы на подтверждение QSO, пройдите по этой ссылке - {link_blacklist}  
И нажмите кнопку "Не присылать мне больше запросов от CFMRDA.ru"
        """).format_map({'correspondent': row['correspondent'],\
            'link_cfm': link_cfm, 'link_blacklist': link_blacklist})
        retries = 0
        while retries < 3:
            if send_email(text=text,\
                fr=conf.get('email', 'address'),\
                to=row['correspondent_email'],\
                subject="Запрос на подтверждение QSO от CFMRDA.ru"):
                logging.error('cfm request email sent to ' +
                              row['correspondent'])
                sent_to.append(row)
                break
            else:
                retries += 1
                yield from asyncio.sleep(10)
        if retries == 3:
            logging.error('Email delivery failed. Correspondent: ' + row['correspondent']\
                + ', address: ' + row['correspondent_email'])
        yield from asyncio.sleep(10)
    logging.error('all requests were sent')
    if sent_to:
        yield from _db.execute("""
            update cfm_request_qso 
            set sent = true, status_tstamp = now()
            where correspondent = %(correspondent)s and not sent""",\
            sent_to)
        logging.error('cfm_request_qso table updated')
        yield from _db.execute(
            """
            update cfm_requests 
            set tstamp = now()
            where callsign = %(correspondent)s;
            insert into cfm_requests
            select %(correspondent)s, now()
            where not exists
                (select 1 
                from cfm_requests 
                where callsign = %(correspondent)s)
            """, sent_to)
        logging.error('cfm_requests table updated')
Beispiel #10
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    re_split = re.compile(r"\t+")
    re_split_date = re.compile(r"\D+")
    re_date = [re.compile(x) for x in [r'(\d\d\d\d)$', r'(\d?\d)\W(\d?\d?\d\d)$',\
            r'(\d?\d)\D(\d?\d)\D(\d?\d?\d\d)$']]
    re_date_l = re.compile(r'(\d?\d)\D(\d?\d)\D(\d?\d?\d\d)')
    re_date_bw = re.compile(r'(\d\d\d\d)\D(\d?\d)\D(\d?\d)')

    def parse_date(str_val, strict=True):
        str_val = str_val.strip()
        parsed = []
        if strict:
            for re_x in re_date:
                m_date = re_x.match(str_val)
                if m_date:
                    grp = 1
                    while grp <= re_x.groups:
                        parsed.append(m_date.group(grp))
                        grp += 1
        else:
            m_date = re_date_l.search(str_val)
            if m_date:
                grp = 1
                while grp < 4:
                    parsed.append(m_date.group(grp))
                    grp += 1
            else:
                m_date = re_date_bw.search(str_val)
                if m_date:
                    parsed = [
                        m_date.group(3),
                        m_date.group(2),
                        m_date.group(1)
                    ]
        if parsed:
            if len(parsed[-1]) < 4:
                if len(parsed[-1]) == 3:
                    parsed = None
                else:
                    if int(parsed[-1]) < 30:
                        parsed[-1] = '20' + parsed[-1]
                    else:
                        parsed[-1] = '19' + parsed[-1]
        return parsed if parsed else None

    def compose_date(parsed_dt, end=False):
        pdt = []
        for xdt in parsed_dt:
            pdt.append(xdt)
        if len(pdt) < 2:
            pdt.insert(0, '12' if end else '01')
        if len(pdt) < 3:
            pdt.insert(0,\
                str(calendar.monthrange(int(pdt[1]), int(pdt[0]))[1]) if end\
                    else '01')
        return pdt[1] + '-' + pdt[0] + '-' + pdt[2]

    with open('/var/www/cfmrda-dev/DL6KVA.txt', 'r',
              encoding='cp437') as f_data:
        params = []
        for line in f_data.readlines():
            fields = [x.strip() for x in line.split('\t')]
            if fields[3] == 'DELETED':
                del fields[2]
            parsed_dt_start, parsed_dt_stop = None, None
            date = parse_date(fields[2])
            if date:
                parsed_dt_start = date
                parsed_dt_stop = date
            else:
                if '-' in fields[2]:
                    str_dates = fields[2].split('-')
                    parsed_dt_stop = parse_date(str_dates[1])
                    if parsed_dt_stop:
                        parsed_dt_start = re_split_date.split(str_dates[0])
                        if not parsed_dt_start[-1]:
                            del parsed_dt_start[-1]
                        while len(parsed_dt_start) < len(parsed_dt_stop):
                            parsed_dt_start.append(
                                parsed_dt_stop[len(parsed_dt_start)])
                elif 'from' in fields[2] or 'SINCE' in fields[
                        2] or 'FROM' in fields[2]:
                    str_dt_start = fields[2].replace('from ', '').replace('SINCE ',\
                        '').replace('FROM ', '')
                    parsed_dt_start = parse_date(str_dt_start)
                elif 'till' in fields[2]:
                    str_dt_stop = fields[2].replace('till ', '')
                    parsed_dt_stop = parse_date(str_dt_stop)
                if not (parsed_dt_start or parsed_dt_stop):
                    date = parse_date(fields[2], False)
                    if date:
                        parsed_dt_start = date
                        parsed_dt_stop = date
            try:
                dt_start = compose_date(
                    parsed_dt_start) if parsed_dt_start else None
                dt_stop = compose_date(parsed_dt_stop,
                                       True) if parsed_dt_stop else None
            except Exception:
                logging.exception(fields[2])

            if len(fields[1]) != 5:
                print(fields[1])
                continue

            params.append({'callsign': fields[0], 'rda': fields[1], 'dt_start': dt_start,\
                    'dt_stop': dt_stop,\
                    'source': fields[4] if fields[4] else 'RDAWARD.org',\
                    'ts': fields[5] if fields[5] else '2019-06-17'})

        yield from _db.execute("""insert into callsigns_rda
            (callsign, rda, dt_start, dt_stop, source, ts)
            values
            (%(callsign)s, %(rda)s, %(dt_start)s, %(dt_stop)s, %(source)s, %(ts)s)""",\
            params, progress=True)
Beispiel #11
0
def main(conf):
    """does the job"""
    db_params = conf.items('db')

    _db = DBConn(db_params)
    yield from _db.connect()

    loggers = yield from _db.execute(
        """
        select id, callsign, logger, login_data, qso_count, 
            to_char(last_updated, 'YYYY-MM-DD') as last_updated
        from ext_loggers
        where state = 0 and 
            (last_updated is null or last_updated < now() - interval '14 days')
        """, None, True)
    if not loggers:
        logging.debug('No updates are due today.')
        return
    for row in loggers.values():
        logger = ExtLogger(row['logger'])
        update_params = {}
        adifs = None
        try:
            adifs = logger.load(row['login_data'])
            logging.debug(row['callsign'] + ' ' + row['logger'] +
                          ' data was downloaded.')
        except Exception:
            logging.exception(row['callsign'] + ' ' + row['logger'] +
                              ' error occured')
            update_params['state'] = 1

        if adifs:

            prev_uploads = yield from _db.execute(
                """
                select id from uploads where ext_logger_id = %(id)s""", row,
                True)
            if prev_uploads:
                for upload_id in prev_uploads:
                    yield from _db.remove_upload(upload_id)

            qso_count = 0

            for adif in adifs:
                adif = adif.upper()
                qso_count += adif.count('<EOR>')
                parsed = load_adif(adif, 'STATION_CALLSIGN', ignore_activator=True,\
                    strip_callsign_flag=False)
                date_start, date_end = None, None
                sql_rda = """
                    select distinct rda 
                    from callsigns_rda
                    where callsign = %(callsign)s and rda <> '***' and 
                        (dt_start is null or dt_start <= %(tstamp)s) and
                        (dt_stop is null or dt_stop >= %(tstamp)s)
                """
                qsos = []

                with (yield from _db.pool.cursor()) as cur:
                    for qso in parsed['qso']:
                        yield from exec_cur(cur, sql_rda, qso)
                        if cur.rowcount == 1:
                            qso['rda'] = (yield from cur.fetchone())[0]
                            qso['callsign'], qso['station_callsign'] = \
                                qso['station_callsign'], qso['callsign']
                            if not date_start or date_start > qso['tstamp']:
                                date_start = qso['tstamp']
                            if not date_end or date_end < qso['tstamp']:
                                date_end = qso['tstamp']
                            qsos.append(qso)

                if qsos:
                    logging.debug(str(len(qsos)) + ' rda qso found.')
                    file_hash = yield from _db.check_upload_hash(
                        adif.encode('utf-8'))

                    db_res = yield from _db.create_upload(\
                        callsign=row['callsign'],\
                        upload_type=row['logger'],\
                        date_start=date_start,\
                        date_end=date_end,\
                        file_hash=file_hash,\
                        activators=set([]),
                        ext_logger_id=row['id'],
                        qsos=qsos)

                    logging.debug(
                        str(db_res['qso']['ok']) + ' qso were stored in db.')

            update_params = {\
                'qso_count': qso_count,\
                'state': 0,\
                'last_updated': datetime.now().strftime("%Y-%m-%d")}

        yield from _db.param_update('ext_loggers', splice_params(row, ('id',)),\
            update_params)
        logging.debug('logger data updated')