Ejemplo n.º 1
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    yield from _db.execute('delete from old_rda;')
    yield from _db.execute('delete from rda;')

    rda_rus = requests.get('http://rdaward.org/rda_rus.txt').text

    params_old = []
    params = []
    lines = rda_rus.split('\r\n')
    re_rda_line = re.compile(
        r'(^[A-Z][A-Z]-\d\d)\s+[^\t]+\t*([A-Z][A-Z]-\d\d|\*\*\*)?')
    for line in lines:
        match_rda_line = re_rda_line.match(line)
        if match_rda_line:
            if match_rda_line.group(2):
                old = match_rda_line.group(1)
                new = None if match_rda_line.group(
                    2) == '***' else match_rda_line.group(2)
                params_old.append({'old': old, 'new': new})
            else:
                params.append({'rda': match_rda_line.group(1)})

    yield from _db.execute("""insert into old_rda
        values (%(old)s, %(new)s)""",\
        params_old, progress=True)

    yield from _db.execute("""insert into rda
        values (%(rda)s)""",\
        params, progress=True)
Ejemplo n.º 2
0
def send_email(**email):
    conf = site_conf()
    my_address = conf.get('email', 'address')
    msg = MIMEMultipart()
    msg.attach(MIMEText(email['text'].encode('utf-8'), 'plain', 'UTF-8'))
    msg['from'] = email['fr']
    msg['to'] = email['to']
    msg['MIME-Version'] = "1.0"
    msg['Subject'] = email['subject']
    msg['Content-Type'] = "text/plain; charset=utf-8"
    msg['Content-Transfer-Encoding'] = "quoted-printable"

    if 'attachments' in email and email['attachments']:
        for item in email['attachments']:
            part = MIMEApplication(item['data'], Name=item['name'])
            part['Content-Disposition'] = 'attachment; filename="%s"' % item[
                'name']
            msg.attach(part)
    try:
        server = smtplib.SMTP_SSL(conf.get('email', 'smtp'))
        server.login(conf.get('email', 'login'), conf.get('email', 'password'))
        server.sendmail(my_address, msg['to'], str(msg))
        return True
    except Exception as exc:
        logging.exception('error sending email')
        return False
Ejemplo n.º 3
0
    def get_session_id(self):
        conf = site_conf()
        fp_session = conf.get('QRZCom', 'session_file')
        if os.path.isfile(fp_session):
            with open(fp_session, 'r') as f_session:
                session_id = f_session.read()
                if self.session_id != session_id:
                    self.session_id = session_id
                    return

        req, r_body = None, None
        try:
            req = requests.get('http://xmldata.qrz.com/xml/current/?username='******';password='******'Key' in r_dict['QRZDatabase']['Session']:
                self.session_id = r_dict['QRZDatabase']['Session']['Key']
                with open(fp_session, 'w') as f_session:
                    f_session.write(self.session_id)
            else:
                raise Exception('Wrong QRZ response')
        except Exception:
            logging.exception('Error logging into QRZ.com')
            if req:
                logging.error('Http result code: ' + str(req.status_code()))
                logging.error('Http response body: ' + req.text)
            self.loop.call_later(60 * 10, self.get_session_id)
Ejemplo n.º 4
0
 def __init__(self, loop):
     conf = site_conf()
     self.loop = loop
     self.login = conf.get('QRZCom', 'login')
     self.password = conf.get('QRZCom', 'password')
     self.session_id = None
     self.get_session_id()
Ejemplo n.º 5
0
def main():
    """when called from shell exports rankings"""
    start_logging('export')
    logging.debug('start export')
    conf = site_conf()

    def set_local_owner(file):
        """change exported file ownership when running as root"""
        if not os.getuid():
            shutil.chown(conf.get('web', 'root') + file,\
                user=conf.get('web', 'user'), group=conf.get('web', 'group'))

    parser = argparse.ArgumentParser()
    parser.add_argument('-r', action="store_true")
    parser.add_argument('-u', action="store_true")
    parser.add_argument('-m', action="store_true")
    parser.add_argument('-s', action="store_true")
    args = parser.parse_args()
    export_all = not args.r and not args.u and not args.m and not args.s
    if args.r or export_all:
        asyncio.get_event_loop().run_until_complete(export_rankings(conf))
        set_local_owner('/json/rankings.json')
    if args.u or export_all:
        asyncio.get_event_loop().run_until_complete(
            export_recent_uploads(conf))
        set_local_owner('/json/recent_uploads.json')
    if args.m or export_all:
        asyncio.get_event_loop().run_until_complete(export_msc(conf))
        set_local_owner('/json/msc.json')
    if args.s or export_all:
        asyncio.get_event_loop().run_until_complete(export_stat(conf))
        set_local_owner('/json/stat.json')
Ejemplo n.º 6
0
def cfm_rda_server():

    loop = asyncio.get_event_loop()
    srv = CfmRdaServer(loop)

    logging.basicConfig(level=logging.DEBUG,
                        format='%(asctime)s %(message)s',
                        datefmt='%Y-%m-%d %H:%M:%S')
    logging.info('starting tests')

    global TEST_USER
    TEST_USER = pytest.config.getoption('--test_user')
    global TEST_USER_EMAIL
    TEST_USER_EMAIL = pytest.config.getoption('--test_user_email')
    global TEST_HUNTER
    TEST_USER = pytest.config.getoption('--test_user')
    hunter_file = site_conf().get('web', 'root') +\
            '/json/hunters/' + TEST_HUNTER + '.json'

    @asyncio.coroutine
    def clear_callsign(callsign):
        yield from srv._db.execute(
            """delete from qso 
                    where exists 
                        (select user_cs from uploads
                        where uploads.id = qso.upload_id and 
                            user_cs = %(callsign)s)""", {'callsign': callsign})
        yield from srv._db.execute(
            """delete from activators 
                    where exists 
                        (select user_cs from uploads
                        where uploads.id = activators.upload_id and 
                            user_cs = %(callsign)s)""", {'callsign': callsign})
        yield from srv._db.param_delete('uploads', {'user_cs': callsign})
        yield from srv._db.param_delete('users', {'callsign': callsign})

    @asyncio.coroutine
    def teardown():

        if os.path.isfile(hunter_file):
            os.remove(hunter_file)
        yield from asyncio.sleep(0.1)
        logging.debug('cleaning user ' + TEST_USER)
        yield from srv._db.execute("""delete from qso
                    where callsign = 'TE1ST' and station_callsign = 'R7CL/M'"""
                                   )
        yield from srv._db.execute("""delete from cfm_qsl_qso
                    where callsign = 'TE1ST' and station_callsign = 'R7CL/M'"""
                                   )
        yield from clear_callsign('RN6BN')

    loop.run_until_complete(teardown())
    yield srv
    loop.run_until_complete(teardown())
Ejemplo n.º 7
0
def main():
    logger = logging.getLogger('')
    handler = logging.handlers.WatchedFileHandler('/var/log/cfmrda.qrz.log')
    handler.setFormatter(logging.Formatter(\
        '%(asctime)s %(name)-12s %(levelname)-8s %(message)s'))
    logger.setLevel(logging.DEBUG)
    logger.addHandler(handler)
    handler.setLevel(logging.DEBUG)

    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    qrzru = QRZRuLink(asyncio.get_event_loop())
    pfx = Pfx('/usr/local/webcluster/cty.dat')

    @asyncio.coroutine
    def db_write(data):
        yield from _db.execute(
            """
            insert into callsigns_rda (callsign, source, rda)
            values (%(callsign)s, 'QRZ.ru', %(rda)s)""", data)


    callsigns = yield from _db.execute(\
        """select distinct hunter from rda_hunter""")
    logging.debug('callsigns list received -- ' + str(len(callsigns)))
    params = []
    cnt = 0
    ru_cnt = 0
    fnd_cnt = 0
    for _cs in callsigns:
        cnt += 1
        cs_pfx = pfx.get(_cs)
        if cs_pfx in ['R', 'R2F', 'R9']:
            m_special = RE_SPECIAL.search(_cs)
            if m_special:
                continue
            ru_cnt += 1
            data = yield from qrzru.query(_cs)
            if data and 'state' in data and data['state']:
                fnd_cnt += 1
                params.append({'callsign': _cs, 'rda': data['state']})
                logging.debug(_cs + ' found')
                if len(params) >= 100:
                    yield from db_write(params)
                    params = []
                    logging.debug('Processed ' + str(cnt) + '/' + str(ru_cnt) + '/'\
                        + str(fnd_cnt) + ' of ' + str(len(callsigns)))
        cnt += 1
    logging.debug('qrz query complete')
Ejemplo n.º 8
0
    def __init__(self, loop):
        conf = site_conf()
        self.loop = loop
        self.login = conf.get('QRZRu', 'login')
        self.password = conf.get('QRZRu', 'password')
        self._query_interval = conf.getfloat('QRZRu', 'query_interval')
        self._session_interval_success = \
            conf.getint('QRZRu', 'session_interval_success')
        self.session_interval_failure = \
            conf.getint('QRZRu', 'session_interval_failure')
        self.cs_queue = asyncio.Queue()
        self.session_task = None
        self.queue_task = None

        self.get_session_id()
Ejemplo n.º 9
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()

    with open('/var/www/adxc.test/csv/rda_old_new.csv', 'r') as f_data:
        params = []
        for line in f_data.readlines():
            fields = {}
            fields['old'], fields['new'] = line.strip().split(';')
            if fields['old'] and fields['new']:
                params.append(fields)

        yield from _db.execute("""insert into old_rda
            values (%(old)s, %(new)s)""",\
            params, progress=True)
Ejemplo n.º 10
0
def setup_module():
    global CONF    
    CONF = site_conf()
    global SECRET
    SECRET = secret.get_secret(CONF.get('files', 'secret'))
    global WEB_ADDRESS
    WEB_ADDRESS = CONF.get('web', 'address')
    global WEB_ROOT
    WEB_ROOT = CONF.get('web', 'root')
    global API_URI
    API_URI = WEB_ADDRESS + '/aiohttp'
    global TEST_USER 
    TEST_USER = pytest.config.getoption('--test_user')
    global TEST_USER_EMAIL
    TEST_USER_EMAIL = pytest.config.getoption('--test_user_email')
    logging.debug('using test user ' + TEST_USER + ' email ' + TEST_USER_EMAIL)
    global TEST_HUNTER
    TEST_HUNTER = pytest.config.getoption('--test_hunter')
    global user_data
    user_data = None
    global loop
    loop = asyncio.get_event_loop()
Ejemplo n.º 11
0
#!/usr/bin/python3
#coding=utf-8

import asyncio

import pytest

from export import export_rankings, export_recent_uploads, export_msc
from common import site_conf
from json_utils import load_json

conf = site_conf()
WEB_ROOT = conf.get('web', 'root')
loop = asyncio.get_event_loop()


def test_export_rankings():

    loop.run_until_complete(export_rankings(conf))

    rankings = load_json(WEB_ROOT + '/json/rankings.json')
    act = rankings['activator']['total']['total'][0]
    assert act['rank'] == 1
    assert act['callsign']

    hunt = rankings['hunter']['total']['total'][0]
    assert hunt['rank'] == 1
    assert hunt['callsign']


def test_export_recent_uploads():
Ejemplo n.º 12
0
def main():
    """sends cfm requests"""
    start_logging('send_cfm_requests')
    logging.debug('start send cfm requests')
    conf = site_conf()
    secret = get_secret(conf.get('files', 'secret'))
    db_params = conf.items('db')

    _db = DBConn(db_params)
    yield from _db.connect()
    data = yield from _db.execute(
        """
        select correspondent, correspondent_email,
            json_agg(json_build_object('callsign', callsign, 
            'stationCallsign', station_callsign, 'rda', rda, 'band', band, 
            'mode', mode, 'tstamp', to_char(tstamp, 'DD mon YYYY HH24:MI'), 
            'rcvRST', rec_rst, 'sntRST', sent_rst)) as qso
        from
            (select * 
            from cfm_request_qso 
            where not sent and correspondent not in  
            (select callsign from cfm_request_blacklist)) as data
        group by correspondent, correspondent_email""", None, True)
    if not data:
        return
    sent_to = []
    for row in data:
        token = create_token(secret, {'callsign': row['correspondent']})
        link_cfm = conf.get('web', 'address') + '/#/cfm_qso/?token=' + token + \
            '&callsign=' + row['correspondent']
        link_blacklist = conf.get('web', 'address') +\
            '/#/cfm_blacklist/?token=' + token
        qso_txt = format_qsos(row['qso'])
        text = ("""
Здравствуйте, {correspondent}.
Просим Вас поддержать проект CFMRDA для создания единой базы по программе диплома RDA.

Вы можете подтвердить конкретные связи, которые очень важны Вашим корреспондентам, приславшим запросы или залить полностью свой лог.

""" + qso_txt + """
Для подтверждения QSO зайдите на эту страницу - {link_cfm}
Если указанные данные верны, поставьте отметки "Подтвердить" в каждом QSO и нажмите кнопку "OK"

Было бы удобнее, если бы вы зарегистрировались на CFMRDA.ru и загрузили бы свои логи в базу данных сайта.
Если Вы не хотите регистрироваться или у Вас возникли какие-то трудности при загрузке, пришлите свой лог, желательно в формате ADIF на адрес техподдержки [email protected] 

Спасибо. 73!
Команда CFMRDA.ru


Если вы не хотите в дальнейшем получать подобные запросы на подтверждение QSO, пройдите по этой ссылке - {link_blacklist}  
И нажмите кнопку "Не присылать мне больше запросов от CFMRDA.ru"
        """).format_map({'correspondent': row['correspondent'],\
            'link_cfm': link_cfm, 'link_blacklist': link_blacklist})
        retries = 0
        while retries < 3:
            if send_email(text=text,\
                fr=conf.get('email', 'address'),\
                to=row['correspondent_email'],\
                subject="Запрос на подтверждение QSO от CFMRDA.ru"):
                logging.error('cfm request email sent to ' +
                              row['correspondent'])
                sent_to.append(row)
                break
            else:
                retries += 1
                yield from asyncio.sleep(10)
        if retries == 3:
            logging.error('Email delivery failed. Correspondent: ' + row['correspondent']\
                + ', address: ' + row['correspondent_email'])
        yield from asyncio.sleep(10)
    logging.error('all requests were sent')
    if sent_to:
        yield from _db.execute("""
            update cfm_request_qso 
            set sent = true, status_tstamp = now()
            where correspondent = %(correspondent)s and not sent""",\
            sent_to)
        logging.error('cfm_request_qso table updated')
        yield from _db.execute(
            """
            update cfm_requests 
            set tstamp = now()
            where callsign = %(correspondent)s;
            insert into cfm_requests
            select %(correspondent)s, now()
            where not exists
                (select 1 
                from cfm_requests 
                where callsign = %(correspondent)s)
            """, sent_to)
        logging.error('cfm_requests table updated')
Ejemplo n.º 13
0
def main():
    logger = logging.getLogger('')
    logger.setLevel(logging.DEBUG)
    conf = site_conf()
    _db = DBConn(conf.items('db'))
    yield from _db.connect()
    re_split = re.compile(r"\t+")
    re_split_date = re.compile(r"\D+")
    re_date = [re.compile(x) for x in [r'(\d\d\d\d)$', r'(\d?\d)\W(\d?\d?\d\d)$',\
            r'(\d?\d)\D(\d?\d)\D(\d?\d?\d\d)$']]
    re_date_l = re.compile(r'(\d?\d)\D(\d?\d)\D(\d?\d?\d\d)')
    re_date_bw = re.compile(r'(\d\d\d\d)\D(\d?\d)\D(\d?\d)')

    def parse_date(str_val, strict=True):
        str_val = str_val.strip()
        parsed = []
        if strict:
            for re_x in re_date:
                m_date = re_x.match(str_val)
                if m_date:
                    grp = 1
                    while grp <= re_x.groups:
                        parsed.append(m_date.group(grp))
                        grp += 1
        else:
            m_date = re_date_l.search(str_val)
            if m_date:
                grp = 1
                while grp < 4:
                    parsed.append(m_date.group(grp))
                    grp += 1
            else:
                m_date = re_date_bw.search(str_val)
                if m_date:
                    parsed = [
                        m_date.group(3),
                        m_date.group(2),
                        m_date.group(1)
                    ]
        if parsed:
            if len(parsed[-1]) < 4:
                if len(parsed[-1]) == 3:
                    parsed = None
                else:
                    if int(parsed[-1]) < 30:
                        parsed[-1] = '20' + parsed[-1]
                    else:
                        parsed[-1] = '19' + parsed[-1]
        return parsed if parsed else None

    def compose_date(parsed_dt, end=False):
        pdt = []
        for xdt in parsed_dt:
            pdt.append(xdt)
        if len(pdt) < 2:
            pdt.insert(0, '12' if end else '01')
        if len(pdt) < 3:
            pdt.insert(0,\
                str(calendar.monthrange(int(pdt[1]), int(pdt[0]))[1]) if end\
                    else '01')
        return pdt[1] + '-' + pdt[0] + '-' + pdt[2]

    with open('/var/www/cfmrda-dev/DL6KVA.txt', 'r',
              encoding='cp437') as f_data:
        params = []
        for line in f_data.readlines():
            fields = [x.strip() for x in line.split('\t')]
            if fields[3] == 'DELETED':
                del fields[2]
            parsed_dt_start, parsed_dt_stop = None, None
            date = parse_date(fields[2])
            if date:
                parsed_dt_start = date
                parsed_dt_stop = date
            else:
                if '-' in fields[2]:
                    str_dates = fields[2].split('-')
                    parsed_dt_stop = parse_date(str_dates[1])
                    if parsed_dt_stop:
                        parsed_dt_start = re_split_date.split(str_dates[0])
                        if not parsed_dt_start[-1]:
                            del parsed_dt_start[-1]
                        while len(parsed_dt_start) < len(parsed_dt_stop):
                            parsed_dt_start.append(
                                parsed_dt_stop[len(parsed_dt_start)])
                elif 'from' in fields[2] or 'SINCE' in fields[
                        2] or 'FROM' in fields[2]:
                    str_dt_start = fields[2].replace('from ', '').replace('SINCE ',\
                        '').replace('FROM ', '')
                    parsed_dt_start = parse_date(str_dt_start)
                elif 'till' in fields[2]:
                    str_dt_stop = fields[2].replace('till ', '')
                    parsed_dt_stop = parse_date(str_dt_stop)
                if not (parsed_dt_start or parsed_dt_stop):
                    date = parse_date(fields[2], False)
                    if date:
                        parsed_dt_start = date
                        parsed_dt_stop = date
            try:
                dt_start = compose_date(
                    parsed_dt_start) if parsed_dt_start else None
                dt_stop = compose_date(parsed_dt_stop,
                                       True) if parsed_dt_stop else None
            except Exception:
                logging.exception(fields[2])

            if len(fields[1]) != 5:
                print(fields[1])
                continue

            params.append({'callsign': fields[0], 'rda': fields[1], 'dt_start': dt_start,\
                    'dt_stop': dt_stop,\
                    'source': fields[4] if fields[4] else 'RDAWARD.org',\
                    'ts': fields[5] if fields[5] else '2019-06-17'})

        yield from _db.execute("""insert into callsigns_rda
            (callsign, rda, dt_start, dt_stop, source, ts)
            values
            (%(callsign)s, %(rda)s, %(dt_start)s, %(dt_stop)s, %(source)s, %(ts)s)""",\
            params, progress=True)
Ejemplo n.º 14
0
                        ext_logger_id=row['id'],
                        qsos=qsos)

                    logging.debug(
                        str(db_res['qso']['ok']) + ' qso were stored in db.')

            update_params = {\
                'qso_count': qso_count,\
                'state': 0,\
                'last_updated': datetime.now().strftime("%Y-%m-%d")}

        yield from _db.param_update('ext_loggers', splice_params(row, ('id',)),\
            update_params)
        logging.debug('logger data updated')


if __name__ == "__main__":
    start_logging('loggers')
    logging.debug('start loading loggers')
    CONF = site_conf()

    PID_FILENAME = CONF.get('files', 'loggers_pid')
    PID_FILE = open(PID_FILENAME, 'w')
    try:
        fcntl.lockf(PID_FILE, fcntl.LOCK_EX | fcntl.LOCK_NB)
    except IOError:
        logging.error('another instance is running')
        sys.exit(0)

    asyncio.get_event_loop().run_until_complete(main(CONF))