Beispiel #1
0
def getfolders(request):
    try:
        data = requests.get("http://" + config['btsync_server_address'] + "/api", params={
            'method': 'get_folders'
        }, auth=('team', 'sync')).json()
        #print json.dumps(data, indent=4)

        for dat in data:
            dat['name'] = dat['dir'].split('/')[-1]
            #print json.dumps(dat, indent=4)

            if os.path.isdir(dat['dir'] + '/.Users'):
                dat['uid'] = config['uid']
                dat['identity'] = loadJSON(dat['dir'] + '/.Users/' + config['uid'] + '.json')['identity']

                files = os.listdir(dat['dir'] + '/.Users')
                users = []

                for fil in files:
                    user = loadJSON(dat['dir'] + '/.Users/' + fil)
                    if user['identity'] == dat['identity']:
                        user['identity'] += ' (Ty)'
                    users.append(user)

                dat['users'] = users

        #print json.dumps(data, indent=4)

        return JsonResponse(data, safe=False)
    except Exception:
        return HttpResponseServerError('Wystąpił błąd podczas pobierania listy folderów.')
Beispiel #2
0
def activeUsersHandler(request):
    data = yield from request.json()
    if 'user' not in data:
        return web.Response(text='OK')
    station = data['station'] if 'station' in data else None
    if station:
        stationPath = getStationPath(data['station'])
        stationSettings = loadJSON(stationPath + '/settings.json')
        if not stationSettings:
            return web.HTTPBadRequest(text='This station was deleted or moved')
    auPath = webRoot + '/js/activeUsers.json'
    au = loadJSON(auPath)
    nowTs = int(datetime.now().timestamp())
    if not au:
        au = {}
    au = { k : v for k, v in au.items() \
            if nowTs - v['ts'] < 120 }
    try:
        au[data['user']] = {\
                'chat': data['chat'],\
                'ts': nowTs,\
                'station': station,\
                'typing': data['typing']\
                }
        with open(auPath, 'w') as f:
            json.dump(au, f, ensure_ascii=False)
    except Exception:
        logging.exception('Exception in activeUserHandler')
        logging.error(data)
    return web.Response(text='OK')
Beispiel #3
0
def getallcomments(request):
    result = []

    try:
        path = json.loads(request.body)['path'] + '/.Comments'

        for tmpdir, subdirs, subfiles in os.walk(path):
            for subdir in subdirs:
                if not pattern.match(subdir):
                    continue

                meta = loadJSON(os.path.join(tmpdir, subdir, 'meta'))
                threaddata = {
                    'fullpath': os.path.join(tmpdir, subdir),
                    'timestamp': meta['timestamp'],
                    'name': meta['topic'],
                    'type': 'thread',
                    'path': os.path.join(tmpdir, subdir).replace(path, ''),
                    'numberofcomments': len(os.listdir(os.path.join(tmpdir, subdir))) - 1,
                    'unreadcomment': False,
                }

                for commentfile in os.listdir(os.path.join(tmpdir, subdir)):
                    if commentfile == 'meta':
                        continue

                    comment = loadJSON(os.path.join(tmpdir, subdir, commentfile))
                    comment['topic'] = threaddata

                    result.append(comment)

        return JsonResponse({'comments': result, 'stats': getstats(result)}, safe=False)
    except Exception:
        return HttpResponseServerError('Wystąpił nieznany błąd podczas pobierania komentarzy.')
Beispiel #4
0
def updateLocation(newData):
    fp = webRoot + '/location.json'
    data = loadJSON(fp)
    if not data:
        data = {}
    if not data.has_key('locTs') and data.has_key('ts'):
        data['locTs'] = data['ts']
    data['ts'] = int(datetime.now().strftime("%s"))
    data['date'], data['time'] = dtFmt(datetime.utcnow())
    if newData['location']:
        if data.has_key('location') and data['location']:
            data['prev'] = { 'location': data['location'][:], \
                    'ts': data['locTs'] }
        data['locTs'] = data['ts']
        data['location'] = newData['location']
        data['loc'] = newData['loc']
        data['rafa'] = newData['rafa']
        if data.has_key('prev'):
            lat = [data['location'][1], data['prev']['location'][1]]
            lon = [data['location'][0], data['prev']['location'][0]]
            dlon = lon[0] - lon[1]
            dlat = lat[0] - lat[1]
            a = (sind(dlat / 2))**2 + cosd(lat[0]) * cosd(lat[1]) * (sind(
                dlon / 2))**2
            c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
            d = c * 6373
            data['d'] = d
            data['dt'] = data['locTs'] - data['prev']['ts']
            data['speed'] = d / (float(data['locTs'] - data['prev']['ts']) /
                                 3600)
    with open(fp, 'w') as f:
        f.write(json.dumps(data, ensure_ascii=False).encode('utf-8'))
Beispiel #5
0
def startbtsync(request):
    if isbtsyncactive():
        return HttpResponseRedirect('/')

    # If wrong config file structure raise an exception
    if 'btsync_conf_file' not in config.keys():
        return HttpResponse('Klucz btsync_conf_file nie istnieje w pliku konfiguracyjnym')

    # If btsync-folder doesn't exist, create it
    btsyncconf = loadJSON(config['btsync_conf_file'])
    if not os.path.exists(btsyncconf['storage_path']):
        os.makedirs(btsyncconf['storage_path'])

    # If BTSync config file doesn't exist, create a new one
    if not path.isfile(config['btsync_conf_file']):
        createemptybtsyncconfigfile(config)

    # Start BTSync process
    if platform.system() == 'Windows':
        pass
    elif platform.system() == 'Linux':
        pid = subprocess.Popen([config['btsync_exe_file'], '--config', config['btsync_conf_file']])
        while not isbtsyncactive():     pass
        print 'BTSync started PID = ' + str(pid)

        if 'uid' not in config.keys():
            print 'taking UID'
            config['uid'] = getUID(config['btsync_server_address'])
            print config['uid']
            saveJSON(os.path.join(config['application_path'], 'config.json'), config)

    return HttpResponseRedirect('/')
Beispiel #6
0
def getallthreads(folderpath):
    result = []

    for tmpdir, subdirs, subfiles in os.walk(folderpath + '/.Comments'):
        if '.' in tmpdir and '.Comments' not in tmpdir:
            continue

        slashornot = tmpdir == folderpath + '/.Comments' and '/' or ''

        for subdir in subdirs:
            if '.' not in subdir and pattern.match(subdir):
                if not len(os.listdir(os.path.join(tmpdir, subdir))) > 0:
                    continue

                # getting timestamp info about freshest comment
                comments = sorted(os.listdir(os.path.join(tmpdir, subdir)), reverse=True)
                lastcomment = loadJSON(os.path.join(tmpdir, subdir, comments[1]))

                # if comments doesn't contain meta file - ignore thread
                if 'meta' not in comments:
                    continue

                metadata = loadJSON(os.path.join(tmpdir, subdir, 'meta'))
                data = {
                    'fullpath': os.path.join(tmpdir, subdir),
                    'timestamp': metadata['timestamp'],
                    'name': metadata['topic'],
                    'type': 'thread',
                    'path': tmpdir.replace(folderpath + '/.Comments', slashornot),
                    'numberofcomments': len(os.listdir(os.path.join(tmpdir, subdir))) - 1,
                    'unreadcomment': False,
                    'lastcomment': lastcomment['timestamp']
                }

                # searching for unread comments
                for comment in comments:
                    if comment == 'meta':
                        continue
                    comm = loadJSON(os.path.join(tmpdir, subdir, comment))
                    if config['uid'] not in comm['readby'].keys():
                        data['unreadcomment'] = True
                        break

                result.append(data)

    return result
Beispiel #7
0
def chatHandler(request):
    data = yield from request.json()
    admin = False
    callsign = decodeToken(data)
    chatPath = ''
    station = data['station'] if 'station' in data else None
    chat = []
    if station:
        stationPath = getStationPath(data['station'])
        stationSettings = loadJSON(stationPath + '/settings.json')
        admins = [x.lower() for x in\
            stationSettings['chatAdmins'] + [ stationSettings['admin'], ]]
        admin = 'from' in data and data['from'].lower() in admins
        chatPath = stationPath + '/chat.json'
    else:
        chatPath = webRoot + '/js/talks.json'
        admin = isinstance(callsign, str) and callsign in siteAdmins
    if ('clear' in data or 'delete' in data
            or ('text' in data and data['text'][0] == '@')):
        callsign = decodeToken(data)
        if not isinstance(callsign, str):
            return callsign
        if 'clear' in data or 'delete' in data:
            admins = siteAdmins + [
                stationSettings['admin'],
            ] if station else siteAdmins
            if not callsign in admins:
                return web.HTTPUnauthorized( \
                    text = 'You must be logged in as station or site admin' )
        else:
            if not callsign in imUsers:
                return web.HTTPUnauthorized( \
                    text = 'You must be logged in as im user' )
    if not 'clear' in data and not 'delete' in data:
        insertChatMessage(path=chatPath, msg_data=data, admin=admin)
    else:
        if 'delete' in data:
            chat = loadJSON(chatPath)
            if not chat:
                chat = []
            chat = [x for x in chat if x['ts'] != data['delete']]
        with open(chatPath, 'w') as f:
            json.dump(chat, f, ensure_ascii=False)
    return web.Response(text='OK')
Beispiel #8
0
 def __init__(self, file=None, disable_qrz_ru=False):
     self.data = []
     self.file = file
     if not disable_qrz_ru:
         self.qrzLink = QRZLink()
     if file:
         prevDX = loadJSON(file)
         if prevDX:
             for item in prevDX:
                 self.append(DX(**item), False)
Beispiel #9
0
def getcommentsfrompath(request):
    result = []

    try:
        data = json.loads(request.body)

        fullpath = os.path.join(data['folderpath'], '.Comments', data['insidepath'][1:])

        if not os.path.isdir(fullpath):
            return JsonResponse([], safe=False)

        for commentfolder in os.listdir(fullpath):
            if not pattern.match(commentfolder):
                continue

            temppath = os.path.join(fullpath, commentfolder)
            meta = loadJSON(os.path.join(temppath, 'meta'))
            threaddata = {
                'fullpath': temppath,
                'timestamp': meta['timestamp'],
                'name': meta['topic'],
                'type': 'thread',
                'path': temppath.replace(data['folderpath'], ''),
                'numberofcomments': len(os.listdir(temppath)) - 1,
                'unreadcomment': False,
            }

            for commentfile in os.listdir(temppath):
                if commentfile == 'meta':
                    continue

                comment = loadJSON(os.path.join(temppath, commentfile))
                comment['topic'] = threaddata

                result.append(comment)

        return JsonResponse({'comments': result}, safe=False)
    except Exception:
        return HttpResponseServerError('Wystąpił nieznany błąd podczas pobierania komentarzy.')
Beispiel #10
0
def publishHandler(request):
    data = yield from request.json()
    callsign = decodeToken(data)
    if not isinstance(callsign, str):
        return callsign
    if not callsign in siteAdmins:
        return web.HTTPUnauthorized( \
            text = 'You must be logged in as site admin' )
    publishPath = webRoot + '/js/publish.json'
    publish = loadJSON(publishPath)
    if not publish:
        publish = {}
    if not data['station'] in publish:
        publish[data['station']] = {}
    publish[data['station']] = data['publish']
    with open(publishPath, 'w') as f:
        json.dump(publish, f, ensure_ascii=False)
    stationPath = getStationPath(data['station'])
    stationSettings = loadJSON(stationPath + '/settings.json')
    stationSettings['publish'] = data['publish']['user']
    yield from saveStationSettings(data['station'], stationSettings['admin'],
                                   stationSettings)
    return web.Response(text='OK')
Beispiel #11
0
def newsHandler(request):
    data = yield from request.json()
    callsign = decodeToken(data)
    if not isinstance(callsign, str):
        return callsign
    stationPath = getStationPath(data['station'])
    stationSettings = loadJSON(stationPath + '/settings.json')
    if callsign != stationSettings['admin'] and not callsign in siteAdmins:
        return web.HTTPUnauthorized( \
            text = 'You must be logged in as station admin' )
    newsPath = stationPath + '/news.json'
    news = loadJSON(newsPath)
    if not news:
        news = []
    if 'add' in data:
        news.insert( 0, { 'ts': time.time(), 'text': data['add'],\
            'time': datetime.now().strftime( '%d %b %H:%M' ).lower() } )
    if 'clear' in data:
        news = []
    if 'delete' in data:
        news = [x for x in news if x['ts'] != data['delete']]
    with open(newsPath, 'w') as f:
        json.dump(news, f, ensure_ascii=False)
    return web.Response(text='OK')
Beispiel #12
0
def main():
    yield from db.connect()
    users = yield from db.execute( "select callsign, settings from users" )
    fname = datetime.now().strftime( "%Y%m%d" ) + ".csv"
    for user in users:
        if user['settings'] and 'station' in user['settings'] and \
            'callsign' in user['settings']['station'] and \
            user['settings']['station']['callsign'] and \
            user['settings']['status']['get'] == 'gpslogger':            
            stationCs = user['settings']['station']['callsign']
            stationPath = getStationPath( stationCs )
            statusPath = stationPath + '/status.json'
            status = loadJSON( statusPath  )
            if not status:
                status = {}
            ftpPath = webRoot + '/ftp/' + user['callsign'] + '/GPSLogger'
            if os.path.isdir( ftpPath ):
                ftpFilePath = ( ftpPath + '/' + fname )
                if os.path.isfile( ftpFilePath ):
                    data = None
                    with open( ftpPath + '/' + fname, 'r' ) as f:
                        data = f.readlines()[-1].split( ',' )
                    dt = datetime.strptime( data[0], '%Y-%m-%dT%H:%M:%S.%fZ' )
                    ts = int( dt.timestamp() + tzOffset() )
                    if not 'ts' in status or status['ts'] < ts:
                        status['date'], status['time'] = dtFmt( dt )    
                        status['year'] = dt.year
                        status['ts'] = ts
                        status['location'] = [ float(data[1]), float(data[2] ) ]
                        status['loc'] = qth( data[1], data[2] )
                        if status['loc'] in rafa:
                            status['rafa'] = rafa[status['loc']]
                        else:
                            status['rafa'] = None
                        status['speed'] = float( data[6] )
                        #dt_ = datetime.utcnow()
                        #dt_.replace( tzinfo = timezone.utc )
                        #status['ts_'] = dt_.timestamp() 
                        #status['date_'], status['time_'] = dtFmt( dt_ )    
                        #status['year_'] = dt_.year
                        with open( statusPath, 'w' ) as f:
                            json.dump( status, f, ensure_ascii = False )
Beispiel #13
0
def upload():
    yield from db.connect()
    users_data = yield from db.execute("select * from users")
    for row in users_data:
        if not row['settings']:
            continue
        station_callsign = row['settings']['station']['callsign']
        if station_callsign:
            station_path = webRoot + '/stations/' +\
                station_callsign.lower().replace( '/', '-' )
            if os.path.isdir(station_path):
                log = []
                log_data = yield from db.execute("""
                    select id, qso 
                    from log where callsign = %(callsign)s order by id desc""",\
                    row)
                if log_data:
                    if isinstance(log_data, dict):
                        log.append(log_data)
                    else:
                        log = [row['qso'] for row in log_data]
                    with open(station_path + '/log.json', 'w') as f:
                        json.dump(log, f)
                with open(station_path + '/settings.json', 'w') as f:
                    json.dump(row['settings'], f)
                status_path = station_path + '/status.json'
                status = loadJSON(status_path)
                if status:
                    status['qth'] = {\
                        'fields': {\
                            'titles': ["RDA", "RAFA", "QTH field"],\
                            'values': [\
                                status['rda'] if 'rda' in status else None,\
                                status['rafa'] if 'rafa' in status else None,\
                                status['userFields'][0] if 'userFields' in status\
                                    else None\
                                    ]\
                                    },\
                        'loc': status['loc'] if 'loc' in status else None}
                    with open(status_path, 'w') as f:
                        json.dump(status, f)
Beispiel #14
0
def editcomment(request):
    try:
        if not checkConnection():
            return HttpResponseServerError('Brak połączenia z Internetem.')

        editedcomment = json.loads(request.body)['comment']
        threadfullpath = json.loads(request.body)['threadfullpath']
        #print json.dumps(editedcomment, indent=4)
        #print threadfullpath

        commentpath = os.path.join(threadfullpath, editedcomment['timestamp'] + fileseparator + editedcomment['uid'])

        comment = loadJSON(commentpath)
        comment['history'].append({'timestamp': gettimestamp(), 'comment': editedcomment['comment']})
        comment['comment'] = editedcomment['comment']

        saveJSON(commentpath, comment)

        return JsonResponse({}, safe=False)
    except:
        return HttpResponseServerError('Wystąpił nieznany błąd podczas edycji komentarza.')
Beispiel #15
0
def save_qth_now_location(cs, location, path):
    qth_now_locations = loadJSON(path)
    if not qth_now_locations:
        qth_now_locations = []
    ts = int(time.time())
    dtUTC = datetime.utcnow()
    dt, tm = dtFmt(dtUTC)
    qth_now_locations = [item for item in qth_now_locations\
        if ts - item['ts'] < 600 and\
        (item['location'][0] != location[0] or\
            item['location'][1] != location[1])\
        and (cs == None or item['callsign'] != cs)]
    qth_now_locations.append({
        'location': location,
        'ts': ts,
        'date': dt,
        'time': tm,
        'callsign': cs
    })
    with open(path, 'w') as f:
        json.dump(qth_now_locations, f, ensure_ascii=False)
Beispiel #16
0
def getdominatedthreads(request):
    try:
        data = json.loads(request.body)
        # print json.dumps(data, indent=4)

        files = getallthreads(data['path'])
        threshold = float(data['threshold'][2:4]) / 100     # threshold as fraction
        threads = []
        dominatingusersuid = []

        for user in data['users']:
            if user['isDominating']:
                dominatingusersuid.append(user['uid'])

        for fil in files:
            if fil['type'] in ['folder', 'file']:
                continue

            # main loop for increasing counters of users
            comments = os.listdir(fil['fullpath'])
            userscomments = 0
            limit = fil['numberofcomments'] * threshold

            for commentpath in comments:
                if commentpath == 'meta':
                    continue

                comment = loadJSON(os.path.join(fil['fullpath'], commentpath))
                if comment['uid'] in dominatingusersuid:
                    userscomments += 1

            if (data['threshold'][0] == '>' and float(userscomments) > limit) or \
                    (data['threshold'][0] == '<' and float(userscomments) < limit):
                threads.append(fil)

        return JsonResponse(threads, safe=False)
    except Exception:
        return HttpResponseServerError('Wystąpił nieznany błąd podczas filtrowania wątków.')
Beispiel #17
0
def insertChatMessage(path, msg_data, admin):
    CHAT_MAX_LENGTH = int(conf['chat']['max_length'])
    chat = loadJSON(path)
    if not chat:
        chat = []
    msg = { 'user': msg_data['from'], \
            'text': msg_data['text'], \
            'admin': admin, 'ts': time.time() }
    msg['date'], msg['time'] = dtFmt(datetime.utcnow())
    if msg['text'][0] == '@':
        to, txt = msg['text'][1:].split(' ', maxsplit=1)
        txt = txt.strip()
        if not txt and to in IM_QUEUE:
            del IM_QUEUE[to]
        else:
            IM_QUEUE[to] = {
                'user': msg['user'],
                'text': txt,
                'ts': msg['ts'],
                'date': msg['date'],
                'time': msg['time']
            }
            logging.debug('------- IM_QUEUE -------')
            logging.debug(IM_QUEUE)
    else:
        if 'name' in msg_data:
            msg['name'] = msg_data['name']
        chat.insert(0, msg)
        chat_trunc = []
        chat_adm = []
        for msg in chat:
            if msg['text'].startswith('***') and msg['admin']:
                chat_adm.append(msg)
            elif len(chat_trunc) < CHAT_MAX_LENGTH:
                chat_trunc.append(msg)
        chat = chat_adm + chat_trunc
        with open(path, 'w') as f:
            json.dump(chat, f, ensure_ascii=False)
Beispiel #18
0
def getcomments(request):
    result = []

    try:
        data = json.loads(request.body)
        fullthreadpath = data['fullthreadpath']
        sortinguid = data['sortinguid']

        for commentfile in os.listdir(fullthreadpath):
            if commentfile == 'meta':
                continue

            comment = loadJSON(os.path.join(fullthreadpath, commentfile))

            if config['uid'] not in comment['readby'].keys():
                comment['readby'][config['uid']] = gettimestamp()

            saveJSON(os.path.join(fullthreadpath, commentfile), comment)

            comment['editing'] = False                                  # need for UI purposes
            comment['historing'] = False                                # need for UI purposes

            result.append(comment)

        if sortinguid != '':
            for res in result:
                if sortinguid not in res['readby'].keys():
                    result.remove(res)
                else:
                    res['timestamp'] = res['readby'][sortinguid]

        result = sorted(result, key=lambda comm: comm['timestamp'])

        return JsonResponse({'comments': result, 'stats': getstats(result)}, safe=False)
    except Exception:
        return HttpResponseServerError('Wystąpił nieznany błąd podczas pobierania komentarzy.')
Beispiel #19
0
import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb, cursor2dicts
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument( '-t', action = 'store_true' )
args = vars( argparser.parse_args() )
testMode = args['t']

conf = siteConf()
webRoot = conf.get( 'web', ( 'test_' if testMode else '' ) + 'root' ) 
awards = loadJSON( webRoot + '/awards.json' )
if not awards:
    print 'No awards data!'
else:
    print 'Processing ' + webRoot + '/awards.json'
#udv = loadJSON( webRoot + '/userMetadata.json' )
#if udv:
#    udv = int( udv ) + 1
#else:
#    udv = '0'

def makeStr( list ):
    return ', '.join( ( "'{}'".format( x ) for x in list ) )

cfmTypesDef = [ 'cfm_lotw', 'cfm_paper', 'cfm_eqsl' ]
sql = """select * from user_awards
Beispiel #20
0
        def process_qso(qso):
            try:
                dt = datetime.strptime(qso['ts'], "%Y-%m-%d %H:%M:%S")
                qso['date'], qso['time'] = dtFmt(dt)
                qso['qso_ts'] = (dt -
                                 datetime(1970, 1, 1)) / timedelta(seconds=1)
            except (ValueError, TypeError) as exc:
                logging.error("Error parsing qso timestamp" + qso['ts'])
                logging.exception(exc)
                return {'ts': None}

            serverTs = qso.pop('serverTs') if 'serverTs' in qso else None

            if serverTs:
                qso['ts'] = serverTs
                qsoIdx = [
                    i[0] for i in enumerate(log) if i[1]['ts'] == qso['ts']
                ]
                if qsoIdx:
                    log[qsoIdx[0]] = qso
                else:
                    log.append(qso)
                dbUpdate = yield from db.execute(
                    """
                    update log set qso = %(qso)s
                    where callsign = %(callsign)s and (qso->>'ts')::float = %(ts)s""",
                    {
                        'callsign': callsign,
                        'ts': qso['ts'],
                        'qso': json.dumps(qso)
                    })
                if not dbUpdate:
                    yield from dbInsertQso(callsign, qso)

            else:
                new_qso = True
                if log:
                    for log_qso in log:
                        sameFl = True
                        for key in qso:
                            if key not in ('ts', 'rda', 'wff', 'comments', 'serverTs', 'qso_ts', 'qth', 'no')\
                                and (key not in log_qso or qso[key] != log_qso[key]):
                                sameFl = False
                                break
                        if sameFl:
                            logging.debug('prev qso found:')
                            new_qso = False
                            qso['ts'] = log_qso['ts']
                            log_qso['qso_ts'] = qso['qso_ts']

                if new_qso:
                    statusPath = stationPath + '/status.json'
                    statusData = loadJSON(statusPath)
                    ts = dt.timestamp() + tzOffset()
                    if ('freq' not in statusData
                            or statusData['freq']['ts'] < ts):
                        statusData['freq'] = {'value': qso['freq'], 'ts': ts}
                        with open(statusPath, 'w') as f:
                            json.dump(statusData, f, ensure_ascii=False)

                    qso['ts'] = time.time()
                    while [x for x in log if x['ts'] == qso['ts']]:
                        qso['ts'] += 0.00000001
                    log.insert(0, qso)
                    yield from dbInsertQso(callsign, qso)

            return {'ts': qso['ts']}
Beispiel #21
0
def galleryHandler(request):
    data = None
    if 'multipart/form-data;' in request.headers[aiohttp.hdrs.CONTENT_TYPE]:
        data = yield from read_multipart(request)
    else:
        data = yield from request.json()
    callsign = decodeToken(data)
    if not isinstance(callsign, str):
        return callsign
    stationPath = yield from getStationPathByAdminCS(callsign)
    galleryPath = stationPath + '/gallery'
    galleryDataPath = stationPath + '/gallery.json'
    galleryData = loadJSON(galleryDataPath)
    site_gallery_params = conf['gallery']
    if not galleryData:
        galleryData = []
    if 'file' in data:
        if not os.path.isdir(galleryPath):
            os.mkdir(galleryPath)
        file = data['file']['contents']
        fileNameBase = uuid.uuid4().hex
        fileExt = data['file']['name'].rpartition('.')[2]
        fileName = fileNameBase + '.' + fileExt
        fileType = 'image' if 'image'\
            in data['file']['type'] else 'video'
        filePath = galleryPath + '/' + fileName
        with open(filePath, 'wb') as fimg:
            fimg.write(file)
        tnSrc = filePath
        width = None
        if fileType == 'video':
            tnSrc = galleryPath + '/' + fileNameBase + '.jpeg'
            (ffmpeg.input(filePath).output(tnSrc, vframes=1).run())
            probe = ffmpeg.probe(filePath)
            video_stream = next((stream for stream in probe['streams']\
                if stream['codec_type'] == 'video'), None)
            width = int(video_stream['width'])
        with Image(filename=tnSrc) as img:
            with Image(width=img.width,
                       height=img.height,
                       background=Color("#EEEEEE")) as bg:

                bg.composite(img, 0, 0)

                exif = {}
                exif.update((k[5:], v) for k, v in img.metadata.items()
                            if k.startswith('exif:'))
                if 'Orientation' in exif:
                    if exif['Orientation'] == '3':
                        bg.rotate(180)
                    elif exif['Orientation'] == '6':
                        bg.rotate(90)
                    elif exif['Orientation'] == '8':
                        bg.rotate(270)

                size = img.width if img.width < img.height else img.height
                bg.crop(width=size, height=size, gravity='north')
                bg.resize(200, 200)
                bg.format = 'jpeg'
                bg.save(filename=galleryPath + '/' + fileNameBase +\
                    '_thumb.jpeg')
                max_height, max_width = int(
                    site_gallery_params['max_height']), int(
                        site_gallery_params['max_width'])
                if img.width > max_width or img.height > max_height:
                    coeff = max_width / img.width
                    if max_height / img.height < coeff:
                        coeff = max_height / img.height
                    img.resize(width=int(coeff * img.width),
                               height=int(coeff * img.height))
                    img.compression_quality = int(
                        site_gallery_params['quality'])
                    img.save(filename=filePath)
        if fileType == 'video':
            os.unlink(tnSrc)
        galleryData.insert(0, {\
            'file': 'gallery/' + fileName,
            'thumb': 'gallery/' + fileNameBase + '_thumb.jpeg',
            'caption': data['caption'],
            'type': fileType,
            'ts': time.time(),
            'datetime': datetime.utcnow().strftime('%d %b %Y %H:%M').lower(),
            'id': fileNameBase})
        max_count = int(site_gallery_params['max_count'])
        if len(galleryData) > max_count:
            galleryData = sorted(galleryData,
                                 key=lambda item: item['ts'],
                                 reverse=True)[:max_count]

    if 'delete' in data:
        items = [x for x in galleryData if x['id'] == data['delete']]
        if items:
            item = items[0]
            galleryData = [x for x in galleryData if x != item]
            deleteGalleryItem(stationPath, item)
    if 'clear' in data:
        for item in galleryData:
            deleteGalleryItem(stationPath, item)
        galleryData = []
    with open(galleryDataPath, 'w') as fg:
        json.dump(galleryData, fg, ensure_ascii=False)
    return web.Response(text='OK')
Beispiel #22
0
#!/usr/bin/python3
#coding=utf-8
import argparse, logging, logging.handlers, os, json, pathlib, re
from common import siteConf, loadJSON, appRoot, startLogging

parser = argparse.ArgumentParser(description="tnxqso adxcluster filter")
startLogging('clusterFilter')

conf = siteConf()
roots = [conf.get('web', x) for x in ['root', 'root_test']]
dx = loadJSON(conf.get('files', 'adxcluster'))
if not dx:
    logging.error('no dx data')
    sys.exit()

for root in roots:
    stationsPath = root + '/stations'


    for station in [str(x) for x in pathlib.Path( stationsPath ).iterdir() \
            if x.is_dir() ]:
        settings = loadJSON(station + '/settings.json')
        if settings and 'clusterCallsigns' in settings and \
                settings['enable']['cluster'] and settings['clusterCallsigns']:
            reCS = []
            for cs in settings['clusterCallsigns']:
                reCS.append(re.compile('^' + cs.replace('*', '.*') + '$'))
            reHl = []
            if 'clusterHighlight' in settings and settings['clusterHighlight']:
                for cs in settings['clusterHighlight']:
                    reHl.append(re.compile('^' + cs.replace('*', '.*') + '$'))
import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument('-t', action='store_true')
args = vars(argparser.parse_args())
testMode = args['t']

conf = siteConf()
webRoot = conf.get('web', ('test_' if testMode else '') + 'root')
awards = loadJSON(webRoot + '/awards.json')
if not awards:
    print 'No awards data!'
else:
    print 'Processing ' + webRoot + '/awards.json'
udv = loadJSON(webRoot + '/userMetadata.json')
if udv:
    udv = int(udv) + 1
else:
    udv = '0'


def makeStr(list):
    return ', '.join(("'{}'".format(x) for x in list))

Beispiel #24
0
#!/usr/bin/python
#coding=utf-8

import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb, cursor2dicts
from dx import DX, DXData


conf = siteConf()
webRoot = conf.get( 'web', 'root' ) 
udv = loadJSON( webRoot + '/userMetadata.json' )
if not udv:
    udv = {}

dblSql = """
    select callsign, title, sum( 1 )
    from users_lists
    where title in ( 'DX', 'DXpedition', 'Special' )
    group by callsign, title
    having sum( 1 ) > 1
"""
dbls = cursor2dicts( dxdb.execute( dblSql ) ) 
idSql = """
    select id from users_lists 
    where callsign = %(callsign)s and title = %(title)s
    """
updateItemsSql = """
    update users_lists_items as items0 set list_id = %(id0)s
Beispiel #25
0
def readWebFile(fName, env):
    test = 'test' in env['SERVER_NAME']
    return loadJSON( conf.get( 'web', 'test_root' if test else 'root' ) + \
            '/' + fName )
Beispiel #26
0
        '33CM': 'UHF', '23CM':'UHF', '13CM': 'UHF' }

countries = {}
for cty, cl in conf.items('countries'):
    for code in cl.split(','):
        countries[code] = cty.title()

countryCodes = {}
with open(appRoot + '/CountryCode.txt', 'r') as fcc:
    for line in fcc.readlines():
        dxcc, pfx = line.split(',', 3)[0:2]
        if countries.has_key(pfx):
            countryCodes[dxcc] = pfx
countryStateAward = {'Russia': 'RDA', 'Ukraine': 'URDA'}

awardsDataJS = loadJSON(conf.get('web', 'root') + '/awards.json')
awardsData = {}
for entry in awardsDataJS:
    awardsData[entry['name']] = entry


def loadQueue():
    return loadJSON(adifQueueDir + 'queue.json')


def getAdifField(line, field):
    iHead = line.find('<' + field + ':')
    if iHead < 0:
        return ''
    iBeg = line.find(">", iHead) + 1
    ends = [ x for x in [ line.find( x, iBeg ) for x in ( ' ', '<' ) ] \
Beispiel #27
0
logging.debug("restart")

db = DBConn(conf.items('db_test' if args.test else 'db'))
db.connect()

secret = None
fpSecret = conf.get('files', 'secret')
if (os.path.isfile(fpSecret)):
    with open(fpSecret, 'rb') as fSecret:
        secret = fSecret.read()
if not secret:
    secret = base64.b64encode(os.urandom(64))
    with open(fpSecret, 'wb') as fSecret:
        fSecret.write(str(secret))

defUserSettings = loadJSON(webRoot + '/js/defaultUserSettings.json')
if not defUserSettings:
    defUserSettings = {}

jsonTemplates = { 'settings': defUserSettings, \
    'log': [], 'chat': [], 'news': [], 'cluster': [], 'status': {}, \
    'chatUsers': {} }

RAFA_LOCS = {}
with open(appRoot + '/rafa.csv', 'r') as f_rafa:
    for line in f_rafa.readlines():
        data = line.strip('\r\n ').split(';')
        locators = data[3].split(',')
        for locator in locators:
            if locator in RAFA_LOCS:
                RAFA_LOCS[locator] += ' ' + data[1]
Beispiel #28
0
import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument( '-t', action = 'store_true' )
args = vars( argparser.parse_args() )


conf = siteConf()
webRoot = conf.get( 'web', ( 'test_' if args['t'] else '' ) + 'root' ) 
awardsData = loadJSON( webRoot + '/awardsData.json' )

def getAward( name ):
    return [ award for award in awardsData
        if award['name'] == name ][0]

jcc = getAward( 'JCC' )
waku = getAward( 'WAKU' )
waja = getAward( 'WAJA' )

def getSplitLine( file, fr = 0, to = None ):
    line = file.readline()
    if line:
        data = [item.strip( '"\r\n ' ) for item in line.split( ';' )]
        if to:
            return data[fr:to]
Beispiel #29
0
#coding=utf-8

import json, logging

from common import siteConf, loadJSON

logging.basicConfig(level=logging.DEBUG,
                    format='%(asctime)s %(message)s',
                    filename='/var/log/dxped_getSpots.log',
                    datefmt='%Y-%m-%d %H:%M:%S')
conf = siteConf()
webRoot = conf.get('web', 'root')
spotsSrcF = conf.get('adxcluster', 'spots')

spotsF = webRoot + '/spots.json'
newData = loadJSON(spotsSrcF)
if not newData:
    logging.error('Spots data is empty')
    raise SystemExit
newData = [ x for x in newData \
        if x['country'] == 'Russia' and \
        ( x['cs'].endswith( '/M' ) or x['cs'].endswith('/P') \
        or x['cs'].endswith('/MM') ) ]
data = loadJSON(spotsF)
if not data:
    data = []
prev = data[0] if data else None
idx = 0
for item in reversed(newData):
    if prev and item['ts'] <= prev['ts']:
        break
Beispiel #30
0
        '33CM': 'UHF', '23CM':'UHF', '13CM': 'UHF' }

countries = {}
for cty, cl in conf.items( 'countries' ):
    for code in cl.split( ',' ):
        countries[code] = cty.title()

countryCodes = {}
with open( appRoot + '/CountryCode.txt', 'r' ) as fcc:
    for line in fcc.readlines():
        dxcc, pfx = line.split( ',', 3 )[0:2]
        if countries.has_key( pfx ):
            countryCodes[dxcc] = pfx
countryStateAward = { 'Russia': 'RDA', 'Ukraine': 'URDA' }

awardsDataJS = loadJSON( conf.get( 'web', 'root' ) + '/awards.json' )
awardsData = {}
for entry in awardsDataJS:
    awardsData[entry['name']] = entry


def loadQueue():
    return loadJSON( adifQueueDir + 'queue.json' )

def getAdifField( line, field ):
    iHead = line.find( '<' + field + ':' )
    if iHead < 0:
        return ''
    iBeg = line.find( ">", iHead ) + 1
    ends = [ x for x in [ line.find( x, iBeg ) for x in ( ' ', '<' ) ] \
                if x > -1 ]
Beispiel #31
0
def loadQueue():
    return loadJSON( adifQueueDir + 'queue.json' )
Beispiel #32
0
#coding=utf-8

import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument('-t', action='store_true')
args = vars(argparser.parse_args())

conf = siteConf()
webRoot = conf.get('web', ('test_' if args['t'] else '') + 'root')
awardsData = loadJSON(webRoot + '/awards.json')
if not awardsData:
    print 'No awards data!'
else:
    print 'Processing ' + webRoot + '/awards.json'
awards = []
webAwards = []


def getSplitLine(file, fr=0, to=None):
    line = file.readline()
    if line:
        data = [item.strip('"\r\n ') for item in line.split(';')]
        if to:
            return data[fr:to]
        else:
Beispiel #33
0
def application(env, start_response):
    global dxdb
    try:
        reqSize = int(env.get('CONTENT_LENGTH', 0))
    except:
        reqSize = 0

    if env['PATH_INFO'] == '/uwsgi/contact':
        start_response('200 OK', [('Content-Type', 'text/plain')])
        email = json.loads(env['wsgi.input'].read(reqSize))
        #        with open( '/var/www/adxc.73/debugEmail.json', 'w' ) as fD:
        #            fD.write( json.dumps( email ) )
        if checkRecaptcha(email['recaptcha']):
            sendEmail( text = email['text'], fr = email['from'], \
                to = conf.get( 'email', 'address' ), \
                subject = "Message from ADXcluster.com" )
            return 'OK'
        else:
            return 'recaptcha failed'
    elif env['PATH_INFO'] == '/uwsgi/login':
        dxdb = dbConn()
        data = json.loads(env['wsgi.input'].read(reqSize))
        error = ''
        if not data.has_key('callsign') or len(data['callsign']) < 2:
            error = 'Invalid callsign'
        elif not data.has_key('password') or len(data['password']) < 6:
            error = 'Invalid password'
        data['callsign'] = data['callsign'].upper()
        if not error:

            if data.has_key('register') and data['register']:
                if not data.has_key('email') or not validEmail(data['email']):
                    error = 'Invalid email'
                elif not data.has_key( 'recaptcha' ) or \
                        not checkRecaptcha( data['recaptcha'] ):
                    error = 'Recaptcha error'
                else:
                    csLookup = dxdb.getObject( 'users', \
                            { 'callsign': data['callsign'] },\
                            False )
                    if csLookup:
                        error = 'callsign is already registered'

                if error == '':

                    userData = dxdb.getObject(
                        'users', {
                            'callsign': data['callsign'],
                            'password': data['password'],
                            'email': data['email']
                        }, True)
                    if userData:
                        return sendUserData(userData, start_response)
                    else:
                        start_response('500 Server Error', \
                                [('Content-Type','text/plain')])
                        return 'Could not create user'
            else:
                userData = dxdb.getObject( 'users', \
                        { 'callsign': data['callsign'] },\
                        False, True )
                if userData and userData['password'] == data['password']:
                    return sendUserData(userData, start_response)
                else:
                    error = 'Wrong callsign or password'
        start_response('400 Bad Request', [('Content-Type', 'text/plain')])
        return 'Invalid login/register request: ' + error
    elif env['PATH_INFO'] == '/uwsgi/userSettings':
        dxdb = dbConn()
        data = json.loads(env['wsgi.input'].read(reqSize))
        error = ''
        okResponse = ''
        dbError = False
        callsign = None
        updMeta = True
        if data.has_key('token'):
            try:
                pl = jwt.decode(data['token'], secret, algorithms=['HS256'])
            except jwt.exceptions.DecodeError, e:
                start_response('400 Bad Request',
                               [('Content-Type', 'text/plain')])
                return 'Login expired'
            if pl.has_key('callsign'):
                callsign = pl['callsign']
        if callsign:
            #award settings
            if data.has_key( 'award' ) and ( ( data.has_key( 'track' ) \
                    and data.has_key( 'color' ) ) \
                    or data.has_key( 'stats_settings' ) ) :
                idParams = {'callsign': callsign, 'award': data['award']}
                updParams = spliceParams( data, \
                    [ 'track', 'color', 'settings', 'stats_settings' ] )
                if dxdb.paramUpdateInsert('users_awards_settings', idParams,
                                          updParams):
                    dxdb.commit()
                    okResponse = 'OK'
                else:
                    dbError = True
#check message
            elif data.has_key('checkMessage'):
                userData = dxdb.getObject( 'users', \
                        { 'callsign': callsign },\
                        False, True )
                if (userData['msg']):
                    dxdb.paramUpdate( 'users', { 'callsign': callsign }, \
                            { 'msg': None } )
                    dxdb.commit()
                start_response('200 OK',
                               [('Content-Type', 'application/json')])
                return json.dumps(userData['msg'])
#reload data
            elif data.has_key('reload'):
                userData = dxdb.getObject( 'users', \
                        { 'callsign': callsign },\
                        False, True )
                return sendUserData(userData, start_response)
#dxped admin
            elif data.has_key('dxpedition') and data['dxpedition'] == 'admin':
                if callsign in admins:
                    idParams = {'callsign': data['callsign']}
                    if data.has_key('delete'):
                        if dxdb.paramDelete('dxpedition', idParams):
                            okResponse = 'OK'
                        else:
                            dbError = True
                    else:
                        updParams = spliceParams( data, [ 'dt_begin', 'dt_end', \
                            'descr', 'link' ] )
                        if dxdb.paramUpdateInsert( 'dxpedition', idParams, \
                                updParams ):
                            okResponse = 'OK'
                        else:
                            dbError = True
                    if okResponse:
                        dxdb.commit()
                        exportDXpedition(env)
                        updMeta = False

                else:
                    start_response( '403 Forbidden', \
                            [('Content-Type','text/plain')])
                    return 'Permission denied'
#adif
            elif data.has_key('adif'):
                adif = getUploadedFile(data['adif']['file'])
                fName = callsign + '-' + str(time.time()) + '.adif'
                with open(adifQueueDir + fName, 'w') as f:
                    f.write(adif)
                queue = loadJSON(adifQueueDir + 'queue.json')
                if not queue:
                    queue = []
                queue.append( { 'callsign': callsign, \
                        'awards': data['adif']['awards'],\
                        'file': fName } )
                with open(adifQueueDir + 'queue.json', 'w') as f:
                    f.write(json.dumps(queue))

                start_response('200 OK', [('Content-Type', 'text/plain')])
                return 'OK'
#email
            elif data.has_key('email'):
                idParams = {'callsign': callsign}
                userData = dxdb.getObject('users', idParams)
                additionalCs = []
                emails = {}
                if userData['misc']:
                    if userData['misc'].has_key('additionalCs'):
                        additionalCs = userData['misc']['additionalCs']
                    if userData['misc'].has_key('emails'):
                        emails = userData['misc']['emails']
                if data['email']['stationCs'] != callsign and \
                        not data['email']['stationCs'] in additionalCs:
                    additionalCs.insert(0, data['email']['stationCs'])
                emails[data['email']['to']] = {
                    'cs': data['email']['stationCs']
                }
                dxdb.paramUpdate(
                    'users', idParams, {
                        'misc':
                        json.dumps({
                            'additionalCs': additionalCs,
                            'defaultCs': data['email']['stationCs'],
                            'emails': emails
                        })
                    })

                text = "Award: " + data['award'] + "\n"
                if data['email']['band']:
                    text += "Band: " + data['email']['band'] + "\n" + \
                        "Mode: " + data['email']['mode'] + "\n"
                text += "Date: " + data['email']['date'] + "\n" + \
                    "Time: " + data['email']['time'] + "\n" + \
                    "Your callsign: " + data['email']['stationCs'] + "\n" + \
                    "Freq: " + str( data['email']['freq'] ) + "\n" + \
                    "Worked callsign: " + data['email']['workedCs'] + "\n" + \
                    "Comments: " + data['email']['comments']

                adif = "ADIF export from adxcluster.com\n" + \
                    "Logs generated @ " + \
                    datetime.utcnow().strftime( "%Y-%m-%d %H:%M:%Sz" ) + "\n" + \
                    "<EOH>\n"
                adifFieldsList = {
                    "CALL": 'workedCs',
                    "BAND": 'band',
                    "STATION_CALLSIGN": 'stationCs',
                    "FREQ": 'freq',
                    "MODE": 'mode'
                }
                for (k, v) in adifFieldsList.iteritems():
                    if data['email'][v]:
                        adif += adiffield(k, data['email'][v])
                m, d, y = data['email']['date'].split('-')
                adif += adiffield('QSO_DATE', y + m + d)
                adif += adiffield( 'TIME_ON', \
                    data['email']['time'].replace( ':', '' ).replace( 'z', '' ) + '00' )
                adif += " <EOR>"

                sendEmail( text = text, fr = conf.get( 'email', 'address' ), \
                    to = data['email']['to'], subject = 'Adxcluster award message',\
                    attachments = ( { 'data': adif, 'name': 'adxcluster.adi'}, ) )

                start_response('200 OK', [('Content-Type', 'text/plain')])
                return 'OK'

#autocfm
            elif data.has_key('loadAutoCfm'):
                rTxt = ''
                r = False
                try:
                    cfmData = None
                    if data['award'] == 'IOTA':
                        cfmData = getUploadedFile(data['cfmData'])
                    r = loadAutoCfm(callsign, data['award'], cfmData)
                    rTxt = 'Your awards were updated.' if r \
                            else 'No new callsigns were found.'
                except:
                    rTxt = 'There was an error. Please try to update later.'
                    logging.exception( 'Error while loading autoCfm. Callsign: ' + \
                            callsign )
                start_response('200 OK',
                               [('Content-Type', 'application/json')])
                return json.dumps({'reload': r, 'text': rTxt})
#user award data
            elif data.has_key( 'award' ) and data.has_key( 'value' ) \
                    and ( data.has_key( 'confirmed' ) or data.has_key('delete') or \
                    data.has_key( 'cfm_paper' ) or data.has_key( 'cfm' ) ):

                params =  { 'callsign': callsign, 'award': data['award'], \
                    'value': data['value'], \
                    'band': data['band'] if data.has_key( 'band' ) else 'N/A',\
                    'mode': data['mode'] if data.has_key( 'mode' ) else 'N/A',\
                    }

                fl = False
                if data.has_key('delete') and data['delete']:
                    awardLookup = dxdb.getObject( 'user_awards', params, \
                        False, True )
                    if awardLookup:
                        fl = dxdb.paramDelete('user_awards', params)
                else:
                    updParams = spliceParams( data, \
                        [ 'confirmed', 'cfm_paper', 'cfm_eqsl', 'cfm_lotw', \
                        'cfm', 'worked_cs'] )
                    dxdb.verbose = True
                    fl = dxdb.paramUpdateInsert('user_awards', params,
                                                updParams)
                    dxdb.verbose = False
                if fl:
                    dxdb.commit()
                    okResponse = 'OK'
                else:
                    dbError = True
            elif data.has_key('award_value_worked_color'):
                del data['token']
                data['callsign'] = callsign
                if dxdb.updateObject('users', data, 'callsign'):
                    dxdb.commit()
                    start_response('200 OK', [('Content-Type', 'text/plain')])
                    return 'OK'
                else:
                    start_response( '500 Server Error', \
                            [('Content-Type','text/plain')])
                    return
#user profile
            elif ( data.has_key( 'email' ) and validEmail( data['email'] ) ) or \
                ( data.has_key( 'password' ) and len( data['password'] ) > 5  ):
                field = 'email' if data.has_key('email') else 'password'
                if field == 'password':
                    if data.has_key('oldPassword'):
                        if data['oldPassword'] != \
                            dxdb.getObject( 'users', {'callsign': callsign} \
                                )['password']:
                            error = 'Wrong current password'
                    else:
                        error = 'No current password'
                if not error:
                    if dxdb.updateObject( 'users', \
                            { 'callsign': callsign, field: data[field] },
                            'callsign' ):
                        dxdb.commit()
                        okResponse = 'OK'
                    else:
                        dbError = True
#user's lists
            elif data.has_key('list_id'):
                #users's list create
                if data['list_id'] == 'new':
                    list = dxdb.getObject( 'users_lists',\
                            { 'callsign': callsign, \
                            'title': data['title'] if data.has_key( 'title' ) \
                                else None },\
                            True )
                    if list:
                        start_response( '200 OK', \
                                [('Content-Type', 'application/json')] )
                        return json.dumps({'list_id': list['id']})
                    else:
                        dbError = True
#users lists settings
                elif data.has_key( 'title' ) or data.has_key( 'track' ) or \
                        data.has_key( 'stats_settings' ) or \
                        data.has_key( 'full_title' ):
                    if dxdb.paramUpdate( 'users_lists', { 'id': data['list_id'] }, \
                            spliceParams( data, [ 'title', 'track', 'color', \
                                'stats_settings', 'full_title' ] ) ):
                        dxdb.commit()
                        okResponse = 'OK'
                    else:
                        dbError = True
#users lists items
                elif data.has_key('items'):
                    idParams = {'list_id': data['list_id']}
                    okResponse = 'OK'
                    for item in data['items']:
                        idParams['callsign'] = item['callsign']
                        if not dxdb.paramUpdateInsert( 'users_lists_items', \
                            idParams,
                            spliceParams( item, [ 'settings', 'pfx' ] ) ):
                            okResponse = ''
                            dbError = True
                            dxdb.rollback()
                        else:
                            dxdb.commit()
                elif data.has_key('callsign'):
                    if data.has_key('delete'):
                        if dxdb.paramDelete( 'users_lists_items',\
                            { 'list_id': data['list_id'], \
                            'callsign': data['callsign'] } ):
                            dxdb.commit()
                            okResponse = 'OK'
                        else:
                            dbError = True
                    else:
                        if dxdb.paramUpdateInsert( 'users_lists_items', \
                            spliceParams( data, [ 'list_id', 'callsign' ] ), \
                            spliceParams( data, [ 'settings', 'pfx' ] ) ):
                            dxdb.commit()
                            okResponse = 'OK'
                        else:
                            dbError = True


#user's list awards
                elif data.has_key('value'):
                    params =  { 'list_id': data['list_id'], \
                            'callsign': data['value'], \
                        'band': data['band'] if data.has_key( 'band' ) else 'N/A',\
                        'mode': data['mode'] if data.has_key( 'mode' ) else 'N/A',\
                        }
                    if data.has_key('delete') and data['delete']:
                        if dxdb.getObject( 'users_lists_awards', params, \
                            False, True ):
                            if dxdb.paramDelete('users_lists_awards', params):
                                dxdb.commit()
                                okResponse = 'OK'
                            else:
                                dbError = True
                    else:
                        if data.has_key('cfm'):
                            for k, v in data['cfm'].items():
                                data[k] = v
                        if dxdb.paramUpdateInsert( 'users_lists_awards', params, \
                            spliceParams( data, \
                            ['cfm_paper', 'cfm_eqsl', 'cfm_lotw', 'worked_cs'] )):
                            dxdb.commit()
                            okResponse = 'OK'
                        else:
                            dbError = True
                elif data.has_key('delete'):
                    if dxdb.execute( """delete from users_lists_awards
                        where list_id = %(list_id)s""", data ) and \
                        dxdb.execute( """delete from users_lists_items
                        where list_id = %(list_id)s""", data ) and \
                        dxdb.execute( """delete from users_lists
                        where id = %(list_id)s""", data ):
                        dxdb.commit()
                        okResponse = 'OK'
                    else:
                        dbError = True

            elif not error:
                error = 'Bad user settings'
        if dbError:
            start_response( '500 Server Error', \
                    [('Content-Type','text/plain')])
            return
        if okResponse:
            if updMeta:
                umd = readWebFile('userMetadata.json', env)
                if not umd:
                    umd = {}
                ts = time.time()
                umd[callsign] = ts
                writeWebFile(json.dumps(umd), 'userMetadata.json', env)
                start_response('200 OK',
                               [('Content-Type', 'application/json')])
                return json.dumps({'version': ts})
            else:
                start_response('200 OK', [('Content-Type', 'text/plain')])
                return okResponse

        start_response('400 Bad Request', [('Content-Type', 'text/plain')])
        return error
Beispiel #34
0
        except:
            logging.exception('ws send error')
            wsRemove(ws)


def wsRemove(ws):
    if ws in wsConnections:
        wsConnections.remove(ws)


async def wsUpdate(data):
    for ws in wsConnections:
        await wsSend(ws, data)


encodersSettings = loadJSON(conf.get('web', 'root') + '/encoders.json')

encoders = []
encData = {}
curEncoder = -1


def initEncData():
    global encoders, encData
    encoders = []
    encData = {}
    for enc in encodersSettings['encoders']:
        encID = enc['id']
        encoders.append(encID)
        encData[encID] = {
            'lo': -1,
Beispiel #35
0
import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb, cursor2dicts, spliceParams
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument( '-t', action = 'store_true' )
args = vars( argparser.parse_args() )
testMode = args['t']

conf = siteConf()
webRoot = conf.get( 'web', ( 'test_' if testMode else '' ) + 'root' ) 

awards = loadJSON( webRoot + '/awards.json' )

dataModesFull = """('DIGI', 'HELL', 'MT63', 'THOR16', 'FAX', 'OPERA', 'PKT', 'SIM31',
'CONTESTI', 'CONTESTIA', 'AMTOR', 'JT6M', 'ASCI', 'FT8', 'MSK144', 'THOR', 'QRA64',
'CONTESTIA', 'DOMINO', 'JT4C', 'THROB', 'DIG', 'ROS', 'SIM63', 'FSQ', 'THRB', 'J3E',
'WSPR', 'ISCAT', 'CONTESTIA8', 'ALE', 'JT10', 'TOR', 'PACKET', 'RTTY',  'PSK31', 
'PSK63', 'PSK125', 'JT65', 'FSK', 'OLIVIA', 'SSTV', 'JT9', 'FT8' )"""
modes = { 'CW':  """( 'A1A' )""",
        'SSB': """( 'AM', 'PHONE' )""",
        'DATA':  """('DIGI', 'HELL', 'MT63', 'THOR16', 'FAX', 'OPERA', 'PKT', 'SIM31',
'CONTESTI', 'CONTESTIA', 'AMTOR', 'JT6M', 'ASCI', 'FT8', 'MSK144', 'THOR', 'QRA64',
'CONTESTIA', 'DOMINO', 'JT4C', 'THROB', 'DIG', 'ROS', 'SIM63', 'FSQ', 'THRB', 'J3E',
'WSPR', 'ISCAT', 'CONTESTIA8', 'ALE', 'JT10', 'TOR', 'PACKET')""" }


def joinModes( mode, modeAliases, awardsList = None ):
Beispiel #36
0
def loadSpecialLists():
    fName = webRoot + '/specialLists.json'
    slData = loadJSON(fName)
    if not slData:
        slData = {'DXpedition': [], 'Special': []}
    return slData
Beispiel #37
0
def locationHandler(request):
    newData = yield from request.json()
    callsign = None
    stationPath = None
    stationSettings = None
    stationCallsign = None
    if ('token' in newData and newData['token']):
        callsign = decodeToken(newData)
        if not isinstance(callsign, str):
            return callsign
        stationPath = yield from getStationPathByAdminCS(callsign)
        stationSettings = loadJSON(stationPath + '/settings.json')
        if not stationSettings:
            return web.HTTPBadRequest(
                text='Expedition profile is not initialized.')
        if stationSettings and 'station' in stationSettings and\
            'callsign' in stationSettings['station'] and\
            stationSettings['station']['callsign'] and\
            'activityPeriod' in stationSettings['station'] and\
            stationSettings['station']['activityPeriod']:
            act_period = [datetime.strptime(dt, '%d.%m.%Y') for dt in\
                stationSettings['station']['activityPeriod'] if dt]
            if act_period and act_period[0] <= datetime.utcnow() <=\
                act_period[1] + timedelta(days=1):
                stationCallsign = stationSettings['station']['callsign']

    if 'location' in newData and newData['location']:
        qth_now_cs = None
        if 'callsign' in newData and newData['callsign']:
            qth_now_cs = newData['callsign']
        elif stationCallsign:
            qth_now_cs = stationCallsign
        logging.info('map callsign: %s' % qth_now_cs)

        if qth_now_cs:
            qth_now_cs = qth_now_cs.upper()
            save_qth_now_location(qth_now_cs, newData['location'],\
                webRoot + '/js/qth_now_locations.json')

        save_qth_now_location(qth_now_cs, newData['location'],\
            webRoot + '/js/qth_now_locations_all.json')

    if ('token' not in newData
            or not newData['token']) and 'location' in newData:
        qth = yield from get_qth_data(newData['location'])
        return web.json_response({'qth': qth})
    fp = stationPath + '/status.json'
    data = loadJSON(fp)
    if not data:
        data = {}
    if not 'locTs' in data and 'ts' in data:
        data['locTs'] = data['ts']
    dtUTC = datetime.utcnow()
    data['ts'] = int(time.time())
    data['date'], data['time'] = dtFmt(dtUTC)
    data['year'] = dtUTC.year
    if 'online' in newData:
        data['online'] = newData['online']
    if 'freq' in newData and newData['freq']:
        data['freq'] = {'value': newData['freq'], 'ts': data['ts']}
        fromCallsign = stationSettings['station']['callsign']
        insertChatMessage(path=stationPath + '/chat.json',\
            msg_data={'from': fromCallsign,\
            'text': '<b><i>' + newData['freq'] + '</b></i>'},\
            admin=True)
    country = stationSettings['qthCountry'] if 'qthCountry' in stationSettings\
        else None
    if 'location' in newData and newData['location']:
        location = newData['location']

        country = get_country(location)

        data['qth'] = yield from get_qth_data(location, country=country)

        if 'comments' in newData:
            data['comments'] = newData['comments']
        if 'location' in data and data['location']:
            data['prev'] = { 'location': data['location'][:], \
                    'ts': data['locTs'] }
        data['locTs'] = data['ts']
        data['location'] = newData['location']
        if 'prev' in data:
            lat = [data['location'][1], data['prev']['location'][1]]
            lon = [data['location'][0], data['prev']['location'][0]]
            dlon = lon[0] - lon[1]
            dlat = lat[0] - lat[1]
            a = (sind(dlat/2))**2 + cosd(lat[0]) * cosd(lat[1]) * (sind(dlon/2)) \
                    ** 2
            c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
            d = c * 6373
            data['d'] = d
            data['dt'] = data['locTs'] - data['prev']['ts']
            if float(data['locTs'] - data['prev']['ts']) != 0:
                data['speed'] = d / ( float( data['locTs'] - data['prev']['ts'] ) \
                        / 3600 )
            else:
                data['speed'] = 0

    if 'qth' in newData:

        if 'qth' not in data:
            data['qth'] = {'fields':\
                empty_qth_fields(country=country)}
        for key in newData['qth']['fields'].keys():
            data['qth']['fields']['values'][int(
                key)] = newData['qth']['fields'][key]
        if 'loc' in newData['qth']:
            data['qth']['loc'] = newData['qth']['loc']

    with open(fp, 'w') as f:
        json.dump(data, f, ensure_ascii=False)
    return web.json_response(data)
from datetime import datetime
from aiohttp import web
from common import siteConf, loadJSON, appRoot, startLogging, createFtpUser
from tqdb import DBConn, spliceParams

logging.basicConfig( level = logging.DEBUG )

parser = argparse.ArgumentParser(description="tnxqso backend aiohttp server")
parser.add_argument('--test', action = "store_true" )
args = parser.parse_args()

conf = siteConf()
db = DBConn( conf.items( 'db_test' if args.test else 'db' ) )

webRoot = conf.get( 'web', 'root_test' if args.test else 'root' )
defSettings = loadJSON( webRoot + '/js/defaultUserSettings.json' )
defStatus = defSettings['status']

@asyncio.coroutine
def main():
    yield from db.connect()
    users = yield from db.execute( "select callsign, settings from users" )
    for user in users:
        settings = user['settings']
        if not 'status' in settings:
            settings['status'] = defStatus
        if not 'currentPositionIcon' in settings:
            settings['currentPositionIcon'] = 0
        if not 'userFields' in settings:
            settings['userFields'] = []
            columns = settings['log']['userColumns']
import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb
from dx import DX, DXData

argparser = argparse.ArgumentParser()
argparser.add_argument( '-t', action = 'store_true' )
args = vars( argparser.parse_args() )
testMode = args['t']

conf = siteConf()
webRoot = conf.get( 'web', ( 'test_' if testMode else '' ) + 'root' ) 
awards = loadJSON( webRoot + '/awards.json' )
if not awards:
    print 'No awards data!'
else:
    print 'Processing ' + webRoot + '/awards.json'
udv = loadJSON( webRoot + '/userMetadata.json' )
if udv:
    udv = int( udv ) + 1
else:
    udv = '0'

def makeStr( list ):
    return ', '.join( ( "'{}'".format( x ) for x in list ) )

digiModes = ('RTTY', 'PSK31', 'PSK63', 'JT65' )
digiModesStr = makeStr( digiModes )
Beispiel #40
0
def getfilelistfromlocation(folderpath, location):
    result = []

    location = location[1:]
    locationfullpath = location == '/' and folderpath or os.path.join(folderpath, location)

    # getting sorted folders only
    for temp in [f for f in sorted(os.listdir(locationfullpath)) if os.path.isdir(os.path.join(locationfullpath, f)) and '.' not in f]:
        fullpath = fullpath = os.path.join(folderpath, location, temp)
        result.append({
            'fullpath': fullpath,
            'name': temp,
            'type': 'folder',
            'insidepath': fullpath.replace(folderpath, '')
        })

    #getting sorted files only
    for temp in [f for f in sorted(os.listdir(locationfullpath)) if os.path.isfile(os.path.join(locationfullpath, f))]:
        fullpath = fullpath = os.path.join(folderpath, location, temp)
        result.append({
            'fullpath': fullpath,
            'name': temp,
            'type': 'file',
            'unrolled': False,
            'threads': [],
            'insidepath': fullpath.replace(folderpath, '')
        })

    # if location contains no comments, then return result
    if not os.path.exists(os.path.join(folderpath, '.Comments', location)) or not os.path.isdir(
            os.path.join(folderpath, '.Comments', location)):
        return result

    # getting threads only
    for temp in os.listdir(os.path.join(folderpath, '.Comments', location)):
        fullpath = os.path.join(folderpath, '.Comments', location, temp)

        if not pattern.match(temp) or not os.path.isdir(fullpath):
            continue

        # getting metadata and lastcomment for it's timestamp
        metadata = loadJSON(os.path.join(fullpath, 'meta'))
        comments = sorted(os.listdir(fullpath), reverse=True)
        lastcomment = loadJSON(os.path.join(fullpath, comments[1]))

        # main thread data
        data = {
            'timestamp': metadata['timestamp'],
            'name': metadata['topic'],
            'type': 'thread',
            'numberofcomments': len(os.listdir(fullpath)) - 1,
            'unreadcomment': False,
            'lastcomment': lastcomment['timestamp'],
            'fullpath': fullpath,
            'insidepath': fullpath.replace(folderpath, '')
        }

        # searching for unread comments
        for comment in comments[1:]:
            comm = loadJSON(os.path.join(fullpath, comment))
            if config['uid'] not in comm['readby'].keys():
                data['unreadcomment'] = True
                break

        # updating files which are thread about
        if len(metadata['fileabout']) > 0:
            for res in result:
                if res['type'] != 'file':
                    continue

                if res['insidepath'] == metadata['fileabout']:
                    if data['unreadcomment']:
                        res['unreadcomment'] = True
                    res['threads'].append(copy.deepcopy(data))
        else:
            result.append(data)

    return result
Beispiel #41
0
def application(env, start_response):
    try:
        reqSize = int(env.get('CONTENT_LENGTH', 0))
    except:
        reqSize = 0

    type = (env["PATH_INFO"].split('/'))[-1]
    if type == 'clearLog':
        os.remove(webRoot + '/qso.json')
        start_response('302 Found', [('Location', 'http://73.ru/rda/')])
        return

    postData = env['wsgi.input'].read(reqSize)
    newItem = {}
    data = []
    if type == 'news':
        newItem = parse_qs(postData)
        newItem['time'] = datetime.now().strftime('%d %b %H:%M').lower()
        newItem['text'] = newItem['text'][0].decode('utf-8').replace(
            '\r\n', '<br/>')
    else:
        newItem = json.loads(postData)
        if newItem.has_key('location'):
            updateLocation(newItem)
            start_response('200 OK', [('Content-Type', 'text/plain')])
            return 'OK'

            if not newItem['location']:
                prev = loadJSON(webRoot + '/location.json')
                if prev:
                    newItem['location'] = prev['location']

            type = 'location'
            data = newItem
            data['ts'] = int(datetime.now().strftime("%s"))
            data['locTs'] = data['ts']
            data['date'], data['time'] = dtFmt(datetime.utcnow())

        elif type == 'qso':
            logging.debug(newItem)
            fp = webRoot + '/qso.json'
            data = loadJSON(fp)
            if not data and os.path.isfile(fp):
                bakNo = 0
                bakFp = fp + '.bak'
                while os.path.isfile(bakFp):
                    bakNo += 1
                    bakFp += str(bakNo)
                shutil.copyfile(fp, bakFp)
                data = []
            data.insert(0, newItem)
            dt = datetime.strptime(newItem['ts'], "%Y-%m-%d %H:%M:%S")
            if newItem['rda']:
                newItem['rda'] = newItem['rda'].upper()
            if newItem['wff']:
                newItem['wff'] = newItem['wff'].upper()
            newItem['date'], newItem['time'] = dtFmt(dt)
            locFp = webRoot + '/location.json'
            locData = loadJSON(locFp)
            if not locData:
                locData = {}
            locData['ts'] = int(datetime.now().strftime("%s"))
            with open(locFp, 'w') as f:
                f.write(
                    json.dumps(locData, ensure_ascii=False).encode('utf-8'))

        elif type == 'chat' or type == 'users':
            newItem['cs'] = newItem['cs'].upper()
            pwd = newItem['cs'].endswith(':123')
            if pwd:
                newItem['cs'] = newItem['cs'][:-4]
                if newItem['cs'] in regCS:
                    newItem['admin'] = True
            if newItem['cs'] in regCS and not pwd:
                start_response('403 Forbidden')
                return 'This callsign is password protected'
            if type == 'chat':
                newItem['date'], newItem['time'] = dtFmt(datetime.utcnow())
            elif type == 'users':
                fp = webRoot + '/users.json'
                data = loadJSON(fp)
                if not data:
                    data = {}
                if newItem.has_key('delete'):
                    if data.has_key(newItem['cs']):
                        del data[newItem['cs']]
                else:
                    data[newItem['cs']] = { 'tab': newItem['tab'], \
                            'ts': int( datetime.now().strftime("%s") ) }
                with open(fp, 'w') as f:
                    f.write(
                        json.dumps(data, ensure_ascii=False).encode('utf-8'))
                start_response('200 OK', [('Content-Type', 'text/plain')])
                return 'OK'

    fp = webRoot + '/' + type + '.json'
    if not data:
        data = loadJSON(fp)
        if not data:
            data = []
        data.insert(0, newItem)
    with open(fp, 'w') as f:
        f.write(json.dumps(data, ensure_ascii=False).encode('utf-8'))
    if type == 'news':
        start_response('302 Found', [('Location', 'http://73.ru/rda/')])
        return
    else:
        start_response('200 OK', [('Content-Type', 'text/plain')])
        return 'OK'
Beispiel #42
0
#!/usr/bin/python
#coding=utf-8

import sys, decimal, re, datetime, os, logging, time, json, urllib2, xmltodict,\
        argparse

from common import appRoot, readConf, siteConf, loadJSON
from dxdb import dxdb
from dx import DX, DXData

if len(sys.argv) < 2:
    raise Exception("No filename given!")

conf = siteConf()
fnAwardsValues = conf.get('web', 'root') + '/awardsValues.json'
awardsValues = loadJSON(fnAwardsValues)
if not awardsValues:
    awardsValues = []


def getSplitLine(file, fr, to):
    line = file.readline()
    if line:
        return [item.strip('"\r\n ') for item in line.split(';')[fr:to]]
    else:
        return None


for fName in sys.argv[1:]:
    with open(fName, 'r') as file:
        name, fullName, country = getSplitLine(file, 0, 3)
Beispiel #43
0
def loadQueue():
    return loadJSON(adifQueueDir + 'queue.json')
Beispiel #44
0
def userSettingsHandler(request):
    error = None
    data = yield from request.json()
    error = ''
    okResponse = ''
    dbError = False
    callsign = decodeToken(data)
    if not isinstance(callsign, str):
        return callsign
    if 'settings' in data:
        oldData = yield from getUserData(callsign)
        cs = oldData['settings']['station']['callsign']
        stationPath = getStationPath(cs) if cs else None
        publishPath = webRoot + '/js/publish.json'
        publish = loadJSON(publishPath)
        if not publish:
            publish = {}
        if cs != data['settings']['station']['callsign']:
            newCs = data['settings']['station']['callsign']
            newPath = getStationPath(newCs) if newCs else None
            if stationPath and os.path.exists(stationPath):
                shutil.rmtree(stationPath)
            if newCs:
                if os.path.exists(newPath):
                    return web.HTTPBadRequest( \
                        text = 'Station callsign ' + newCs.upper() + \
                            'is already registered' )
                    createStationDir(newPath, callsign)
                if cs and cs in publish:
                    if newCs:
                        publish[newCs] = publish[cs]
                    del publish[cs]
                cs = newCs
                stationPath = newPath
            else:
                stationPath = None
        if cs:
            if not cs in publish:
                publish[cs] = {'admin': True}
            publish[cs]['user'] = data['settings']['publish']
        with open(publishPath, 'w') as f:
            json.dump(publish, f, ensure_ascii=False)
        if stationPath:
            if not os.path.exists(stationPath):
                createStationDir(stationPath, callsign)
        yield from saveStationSettings(cs, callsign, data['settings'])
    elif 'userColumns' in data:
        userData = yield from getUserData(callsign)
        settings = userData['settings']
        userColumns = settings['userFields']
        for c in range(0, len(data['userColumns'])):
            if len(settings) <= c:
                userColumns.append(data['userColumns'][c])
            else:
                userColumns[c] = data['userColumns'][c]
        userColumns = userColumns[:len(data['userColumns'])]
        yield from saveStationSettings(
            userData['settings']['station']['callsign'], callsign, settings)
    else:
        yield from db.paramUpdate( 'users', { 'callsign': callsign }, \
            spliceParams( data, ( 'email', 'password' ) ) )
        setFtpPasswd(callsign, data['password'], test=args.test)
    return web.Response(text='OK')