Exemplo n.º 1
0
def login_post(request):  # tested
    # next = request.params.get('next') or request.route_url('home')
    login = request.POST.get('login', '')
    password = request.POST.get('password', '')
    # print(login)
    log.debug(login)
    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.check_password(password):
        client = Client(user_id=user.id)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id', value=str(locale_id))
        response.set_cookie(key='client_id', value=str(client.id))
        headers = remember(request, principal=client.id)
        # return HTTPFound(location=next, headers=response.headers)
        return HTTPOk(headers=response.headers, json_body={})
        # return {}
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 2
0
def signin(request):
    req = request.json_body
    login = req['login']
    password = req['password']
    # login = request.POST.get('login', '')
    # password = request.POST.get('password', '')
    desktop = req.get('desktop', False)

    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.check_password(password):
        client = Client(user_id=user.id, is_browser_client=not desktop)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id, max_age=315360000)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id',
                            value=str(locale_id),
                            max_age=datetime.timedelta(days=3650))
        response.set_cookie(key='client_id',
                            value=str(client.id),
                            max_age=datetime.timedelta(days=3650))
        result = dict()
        result['client_id'] = client.id
        request.response.status = HTTPOk.code
        # request.response.headers = headers
        # return response
        return HTTPOk(headers=response.headers, json_body=result)
        # return result
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 3
0
def login_cheat(request):  # TODO: test
    next = request.params.get('next') or request.route_url('dashboard')
    login = request.json_body.get('login', '')
    passwordhash = request.json_body.get('passwordhash', '')
    log.debug("Logging in with cheat method:" + login)
    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.password.hash == passwordhash:
        log.debug("Login successful")
        client = Client(user_id=user.id)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id', value=str(locale_id))
        response.set_cookie(key='client_id', value=str(client.id))
        headers = remember(request, principal=client.id)
        return response

    log.debug("Login unsuccessful for " + login)
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 4
0
 def setUp(self):
     self.config = testing.setUp()
     import webtest
     from pyramid import  paster
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     myapp = paster.get_app('testing.ini')
     self.app = webtest.TestApp(myapp)
     from lingvodoc.models import (
         Base,
         Locale,
         User,
         Passhash,
         Client
         )
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         en_locale = Locale(id=2, shortcut="en", intl_name="English")
         DBSession.add(en_locale)
         DBSession.flush()
         new_user = User(id=1, login='******', default_locale_id = 1)
         new_pass = Passhash(password='******')
         DBSession.add(new_pass)
         new_user.password = new_pass
         DBSession.add(new_user)
         new_client = Client(id=1, user=new_user)
         DBSession.add(new_client)
Exemplo n.º 5
0
def upload_user_blob(request):  # TODO: remove blob Object
    variables = {'auth': authenticated_userid(request)}
    response = dict()
    filename = request.POST['blob'].filename
    input_file = request.POST['blob'].file

    class Object(object):
        pass

    blob = Object()
    blob.client_id = variables['auth']
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    blob.object_id = DBSession.query(UserBlobs).filter_by(client_id=client.id).count() + 1
    blob.data_type = request.POST['data_type']

    blob.filename = filename

    current_user = DBSession.query(User).filter_by(id=client.user_id).first()

    blob_object = UserBlobs(object_id=blob.object_id,
                            client_id=blob.client_id,
                            name=filename,
                            data_type=blob.data_type,
                            user_id=current_user.id)

    current_user.userblobs.append(blob_object)
    blob_object.real_storage_path, blob_object.content = create_object(request, input_file, blob_object, blob.data_type,
                                                                       blob.filename, json_input=False)
    DBSession.add(blob_object)
    DBSession.add(current_user)
    DBSession.flush()
    request.response.status = HTTPOk.code
    response = {"client_id": blob_object.client_id, "object_id": blob_object.object_id, "content": blob_object.content}
    return response
Exemplo n.º 6
0
def signin(request):
    req = request.json_body
    login = req['login']
    password = req['password']
    # login = request.POST.get('login', '')
    # password = request.POST.get('password', '')

    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.check_password(password):
        client = Client(user_id=user.id)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id', value=str(locale_id))
        response.set_cookie(key='client_id', value=str(client.id))
        result = dict()
        result['client_id'] = client.id
        request.response.status = HTTPOk.code
        # request.response.headers = headers
        # return response
        return HTTPOk(headers=response.headers, json_body=result)
        # return result
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 7
0
def login_post(request):  # tested
    next = request.params.get('next') or request.route_url('home')
    login = request.POST.get('login', '')
    password = request.POST.get('password', '')
    # print(login)
    log.debug(login)
    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.check_password(password):
        client = Client(user_id=user.id)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id', value=str(locale_id))
        response.set_cookie(key='client_id', value=str(client.id))
        headers = remember(request, principal=client.id)
        # return HTTPFound(location=next, headers=response.headers)
        return HTTPOk(headers=response.headers, json_body={})
        # return {}
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 8
0
def create_nested_field(field, perspective, client_id, upper_level, link_ids,
                        position):
    field_object = DictionaryPerspectiveToField(
        client_id=client_id,
        parent=perspective,
        field_client_id=field['client_id'],
        field_object_id=field['object_id'],
        upper_level=upper_level,
        position=position)
    if field.get('link'):
        field_object.link_client_id = field['link']['client_id']
        field_object.link_object_id = field['link']['object_id']
    DBSession.flush()
    contains = field.get('contains', None)
    if contains:
        inner_position = 1
        for subfield in contains:
            create_nested_field(subfield,
                                perspective,
                                client_id,
                                upper_level=field_object,
                                link_ids=link_ids,
                                position=inner_position)
            inner_position += 1
    return
def create_field(translation_gist_client_id, translation_gist_object_id,
                 data_type_translation_gist_client_id,
                 data_type_translation_gist_object_id, client):
    try:

        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login."
            )
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        field = Field(
            client_id=client.id,  ###
            data_type_translation_gist_client_id=
            data_type_translation_gist_client_id,
            data_type_translation_gist_object_id=
            data_type_translation_gist_object_id,
            translation_gist_client_id=translation_gist_client_id,
            translation_gist_object_id=translation_gist_object_id)

        DBSession.add(field)
        DBSession.flush()

        return {'object_id': field.object_id, 'client_id': field.client_id}
    except KeyError as e:

        return {'error': str(e)}
Exemplo n.º 10
0
def new_client_server(request):
    old_client = DBSession.query(Client).filter_by(
        id=authenticated_userid(request)).first()
    if old_client:
        user = old_client.user
        if user:
            client = Client(user_id=user.id, is_browser_client=False)
            user.clients.append(client)
            DBSession.add(client)
            DBSession.flush()
            headers = remember(request, principal=client.id)
            response = Response()
            response.headers = headers
            locale_id = user.default_locale_id
            if not locale_id:
                locale_id = 1
            response.set_cookie(key='locale_id', value=str(locale_id))
            response.set_cookie(key='client_id', value=str(client.id))
            result = dict()
            result['client_id'] = client.id
            request.response.status = HTTPOk.code
            # request.response.headers = headers
            # return response
            return HTTPOk(headers=response.headers, json_body=result)
            # return result
    return HTTPUnauthorized(location=request.route_url('login'))
Exemplo n.º 11
0
def login_cheat(request):  # TODO: test
    next = request.params.get('next') or request.route_url('dashboard')
    login = request.json_body.get('login', '')
    passwordhash = request.json_body.get('passwordhash', '')
    log.debug("Logging in with cheat method:" + login)
    user = DBSession.query(User).filter_by(login=login).first()
    if user and user.password.hash == passwordhash:
        log.debug("Login successful")
        client = Client(user_id=user.id)
        user.clients.append(client)
        DBSession.add(client)
        DBSession.flush()
        headers = remember(request, principal=client.id)
        response = Response()
        response.headers = headers
        locale_id = user.default_locale_id
        if not locale_id:
            locale_id = 1
        response.set_cookie(key='locale_id', value=str(locale_id))
        response.set_cookie(key='client_id', value=str(client.id))
        headers = remember(request, principal=client.id)
        return response

    log.debug("Login unsuccessful for " + login)
    return HTTPUnauthorized(location=request.route_url('login'))
def get_translation(translation_gist_client_id, translation_gist_object_id, locale_id):
    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)
    translation = DBSession.query(TranslationAtom).filter_by(parent_client_id=translation_gist_client_id,
                                                             parent_object_id=translation_gist_object_id,
                                                             locale_id=locale_id).first()
    DBSession.flush()
    return translation.content
def get_translation(translation_gist_client_id, translation_gist_object_id, locale_id):
    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)
    translation = DBSession.query(TranslationAtom).filter_by(parent_client_id=translation_gist_client_id,
                                                             parent_object_id=translation_gist_object_id,
                                                             locale_id=locale_id).first()
    DBSession.flush()
    return translation.content
Exemplo n.º 14
0
def create_group_entity(request):  # tested
    try:
        variables = {'auth': authenticated_userid(request)}
        response = dict()
        req = request.json_body
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")

        tags = []
        if 'tag' in req:
            tags += [req['tag']]
        for par in req['connections']:
            parent = DBSession.query(LexicalEntry).\
                filter_by(client_id=par['client_id'], object_id=par['object_id']).first()
            if not parent:
                request.response.status = HTTPNotFound.code
                return {'error': str("No such lexical entry in the system")}
            # par_tags = DBSession.query(GroupingEntity).\
            #     filter_by(entity_type=req['entity_type'], parent=parent).all()
            par_tags = None
            tags += [o.content for o in par_tags]
        if not tags:
            n = 10  # better read from settings
            tag = time.ctime() + ''.join(random.SystemRandom().choice(string.ascii_uppercase + string.digits)
                                         for c in range(n))
            tags += [tag]
        parents = req['connections']
        for par in parents:
            parent = DBSession.query(LexicalEntry).\
                filter_by(client_id=par['client_id'], object_id=par['object_id']).first()
            for tag in tags:
                # ent = DBSession.query(GroupingEntity).\
                #     filter_by(entity_type=req['entity_type'], content=tag, parent=parent).first()
                ent = None
                if not ent:
                    # entity = GroupingEntity(client_id=client.id, object_id=DBSession.query(GroupingEntity).filter_by(client_id=client.id).count() + 1,
                    #                         entity_type=req['entity_type'], content=tag, parent=parent)
                    entity = None
                    DBSession.add(entity)
                    DBSession.flush()
        log.debug('TAGS: %s', tags)
        request.response.status = HTTPOk.code
        return {}
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 15
0
def create_language(request):  # tested & in docs
    try:
        variables = {'auth': request.authenticated_userid}

        req = request.json_body
        try:
            parent_client_id = req['parent_client_id']
            parent_object_id = req['parent_object_id']
        except:
            parent_client_id = None
            parent_object_id = None
        translation_gist_client_id = req['translation_gist_client_id']
        translation_gist_object_id = req['translation_gist_object_id']
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        object_id = req.get('object_id', None)
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")

        parent = None
        if parent_client_id and parent_object_id:
            parent = DBSession.query(Language).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()
        language = Language(client_id=variables['auth'],
                            object_id=object_id,
                                translation_gist_client_id=translation_gist_client_id,
                                translation_gist_object_id=translation_gist_object_id)
        DBSession.add(language)
        if parent:
            language.parent = parent
        DBSession.flush()
        basegroups = []
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can edit languages").first()]
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can delete languages").first()]
        if not object_id:
            groups = []
            for base in basegroups:
                group = Group(subject_client_id=language.client_id, subject_object_id=language.object_id, parent=base)
                groups += [group]
            for group in groups:
                add_user_to_group(user, group)
        request.response.status = HTTPOk.code
        return {'object_id': language.object_id,
                'client_id': language.client_id}
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 16
0
def create_lexical_entry_bulk(request):  # TODO: test
    try:
        dictionary_client_id = request.matchdict.get('dictionary_client_id')
        dictionary_object_id = request.matchdict.get('dictionary_object_id')
        perspective_client_id = request.matchdict.get('perspective_client_id')
        perspective_object_id = request.matchdict.get('perspective_object_id')

        count = request.json_body.get('count') or 0

        variables = {'auth': request.authenticated_userid}

        client = DBSession.query(Client).filter_by(
            id=variables['auth']).first()
        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login.",
                variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException(
                "This client id is orphaned. Try to logout and then login once more."
            )
        perspective = DBSession.query(DictionaryPerspective). \
            filter_by(client_id=perspective_client_id, object_id = perspective_object_id).first()
        if not perspective:
            request.response.status = HTTPNotFound.code
            return {'error': str("No such perspective in the system")}

        lexes_list = []
        for i in range(0, count):
            lexentr = LexicalEntry(client_id=variables['auth'],
                                   parent_object_id=perspective_object_id,
                                   parent=perspective)
            DBSession.add(lexentr)
            lexes_list.append(lexentr)
        DBSession.flush()
        lexes_ids_list = []
        for lexentr in lexes_list:
            lexes_ids_list.append({
                'client_id': lexentr.client_id,
                'object_id': lexentr.object_id
            })

        request.response.status = HTTPOk.code
        return lexes_ids_list
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 17
0
def signup_post(request):  # tested
    try:
        req = request.json_body
        login = req['login']
        name = req['name']
        email = req['email']
        password = req['password']

        day = req.get('day')
        month = req.get('month')
        year = req.get('year')
        if day is None or month is None or year is None:
            request.response.status = HTTPBadRequest.code
            return {'Error': "day, month or year of the birth is missing"}
        # birthday = datetime.datetime.strptime(day + month + year, "%d%m%Y").date()
        try:
            birthday = datetime.date(year, month, day)
        except ValueError:
            request.response.status = HTTPBadRequest.code
            return {'Error': "Invalid birthday"}

        if DBSession.query(User).filter_by(login=login).first():
            raise CommonException("The user with this login is already registered")
        if DBSession.query(Email).filter_by(email=email).first():
            raise CommonException("The user with this email is already registered")
        new_user = User(login=login, name=name, signup_date=datetime.datetime.utcnow(), intl_name=login, birthday=birthday, is_active=True)
        pwd = Passhash(password=password)
        email = Email(email=email)
        new_user.password = pwd
        new_user.email.append(email)
        DBSession.add(new_user)
        basegroups = []
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can create dictionaries").first()]
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can create languages").first()]
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can create organizations").first()]
        basegroups += [DBSession.query(BaseGroup).filter_by(name="Can create translation strings").first()]
        groups = []
        for base in basegroups:
            groups += [DBSession.query(Group).filter_by(subject_override=True, base_group_id=base.id).first()]
        for group in groups:
            if group not in new_user.groups:
                new_user.groups.append(group)
        DBSession.flush()
        return {}

    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'status': request.response.status, 'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'status': request.response.status, 'error': str(e)}

    except ValueError as e:
        request.response.status = HTTPConflict.code
        return {'status': request.response.status, 'error': str(e)}
Exemplo n.º 18
0
def create_translationgist(request):
    try:
        variables = {'auth': request.authenticated_userid}

        req = request.json_body
        object_id = req.get('object_id', None)
        type = req['type']
        client = DBSession.query(Client).filter_by(
            id=variables['auth']).first()
        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login.",
                variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException(
                "This client id is orphaned. Try to logout and then login once more."
            )
        translationgist = TranslationGist(client_id=variables['auth'],
                                          object_id=object_id,
                                          type=type)
        DBSession.add(translationgist)
        DBSession.flush()
        basegroups = []
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can delete translationgist").first()
        ]
        if not object_id:
            groups = []
            for base in basegroups:
                group = Group(subject_client_id=translationgist.client_id,
                              subject_object_id=translationgist.object_id,
                              parent=base)
                groups += [group]
            for group in groups:
                add_user_to_group(user, group)
        request.response.status = HTTPOk.code
        return {
            'object_id': translationgist.object_id,
            'client_id': translationgist.client_id
        }
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 19
0
def upload_user_blob(request):  # TODO: remove blob Object
    variables = {'auth': authenticated_userid(request)}
    response = dict()
    filename = request.POST['blob'].filename
    input_file = request.POST['blob'].file

    class Object(object):
        pass

    blob = Object()
    blob.client_id = variables['auth']
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    blob.data_type = request.POST['data_type']

    blob.filename = filename

    current_user = DBSession.query(User).filter_by(id=client.user_id).first()
    object_id = request.POST.get('object_id', None)

    blob_object = UserBlobs(object_id=object_id,
                            client_id=blob.client_id,
                            name=filename,
                            data_type=blob.data_type,
                            user_id=current_user.id)

    current_user.userblobs.append(blob_object)
    blob_object.real_storage_path, blob_object.content = create_object(
        request,
        input_file,
        blob_object,
        blob.data_type,
        blob.filename,
        json_input=False)
    if blob.data_type == "sociolinguistics":
        try:
            check_socio(blob_object.real_storage_path)
        except Exception as e:
            request.response.status = HTTPBadRequest.code
            response = {"error": str(e)}
            return response

    DBSession.add(blob_object)
    DBSession.add(current_user)
    DBSession.flush()

    request.response.status = HTTPOk.code
    response = {
        "client_id": blob_object.client_id,
        "object_id": blob_object.object_id,
        "content": blob_object.content
    }
    return response
Exemplo n.º 20
0
def create_translationatom(request):
    try:
        variables = {'auth': request.authenticated_userid}

        req = request.json_body
        parent_client_id = req['parent_client_id']
        parent_object_id = req['parent_object_id']
        locale_id = req['locale_id']
        content = req['content']
        object_id = req.get('object_id', None)
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")
        parent = DBSession.query(TranslationGist).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()
        if not parent.marked_for_deletion:
            translationatom = TranslationAtom(client_id=variables['auth'],
                                              object_id=object_id,
                                              parent=parent,
                                              locale_id=locale_id,
                                              content=content)
            DBSession.add(translationatom)
            DBSession.flush()
            if not object_id:
                basegroups = []
                basegroups += [DBSession.query(BaseGroup).filter_by(name="Can edit translationatom").first()]
                if not object_id:
                    groups = []
                    for base in basegroups:
                        group = Group(subject_client_id=translationatom.client_id, subject_object_id=translationatom.object_id, parent=base)
                        groups += [group]
                    for group in groups:
                        add_user_to_group(user, group)
            request.response.status = HTTPOk.code
            return {'object_id': translationatom.object_id,
                    'client_id': translationatom.client_id}
        else:
            raise KeyError("TranslationGist deleted")
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
def upload_audio(sound_ids, ids_map, fields_dict, sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                        user_id, is_a_regular_form, client, storage):
    log = logging.getLogger(__name__)
    sound_field = "Sound"
    markup_field = "Markup"
    if "Sounds of Paradigmatic forms" in fields_dict:
        sound_field = "Sounds of Paradigmatic forms"
    if "Paradigm Markup" in fields_dict:
        markup_field = "Paradigm Markup"

    markup__without_audio_sequence = []
    audio_sequence = []
    for cursor in sound_and_markup_cursor:
        blob_id = cursor[0]
        description_type = int(cursor[5])
        if description_type == 1:
            audio = cursor[2]
            markup = cursor[1]
        common_name = str(cursor[3])
        word_id = cursor[4]
        if word_id in sound_ids:
            continue
        sound_ids.add(word_id)
        audio_hash = hashlib.sha224(audio).hexdigest()
        if audio_hash not in audio_hashes:
            ###filename = common_name + ".wav"
            if common_name:
                fname = os.path.splitext(common_name)[0]
                fname = fname.replace(".", "_")
                filename = "%s.wav" % fname
            else:
                filename = 'noname.noext'
            audio_hashes.add(audio_hash)
            audio_sequence.append((ids_map[int(word_id)][0], ids_map[int(word_id)][1], fields_dict[sound_field][0], fields_dict[sound_field][1],
                                    None, client, filename, audio))
            lvl = create_entity(ids_map[int(word_id)][0], ids_map[int(word_id)][1], fields_dict[sound_field][0], fields_dict[sound_field][1],
                    None, client, filename=filename, content=base64.urlsafe_b64encode(audio).decode(), folder_name=folder_name, storage=storage)
            if len(markup__without_audio_sequence) > 50:
                DBSession.flush()
        if len(audio_sequence) > 50:
            DBSession.flush()
            audio_sequence = []
            if len(markup__without_audio_sequence) > 50:
                DBSession.flush()
    if len(audio_sequence) != 0:
        DBSession.flush()
        audio_sequence = []
    if len(markup__without_audio_sequence) != 0:
        DBSession.flush()
def upload_audio(sound_ids, ids_map, fields_dict, sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                        user_id, is_a_regular_form, client, storage):
    log = logging.getLogger(__name__)
    sound_field = "Sound"
    markup_field = "Markup"
    if "Sounds of Paradigmatic forms" in fields_dict:
        sound_field = "Sounds of Paradigmatic forms"
    if "Paradigm Markup" in fields_dict:
        markup_field = "Paradigm Markup"

    markup__without_audio_sequence = []
    audio_sequence = []
    for cursor in sound_and_markup_cursor:
        blob_id = cursor[0]
        description_type = int(cursor[5])
        if description_type == 1:
            audio = cursor[2]
            markup = cursor[1]
        common_name = str(cursor[3])
        word_id = cursor[4]
        if word_id in sound_ids:
            continue
        sound_ids.add(word_id)
        audio_hash = hashlib.sha224(audio).hexdigest()
        if audio_hash not in audio_hashes:
            ###filename = common_name + ".wav"
            if common_name:
                fname = os.path.splitext(common_name)[0]
                fname = fname.replace(".", "_")
                filename = "%s.wav" % fname
            else:
                filename = 'noname.noext'
            audio_hashes.add(audio_hash)
            audio_sequence.append((ids_map[int(word_id)][0], ids_map[int(word_id)][1], fields_dict[sound_field][0], fields_dict[sound_field][1],
                                    None, client, filename, audio))
            lvl = create_entity(ids_map[int(word_id)][0], ids_map[int(word_id)][1], fields_dict[sound_field][0], fields_dict[sound_field][1],
                    None, client, filename=filename, content=base64.urlsafe_b64encode(audio).decode(), folder_name=folder_name, storage=storage)
            if len(markup__without_audio_sequence) > 50:
                DBSession.flush()
        if len(audio_sequence) > 50:
            DBSession.flush()
            audio_sequence = []
            if len(markup__without_audio_sequence) > 50:
                DBSession.flush()
    if len(audio_sequence) != 0:
        DBSession.flush()
        audio_sequence = []
    if len(markup__without_audio_sequence) != 0:
        DBSession.flush()
Exemplo n.º 23
0
def convert_all(dictionary_client_id,
                dictionary_object_id,
                user_id,
                client_id,
                object_id,
                sqlalchemy_url,
                storage,
                eaf_url,
                sound_url=None):
    engine = create_engine(sqlalchemy_url)
    DBSession.configure(bind=engine)
    convert_five_tiers(dictionary_client_id, dictionary_object_id, user_id,
                       client_id, object_id, sqlalchemy_url, storage, eaf_url,
                       sound_url)
    DBSession.flush()
Exemplo n.º 24
0
def create_lexical_entry_bulk(request):  # TODO: test
    try:
        dictionary_client_id = request.matchdict.get('dictionary_client_id')
        dictionary_object_id = request.matchdict.get('dictionary_object_id')
        perspective_client_id = request.matchdict.get('perspective_client_id')
        perspective_object_id = request.matchdict.get('perspective_object_id')

        count = request.json_body.get('count') or 0

        variables = {'auth': request.authenticated_userid}

        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")
        perspective = DBSession.query(DictionaryPerspective). \
            filter_by(client_id=perspective_client_id, object_id = perspective_object_id).first()
        if not perspective:
            request.response.status = HTTPNotFound.code
            return {'error': str("No such perspective in the system")}

        lexes_list = []
        for i in range(0, count):
            lexentr = LexicalEntry(client_id=variables['auth'],
                                   parent_object_id=perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            lexes_list.append(lexentr)
        DBSession.flush()
        lexes_ids_list = []
        for lexentr in lexes_list:
            lexes_ids_list.append({'client_id': lexentr.client_id, 'object_id': lexentr.object_id})

        request.response.status = HTTPOk.code
        return lexes_ids_list
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 25
0
def update_perspective_fields(req, perspective_client_id,
                              perspective_object_id, client):
    response = dict()
    perspective = SyncDBSession.query(DictionaryPerspective).filter_by(
        client_id=perspective_client_id,
        object_id=perspective_object_id).first()
    client = SyncDBSession.query(Client).filter_by(
        id=client.id).first()  # variables['auth']
    if not client:
        raise KeyError(
            "Invalid client id (not registered on server). Try to logout and then login."
        )

    if perspective and not perspective.marked_for_deletion:
        try:
            link_gist = SyncDBSession.query(TranslationGist) \
                .join(TranslationAtom) \
                .filter(TranslationGist.type == 'Service',
                        TranslationAtom.content == 'Link',
                        TranslationAtom.locale_id == 2).one()
            link_ids = {
                'client_id': link_gist.client_id,
                'object_id': link_gist.object_id
            }
        except NoResultFound:
            return {'error': str("Something wrong with the base")}
        fields = SyncDBSession.query(DictionaryPerspectiveToField) \
            .filter_by(parent=perspective) \
            .all()
        SyncDBSession.flush()
        for field in fields:  ## ?
            SyncDBSession.delete(field)
        position = 1
        for field in req:
            create_nested_field(field=field,
                                perspective=perspective,
                                client_id=client.id,
                                upper_level=None,
                                link_ids=link_ids,
                                position=position)
            position += 1

        return response
    else:
        return {'error': str("No such perspective in the system")}
Exemplo n.º 26
0
def upload_user_blob(request):  # TODO: remove blob Object
    variables = {'auth': authenticated_userid(request)}
    response = dict()
    filename = request.POST['blob'].filename
    input_file = request.POST['blob'].file

    class Object(object):
        pass

    blob = Object()
    blob.client_id = variables['auth']
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    blob.data_type = request.POST['data_type']

    blob.filename = filename

    current_user = DBSession.query(User).filter_by(id=client.user_id).first()
    object_id = request.POST.get('object_id', None)

    blob_object = UserBlobs(object_id=object_id,
                            client_id=blob.client_id,
                            name=filename,
                            data_type=blob.data_type,
                            user_id=current_user.id)

    current_user.userblobs.append(blob_object)
    blob_object.real_storage_path, blob_object.content = create_object(request, input_file, blob_object, blob.data_type,
                                                                       blob.filename, json_input=False)
    if blob.data_type == "sociolinguistics":
        try:
            check_socio(blob_object.real_storage_path)
        except Exception as e:
            request.response.status = HTTPBadRequest.code
            response = {"error": str(e)}
            return response

    DBSession.add(blob_object)
    DBSession.add(current_user)
    DBSession.flush()

    request.response.status = HTTPOk.code
    response = {"client_id": blob_object.client_id, "object_id": blob_object.object_id, "content": blob_object.content}
    return response
Exemplo n.º 27
0
def create_organization(request):  # TODO: test
    try:

        variables = {'auth': request.authenticated_userid}
        req = request.json_body
        name = req['name']
        about = req['about']
        client = DBSession.query(Client).filter_by(
            id=variables['auth']).first()
        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login."
            )
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException(
                "This client id is orphaned. Try to logout and then login once more."
            )

        organization = Organization(name=name, about=about)
        if user not in organization.users:
            organization.users.append(user)
        DBSession.add(organization)
        DBSession.flush()
        bases = DBSession.query(BaseGroup).filter_by(subject='organization')
        for base in bases:
            group = Group(parent=base, subject_object_id=organization.id)
            add_user_to_group(user, group)
            DBSession.add(group)
        request.response.status = HTTPOk.code
        return {'organization_id': organization.id}
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 28
0
 def setUp(self):
     self.config = testing.setUp()
     import webtest
     from pyramid import paster
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     myapp = paster.get_app('testing.ini')
     self.app = webtest.TestApp(myapp)
     from lingvodoc.models import (Base, Language, Locale,
                                   UserEntitiesTranslationString, User,
                                   Passhash, Client)
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         en_locale = Locale(id=2, shortcut="en", intl_name="English")
         DBSession.add(en_locale)
         DBSession.flush()
         new_user = User(id=1, login='******', default_locale_id=1)
         new_pass = Passhash(password='******')
         DBSession.add(new_pass)
         new_user.password = new_pass
         DBSession.add(new_user)
         new_client = Client(id=1, user=new_user)
         DBSession.add(new_client)
         DBSession.flush()
         new_uets = UserEntitiesTranslationString(object_id=1,
                                                  client_id=1,
                                                  locale_id=1,
                                                  translation_string='test',
                                                  translation='not working')
         DBSession.add(new_uets)
         new_lang = Language(client_id=1,
                             object_id=1,
                             translation_string='test')
         DBSession.add(new_lang)
         new_lang = Language(client_id=1,
                             object_id=2,
                             translation_string='testy')
         DBSession.add(new_lang)
Exemplo n.º 29
0
 def setUp(self):
     self.config = testing.setUp()
     from lingvodoc.models import (
         Base,
         Language,
         UserEntitiesTranslationString,
         Locale
         )
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         DBSession.flush()
         new_uets = UserEntitiesTranslationString(object_id = 1, client_id = 1, locale_id=1,
                                                  translation_string = 'test', translation = 'working')
         DBSession.add(new_uets)
         new_lang=Language(client_id=1, object_id=1, translation_string = 'test')
         DBSession.add(new_lang)
Exemplo n.º 30
0
def create_organization(request):  # TODO: test
    try:

        variables = {'auth': request.authenticated_userid}
        req = request.json_body
        name = req['name']
        about = req['about']
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")

        organization = Organization(name=name,
                                    about=about)
        if user not in organization.users:
            organization.users.append(user)
        DBSession.add(organization)
        DBSession.flush()
        bases = DBSession.query(BaseGroup).filter_by(subject='organization')
        for base in bases:
            group = Group(parent=base, subject_object_id=organization.id)
            if not user in group.users:
                group.users.append(user)
            DBSession.add(group)
        request.response.status = HTTPOk.code
        return {'organization_id': organization.id}
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 31
0
 def setUp(self):
     self.config = testing.setUp()
     self.config.testing_securitypolicy(userid='1',
                                        permissive=True)
     import webtest
     from pyramid import  paster
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     myapp = paster.get_app('testing.ini')
     self.app = webtest.TestApp(myapp)
     from lingvodoc.models import (
         Base,
         User,
         Client,
         Passhash,
         Locale,
         UserEntitiesTranslationString,
         Language
         )
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         en_locale = Locale(id=2, shortcut="en", intl_name="English")
         DBSession.add(en_locale)
         DBSession.flush()
         new_user = User(id=1, login='******', default_locale_id = 1)
         new_pass = Passhash(password='******')
         DBSession.add(new_pass)
         new_user.password = new_pass
         DBSession.add(new_user)
         new_client = Client(id=1, user=new_user)
         DBSession.add(new_client)
         new_uets= UserEntitiesTranslationString(client_id = 1, object_id = 1, locale_id=2,
                                                 translation_string = 'imastring')
         DBSession.add(new_uets)
         new_lang = Language(client_id=1, object_id=1, translation_string='testy')
         DBSession.add(new_lang)
def create_field(translation_gist_client_id, translation_gist_object_id, data_type_translation_gist_client_id,
                 data_type_translation_gist_object_id, client):
    try:

        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        field = Field(client_id=client.id, ###
                                            data_type_translation_gist_client_id=data_type_translation_gist_client_id,
                                            data_type_translation_gist_object_id=data_type_translation_gist_object_id,
                                            translation_gist_client_id=translation_gist_client_id,
                                            translation_gist_object_id=translation_gist_object_id
                                            )

        DBSession.add(field)
        DBSession.flush()

        return {'object_id': field.object_id,
                'client_id': field.client_id}
    except KeyError as e:

        return {'error': str(e)}
Exemplo n.º 33
0
 def setUp(self):
     self.config = testing.setUp()
     from lingvodoc.models import (Base, Language,
                                   UserEntitiesTranslationString, Locale)
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         DBSession.flush()
         new_uets = UserEntitiesTranslationString(object_id=1,
                                                  client_id=1,
                                                  locale_id=1,
                                                  translation_string='test',
                                                  translation='working')
         DBSession.add(new_uets)
         new_lang = Language(client_id=1,
                             object_id=1,
                             translation_string='test')
         DBSession.add(new_lang)
Exemplo n.º 34
0
def update_perspective_fields(req,
                              perspective_client_id,
                              perspective_object_id,
                              client):
    response = dict()
    perspective = DBSession.query(DictionaryPerspective).filter_by(client_id=perspective_client_id,
                                                                   object_id=perspective_object_id).first()
    client = DBSession.query(Client).filter_by(id=client.id).first() #variables['auth']
    if not client:
        raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
    if perspective and not perspective.marked_for_deletion:
        try:
            link_gist = DBSession.query(TranslationGist)\
                .join(TranslationAtom)\
                .filter(TranslationGist.type == 'Service',
                        TranslationAtom.content == 'Link',
                        TranslationAtom.locale_id == 2).one()
            link_ids = {'client_id':link_gist.client_id, 'object_id': link_gist.object_id}
        except NoResultFound:
            return {'error': str("Something wrong with the base")}
        fields = DBSession.query(DictionaryPerspectiveToField)\
            .filter_by(parent=perspective)\
            .all()
        DBSession.flush()
        for field in fields: ## ?
            DBSession.delete(field)
        position = 1
        for field in req:
            create_nested_field(field=field,
                                perspective=perspective,
                                client_id=client.id,
                                upper_level=None,
                                link_ids=link_ids, position=position)
            position += 1

        return response
    else:
        return {'error': str("No such perspective in the system")}
Exemplo n.º 35
0
def create_nested_field(field, perspective, client_id, upper_level, link_ids, position):
    field_object = DictionaryPerspectiveToField(client_id=client_id,
                                                parent=perspective,
                                                field_client_id=field['client_id'],
                                                field_object_id=field['object_id'],
                                                upper_level=upper_level,
                                                position=position)
    if field.get('link'):
        field_object.link_client_id = field['link']['client_id']
        field_object.link_object_id = field['link']['object_id']
    DBSession.flush()
    contains = field.get('contains', None)
    if contains:
        inner_position = 1
        for subfield in contains:
            create_nested_field(subfield,
                                perspective,
                                client_id,
                                upper_level=field_object,
                                link_ids=link_ids,
                                position=inner_position)
            inner_position += 1
    return
Exemplo n.º 36
0
def convert_all(dictionary_client_id,
                dictionary_object_id,
                user_id,
                client_id,
                object_id,
                sqlalchemy_url,
                storage,
                eaf_url,
                sound_url=None
                ):
    engine = create_engine(sqlalchemy_url)
    DBSession.configure(bind=engine)
    convert_five_tiers(
                dictionary_client_id,
                dictionary_object_id,
                user_id,
                client_id,
                object_id,
                sqlalchemy_url,
                storage,
                eaf_url,
                sound_url
                )
    DBSession.flush()
Exemplo n.º 37
0
def diff_desk(request):
    import base64
    from lingvodoc.models import categories
    from time import sleep
    client = DBSession.query(Client).filter_by(id=authenticated_userid(request)).first()
    if not client:
        request.response.status = HTTPNotFound.code
        return {'error': str("Try to login again")}
    user = DBSession.query(User).filter_by(id=client.user_id).first()
    if not user:
        request.response.status = HTTPNotFound.code
        return {'error': str("Try to login again")}
    settings = request.registry.settings
    existing = [row2dict(entry) for entry in DBSession.query(ObjectTOC)]
    central_server = settings['desktop']['central_server']
    path = central_server + 'sync/difference/server'
    server = make_request(path, 'post', existing).json()
    language = list()
    dictionary = list()
    perspective = list()
    field = list()
    dictionaryperspectivetofield = list()
    lexicalentry = list()
    entity = list()
    userblobs = list()
    translationgist = list()
    translationatom = list()
    for entry in server:
        if entry['table_name'] == 'language':
            language.append(entry)
        if entry['table_name'] == 'dictionary':
            dictionary.append(entry)
        if entry['table_name'] == 'dictionaryperspective':
            perspective.append(entry)
        if entry['table_name'] == 'dictionaryperspectivetofield':
            dictionaryperspectivetofield.append(entry)
        if entry['table_name'] == 'lexicalentry':
            lexicalentry.append(entry)
        if entry['table_name'] == 'entity':
            entity.append(entry)
        if entry['table_name'] == 'userblobs':
            userblobs.append(entry)
        if entry['table_name'] == 'translationgist':
            translationgist.append(entry)
        if entry['table_name'] == 'translationatom':
            translationatom.append(entry)
        if entry['table_name'] == 'field':
            field.append(entry)
    # todo: batches
    for group in DBSession.query(Group).filter_by(subject_client_id=authenticated_userid(request)).all():
        path = central_server + 'group'
        gr_req = row2dict(group)
        gr_req['users']=[user.id]
        status = make_request(path, 'post', gr_req)
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in translationgist:
        desk_gist = DBSession.query(TranslationGist).filter_by(client_id=entry['client_id'],
                                                               object_id=entry['object_id']).one()
        path = central_server + 'translationgist'
        status = make_request(path, 'post', row2dict(desk_gist))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in translationatom:
        desk_atom = DBSession.query(TranslationAtom).filter_by(client_id=entry['client_id'],
                                                               object_id=entry['object_id']).one()
        path = central_server + 'translationatom'
        status = make_request(path, 'post', row2dict(desk_atom))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in language:
        desk_lang = DBSession.query(Language).filter_by(client_id=entry['client_id'],
                                                        object_id=entry['object_id']).one()
        path = central_server + 'language'
        status = make_request(path, 'post', row2dict(desk_lang))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in dictionary:
        desk_dict = DBSession.query(Dictionary).filter_by(client_id=entry['client_id'],
                                                          object_id=entry['object_id']).one()
        path = central_server + 'dictionary'
        desk_json = row2dict(desk_dict)
        desk_json['category'] = categories[desk_json['category']]
        status = make_request(path, 'post', desk_json)
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in perspective:
        desk_persp = DBSession.query(DictionaryPerspective).filter_by(client_id=entry['client_id'],
                                                                      object_id=entry['object_id']).one()
        path = central_server + 'dictionary/%s/%s/perspective' % (
            desk_persp.parent_client_id, desk_persp.parent_object_id)
        status = make_request(path, 'post', row2dict(desk_persp))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in field:
        desk_field = DBSession.query(Field).filter_by(client_id=entry['client_id'],
                                                           object_id=entry['object_id']).one()
        path = central_server + 'field'
        status = make_request(path, 'post', row2dict(desk_field))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    for entry in dictionaryperspectivetofield:
        desk_field = DBSession.query(DictionaryPerspectiveToField).filter_by(client_id=entry['client_id'],
                                                           object_id=entry['object_id']).one()
        if desk_field.parent_client_id == client.id:
            persp = desk_field.parent
            path = central_server + 'dictionary/%s/%s/perspective/%s/%s/field' % (persp.parent_client_id,
                                                                                          persp.parent_object_id,
                                                                                          persp.client_id,
                                                                                          persp.object_id)
            status = make_request(path, 'post', row2dict(desk_field))
            if status.status_code != 200:
                request.response.status = HTTPInternalServerError.code
                return {'error': str("internet error")}
    for entry in lexicalentry:
        desk_lex = DBSession.query(LexicalEntry).filter_by(client_id=entry['client_id'],
                                                           object_id=entry['object_id']).one()
        persp = desk_lex.parent
        path = central_server + 'dictionary/%s/%s/perspective/%s/%s/lexical_entry' % (persp.parent_client_id,
                                                                                      persp.parent_object_id,
                                                                                      persp.client_id,
                                                                                      persp.object_id)
        status = make_request(path, 'post', row2dict(desk_lex))
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    grouping_tags = dict()
    for entry in entity:
        desk_ent = DBSession.query(Entity).filter_by(client_id=entry['client_id'],
                                                     object_id=entry['object_id']).one()
        lex = desk_ent.parent
        persp = lex.parent
        path = central_server + 'dictionary/%s/%s/perspective/%s/%s/lexical_entry/%s/%s/entity' % (
            persp.parent_client_id,
            persp.parent_object_id,
            persp.client_id,
            persp.object_id,
            lex.client_id,
            lex.object_id)  # todo: normal content upload
        ent_req = row2dict(desk_ent)
        content = desk_ent.content
        filename = None
        if desk_ent.additional_metadata:
            data_type = desk_ent.additional_metadata.get('data_type')
            if data_type:
                data_type = data_type.lower()
                if data_type == 'image' or data_type == 'sound' or 'markup' in data_type:
                    full_name = desk_ent.content.split('/')
                    filename = full_name[len(full_name) - 1]
                    content = make_request(desk_ent.content)
                    if content.status_code != 200:
                        log.error(desk_ent.content)
                        DBSession.rollback()
                        request.response.status = HTTPInternalServerError.code
                        return {'error': str("internet error")}
                        # return

                    content = content.content
                    content = base64.urlsafe_b64encode(content)

        ent_req['content'] = content
        ent_req['filename'] = filename
        if desk_ent.field.data_type == 'Grouping Tag':
            field_ids = str(desk_ent.field.client_id) + '_' + str(desk_ent.field.object_id)
            if field_ids not in grouping_tags:
                grouping_tags[field_ids] = {'field_client_id': desk_ent.field.client_id,
                                            'field_object_id': desk_ent.field.object_id,
                                            'tag_groups': dict()}
            if desk_ent.content not in grouping_tags[field_ids]['tag_groups']:
                grouping_tags[field_ids]['tag_groups'][desk_ent.content] = [row2dict(desk_ent)]
            else:
                grouping_tags[field_ids]['tag_groups'][desk_ent.content].append(row2dict(desk_ent))
        else:

            status = make_request(path, 'post', ent_req)
            if status.status_code != 200:
                request.response.status = HTTPInternalServerError.code
                return {'error': str("internet error")}
    for entry in grouping_tags:
        path = central_server + 'group_entity/bulk'
        req = grouping_tags[entry]
        req['counter'] = client.counter
        status = make_request(path, 'post', req)
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
        client.counter = status.json()['counter']
        DBSession.flush()
    for entry in userblobs:
        desk_blob = DBSession.query(UserBlobs).filter_by(client_id=entry['client_id'],
                                                         object_id=entry['object_id']).one()
        path = central_server + 'blob'
        data = {'object_id': desk_blob.object_id, 'data_type': desk_blob.data_type}
        files = {'blob': open(desk_blob.real_storage_path, 'rb')}

        status = make_request(path, 'post', data=data, files=files)
        if status.status_code != 200:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("internet error")}
    return
Exemplo n.º 38
0
def edit_user_info(request):  # TODO: test
    from passlib.hash import bcrypt
    response = dict()

    req = request.json_body
    client_id = req.get('client_id')
    user_id = req.get('user_id')
    user = None
    if client_id:
        client = DBSession.query(Client).filter_by(id=client_id).first()
        if not client:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such client in the system")}
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        user_id = client.user_id
        if not user:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such user in the system")}
    else:
        user = DBSession.query(User).filter_by(id=user_id).first()
        if not user:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such user in the system")}
    new_password = req.get('new_password')
    old_password = req.get('old_password')

    if new_password:
        if not old_password:
            request.response.status = HTTPBadRequest.code
            return {'error': str("Need old password to confirm")}
        old_hash = DBSession.query(Passhash).filter_by(user_id=user_id).first()
        if old_hash:
            if not user.check_password(old_password):
                request.response.status = HTTPBadRequest.code
                return {'error': str("Wrong password")}
            else:
                old_hash.hash = bcrypt.encrypt(new_password)
        else:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("User has no password")}

    name = req.get('name')
    if name:
        user.name = name
    default_locale_id = req.get('default_locale_id')
    if default_locale_id:
        user.default_locale_id = default_locale_id
    birthday = req.get('birthday')
    if birthday:
        try:
            year, month, day = birthday.split('-')
            user.birthday = datetime.date(int(year), int(month), int(day))
        except ValueError:
            request.response.status = HTTPBadRequest.code
            return {'Error': "Invalid birthday"}
    email = req.get('email')
    if email:
        if user.email:
            for em in user.email:
                em.email = email
        else:
            new_email = Email(user=user, email=email)
            DBSession.add(new_email)
            DBSession.flush()
    about = req.get('about')
    if about:
        if user.about:
            for ab in user.about:
                ab.content = req['about']
        else:
            new_about = About(user=user, content=about)
            DBSession.add(new_about)
            DBSession.flush()
    # response['is_active']=str(user.is_active)
    request.response.status = HTTPOk.code
    return response
Exemplo n.º 39
0
def convert_five_tiers(dictionary_client_id,
                       dictionary_object_id,
                       user_id,
                       origin_client_id,
                       origin_object_id,
                       sqlalchemy_url,
                       storage,
                       eaf_url,
                       sound_url=None):

    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)

    no_sound = True
    if sound_url:
        no_sound = False
    with warnings.catch_warnings():
        warnings.filterwarnings('error')
        try:
            from pydub import AudioSegment
        except Warning as e:
            no_sound = True
    if not no_sound:
        with tempfile.NamedTemporaryFile() as temp:
            try:
                sound_file = request.urlopen(sound_url)
            except HTTPError as e:
                return {'error': str(e.read().decode("utf8", 'ignore'))}
            with open(temp.name, 'wb') as output:
                output.write(sound_file.read())
            full_audio = AudioSegment.from_wav(temp.name)
            temp.flush()

    field_ids = {}
    with transaction.manager:
        client = DBSession.query(Client).filter_by(id=user_id).first()

        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login.",
                user_id)
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        all_fieldnames = ("Markup", "Paradigm Markup", "Word", "Transcription",
                          "Translation", "Sound", "Etymology", "Backref",
                          "Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms")
        for name in all_fieldnames:
            data_type_query = DBSession.query(Field) \
                .join(TranslationGist,
                      and_(Field.translation_gist_object_id == TranslationGist.object_id,
                           Field.translation_gist_client_id == TranslationGist.client_id))\
                .join(TranslationGist.translationatom)
            field = data_type_query.filter(
                TranslationAtom.locale_id == 2,
                TranslationAtom.content == name).one()
            field_ids[name] = (field.client_id, field.object_id)
        fp_fields = ("Word", "Transcription", "Translation", "Sound", "Markup",
                     "Etymology", "Backref")
        sp_fields = ("Word of Paradigmatic forms",
                     "Transcription of Paradigmatic forms",
                     "Translation of Paradigmatic forms",
                     "Sounds of Paradigmatic forms", "Paradigm Markup",
                     "Backref")
        fp_structure = set([field_ids[x] for x in fp_fields])
        sp_structure = set([field_ids[x] for x in sp_fields])
        DBSession.flush()
        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp[
            'object_id'], resp['client_id']
        for base in DBSession.query(BaseGroup).filter_by(
                dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary_object_id,
                              subject_client_id=dictionary_client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()

        origin_metadata = {
            "origin_client_id": origin_client_id,
            "origin_object_id": origin_object_id
        }

        parent = DBSession.query(Dictionary).filter_by(
            client_id=dictionary_client_id,
            object_id=dictionary_object_id).first()
        if not parent:
            return {'error': str("No such dictionary in the system")}
        first_perspective = None
        second_perspective = None
        for perspective in DBSession.query(DictionaryPerspective).filter_by(
                parent=parent, marked_for_deletion=False):
            structure = set()
            fields = DBSession.query(DictionaryPerspectiveToField)\
                        .filter_by(parent=perspective)\
                        .all()
            DBSession.flush()
            for p_to_field in fields:
                structure.add(
                    (p_to_field.field_client_id, p_to_field.field_object_id))

            if structure == fp_structure:
                first_perspective = perspective
            elif structure == sp_structure:
                second_perspective = perspective
            structure.clear()
        lexes = []
        if first_perspective:
            lexes = DBSession.query(DictionaryPerspective, LexicalEntry, Entity)\
                .filter(and_(DictionaryPerspective.object_id==first_perspective.object_id,
                        DictionaryPerspective.client_id==first_perspective.client_id))\
                .join(LexicalEntry, and_( LexicalEntry.parent_object_id==DictionaryPerspective.object_id,
                                          LexicalEntry.parent_client_id==DictionaryPerspective.client_id))\
                .join(Entity, and_(LexicalEntry.object_id==Entity.parent_object_id,
                                   LexicalEntry.client_id==Entity.parent_client_id))
        p_lexes = []
        if second_perspective:
            p_lexes = DBSession.query(DictionaryPerspective, LexicalEntry, Entity)\
                .filter(and_(DictionaryPerspective.object_id==second_perspective.object_id,
                        DictionaryPerspective.client_id==second_perspective.client_id))\
                .join(LexicalEntry, and_( LexicalEntry.parent_object_id==DictionaryPerspective.object_id,
                                          LexicalEntry.parent_client_id==DictionaryPerspective.client_id))\
                .join(Entity, and_(LexicalEntry.object_id==Entity.parent_object_id,
                                   LexicalEntry.client_id==Entity.parent_client_id))

        hashes = [
            x[2].additional_metadata["hash"] for x in lexes
            if x[2].field.data_type == "Sound"
        ]
        hashes = hashes[:] + [
            x[2].additional_metadata["hash"]
            for x in p_lexes if x[2].field.data_type == "Sound"
        ]
        links = [((x[2].link.client_id, x[2].link.object_id),
                  (x[1].client_id, x[1].object_id)) for x in lexes
                 if x[2].field.data_type == "Link"]
        links = links[:] + [((x[2].link.client_id, x[2].link.object_id),
                             (x[1].client_id, x[1].object_id))
                            for x in p_lexes if x[2].field.data_type == "Link"]
        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp[
            'object_id'], resp['client_id']
        for base in DBSession.query(BaseGroup).filter_by(
                dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary_object_id,
                              subject_client_id=dictionary_client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()
        """
        # FIRST PERSPECTIVE
        """
        if first_perspective is None:
            resp = translation_service_search_all("Lexical Entries")
            persp_translation_gist_client_id, persp_translation_gist_object_id = resp[
                'client_id'], resp['object_id']
            first_perspective = DictionaryPerspective(
                client_id=client.id,  ###
                state_translation_gist_object_id=
                state_translation_gist_object_id,
                state_translation_gist_client_id=
                state_translation_gist_client_id,
                parent=parent,
                # import_source=req.get('import_source'),
                # import_hash=req.get('import_hash'),
                additional_metadata=origin_metadata,
                translation_gist_client_id=persp_translation_gist_client_id,
                translation_gist_object_id=persp_translation_gist_object_id)

            first_perspective.additional_metadata = origin_metadata
            DBSession.add(first_perspective)
        owner_client = DBSession.query(Client).filter_by(
            id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(
                perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=first_perspective.object_id,
                              subject_client_id=first_perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
            DBSession.flush()
        first_perspective_client_id = first_perspective.client_id
        first_perspective_object_id = first_perspective.object_id
        """
        # SECOND PERSPECTIVE
        """
        resp = translation_service_search_all("Paradigms")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp[
            'client_id'], resp['object_id']
        if second_perspective is None:
            second_perspective = DictionaryPerspective(
                client_id=client.id,  ### variables['auth']
                state_translation_gist_object_id=
                state_translation_gist_object_id,
                state_translation_gist_client_id=
                state_translation_gist_client_id,
                parent=parent,
                # import_source=req.get('import_source'),
                # import_hash=req.get('import_hash'),
                additional_metadata=origin_metadata,
                translation_gist_client_id=persp_translation_gist_client_id,
                translation_gist_object_id=persp_translation_gist_object_id)
            second_perspective.additional_metadata = origin_metadata
            # if is_template is not None:
            #     perspective.is_template = is_template
            DBSession.add(second_perspective)
        owner_client = DBSession.query(Client).filter_by(
            id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(
                perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=second_perspective.object_id,
                              subject_client_id=second_perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
        second_perspective_client_id = second_perspective.client_id
        second_perspective_object_id = second_perspective.object_id

        fp_fields_dict = {}
        """
        # FIRST PERSPECTIVE FIELDS CREATION
        """
        fp_field_names = ("Word", "Transcription", "Translation", "Sound",
                          "Etymology", "Backref")
        fields_list = []
        for fieldname in fp_field_names:  #

            if fieldname == "Backref":
                fields_list.append({
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link": {
                        "client_id": second_perspective_client_id,
                        "object_id": second_perspective_object_id
                    }
                })

            elif fieldname == "Sound":
                fields_list.append({
                    "client_id":
                    field_ids[fieldname][0],
                    "object_id":
                    field_ids[fieldname][1],
                    "contains": [{
                        "client_id": field_ids["Markup"][0],
                        "object_id": field_ids["Markup"][1]
                    }]
                })
            else:
                fields_list.append({
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1]
                })
            fp_fields_dict[fieldname] = (field_ids[fieldname][0],
                                         field_ids[fieldname][1])
        fp_fields_dict["Markup"] = (field_ids["Markup"][0],
                                    field_ids["Markup"][1])
        update_perspective_fields(fields_list, first_perspective_client_id,
                                  first_perspective_object_id, client)
        """
        # Creating fields of the second perspective
        """
        sp_field_names = ("Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms", "Backref")
        sp_fields_dict = {}
        fields_list = []
        for fieldname in sp_field_names:
            if fieldname == "Backref":
                fields_list.append({
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link": {
                        "client_id": first_perspective_client_id,
                        "object_id": first_perspective_object_id
                    }
                })
            elif fieldname == "Sounds of Paradigmatic forms":
                fields_list.append({
                    "client_id":
                    field_ids[fieldname][0],
                    "object_id":
                    field_ids[fieldname][1],
                    "contains": [{
                        "client_id": field_ids["Paradigm Markup"][0],
                        "object_id": field_ids["Paradigm Markup"][1]
                    }]
                })
            else:
                fields_list.append({
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1]
                })
            sp_fields_dict[fieldname] = (field_ids[fieldname][0],
                                         field_ids[fieldname][1])
        sp_fields_dict["Paradigm Markup"] = (field_ids["Paradigm Markup"][0],
                                             field_ids["Paradigm Markup"][1])
        update_perspective_fields(fields_list, second_perspective_client_id,
                                  second_perspective_object_id, client)
        dubl = []
        try:
            eaffile = request.urlopen(eaf_url)
        except HTTPError as e:
            return {'error': str(e.read().decode("utf8", 'ignore'))}
        with tempfile.NamedTemporaryFile() as temp:
            temp.write(eaffile.read())
            converter = elan_parser.Elan(temp.name)
            converter.parse()
            final_dicts = converter.proc()
            temp.flush()
        for phrase in final_dicts:
            curr_dict = None
            paradigm_words = []
            for word_translation in phrase:
                if type(word_translation) is not list:
                    curr_dict = word_translation
                    mt_words = [
                        word_translation[i][1].text for i in word_translation
                        if len(word_translation[i]) > 1
                        and type(word_translation[i][1].text) is str
                    ]
                    main_tier_text = " ".join(mt_words)
                    if main_tier_text:
                        paradigm_words.append(
                            elan_parser.Word(text=main_tier_text,
                                             tier="Word of Paradigmatic forms",
                                             time=word.time))
                else:
                    word = word_translation[0]
                    tier_name = word.tier
                    new = " ".join([i.text for i in word_translation])
                    paradigm_words.append(
                        elan_parser.Word(text=new,
                                         tier=tier_name,
                                         time=word.time))
            p_match_dict = defaultdict(list)
            for pword in paradigm_words:
                match = [x for x in p_lexes
                         if x[2].content == pword.text]  #LEX COUNT OR RANDOM
                for t in match:
                    if field_ids[EAF_TIERS[pword.tier]] == (
                            t[2].field.client_id, t[2].field.object_id):
                        p_match_dict[t[1]].append(t)
            p_match_dict = {
                k: v
                for k, v in p_match_dict.items() if len(v) >= 2
            }
            max_sim = None
            for le in p_match_dict:
                if max_sim is None:
                    max_sim = le
                else:
                    if len(p_match_dict[le]) >= len(p_match_dict[max_sim]):
                        max_sim = le
            if max_sim:
                sp_lexical_entry_client_id = max_sim.client_id
                sp_lexical_entry_object_id = max_sim.object_id
            else:
                lexentr = LexicalEntry(
                    client_id=client.id,
                    parent_object_id=second_perspective_object_id,
                    parent=second_perspective)
                DBSession.add(lexentr)
                sp_lexical_entry_client_id = lexentr.client_id
                sp_lexical_entry_object_id = lexentr.object_id

            for other_word in paradigm_words:
                if max_sim:
                    text_and_field = (other_word.text,
                                      field_ids[EAF_TIERS[other_word.tier]])
                    sim = [(x[2].content, (x[2].field.client_id,
                                           x[2].field.object_id))
                           for x in p_match_dict[max_sim]]
                    if text_and_field not in sim:
                        create_entity(sp_lexical_entry_client_id,
                                      sp_lexical_entry_object_id,
                                      field_ids[EAF_TIERS[other_word.tier]][0],
                                      field_ids[EAF_TIERS[other_word.tier]][1],
                                      None,
                                      client,
                                      other_word.text,
                                      filename=None,
                                      storage=storage)
                else:
                    create_entity(sp_lexical_entry_client_id,
                                  sp_lexical_entry_object_id,
                                  field_ids[EAF_TIERS[other_word.tier]][0],
                                  field_ids[EAF_TIERS[other_word.tier]][1],
                                  None,
                                  client,
                                  other_word.text,
                                  filename=None,
                                  storage=storage)
            if not no_sound:
                if word.time[1] < len(full_audio):
                    with tempfile.NamedTemporaryFile() as temp:
                        full_audio[word.time[0]:word.time[1]].export(
                            temp.name, format="wav")
                        audio_slice = temp.read()
                        if max_sim:
                            hash = hashlib.sha224(audio_slice).hexdigest()
                            if not hash in hashes:
                                hashes.append(hash)
                                create_entity(
                                    sp_lexical_entry_client_id,
                                    sp_lexical_entry_object_id,
                                    field_ids["Sounds of Paradigmatic forms"]
                                    [0],
                                    field_ids["Sounds of Paradigmatic forms"]
                                    [1],
                                    None,
                                    client,
                                    filename="%s.wav" % (word.index),
                                    folder_name="sound1",
                                    content=base64.urlsafe_b64encode(
                                        audio_slice).decode(),
                                    storage=storage)
                        else:
                            create_entity(
                                sp_lexical_entry_client_id,
                                sp_lexical_entry_object_id,
                                field_ids["Sounds of Paradigmatic forms"][0],
                                field_ids["Sounds of Paradigmatic forms"][1],
                                None,
                                client,
                                filename="%s.wav" % (word.index),
                                folder_name="sound1",
                                content=base64.urlsafe_b64encode(
                                    audio_slice).decode(),
                                storage=storage)

                        temp.flush()
            p_match_dict.clear()
            paradigm_words[:] = []
            for word in curr_dict:
                column = [word] + curr_dict[word]
                match_dict = defaultdict(list)
                for crt in tuple(i for i in column):
                    match = [x for x in lexes if x[2].content == crt.text]
                    for t in match:
                        if field_ids[EAF_TIERS[crt.tier]] == (
                                t[2].field.client_id, t[2].field.object_id):
                            match_dict[t[1]].append(t)
                match_dict = {
                    k: v
                    for k, v in match_dict.items() if len(v) >= 2
                }
                max_sim = None
                for le in match_dict:
                    if max_sim is None:
                        max_sim = le
                    else:
                        if len(match_dict[le]) >= len(match_dict[max_sim]):
                            max_sim = le
                if max_sim:
                    fp_lexical_entry_client_id = max_sim.client_id
                    fp_lexical_entry_object_id = max_sim.object_id
                else:
                    lexentr = LexicalEntry(
                        client_id=client.id,
                        parent_object_id=first_perspective_object_id,
                        parent=first_perspective)
                    DBSession.add(lexentr)
                    fp_lexical_entry_client_id = lexentr.client_id
                    fp_lexical_entry_object_id = lexentr.object_id
                for other_word in column:
                    if max_sim:
                        text_and_field = (
                            other_word.text,
                            field_ids[EAF_TIERS[other_word.tier]])
                        sim = [(x[2].content, (x[2].field.client_id,
                                               x[2].field.object_id))
                               for x in match_dict[max_sim]]
                        if text_and_field not in sim:
                            create_entity(
                                fp_lexical_entry_client_id,
                                fp_lexical_entry_object_id,
                                field_ids[EAF_TIERS[other_word.tier]][0],
                                field_ids[EAF_TIERS[other_word.tier]][1],
                                None,
                                client,
                                other_word.text,
                                filename=None,
                                storage=storage)
                    else:
                        create_entity(fp_lexical_entry_client_id,
                                      fp_lexical_entry_object_id,
                                      field_ids[EAF_TIERS[other_word.tier]][0],
                                      field_ids[EAF_TIERS[other_word.tier]][1],
                                      None,
                                      client,
                                      other_word.text,
                                      filename=None,
                                      storage=storage)
                if not no_sound:
                    if word.time[1] < len(full_audio):
                        with tempfile.NamedTemporaryFile() as temp:
                            full_audio[word.time[0]:word.time[1]].export(
                                temp.name, format="wav")
                            audio_slice = temp.read()
                            hash = hashlib.sha224(audio_slice).hexdigest()
                            if max_sim:
                                if not hash in hashes:
                                    hashes.append(hash)
                                    create_entity(
                                        fp_lexical_entry_client_id,
                                        fp_lexical_entry_object_id,
                                        field_ids["Sound"][0],
                                        field_ids["Sound"][1],
                                        None,
                                        client,
                                        filename="%s.wav" % (word.index),
                                        folder_name="sound1",
                                        content=base64.urlsafe_b64encode(
                                            audio_slice).decode(),
                                        storage=storage)
                            else:
                                create_entity(fp_lexical_entry_client_id,
                                              fp_lexical_entry_object_id,
                                              field_ids["Sound"][0],
                                              field_ids["Sound"][1],
                                              None,
                                              client,
                                              filename="%s.wav" % (word.index),
                                              folder_name="sound1",
                                              content=base64.urlsafe_b64encode(
                                                  audio_slice).decode(),
                                              storage=storage)
                            temp.flush()
                fp_le_ids = (fp_lexical_entry_client_id,
                             fp_lexical_entry_object_id)
                sp_le_ids = (sp_lexical_entry_client_id,
                             sp_lexical_entry_object_id)
                dubl_tuple = (sp_le_ids, fp_le_ids)
                if not dubl_tuple in dubl:
                    dubl.append(dubl_tuple)
                    if max_sim:
                        if not (sp_le_ids, fp_le_ids) in links:
                            create_entity(
                                sp_lexical_entry_client_id,
                                sp_lexical_entry_object_id,
                                field_ids["Backref"][0],
                                field_ids["Backref"][1],
                                None,
                                client,
                                filename=None,
                                link_client_id=fp_lexical_entry_client_id,
                                link_object_id=fp_lexical_entry_object_id,
                                storage=storage)
                        if not (fp_le_ids, sp_le_ids) in links:
                            create_entity(
                                fp_lexical_entry_client_id,
                                fp_lexical_entry_object_id,
                                field_ids["Backref"][0],
                                field_ids["Backref"][1],
                                None,
                                client,
                                filename=None,
                                link_client_id=sp_lexical_entry_client_id,
                                link_object_id=sp_lexical_entry_object_id,
                                storage=storage)
                    else:
                        create_entity(
                            sp_lexical_entry_client_id,
                            sp_lexical_entry_object_id,
                            field_ids["Backref"][0],
                            field_ids["Backref"][1],
                            None,
                            client,
                            filename=None,
                            link_client_id=fp_lexical_entry_client_id,
                            link_object_id=fp_lexical_entry_object_id,
                            storage=storage)
                        create_entity(
                            fp_lexical_entry_client_id,
                            fp_lexical_entry_object_id,
                            field_ids["Backref"][0],
                            field_ids["Backref"][1],
                            None,
                            client,
                            filename=None,
                            link_client_id=sp_lexical_entry_client_id,
                            link_object_id=sp_lexical_entry_object_id,
                            storage=storage)
                column[:] = []
                match_dict.clear()
    return
Exemplo n.º 40
0
def create_l2_entity(request):
    try:

        variables = {'auth': authenticated_userid(request)}
        response = dict()
        parent_client_id = request.matchdict.get('level_one_client_id')
        parent_object_id = request.matchdict.get('level_one_object_id')
        req = request.json_body
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")

        # parent = DBSession.query(LevelOneEntity).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()
        parent = None
        if not parent:
            request.response.status = HTTPNotFound.code
            return {'error': str("No such level one entity in the system")}
        additional_metadata = req.get('additional_metadata')
        # entity = LevelTwoEntity(client_id=client.id, object_id=DBSession.query(LevelTwoEntity).filter_by(client_id=client.id).count() + 1, entity_type=req['entity_type'],
        #                         locale_id=req['locale_id'], additional_metadata=additional_metadata,
        #                         parent=parent)
        entity = None

        DBSession.add(entity)
        DBSession.flush()
        data_type = req.get('data_type')
        filename = req.get('filename')
        real_location = None
        url = None
        if data_type == 'image' or data_type == 'sound' or data_type == 'markup':
            real_location, url = create_object(request, req['content'], entity, data_type, filename)

        if url and real_location:
            entity.content = url
            old_meta = entity.additional_metadata

            need_hash = True
            if old_meta:
                new_meta=json.loads(old_meta)
                if new_meta.get('hash'):
                    need_hash = False
            if need_hash:
                hash = hashlib.sha224(base64.urlsafe_b64decode(req['content'])).hexdigest()
                hash_dict = {'hash': hash}
                if old_meta:
                    new_meta = json.loads(old_meta)
                    new_meta.update(hash_dict)
                else:
                    new_meta = hash_dict
                entity.additional_metadata = json.dumps(new_meta)
        else:
            entity.content = req['content']
        DBSession.add(entity)
        request.response.status = HTTPOk.code
        response['client_id'] = entity.client_id
        response['object_id'] = entity.object_id
        return response
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 41
0
def merge_perspectives_api(request):  # TODO: test
    try:
        req = request.json_body
        variables = {'auth': request.authenticated_userid}
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")
        client_id = req.get('client_id')
        object_id = req.get('object_id')
        dictionary_client_id = req['dictionary_client_id']
        dictionary_object_id = req['dictionary_object_id']
        translation_string = req['translation_string']
        translation = translation_string
        if 'translation' in req:
            translation = req['translation']

        persps = req['perspectives']
        if len(persps) != 2:
            raise KeyError("Wrong number of perspectives to merge.",
                           len(persps))
        for persp in persps:
            perspe = DBSession.query(DictionaryPerspective).filter_by(client_id=persp['client_id'],
                                                                       object_id=persp['object_id']).first()
            if not perspe:
                raise KeyError("Perspective do not exist in the system")
            if dictionary_client_id != perspe.parent_client_id or dictionary_object_id != perspe.parent_object_id:
                raise KeyError("Both perspective should from same dictionary.")
        base = DBSession.query(BaseGroup).filter_by(subject='merge', action='create').first()
        override = DBSession.query(Group).filter_by(base_group_id=base.id, subject_override = True).first()
        if user not in override.users:
            group = DBSession.query(Group).filter_by(base_group_id=base.id,
                                                  subject_client_id=dictionary_client_id,
                                                  subject_object_id=dictionary_object_id).first()
            if user not in group.users:
                raise KeyError("Not enough permission to do that")
            if client_id and object_id:
                gr = DBSession.query(Group).filter_by(base_group_id=base.id,
                                                      subject_client_id=client_id,
                                                      subject_object_id=object_id).first()
                if user not in gr.users:
                    raise KeyError("Not enough permission to do that")

        if not client_id and not object_id:
            subreq = Request.blank('/dictionary/%s/%s/perspective' % (dictionary_client_id, dictionary_object_id))
            subreq.method = 'POST'
            subreq.json = {'translation_string': translation_string, 'translation': translation}
            headers = {'Cookie':request.headers['Cookie']}
            subreq.headers = headers
            response = request.invoke_subrequest(subreq)
            client_id = response.json['client_id']
            object_id = response.json['object_id']
        new_persp = DBSession.query(DictionaryPerspective).filter_by(client_id=client_id, object_id=object_id).first()
        fields = []
        for persp in persps:
            for entry in persp['fields']:
                field = dict(entry)
                new_type = field.pop('new_type_name', None)
                if new_type:
                    field['entity_type'] = new_type
                    field['entity_type_translation'] = new_type
                if not field in fields:
                    entity_type_translation = field['entity_type_translation']
                    add_need = True
                    for fi in fields:
                        if fi['entity_type_translation'] == entity_type_translation:
                            add_need = False
                            break
                    if add_need:
                        fields.append(field)
        subreq = Request.blank('/dictionary/%s/%s/perspective/%s/%s/fields' %
                               (dictionary_client_id,
                                dictionary_object_id,
                                client_id,
                                object_id))
        subreq.method = 'POST'
        subreq.json = {'fields': fields}
        headers = {'Cookie':request.headers['Cookie']}
        subreq.headers = headers
        response = request.invoke_subrequest(subreq)
        for persp in persps:

            obj_id = persp['object_id']
            cli_id = persp['client_id']
            if (cli_id == client_id) and (obj_id == object_id):
                continue
            parent = DBSession.query(DictionaryPerspective).filter_by(client_id=cli_id, object_id=obj_id).first()
            lexes = DBSession.query(LexicalEntry).filter_by(parent_client_id=cli_id, parent_object_id=obj_id).all()

            for lex in lexes:
                metadata = dict()
                if lex.additional_metadata:
                    metadata = lex.additional_metadata
                metadata['came_from'] = {'client_id': lex.parent_client_id, 'object_id': lex.parent_object_id}
                lex.additional_metadata = metadata
                lex.parent = new_persp
                DBSession.flush()
                for ent in lex.leveloneentity:
                    for field in persp['fields']:
                        if ent.entity_type == field['entity_type']:
                            if 'new_type_name' in field:
                                ent.entity_type = field['new_type_name']
            bases = DBSession.query(BaseGroup).filter_by(perspective_default=True)
            groups = []
            for base in bases:

                group = DBSession.query(Group).filter_by(base_group_id=base.id,
                                                         subject_object_id=obj_id,
                                                         subject_client_id=cli_id).first()
                if group:
                    groups += [group]

            for group in groups:
                base = group.parent
                existing = DBSession.query(Group).filter_by(parent = base,
                                                         subject_object_id=object_id,
                                                         subject_client_id=client_id).first()
                if existing:
                    users = []
                    for user in group.users:
                        users += [user]
                    for user in users:
                        if user in group.users:
                            group.users.remove(user)
                        if not user in existing.users:
                            existing.users.append(user)
                else:
                    new_group = Group(base_group_id=group.base_group_id,
                                      subject_object_id=client_id,
                                      subject_client_id=object_id)
                    DBSession.add(new_group)
                    users = []
                    for user in group.users:
                        users += [user]
                    for user in users:
                        if user in group.users:
                            group.users.remove(user)
                        if not user in new_group.users:
                            new_group.users.append(user)
                group.marked_for_deletion = True
            parent.marked_for_deletion = True
        new_persp.marked_for_deletion = False  # TODO: check where it is deleted
        request.response.status = HTTPOk.code
        return {'object_id': object_id,
                'client_id': client_id}
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 42
0
def convert_five_tiers(
                dictionary_client_id,
                dictionary_object_id,
                user_id,
                origin_client_id,
                origin_object_id,
                sqlalchemy_url,
                storage,
                eaf_url,
                sound_url=None
                ):

    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)

    no_sound = True
    if sound_url:
        no_sound = False
    with warnings.catch_warnings():
        warnings.filterwarnings('error')
        try:
            from pydub import AudioSegment
        except Warning as e:
            no_sound = True
    if not no_sound:
        with tempfile.NamedTemporaryFile() as temp:
            try:
               sound_file = request.urlopen(sound_url)
            except HTTPError as e:
                return {'error': str(e.read().decode("utf8", 'ignore'))}
            with open(temp.name,'wb') as output:
                output.write(sound_file.read())
            full_audio = AudioSegment.from_wav(temp.name)
            temp.flush()

    field_ids = {}
    with transaction.manager:
        client = DBSession.query(Client).filter_by(id=user_id).first()

        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           user_id)
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        all_fieldnames = ("Markup",
                          "Paradigm Markup",
                          "Word",
                          "Transcription",
                          "Translation",
                          "Sound",
                          "Etymology",
                          "Backref",
                          "Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms"
                         )
        for name in all_fieldnames:
            data_type_query = DBSession.query(Field) \
                .join(TranslationGist,
                      and_(Field.translation_gist_object_id == TranslationGist.object_id,
                           Field.translation_gist_client_id == TranslationGist.client_id))\
                .join(TranslationGist.translationatom)
            field = data_type_query.filter(TranslationAtom.locale_id == 2,
                                                 TranslationAtom.content == name).one()
            field_ids[name] = (field.client_id, field.object_id)
        fp_fields = ("Word", "Transcription", "Translation", "Sound", "Markup", "Etymology", "Backref")
        sp_fields = ("Word of Paradigmatic forms",
                     "Transcription of Paradigmatic forms",
                     "Translation of Paradigmatic forms",
                     "Sounds of Paradigmatic forms",
                     "Paradigm Markup",
                     "Backref")
        fp_structure = set([field_ids[x] for x in fp_fields])
        sp_structure = set([field_ids[x] for x in sp_fields])
        DBSession.flush()
        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary_object_id, subject_client_id=dictionary_client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()


        origin_metadata= {"origin_client_id": origin_client_id,
                              "origin_object_id": origin_object_id
                              }

        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id,
                                                       object_id=dictionary_object_id).first()
        if not parent:
            return {'error': str("No such dictionary in the system")}
        first_perspective = None
        second_perspective = None
        for perspective in DBSession.query(DictionaryPerspective).filter_by(parent=parent, marked_for_deletion=False):
            structure = set()
            fields = DBSession.query(DictionaryPerspectiveToField)\
                        .filter_by(parent=perspective)\
                        .all()
            DBSession.flush()
            for p_to_field in fields:
                structure.add((p_to_field.field_client_id, p_to_field.field_object_id))

            if structure == fp_structure:
                first_perspective = perspective
            elif structure == sp_structure:
                second_perspective = perspective
            structure.clear()
        lexes = []
        if first_perspective:
            lexes = DBSession.query(DictionaryPerspective, LexicalEntry, Entity)\
                .filter(and_(DictionaryPerspective.object_id==first_perspective.object_id,
                        DictionaryPerspective.client_id==first_perspective.client_id))\
                .join(LexicalEntry, and_( LexicalEntry.parent_object_id==DictionaryPerspective.object_id,
                                          LexicalEntry.parent_client_id==DictionaryPerspective.client_id))\
                .join(Entity, and_(LexicalEntry.object_id==Entity.parent_object_id,
                                   LexicalEntry.client_id==Entity.parent_client_id))
        p_lexes = []
        if second_perspective:
            p_lexes = DBSession.query(DictionaryPerspective, LexicalEntry, Entity)\
                .filter(and_(DictionaryPerspective.object_id==second_perspective.object_id,
                        DictionaryPerspective.client_id==second_perspective.client_id))\
                .join(LexicalEntry, and_( LexicalEntry.parent_object_id==DictionaryPerspective.object_id,
                                          LexicalEntry.parent_client_id==DictionaryPerspective.client_id))\
                .join(Entity, and_(LexicalEntry.object_id==Entity.parent_object_id,
                                   LexicalEntry.client_id==Entity.parent_client_id))

        hashes = [x[2].additional_metadata["hash"]  for x in lexes if x[2].field.data_type == "Sound"]
        hashes = hashes[:] + [x[2].additional_metadata["hash"]  for x in p_lexes if x[2].field.data_type == "Sound"]
        links = [((x[2].link.client_id, x[2].link.object_id), (x[1].client_id, x[1].object_id))
                 for x in lexes if x[2].field.data_type == "Link"]
        links = links[:] + [((x[2].link.client_id, x[2].link.object_id), (x[1].client_id, x[1].object_id))
                 for x in p_lexes if x[2].field.data_type == "Link"]
        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary_object_id, subject_client_id=dictionary_client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()

        """
        # FIRST PERSPECTIVE
        """
        if first_perspective is None:
            resp = translation_service_search_all("Lexical Entries")
            persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
            first_perspective = DictionaryPerspective(client_id=client.id, ###
                                                state_translation_gist_object_id=state_translation_gist_object_id,
                                                state_translation_gist_client_id=state_translation_gist_client_id,
                                                parent=parent,
                                                # import_source=req.get('import_source'),
                                                # import_hash=req.get('import_hash'),
                                                additional_metadata=origin_metadata,
                                                translation_gist_client_id=persp_translation_gist_client_id,
                                                translation_gist_object_id=persp_translation_gist_object_id
                                                )

            first_perspective.additional_metadata = origin_metadata
            DBSession.add(first_perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=first_perspective.object_id,
                              subject_client_id=first_perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
            DBSession.flush()
        first_perspective_client_id = first_perspective.client_id
        first_perspective_object_id = first_perspective.object_id
        """
        # SECOND PERSPECTIVE
        """
        resp = translation_service_search_all("Paradigms")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        if second_perspective is None:
            second_perspective = DictionaryPerspective(client_id=client.id, ### variables['auth']
                                                state_translation_gist_object_id=state_translation_gist_object_id,
                                                state_translation_gist_client_id=state_translation_gist_client_id,
                                                parent=parent,
                                                # import_source=req.get('import_source'),
                                                # import_hash=req.get('import_hash'),
                                                additional_metadata=origin_metadata,
                                                translation_gist_client_id=persp_translation_gist_client_id,
                                                translation_gist_object_id=persp_translation_gist_object_id
                                                )
            second_perspective.additional_metadata = origin_metadata
            # if is_template is not None:
            #     perspective.is_template = is_template
            DBSession.add(second_perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=second_perspective.object_id,
                              subject_client_id=second_perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
        second_perspective_client_id = second_perspective.client_id
        second_perspective_object_id = second_perspective.object_id

        fp_fields_dict = {}
        """
        # FIRST PERSPECTIVE FIELDS CREATION
        """
        fp_field_names = ("Word", "Transcription", "Translation", "Sound", "Etymology", "Backref")
        fields_list = []
        for fieldname in fp_field_names: #

            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": second_perspective_client_id,
                        "object_id": second_perspective_object_id
                    }
                    }
                )

            elif fieldname == "Sound":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Markup"][0],
                       "object_id": field_ids["Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            fp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        fp_fields_dict["Markup"] = (field_ids["Markup"][0], field_ids["Markup"][1])
        update_perspective_fields(fields_list, first_perspective_client_id, first_perspective_object_id, client )
        """
        # Creating fields of the second perspective
        """
        sp_field_names = ("Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms",
                          "Backref")
        sp_fields_dict = {}
        fields_list = []
        for fieldname in sp_field_names:
            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": first_perspective_client_id,
                        "object_id": first_perspective_object_id
                    }
                    }
                )
            elif fieldname == "Sounds of Paradigmatic forms":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Paradigm Markup"][0],
                       "object_id": field_ids["Paradigm Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            sp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        sp_fields_dict["Paradigm Markup"] = (field_ids["Paradigm Markup"][0], field_ids["Paradigm Markup"][1])
        update_perspective_fields(fields_list, second_perspective_client_id, second_perspective_object_id, client)
        dubl = []
        try:
           eaffile = request.urlopen(eaf_url)
        except HTTPError as e:
            return {'error': str(e.read().decode("utf8", 'ignore'))}
        with tempfile.NamedTemporaryFile() as temp:
            temp.write(eaffile.read())
            converter = elan_parser.Elan(temp.name)
            converter.parse()
            final_dicts = converter.proc()
            temp.flush()
        for phrase in final_dicts:
            curr_dict = None
            paradigm_words = []
            for word_translation in phrase:
                if type(word_translation) is not list:
                    curr_dict = word_translation
                    mt_words = [word_translation[i][1].text for i in word_translation
                                if len(word_translation[i]) > 1 and type(word_translation[i][1].text) is str]
                    main_tier_text = " ".join(mt_words)
                    if main_tier_text:
                        paradigm_words.append(elan_parser.Word(text=main_tier_text,
                                                               tier="Word of Paradigmatic forms",
                                                               time=word.time)
                                              )
                else:
                    word = word_translation[0]
                    tier_name = word.tier
                    new = " ".join([i.text for i in word_translation])
                    paradigm_words.append(elan_parser.Word(text=new, tier=tier_name, time=word.time))
            p_match_dict = defaultdict(list)
            for pword in paradigm_words:
                match = [x for x in p_lexes if x[2].content == pword.text]  #LEX COUNT OR RANDOM
                for t in match:
                    if field_ids[EAF_TIERS[pword.tier]] == (t[2].field.client_id, t[2].field.object_id):
                       p_match_dict[t[1]].append(t)
            p_match_dict = { k: v for k, v in p_match_dict.items() if len(v) >= 2 }
            max_sim = None
            for le in p_match_dict:
                if max_sim is None:
                    max_sim = le
                else:
                    if len(p_match_dict[le]) >= len(p_match_dict[max_sim]):
                            max_sim = le
            if max_sim:
                sp_lexical_entry_client_id = max_sim.client_id
                sp_lexical_entry_object_id = max_sim.object_id
            else:
                lexentr = LexicalEntry(client_id=client.id,
                                       parent_object_id=second_perspective_object_id,
                                       parent=second_perspective)
                DBSession.add(lexentr)
                sp_lexical_entry_client_id = lexentr.client_id
                sp_lexical_entry_object_id = lexentr.object_id

            for other_word in paradigm_words:
                if max_sim:
                    text_and_field = (other_word.text, field_ids[EAF_TIERS[other_word.tier]])
                    sim = [(x[2].content, (x[2].field.client_id, x[2].field.object_id)) for x in p_match_dict[max_sim]]
                    if text_and_field not in sim:
                        create_entity(sp_lexical_entry_client_id,
                                      sp_lexical_entry_object_id,
                                      field_ids[EAF_TIERS[other_word.tier]][0],
                                      field_ids[EAF_TIERS[other_word.tier]][1],
                                      None,
                                      client,
                                      other_word.text,
                                      filename=None,
                                      storage=storage)
                else:
                    create_entity(sp_lexical_entry_client_id, sp_lexical_entry_object_id, field_ids[EAF_TIERS[other_word.tier]][0], field_ids[EAF_TIERS[other_word.tier]][1],
                        None, client, other_word.text, filename=None, storage=storage)
            if not no_sound:
                if word.time[1] < len(full_audio):
                    with tempfile.NamedTemporaryFile() as temp:
                        full_audio[ word.time[0]: word.time[1]].export(temp.name, format="wav")
                        audio_slice = temp.read()
                        if max_sim:
                            hash = hashlib.sha224(audio_slice).hexdigest()
                            if not hash in hashes:
                                hashes.append(hash)
                                create_entity(sp_lexical_entry_client_id,
                                              sp_lexical_entry_object_id,
                                              field_ids["Sounds of Paradigmatic forms"][0],
                                              field_ids["Sounds of Paradigmatic forms"][1],
                                              None,
                                              client,
                                              filename="%s.wav" %(word.index) ,
                                              folder_name="sound1",
                                              content=base64.urlsafe_b64encode(audio_slice).decode(),
                                              storage=storage)
                        else:
                            create_entity(sp_lexical_entry_client_id,
                                          sp_lexical_entry_object_id,
                                          field_ids["Sounds of Paradigmatic forms"][0],
                                          field_ids["Sounds of Paradigmatic forms"][1],
                                          None,
                                          client,
                                          filename="%s.wav" %(word.index) ,
                                          folder_name="sound1",
                                          content=base64.urlsafe_b64encode(audio_slice).decode(),
                                          storage=storage)

                        temp.flush()
            p_match_dict.clear()
            paradigm_words[:] = []
            for word in curr_dict:
                column = [word] + curr_dict[word]
                match_dict = defaultdict(list)
                for crt in tuple(i for i in column):
                    match = [x for x in lexes if x[2].content == crt.text]
                    for t in match:
                        if field_ids[EAF_TIERS[crt.tier]] == (t[2].field.client_id, t[2].field.object_id):
                           match_dict[t[1]].append(t)
                match_dict = { k: v for k, v in match_dict.items() if len(v) >= 2 }
                max_sim = None
                for le in match_dict:
                    if max_sim is None:
                        max_sim = le
                    else:
                        if len(match_dict[le]) >= len(match_dict[max_sim]):
                            max_sim = le
                if max_sim:
                    fp_lexical_entry_client_id = max_sim.client_id
                    fp_lexical_entry_object_id = max_sim.object_id
                else:
                    lexentr = LexicalEntry(client_id=client.id,
                                           parent_object_id=first_perspective_object_id, parent=first_perspective)
                    DBSession.add(lexentr)
                    fp_lexical_entry_client_id = lexentr.client_id
                    fp_lexical_entry_object_id = lexentr.object_id
                for other_word in column:
                    if max_sim:
                        text_and_field = (other_word.text, field_ids[EAF_TIERS[other_word.tier]])
                        sim = [(x[2].content, (x[2].field.client_id, x[2].field.object_id)) for x in match_dict[max_sim]]
                        if text_and_field not in sim:
                            create_entity(fp_lexical_entry_client_id,
                                          fp_lexical_entry_object_id,
                                          field_ids[EAF_TIERS[other_word.tier]][0],
                                          field_ids[EAF_TIERS[other_word.tier]][1],
                                          None,
                                          client,
                                          other_word.text,
                                          filename=None,
                                          storage=storage)
                    else:
                        create_entity(fp_lexical_entry_client_id,
                                      fp_lexical_entry_object_id,
                                      field_ids[EAF_TIERS[other_word.tier]][0],
                                      field_ids[EAF_TIERS[other_word.tier]][1],
                                      None,
                                      client,
                                      other_word.text,
                                      filename=None,
                                      storage=storage)
                if not no_sound:
                    if word.time[1] < len(full_audio):
                        with tempfile.NamedTemporaryFile() as temp:
                            full_audio[ word.time[0]: word.time[1]].export(temp.name, format="wav")
                            audio_slice = temp.read()
                            hash = hashlib.sha224(audio_slice).hexdigest()
                            if max_sim:
                                if not hash in hashes:
                                    hashes.append(hash)
                                    create_entity(fp_lexical_entry_client_id,
                                                  fp_lexical_entry_object_id,
                                                  field_ids["Sound"][0],
                                                  field_ids["Sound"][1],
                                                  None,
                                                  client,
                                                  filename="%s.wav" %(word.index) ,
                                                  folder_name="sound1",
                                                  content=base64.urlsafe_b64encode(audio_slice).decode(),
                                                  storage=storage)
                            else:
                                create_entity(fp_lexical_entry_client_id,
                                              fp_lexical_entry_object_id,
                                              field_ids["Sound"][0],
                                              field_ids["Sound"][1],
                                              None,
                                              client,
                                              filename="%s.wav" %(word.index) ,
                                              folder_name="sound1",
                                              content=base64.urlsafe_b64encode(audio_slice).decode(),
                                              storage=storage)
                            temp.flush()
                fp_le_ids = (fp_lexical_entry_client_id, fp_lexical_entry_object_id)
                sp_le_ids = (sp_lexical_entry_client_id, sp_lexical_entry_object_id)
                dubl_tuple = (sp_le_ids, fp_le_ids)
                if not dubl_tuple in dubl:
                    dubl.append(dubl_tuple)
                    if max_sim:
                        if not (sp_le_ids, fp_le_ids) in links :
                            create_entity(sp_lexical_entry_client_id,
                                          sp_lexical_entry_object_id,
                                          field_ids["Backref"][0],
                                          field_ids["Backref"][1],
                                          None,
                                          client,
                                          filename=None,
                                          link_client_id=fp_lexical_entry_client_id,
                                          link_object_id=fp_lexical_entry_object_id,
                                          storage=storage)
                        if not (fp_le_ids, sp_le_ids) in links:
                            create_entity(fp_lexical_entry_client_id,
                                          fp_lexical_entry_object_id,
                                          field_ids["Backref"][0],
                                          field_ids["Backref"][1],
                                          None,
                                          client,
                                          filename=None,
                                          link_client_id=sp_lexical_entry_client_id,
                                          link_object_id=sp_lexical_entry_object_id,
                                          storage=storage)
                    else:
                        create_entity(sp_lexical_entry_client_id,
                                      sp_lexical_entry_object_id,
                                      field_ids["Backref"][0],
                                      field_ids["Backref"][1],
                                      None,
                                      client,
                                      filename=None,
                                      link_client_id=fp_lexical_entry_client_id,
                                      link_object_id=fp_lexical_entry_object_id,
                                      storage=storage)
                        create_entity(fp_lexical_entry_client_id,
                                      fp_lexical_entry_object_id,
                                      field_ids["Backref"][0],
                                      field_ids["Backref"][1],
                                      None,
                                      client,
                                      filename=None,
                                      link_client_id=sp_lexical_entry_client_id,
                                      link_object_id=sp_lexical_entry_object_id,
                                      storage=storage)
                column[:] = []
                match_dict.clear()
    return
Exemplo n.º 43
0
 def setUp(self):
     self.config = testing.setUp()
     import webtest
     from pyramid import paster
     from sqlalchemy import create_engine
     engine = create_engine('sqlite://')
     myapp = paster.get_app('testing.ini')
     self.app = webtest.TestApp(myapp)
     from lingvodoc.models import (
         Base,
         Dictionary,
         Language,
         Organization,
         Locale,
         User,
         Passhash,
         Client,
         DictionaryPerspective,
         UserEntitiesTranslationString
         )
     DBSession.configure(bind=engine)
     Base.metadata.create_all(engine)
     with transaction.manager:
         ru_locale = Locale(id=1, shortcut="ru", intl_name="Русский")
         DBSession.add(ru_locale)
         en_locale = Locale(id=2, shortcut="en", intl_name="English")
         DBSession.add(en_locale)
         DBSession.flush()
         new_user = User(id=1, login='******', default_locale_id = 1)
         new_pass = Passhash(password='******')
         DBSession.add(new_pass)
         new_user.password = new_pass
         DBSession.add(new_user)
         new_client = Client(id=1, user=new_user)
         DBSession.add(new_client)
         new_user2 = User(id=2, login='******', default_locale_id = 1)
         new_pass2 = Passhash(password='******')
         DBSession.add(new_pass)
         new_user2.password = new_pass2
         DBSession.add(new_user2)
         new_client = Client(id=2, user=new_user2)
         DBSession.add(new_client)
         new_client = Client(id=3, user=new_user)
         DBSession.add(new_client)
         DBSession.flush()
         new_user3 = User(id=3, login='******', default_locale_id = 1)
         new_pass3 = Passhash(password='******')
         DBSession.add(new_pass3)
         new_user3.password = new_pass3
         DBSession.add(new_user3)
         new_client = Client(id=4, user=new_user3)
         DBSession.add(new_client)
         new_user4 = User(id=4, login='******', default_locale_id = 1)
         new_pass4 = Passhash(password='******')
         DBSession.add(new_pass4)
         new_user4.password = new_pass4
         DBSession.add(new_user4)
         new_client = Client(id=5, user=new_user4)
         DBSession.add(new_client)
         new_lang1 = Language(client_id=1, object_id=1, translation_string='head')
         DBSession.add(new_lang1)
         new_lang2 = Language(client_id=2, object_id=5, translation_string='left son', parent=new_lang1)
         DBSession.add(new_lang2)
         new_lang3 = Language(client_id=1, object_id=3, translation_string='right son', parent=new_lang1)
         DBSession.add(new_lang3)
         new_lang4 = Language(client_id=2, object_id=4, translation_string='first grand son', parent=new_lang3)
         DBSession.add(new_lang4)
         new_lang5 = Language(client_id=1, object_id=5, translation_string='second grand son', parent=new_lang3)
         DBSession.add(new_lang5)
         new_lang6 = Language(client_id=1, object_id=6, translation_string='third grand son', parent=new_lang3)
         DBSession.add(new_lang6)
         new_lang7 = Language(client_id=1, object_id=7, translation_string='grand grand son', parent=new_lang5)
         DBSession.add(new_lang7)
         new_lang8 = Language(client_id=1, object_id=8, translation_string='second head')
         DBSession.add(new_lang8)
         new_lang9 = Language(client_id=1, object_id=9, translation_string='second left son', parent=new_lang8)
         DBSession.add(new_lang9)
         new_lang10 = Language(client_id=1, object_id=10, translation_string='second right son', parent=new_lang8)
         DBSession.add(new_lang10)
         new_org1 = Organization(name='first')
         new_org1.users.append(new_user)
         new_org1.users.append(new_user3)
         DBSession.add(new_org1)
         new_org2 = Organization(name='second')
         DBSession.add(new_org2)
         new_dict = Dictionary(client_id=1, object_id=1, name='dict')
         DBSession.add(new_dict)
         DBSession.flush()
         new_persp1 = DictionaryPerspective(client_id=1, object_id=1, name='persp', parent=new_dict)
         DBSession.add(new_persp1)
         new_persp2 = DictionaryPerspective(client_id=2, object_id=2, name='persp', parent=new_dict)
         DBSession.add(new_persp2)
         uets = UserEntitiesTranslationString(locale_id=1, translation_string='persp', translation='персп')
def convert_db_new( blob_client_id, blob_object_id, language_client_id, language_object_id, user_id, gist_client_id, gist_object_id, storage,
                   locale_id=2):
    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)

    time.sleep(4)
    field_ids = {}
    with transaction.manager:
        blob = DBSession.query(UserBlobs).filter_by(client_id=blob_client_id, object_id=blob_object_id).first()
        # DBSession.flush()
        filename = blob.real_storage_path
        log.debug("user_id: %s" % user_id)
        log.debug("Starting convert_one")
        log.debug("Creating session")
        sqconn = sqlite3.connect(filename)
        log.debug("Connected to sqlite3 database")
        client = DBSession.query(Client).filter_by(id=user_id).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           user_id)
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            log.debug("ERROR")


        all_fieldnames = ("Markup",
                          "Paradigm Markup",
                          "Word",
                          "Transcription",
                          "Translation",
                          "Sound",
                          "Etymology",
                          "Backref",
                          "Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms"
                         )
        for name in all_fieldnames:
            data_type_query = DBSession.query(Field) \
                .join(TranslationGist,
                      and_(Field.translation_gist_object_id == TranslationGist.object_id,
                           Field.translation_gist_client_id == TranslationGist.client_id))\
                .join(TranslationGist.translationatom)
            field = data_type_query.filter(TranslationAtom.locale_id == 2,
                                                 TranslationAtom.content == name).one() # todo: a way to find this fields if wwe cannot use one
            field_ids[name] = (field.client_id, field.object_id)

        DBSession.flush()

        dict_attributes = get_dict_attributes(sqconn)
        """
        dict_attributes = get_dict_attributes(sqconn)
        translationgist = TranslationGist(client_id=user_id, type="Dictionary")
        DBSession.add(translationgist)
        DBSession.flush()
        gist_client_id = translationgist.client_id
        gist_object_id = translationgist.object_id
        """
        parent_client_id = gist_client_id
        parent_object_id = gist_object_id

        parent = DBSession.query(TranslationGist).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()

        """
        translationatom = TranslationAtom(client_id=client.id,
                                          parent=parent,
                                          locale_id=locale_id,
                                          content=dict_attributes["dictionary_name"])
        DBSession.add(translationatom)
        DBSession.flush()
        atom_client_id = translationatom.client_id
        atom_object_id = translationatom.object_id

        log.debug(dict_attributes["dictionary_name"])
        language_client_id = atom_client_id
        language_object_id = atom_object_id
        """
        lang_parent = DBSession.query(Language).filter_by(client_id=language_client_id, object_id=language_object_id).first()

        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        dictionary = Dictionary(client_id=user_id,
                                state_translation_gist_object_id=state_translation_gist_object_id,
                                state_translation_gist_client_id=state_translation_gist_client_id,
                                parent=lang_parent,
                                translation_gist_client_id=gist_client_id,
                                translation_gist_object_id=gist_object_id
                                      )
                                #additional_metadata=additional_metadata)
        DBSession.add(dictionary)
        DBSession.flush()

        dictionary_client_id = dictionary.client_id
        dictionary_object_id = dictionary.object_id
        for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary.object_id, subject_client_id=dictionary.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()
        """
        # FIRST PERSPECTIVE
        """
        resp = translation_service_search_all("Lexical Entries")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        perspective = DictionaryPerspective(client_id=client.id, ###
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            import_source="Lingvodoc-0.98",
                                            import_hash=dict_attributes['dialeqt_id'],
                                            # additional_metadata=additional_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
            DBSession.flush()
        first_perspective_client_id = perspective.client_id
        first_perspective_object_id = perspective.object_id

        """
        # SECOND PERSPECTIVE
        """
        resp = translation_service_search_all("Paradigms")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        if not parent:
            return {'error': str("No such dictionary in the system")}

        perspective = DictionaryPerspective(client_id=client.id, ### variables['auth']
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            import_source="Lingvodoc-0.98",
                                            import_hash=dict_attributes['dialeqt_id'],
                                            # additional_metadata=additional_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
        second_perspective_client_id = perspective.client_id
        second_perspective_object_id = perspective.object_id
        get_fp_ids = sqconn.cursor()
        get_fp_ids.execute("select id from dictionary where is_a_regular_form=1")
        count_cursor = sqconn.cursor()
        count_cursor.execute("select count(*) from dictionary where is_a_regular_form=1")
        words_count = count_cursor.fetchone()[0]
        count_cursor2 = sqconn.cursor()
        count_cursor2.execute("select count(*) from dictionary where is_a_regular_form=0")
        words_count2 = count_cursor2.fetchone()[0]
        ids_dict = dict()
        for i in range(words_count):
            perspective = DBSession.query(DictionaryPerspective).\
                filter_by(client_id=first_perspective_client_id, object_id = first_perspective_object_id).first()
            if not perspective:
                return {'error': str("No such perspective in the system")}
            lexentr = LexicalEntry( client_id=client.id,
                                   parent_object_id=first_perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            lexical_entry_client_id = lexentr.client_id
            lexical_entry_object_id = lexentr.object_id
            ids_dict[i] = (lexical_entry_client_id, lexical_entry_object_id)
        DBSession.flush()
        ids_dict2 = dict()
        for i in range(words_count2):
            perspective = DBSession.query(DictionaryPerspective).\
                filter_by(client_id=first_perspective_client_id, object_id = second_perspective_object_id).first()
            if not perspective:
                return {'error': str("No such perspective in the system")}
            lexentr = LexicalEntry( client_id=client.id,
                                   parent_object_id=second_perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            lexical_entry_client_id = lexentr.client_id
            lexical_entry_object_id = lexentr.object_id
            ids_dict2[i] = (lexical_entry_client_id, lexical_entry_object_id)
        DBSession.flush()
        get_sp_ids = sqconn.cursor()
        get_sp_ids.execute("select id from dictionary where is_a_regular_form=0")
        ids_mapping2 = dict()
        i = 0
        sp_le_id_dict = {}
        for id_cursor in get_sp_ids:
            id = id_cursor[0]
            sp_le_id_dict[id] = i
            client_id = ids_dict2[i][0]
            object_id = ids_dict2[i][1]
            ids_mapping2[int(id)] = (client_id, object_id)
            i += 1
        get_fp_ids = sqconn.cursor()
        get_fp_ids.execute("select id from dictionary where is_a_regular_form=1")
        ids_mapping = dict()
        i = 0
        fp_le_id_dict = {}
        for id_cursor in get_fp_ids:
            id = id_cursor[0]
            fp_le_id_dict[id] = i
            client_id = ids_dict[i][0]
            object_id = ids_dict[i][1]
            ids_mapping[id] = (client_id, object_id)
            i += 1
        fp_fields_dict = {}
        """
        # FIRST PERSPECTIVE FIELDS CREATION
        """
        fp_field_names = ("Word", "Transcription", "Translation", "Sound", "Etymology", "Backref")
        fields_list = []
        for fieldname in fp_field_names: #

            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": second_perspective_client_id,
                        "object_id": second_perspective_object_id
                    }
                    }
                )

            elif fieldname == "Sound":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Markup"][0],
                       "object_id": field_ids["Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            fp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        fp_fields_dict["Markup"] = (field_ids["Markup"][0], field_ids["Markup"][1])
        update_perspective_fields(fields_list, first_perspective_client_id, first_perspective_object_id, client )
        """
        # Creating fields of the second perspective
        """
        sp_fields_dict = {}
        fields_list = []
        sp_field_names = ("Word of Paradigmatic forms", "Transcription of Paradigmatic forms", "Translation of Paradigmatic forms", "Sounds of Paradigmatic forms", "Backref")
        for fieldname in sp_field_names: #
            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": first_perspective_client_id,
                        "object_id": first_perspective_object_id
                    }
                    }
                )
            elif fieldname == "Sounds of Paradigmatic forms":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Paradigm Markup"][0],
                       "object_id": field_ids["Paradigm Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            sp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        sp_fields_dict["Paradigm Markup"] = (field_ids["Paradigm Markup"][0], field_ids["Paradigm Markup"][1])
        update_perspective_fields(fields_list, second_perspective_client_id, second_perspective_object_id, client)
        columns = ("word", "Transcription", "translation")
        # First Perspective entity
        sqcursor = sqconn.cursor()
        for column in columns:
            sqcursor.execute("select id,%s from dictionary where is_a_regular_form=1" % column)
            for row in sqcursor:
                row_id = int(row[0])
                content = row[1]
                name = None
                if column == "word":
                    name = "Word"
                if column == "Transcription":
                    name = "Transcription"
                if column == "translation":
                    name = "Translation"
                create_entity(ids_dict[fp_le_id_dict[row_id]][0], ids_dict[fp_le_id_dict[row_id]][1], fp_fields_dict[name][0], fp_fields_dict[name][1],
                    None, client, content, filename=None, storage=storage)
        # Second Perspective entity
        sqcursor = sqconn.cursor()
        for column in columns:
            sqcursor.execute("select id,%s from dictionary where is_a_regular_form=0" % column)
            for row in sqcursor:
                row_id = int(row[0])
                content = row[1]
                name = None
                if column == "word":
                    name = "Word of Paradigmatic forms"
                if column == "Transcription":
                    name = "Transcription of Paradigmatic forms"
                if column == "translation":
                    name = "Translation of Paradigmatic forms"
                create_entity(ids_dict2[sp_le_id_dict[row_id]][0], ids_dict2[sp_le_id_dict[row_id]][1], sp_fields_dict[name][0], sp_fields_dict[name][1],
                    None, client, content, filename=None, storage=storage)
        sqcursor = sqconn.cursor()
        sqcursor.execute("select id,regular_form from dictionary where is_a_regular_form=0")
        for le_cursor in sqcursor:
            fp_id = int(le_cursor[1])
            sp_id = int(le_cursor[0])
            if fp_id in ids_mapping:
                create_entity(ids_dict[fp_le_id_dict[fp_id]][0], ids_dict[fp_le_id_dict[fp_id]][1], fp_fields_dict["Backref"][0], fp_fields_dict["Backref"][1],
                    None, client, filename=None, link_client_id=ids_dict2[sp_le_id_dict[sp_id]][0], link_object_id=ids_dict2[sp_le_id_dict[sp_id]][1], storage=storage)
                create_entity(ids_dict2[sp_le_id_dict[sp_id]][0], ids_dict2[sp_le_id_dict[sp_id]][1], sp_fields_dict["Backref"][0], sp_fields_dict["Backref"][1],
                    None, client, filename=None, link_client_id=ids_dict[fp_le_id_dict[fp_id]][0], link_object_id=ids_dict[fp_le_id_dict[fp_id]][1], storage=storage)
        #DBSession.flush()
        # if req.get('is_translatable', None):
        #         field.is_translatable = bool(req['is_translatable'])
        audio_hashes = set()
        markup_hashes = set()
        DBSession.flush()
        """
        Sound and Markup
        """
        audio_ids = set()
        paradigm_audio_ids = set()
        sound_and_markup_word_cursor = sqconn.cursor()
        sound_and_markup_word_cursor.execute("""select blobs.id,
                                                blobs.secblob,
                                                blobs.mainblob,
                                                dict_blobs_description.name,
                                                dictionary.id,
                                                dict_blobs_description.type
                                                from blobs, dict_blobs_description, dictionary
                                                where dict_blobs_description.blobid=blobs.id
                                                and dict_blobs_description.wordid=dictionary.id
                                                and dictionary.is_a_regular_form=1;""")

        folder_name = "praat_markup"
        upload_audio_with_markup(audio_ids, ids_mapping, fp_fields_dict, sound_and_markup_word_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        sound_and_markup_word_cursor = sqconn.cursor()
        sound_and_markup_word_cursor.execute("""select blobs.id,
                                                blobs.secblob,
                                                blobs.mainblob,
                                                dict_blobs_description.name,
                                                dictionary.id,
                                                dict_blobs_description.type
                                                from blobs, dict_blobs_description, dictionary
                                                where dict_blobs_description.blobid=blobs.id
                                                and dict_blobs_description.wordid=dictionary.id
                                                and dictionary.is_a_regular_form=1;""")
        upload_audio(audio_ids, ids_mapping, fp_fields_dict, sound_and_markup_word_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        paradigm_sound_and_markup_cursor = sqconn.cursor()
        paradigm_sound_and_markup_cursor.execute("""select blobs.id,
                                                    blobs.secblob,
                                                    blobs.mainblob,
                                                    dict_blobs_description.name,
                                                    dictionary.id,
                                                    dict_blobs_description.type
                                                    from blobs, dict_blobs_description, dictionary
                                                    where dict_blobs_description.blobid=blobs.id
                                                    and dict_blobs_description.wordid=dictionary.id
                                                    and dictionary.is_a_regular_form=0;""")


        folder_name = "paradigm_praat_markup"
        upload_audio_with_markup(paradigm_audio_ids, ids_mapping2, sp_fields_dict, paradigm_sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        paradigm_sound_and_markup_cursor = sqconn.cursor()
        paradigm_sound_and_markup_cursor.execute("""select blobs.id,
                                                    blobs.secblob,
                                                    blobs.mainblob,
                                                    dict_blobs_description.name,
                                                    dictionary.id,
                                                    dict_blobs_description.type
                                                    from blobs, dict_blobs_description, dictionary
                                                    where dict_blobs_description.blobid=blobs.id
                                                    and dict_blobs_description.wordid=dictionary.id
                                                    and dictionary.is_a_regular_form=0;""")
        upload_audio(paradigm_audio_ids, ids_mapping2, sp_fields_dict, paradigm_sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        """
        Etimology_tag
        """

        etymology_cursor = sqconn.cursor()
        etymology_cursor.execute("""select id, etimology_tag
                                    FROM dictionary
                                    WHERE etimology_tag NOT NULL
                                    and dictionary.is_a_regular_form=1; """)
        for cursor in etymology_cursor:
            id = int(cursor[0])
            client_id = ids_mapping[id][0]
            object_id = ids_mapping[id][1]
            item = {"entity_type": "Etymology", "tag": cursor[1],
                    "field_client_id": field_ids["Etymology"][0],
                    "field_object_id": field_ids["Etymology"][1],
                    "connections": [{"client_id": client_id, "object_id": object_id}]}
            create_group_entity(item, client, user)
            # status = session.post(connect_url, json=item)
            # log.debug(status.text)


        dictionary = {}
        return dictionary
Exemplo n.º 45
0
def basic_sync(request):
    import requests
    import transaction

    return_date_time = lambda r: {key: datetime.datetime.fromtimestamp(r[key]) if key == 'created_at' else r[key] for
                                  key in r}
    settings = request.registry.settings
    existing = basic_tables_content()
    path = settings['desktop']['central_server'] + 'sync/basic/server'
    with open('authentication_data.json', 'r') as f:
        cookies = json.loads(f.read())
    session = requests.Session()
    session.headers.update({'Connection': 'Keep-Alive'})
    adapter = requests.adapters.HTTPAdapter(pool_connections=1, pool_maxsize=1, max_retries=10)
    session.mount('http://', adapter)
    status = session.get(path, cookies=cookies)
    server = status.json()
    new_entries = list()
    old_langs = dict()
    langs = list()
    for table in [Locale, User, Client, BaseGroup, TranslationGist, TranslationAtom, Field, Group, Language]:
        curr_server = server[table.__tablename__]
        curr_existing = existing[table.__tablename__]
        curr_old = list()
        if hasattr(table, 'id'):
            for key in curr_server:
                if key in curr_existing:
                    if curr_server[key] != curr_existing[key]:
                        kwargs = return_date_time(curr_server[key])
                        curr_old.append(kwargs)
                else:
                    kwargs = return_date_time(curr_server[key])
                    if table != Language:
                        new_entries.append(table(**kwargs))
                    else:
                        langs.append(table(**kwargs))
        else:
            for client_id in curr_server:
                if client_id in curr_existing:
                    for object_id in curr_server[client_id]:
                        if object_id in curr_existing[client_id]:
                            if curr_server[client_id][object_id] != curr_existing[client_id][object_id]:
                                kwargs = return_date_time(curr_server[client_id][object_id])
                                curr_old.append(kwargs)
                        else:
                            kwargs = return_date_time(curr_server[client_id][object_id])
                            if table != Language:
                                new_entries.append(table(**kwargs))
                            else:
                                langs.append(table(**kwargs))

                else:
                    for object_id in curr_server[client_id]:
                        kwargs = return_date_time(curr_server[client_id][object_id])
                        if table != Language:
                            new_entries.append(table(**kwargs))
                        else:
                            langs.append(table(**kwargs))

        if table != Language:
            all_entries = DBSession.query(table).all()
            if hasattr(table, 'client_id'):
                    for entry in all_entries:
                        client_id = str(entry.client_id)
                        object_id = str(entry.object_id)
                        if client_id in curr_server and object_id in curr_server[client_id]:
                            for key, value in list(return_date_time(curr_server[client_id][object_id]).items()):
                                setattr(entry, key, value)
            else:
                for entry in all_entries:
                    id = str(entry.id)
                    if id in curr_server:
                        for key, value in list(return_date_time(curr_server[id]).items()):
                            if key != 'counter' and table != User:
                                setattr(entry, key, value)
            new_entries.extend(all_entries)
        else:
            old_langs = curr_server
    DBSession.flush()
    parent_langs_ids = DBSession.query(Language.client_id, Language.object_id).all()
    parent_langs = [lang for lang in langs if not lang.parent_client_id]
    parent_langs_ids.extend([(lang.client_id, lang.object_id) for lang in langs if not lang.parent_client_id])
    new_langs = [lang for lang in langs if (lang.client_id, lang.object_id) not in parent_langs_ids]
    while new_langs:
        parent_langs.extend([lang for lang in langs if (
        lang.client_id, lang.object_id) not in parent_langs_ids and (
        lang.parent_client_id, lang.parent_object_id) in parent_langs_ids])
        parent_langs_ids.extend([(lang.client_id, lang.object_id) for lang in langs if (
        lang.client_id, lang.object_id) not in parent_langs_ids and (
        lang.parent_client_id, lang.parent_object_id) in parent_langs_ids])
        new_langs = [lang for lang in langs if (lang.client_id, lang.object_id) not in parent_langs_ids]
    new_entries.extend(parent_langs)
    for entry in DBSession.query(Language).all():
        client_id = str(entry.client_id)
        object_id = str(entry.object_id)
        if client_id in curr_server and object_id in old_langs[client_id]:
                for key, value in list(return_date_time(curr_server[client_id][object_id]).items()):
                    setattr(entry, key, value)
    DBSession.bulk_save_objects(new_entries)
    # client = DBSession.query(Client).filter_by(id=authenticated_userid(request)).first()
    # if not client:
    #     request.response.status = HTTPNotFound.code
    #     return {'error': str("Try to login again")}
    # user = DBSession.query(User).filter_by(id=client.user_id).first()
    # if not user:
    #     request.response.status = HTTPNotFound.code
    #     return {'error': str("Try to login again")}

    for entry in server['user_to_group_association']:
        if not DBSession.query(user_to_group_association).filter_by(user_id=entry[0], group_id=entry[1]).first():
            insertion = user_to_group_association.insert().values(user_id=entry[0], group_id=entry[1])
            DBSession.execute(insertion)
    request.response.status = HTTPOk.code
    return HTTPOk(json_body={})
Exemplo n.º 46
0
def edit_user_info(request):  # TODO: test
    from passlib.hash import bcrypt
    response = dict()

    req = request.json_body
    client_id = req.get('client_id')
    user_id = req.get('user_id')
    user = None
    if client_id:
        client = DBSession.query(Client).filter_by(id=client_id).first()
        if not client:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such client in the system")}
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        user_id = client.user_id
        if not user:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such user in the system")}
    else:
        user = DBSession.query(User).filter_by(id=user_id).first()
        if not user:

            request.response.status = HTTPNotFound.code
            return {'error': str("No such user in the system")}
    new_password = req.get('new_password')
    old_password = req.get('old_password')

    if new_password:
        if not old_password:
            request.response.status = HTTPBadRequest.code
            return {'error': str("Need old password to confirm")}
        old_hash = DBSession.query(Passhash).filter_by(user_id=user_id).first()
        if old_hash:
            if not user.check_password(old_password):
                request.response.status = HTTPBadRequest.code
                return {'error': str("Wrong password")}
            else:
                old_hash.hash = bcrypt.encrypt(new_password)
        else:
            request.response.status = HTTPInternalServerError.code
            return {'error': str("User has no password")}

    name = req.get('name')
    if name:
        user.name = name
    default_locale_id = req.get('default_locale_id')
    if default_locale_id:
        user.default_locale_id = default_locale_id
    birthday = req.get('birthday')
    if birthday:
        try:
            year, month, day = birthday.split('-')
            user.birthday = datetime.date(int(year), int(month), int(day))
        except ValueError:
            request.response.status = HTTPBadRequest.code
            return {'Error': "Invalid birthday"}
    email = req.get('email')
    if email:
        if user.email:
            user.email.email = email
        else:
            new_email = Email(user=user, email=email)
            DBSession.add(new_email)
            DBSession.flush()
    about = req.get('about')
    if about:
        meta = dict()
        if user.additional_metadata:
            meta = user.additional_metadata
        meta['about'] = about
        user.additional_metadata = meta
    # response['is_active']=str(user.is_active)
    request.response.status = HTTPOk.code
    return response
def convert_db_new( blob_client_id, blob_object_id, language_client_id, language_object_id, user_id, gist_client_id, gist_object_id, storage,
                   locale_id=2):
    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)

    time.sleep(4)
    field_ids = {}
    with transaction.manager:
        blob = DBSession.query(UserBlobs).filter_by(client_id=blob_client_id, object_id=blob_object_id).first()
        # DBSession.flush()
        filename = blob.real_storage_path
        log.debug("user_id: %s" % user_id)
        log.debug("Starting convert_one")
        log.debug("Creating session")
        sqconn = sqlite3.connect(filename)
        log.debug("Connected to sqlite3 database")
        client = DBSession.query(Client).filter_by(id=user_id).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           user_id)
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            log.debug("ERROR")


        all_fieldnames = ("Markup",
                          "Paradigm Markup",
                          "Word",
                          "Transcription",
                          "Translation",
                          "Sound",
                          "Etymology",
                          "Backref",
                          "Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms"
                         )
        for name in all_fieldnames:
            data_type_query = DBSession.query(Field) \
                .join(TranslationGist,
                      and_(Field.translation_gist_object_id == TranslationGist.object_id,
                           Field.translation_gist_client_id == TranslationGist.client_id))\
                .join(TranslationGist.translationatom)
            field = data_type_query.filter(TranslationAtom.locale_id == 2,
                                                 TranslationAtom.content == name).one() # todo: a way to find this fields if wwe cannot use one
            field_ids[name] = (field.client_id, field.object_id)

        DBSession.flush()


        """
        dict_attributes = get_dict_attributes(sqconn)
        translationgist = TranslationGist(client_id=user_id, type="Dictionary")
        DBSession.add(translationgist)
        DBSession.flush()
        gist_client_id = translationgist.client_id
        gist_object_id = translationgist.object_id
        """
        parent_client_id = gist_client_id
        parent_object_id = gist_object_id

        parent = DBSession.query(TranslationGist).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()

        """
        translationatom = TranslationAtom(client_id=client.id,
                                          parent=parent,
                                          locale_id=locale_id,
                                          content=dict_attributes["dictionary_name"])
        DBSession.add(translationatom)
        DBSession.flush()
        atom_client_id = translationatom.client_id
        atom_object_id = translationatom.object_id

        log.debug(dict_attributes["dictionary_name"])
        language_client_id = atom_client_id
        language_object_id = atom_object_id
        """
        lang_parent = DBSession.query(Language).filter_by(client_id=language_client_id, object_id=language_object_id).first()

        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        dictionary = Dictionary(client_id=user_id,
                                state_translation_gist_object_id=state_translation_gist_object_id,
                                state_translation_gist_client_id=state_translation_gist_client_id,
                                parent=lang_parent,
                                translation_gist_client_id=gist_client_id,
                                translation_gist_object_id=gist_object_id
                                      )
                                #additional_metadata=additional_metadata)
        DBSession.add(dictionary)
        DBSession.flush()

        dictionary_client_id = dictionary.client_id
        dictionary_object_id = dictionary.object_id
        for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary.object_id, subject_client_id=dictionary.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()
        """
        # FIRST PERSPECTIVE
        """
        resp = translation_service_search_all("Lexical Entries")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        perspective = DictionaryPerspective(client_id=client.id, ###
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            # import_source=req.get('import_source'),
                                            # import_hash=req.get('import_hash'),
                                            # additional_metadata=additional_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
            DBSession.flush()
        first_perspective_client_id = perspective.client_id
        first_perspective_object_id = perspective.object_id

        """
        # SECOND PERSPECTIVE
        """
        resp = translation_service_search_all("Paradigms")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        if not parent:
            return {'error': str("No such dictionary in the system")}

        perspective = DictionaryPerspective(client_id=client.id, ### variables['auth']
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            # import_source=req.get('import_source'),
                                            # import_hash=req.get('import_hash'),
                                            # additional_metadata=additional_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
        second_perspective_client_id = perspective.client_id
        second_perspective_object_id = perspective.object_id
        get_fp_ids = sqconn.cursor()
        get_fp_ids.execute("select id from dictionary where is_a_regular_form=1")
        count_cursor = sqconn.cursor()
        count_cursor.execute("select count(*) from dictionary where is_a_regular_form=1")
        words_count = count_cursor.fetchone()[0]
        count_cursor2 = sqconn.cursor()
        count_cursor2.execute("select count(*) from dictionary where is_a_regular_form=0")
        words_count2 = count_cursor2.fetchone()[0]
        ids_dict = dict()
        for i in range(words_count):
            perspective = DBSession.query(DictionaryPerspective).\
                filter_by(client_id=first_perspective_client_id, object_id = first_perspective_object_id).first()
            if not perspective:
                return {'error': str("No such perspective in the system")}
            lexentr = LexicalEntry( client_id=client.id,
                                   parent_object_id=first_perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            lexical_entry_client_id = lexentr.client_id
            lexical_entry_object_id = lexentr.object_id
            ids_dict[i] = (lexical_entry_client_id, lexical_entry_object_id)
        DBSession.flush()
        ids_dict2 = dict()
        for i in range(words_count2):
            perspective = DBSession.query(DictionaryPerspective).\
                filter_by(client_id=first_perspective_client_id, object_id = second_perspective_object_id).first()
            if not perspective:
                return {'error': str("No such perspective in the system")}
            lexentr = LexicalEntry( client_id=client.id,
                                   parent_object_id=second_perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            lexical_entry_client_id = lexentr.client_id
            lexical_entry_object_id = lexentr.object_id
            ids_dict2[i] = (lexical_entry_client_id, lexical_entry_object_id)
        DBSession.flush()
        get_sp_ids = sqconn.cursor()
        get_sp_ids.execute("select id from dictionary where is_a_regular_form=0")
        ids_mapping2 = dict()
        i = 0
        sp_le_id_dict = {}
        for id_cursor in get_sp_ids:
            id = id_cursor[0]
            sp_le_id_dict[id] = i
            client_id = ids_dict2[i][0]
            object_id = ids_dict2[i][1]
            ids_mapping2[int(id)] = (client_id, object_id)
            i += 1
        get_fp_ids = sqconn.cursor()
        get_fp_ids.execute("select id from dictionary where is_a_regular_form=1")
        ids_mapping = dict()
        i = 0
        fp_le_id_dict = {}
        for id_cursor in get_fp_ids:
            id = id_cursor[0]
            fp_le_id_dict[id] = i
            client_id = ids_dict[i][0]
            object_id = ids_dict[i][1]
            ids_mapping[id] = (client_id, object_id)
            i += 1
        fp_fields_dict = {}
        """
        # FIRST PERSPECTIVE FIELDS CREATION
        """
        fp_field_names = ("Word", "Transcription", "Translation", "Sound", "Etymology", "Backref")
        fields_list = []
        for fieldname in fp_field_names: #

            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": second_perspective_client_id,
                        "object_id": second_perspective_object_id
                    }
                    }
                )

            elif fieldname == "Sound":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Markup"][0],
                       "object_id": field_ids["Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            fp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        fp_fields_dict["Markup"] = (field_ids["Markup"][0], field_ids["Markup"][1])
        update_perspective_fields(fields_list, first_perspective_client_id, first_perspective_object_id, client )
        """
        # Creating fields of the second perspective
        """
        sp_fields_dict = {}
        fields_list = []
        sp_field_names = ("Word of Paradigmatic forms", "Transcription of Paradigmatic forms", "Translation of Paradigmatic forms", "Sounds of Paradigmatic forms", "Backref")
        for fieldname in sp_field_names: #
            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": first_perspective_client_id,
                        "object_id": first_perspective_object_id
                    }
                    }
                )
            elif fieldname == "Sounds of Paradigmatic forms":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Paradigm Markup"][0],
                       "object_id": field_ids["Paradigm Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            sp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        sp_fields_dict["Paradigm Markup"] = (field_ids["Paradigm Markup"][0], field_ids["Paradigm Markup"][1])
        update_perspective_fields(fields_list, second_perspective_client_id, second_perspective_object_id, client)
        columns = ("word", "Transcription", "translation")
        # First Perspective entity
        sqcursor = sqconn.cursor()
        for column in columns:
            sqcursor.execute("select id,%s from dictionary where is_a_regular_form=1" % column)
            for row in sqcursor:
                row_id = int(row[0])
                content = row[1]
                name = None
                if column == "word":
                    name = "Word"
                if column == "Transcription":
                    name = "Transcription"
                if column == "translation":
                    name = "Translation"
                create_entity(ids_dict[fp_le_id_dict[row_id]][0], ids_dict[fp_le_id_dict[row_id]][1], fp_fields_dict[name][0], fp_fields_dict[name][1],
                    None, client, content, filename=None, storage=storage)
        # Second Perspective entity
        sqcursor = sqconn.cursor()
        for column in columns:
            sqcursor.execute("select id,%s from dictionary where is_a_regular_form=0" % column)
            for row in sqcursor:
                row_id = int(row[0])
                content = row[1]
                name = None
                if column == "word":
                    name = "Word of Paradigmatic forms"
                if column == "Transcription":
                    name = "Transcription of Paradigmatic forms"
                if column == "translation":
                    name = "Translation of Paradigmatic forms"
                create_entity(ids_dict2[sp_le_id_dict[row_id]][0], ids_dict2[sp_le_id_dict[row_id]][1], sp_fields_dict[name][0], sp_fields_dict[name][1],
                    None, client, content, filename=None, storage=storage)
        sqcursor = sqconn.cursor()
        sqcursor.execute("select id,regular_form from dictionary where is_a_regular_form=0")
        for le_cursor in sqcursor:
            fp_id = int(le_cursor[1])
            sp_id = int(le_cursor[0])
            if fp_id in ids_mapping:
                create_entity(ids_dict[fp_le_id_dict[fp_id]][0], ids_dict[fp_le_id_dict[fp_id]][1], fp_fields_dict["Backref"][0], fp_fields_dict["Backref"][1],
                    None, client, filename=None, link_client_id=ids_dict2[sp_le_id_dict[sp_id]][0], link_object_id=ids_dict2[sp_le_id_dict[sp_id]][1], storage=storage)
                create_entity(ids_dict2[sp_le_id_dict[sp_id]][0], ids_dict2[sp_le_id_dict[sp_id]][1], sp_fields_dict["Backref"][0], sp_fields_dict["Backref"][1],
                    None, client, filename=None, link_client_id=ids_dict[fp_le_id_dict[fp_id]][0], link_object_id=ids_dict[fp_le_id_dict[fp_id]][1], storage=storage)
        #DBSession.flush()
        # if req.get('is_translatable', None):
        #         field.is_translatable = bool(req['is_translatable'])
        audio_hashes = set()
        markup_hashes = set()
        DBSession.flush()
        """
        Sound and Markup
        """
        audio_ids = set()
        paradigm_audio_ids = set()
        sound_and_markup_word_cursor = sqconn.cursor()
        sound_and_markup_word_cursor.execute("""select blobs.id,
                                                blobs.secblob,
                                                blobs.mainblob,
                                                dict_blobs_description.name,
                                                dictionary.id,
                                                dict_blobs_description.type
                                                from blobs, dict_blobs_description, dictionary
                                                where dict_blobs_description.blobid=blobs.id
                                                and dict_blobs_description.wordid=dictionary.id
                                                and dictionary.is_a_regular_form=1;""")

        folder_name = "praat_markup"
        upload_audio_with_markup(audio_ids, ids_mapping, fp_fields_dict, sound_and_markup_word_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        upload_audio(audio_ids, ids_mapping, fp_fields_dict, sound_and_markup_word_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        paradigm_sound_and_markup_cursor = sqconn.cursor()
        paradigm_sound_and_markup_cursor.execute("""select blobs.id,
                                                    blobs.secblob,
                                                    blobs.mainblob,
                                                    dict_blobs_description.name,
                                                    dictionary.id,
                                                    dict_blobs_description.type
                                                    from blobs, dict_blobs_description, dictionary
                                                    where dict_blobs_description.blobid=blobs.id
                                                    and dict_blobs_description.wordid=dictionary.id
                                                    and dictionary.is_a_regular_form=0;""")


        folder_name = "paradigm_praat_markup"
        upload_audio_with_markup(paradigm_audio_ids, ids_mapping2, sp_fields_dict, paradigm_sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        upload_audio(paradigm_audio_ids, ids_mapping2, sp_fields_dict, paradigm_sound_and_markup_cursor, audio_hashes, markup_hashes, folder_name,
                            user_id, True, client, storage)
        """
        Etimology_tag
        """

        etymology_cursor = sqconn.cursor()
        etymology_cursor.execute("""select id, etimology_tag
                                    FROM dictionary
                                    WHERE etimology_tag NOT NULL
                                    and dictionary.is_a_regular_form=1; """)
        for cursor in etymology_cursor:
            id = int(cursor[0])
            client_id = ids_mapping[id][0]
            object_id = ids_mapping[id][1]
            item = {"entity_type": "Etymology", "tag": cursor[1],
                    "field_client_id": field_ids["Etymology"][0],
                    "field_object_id": field_ids["Etymology"][1],
                    "connections": [{"client_id": client_id, "object_id": object_id}]}
            create_group_entity(item, client, user)
            # status = session.post(connect_url, json=item)
            # log.debug(status.text)


        dictionary = {}
        return dictionary
Exemplo n.º 48
0
def bulk_group_entities(request):  # tested
    try:
        variables = {'auth': authenticated_userid(request)}
        response = dict()
        req = request.json_body
        client = DBSession.query(Client).filter_by(id=variables['auth']).first()
        field_client_id = req['field_client_id']
        field_object_id = req['field_object_id']
        counter = req['counter']
        field = DBSession.query(Field).\
            filter_by(client_id=field_client_id, object_id=field_object_id).first()

        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.")
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException("This client id is orphaned. Try to logout and then login once more.")
        client.counter = counter
        DBSession.flush()
        if not field:
            request.response.status = HTTPNotFound
            return {'error': str("No such field in the system")}
        if field.data_type != 'Grouping Tag':
            raise KeyError("wrong field data type")

        for tag in req['tag_groups']:
            for tag_ent in req['tag_groups'][tag]:
                tag_entity = DBSession.query(Entity) \
                    .join(Entity.field) \
                    .filter(Entity.parent_client_id == tag_ent['parent_client_id'],
                            Entity.parent_object_id == tag_ent['parent_object_id'],
                            Field.client_id == tag_ent['field_client_id'],
                            Field.object_id == tag_ent['field_object_id'],
                            Entity.content == tag).first()
                if not tag_entity:
                    tag_entity = Entity(client_id=client.id,
                                        object_id=tag_ent['object_id'],
                                        field=field,
                                        content=tag_ent['content'],
                                        parent_client_id=tag_ent['parent_client_id'],
                                        parent_object_id=tag_ent['parent_object_id'])
                    lex = DBSession.query(LexicalEntry).filter_by(client_id=tag_ent['parent_client_id'],
                                                                  object_id=tag_ent['parent_object_id']).one()
                    group = DBSession.query(Group).join(BaseGroup).filter(
                        BaseGroup.subject == 'lexical_entries_and_entities',
                        Group.subject_client_id == lex.parent_client_id,
                        Group.subject_object_id == lex.parent_object_id,
                        BaseGroup.action == 'create').one()
                    if user in group.users:
                        tag_entity.publishingentity.accepted = True
        # if 'tag' in req:
        #     tags.append(req['tag'])

        for tag in req['tag_groups']:
            tags = list()
            tag_ent = req['tag_groups'][tag][0]
            parent = DBSession.query(LexicalEntry).\
                filter_by(client_id=tag_ent['parent_client_id'], object_id=tag_ent['parent_object_id']).first()
            if not parent:
                request.response.status = HTTPNotFound.code
                return {'error': str("No such lexical entry in the system")}
            par_tags = find_all_tags(parent, field_client_id, field_object_id)
            for tag in par_tags:
                if tag not in tags:
                    tags.append(tag)
            lexical_entries = find_lexical_entries_by_tags(tags, field_client_id, field_object_id)
            if parent not in lexical_entries:
                lexical_entries.append(parent)

            for lex in lexical_entries:
                for tag in tags:
                    tag_entity = DBSession.query(Entity) \
                        .join(Entity.field) \
                        .filter(Entity.parent == lex,
                                Field.client_id == field_client_id,
                                Field.object_id == field_object_id,
                                Entity.content == tag).first()
                    if not tag_entity:
                        tag_entity = Entity(client_id=client.id,
                                            field=field, content=tag, parent=lex)

                        group = DBSession.query(Group).join(BaseGroup).filter(
                            BaseGroup.subject == 'lexical_entries_and_entities',
                            Group.subject_client_id == lex.parent_client_id,
                            Group.subject_object_id == lex.parent_object_id,
                            BaseGroup.action == 'create').one()
                        if user in group.users:
                            tag_entity.publishingentity.accepted = True
            request.response.status = HTTPOk.code
            response['counter'] = client.counter
            return response
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 49
0
def convert_five_tiers(
                dictionary_client_id,
                dictionary_object_id,
                user_id,
                origin_client_id,
                origin_object_id,
                sqlalchemy_url,
                storage,
                eaf_url,
                sound_url=None
                ):

    log = logging.getLogger(__name__)
    log.setLevel(logging.DEBUG)
    no_sound = True
    if sound_url:
        no_sound = False
    with warnings.catch_warnings():
        warnings.filterwarnings('error')
        try:
            from pydub import AudioSegment
        except Warning as e:
            no_sound = True
    if not no_sound:
        with tempfile.NamedTemporaryFile() as temp:
            try:
               sound_file = request.urlopen(sound_url)
            except HTTPError as e:
                return {'error': str(e.read().decode("utf8", 'ignore'))}
            with open(temp.name,'wb') as output:
                output.write(sound_file.read())
            full_audio = AudioSegment.from_wav(temp.name)
            temp.flush()

    field_ids = {}
    with transaction.manager:
        client = DBSession.query(Client).filter_by(id=user_id).first()
        if not client:
            raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                           user_id)
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        all_fieldnames = ("Markup",
                          "Paradigm Markup",
                          "Word",
                          "Transcription",
                          "Translation",
                          "Sound",
                          "Etymology",
                          "Backref",
                          "Word of Paradigmatic forms",
                          "Transcription of Paradigmatic forms",
                          "Translation of Paradigmatic forms",
                          "Sounds of Paradigmatic forms"
                         )
        for name in all_fieldnames:
            data_type_query = DBSession.query(Field) \
                .join(TranslationGist,
                      and_(Field.translation_gist_object_id == TranslationGist.object_id,
                           Field.translation_gist_client_id == TranslationGist.client_id))\
                .join(TranslationGist.translationatom)
            field = data_type_query.filter(TranslationAtom.locale_id == 2,
                                                 TranslationAtom.content == name).one() # todo: a way to find this fields if wwe cannot use one
            field_ids[name] = (field.client_id, field.object_id)

        DBSession.flush()
        """
        parent_client_id = gist_client_id
        parent_object_id = gist_object_id

        parent = DBSession.query(TranslationGist).filter_by(client_id=parent_client_id, object_id=parent_object_id).first()

        lang_parent = DBSession.query(Language).filter_by(client_id=language_client_id, object_id=language_object_id).first()

        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        dictionary = Dictionary(client_id=user_id,
                                state_translation_gist_object_id=state_translation_gist_object_id,
                                state_translation_gist_client_id=state_translation_gist_client_id,
                                parent=lang_parent,
                                translation_gist_client_id=gist_client_id,
                                translation_gist_object_id=gist_object_id
                                      )
                                #additional_metadata=additional_metadata)
        DBSession.add(dictionary)
        DBSession.flush()

        dictionary_client_id = dictionary.client_id
        dictionary_object_id = dictionary.object_id
        """
        resp = translation_service_search("WiP")
        state_translation_gist_object_id, state_translation_gist_client_id = resp['object_id'], resp['client_id']
        for base in DBSession.query(BaseGroup).filter_by(dictionary_default=True):
            new_group = Group(parent=base,
                              subject_object_id=dictionary_object_id, subject_client_id=dictionary_client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            DBSession.add(new_group)
            DBSession.flush()
        """
        # FIRST PERSPECTIVE
        """
        resp = translation_service_search_all("Lexical Entries")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']


        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        origin_metadata= {"origin_client_id": origin_client_id,
                              "origin_object_id": origin_object_id
                              }
        perspective = DictionaryPerspective(client_id=client.id, ###
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            # import_source=req.get('import_source'),
                                            # import_hash=req.get('import_hash'),
                                            additional_metadata=origin_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        perspective.additional_metadata = origin_metadata
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
            DBSession.flush()
        first_perspective_client_id = perspective.client_id
        first_perspective_object_id = perspective.object_id
        """
        # SECOND PERSPECTIVE
        """
        resp = translation_service_search_all("Paradigms")
        persp_translation_gist_client_id, persp_translation_gist_object_id = resp['client_id'], resp['object_id']
        parent = DBSession.query(Dictionary).filter_by(client_id=dictionary_client_id, object_id=dictionary_object_id).first()
        if not parent:
            return {'error': str("No such dictionary in the system")}

        perspective = DictionaryPerspective(client_id=client.id, ### variables['auth']
                                            state_translation_gist_object_id=state_translation_gist_object_id,
                                            state_translation_gist_client_id=state_translation_gist_client_id,
                                            parent=parent,
                                            # import_source=req.get('import_source'),
                                            # import_hash=req.get('import_hash'),
                                            additional_metadata=origin_metadata,
                                            translation_gist_client_id=persp_translation_gist_client_id,
                                            translation_gist_object_id=persp_translation_gist_object_id
                                            )
        perspective.additional_metadata = origin_metadata
        # if is_template is not None:
        #     perspective.is_template = is_template
        DBSession.add(perspective)
        owner_client = DBSession.query(Client).filter_by(id=parent.client_id).first()
        owner = owner_client.user
        for base in DBSession.query(BaseGroup).filter_by(perspective_default=True):
            new_group = Group(parent=base,
                              subject_object_id=perspective.object_id, subject_client_id=perspective.client_id)
            if user not in new_group.users:
                new_group.users.append(user)
            if owner not in new_group.users:
                new_group.users.append(owner)
            DBSession.add(new_group)
        second_perspective_client_id = perspective.client_id
        second_perspective_object_id = perspective.object_id

        fp_fields_dict = {}
        """
        # FIRST PERSPECTIVE FIELDS CREATION
        """
        fp_field_names = ("Word", "Transcription", "Translation", "Sound", "Etymology", "Backref")
        fields_list = []
        for fieldname in fp_field_names: #

            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": second_perspective_client_id,
                        "object_id": second_perspective_object_id
                    }
                    }
                )

            elif fieldname == "Sound":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Markup"][0],
                       "object_id": field_ids["Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            fp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        fp_fields_dict["Markup"] = (field_ids["Markup"][0], field_ids["Markup"][1])
        update_perspective_fields(fields_list, first_perspective_client_id, first_perspective_object_id, client )
        """
        # Creating fields of the second perspective
        """
        sp_fields_dict = {}
        fields_list = []
        sp_field_names = ("Word of Paradigmatic forms", "Transcription of Paradigmatic forms", "Translation of Paradigmatic forms", "Sounds of Paradigmatic forms", "Backref")
        for fieldname in sp_field_names: #
            if fieldname == "Backref":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "link":{
                        "client_id": first_perspective_client_id,
                        "object_id": first_perspective_object_id
                    }
                    }
                )
            elif fieldname == "Sounds of Paradigmatic forms":
                fields_list.append(
                    {
                    "client_id": field_ids[fieldname][0],
                    "object_id": field_ids[fieldname][1],
                    "contains":[{
                       "client_id": field_ids["Paradigm Markup"][0],
                       "object_id": field_ids["Paradigm Markup"][1]
                    }
                    ]
                    }
                )
            else:
                fields_list.append({"client_id": field_ids[fieldname][0], "object_id": field_ids[fieldname][1]})
            sp_fields_dict[fieldname] = (field_ids[fieldname][0], field_ids[fieldname][1])
        sp_fields_dict["Paradigm Markup"] = (field_ids["Paradigm Markup"][0], field_ids["Paradigm Markup"][1])
        update_perspective_fields(fields_list, second_perspective_client_id, second_perspective_object_id, client)
        link_dict = defaultdict(list)
        dubl = []

        log = logging.getLogger(__name__)
        try:
           eaffile = request.urlopen(eaf_url)
        except HTTPError as e:
            return {'error': str(e.read().decode("utf8", 'ignore'))}
        with tempfile.NamedTemporaryFile() as temp:
            temp.write(eaffile.read())
            converter = elan_parser.Elan(temp.name)
            converter.parse()
            final_dicts = converter.proc()
            temp.flush()

        for phrase in final_dicts:
            perspective = DBSession.query(DictionaryPerspective).\
            filter_by(client_id=second_perspective_client_id, object_id = second_perspective_object_id).first() #sec?
            if not perspective:
                return {'error': str("No such perspective in the system")}
            lexentr = LexicalEntry(client_id=client.id,
                                   parent_object_id=second_perspective_object_id, parent=perspective)
            DBSession.add(lexentr)
            sp_lexical_entry_client_id = lexentr.client_id
            sp_lexical_entry_object_id = lexentr.object_id
            curr_dict = None
            for word_translation in phrase:
                if type(word_translation) is not list:
                    curr_dict = word_translation
                    main_tier_text = " ".join([word_translation[i][1].text for i in word_translation if len(word_translation[i]) > 1 and type(word_translation[i][1].text) is str])
                    if main_tier_text:
                        create_entity(sp_lexical_entry_client_id, sp_lexical_entry_object_id, field_ids["Word of Paradigmatic forms"][0], field_ids["Word of Paradigmatic forms"][1],
                            None, client, main_tier_text, filename=None, storage=storage)
                    if not no_sound:
                        if word.time[1] < len(full_audio):
                            with tempfile.NamedTemporaryFile() as temp:
                                full_audio[ word.time[0]: word.time[1]].export(temp.name, format="wav")
                                audio_slice = temp.read()
                                create_entity(sp_lexical_entry_client_id, sp_lexical_entry_object_id, field_ids["Sounds of Paradigmatic forms"][0], field_ids["Sounds of Paradigmatic forms"][1],
                                    None, client, filename="%s.wav" %(word.index) , folder_name="sound1", content=base64.urlsafe_b64encode(audio_slice).decode(), storage=storage)
                                temp.flush()


                else:
                    word = word_translation[0]
                    tier_name = word.tier
                    new = " ".join([i.text for i in word_translation])
                    create_entity(sp_lexical_entry_client_id, sp_lexical_entry_object_id, field_ids[EAF_TIERS[tier_name]][0], field_ids[EAF_TIERS[tier_name]][1],
                        None, client, new, filename=None, storage=storage)
            for word in curr_dict:
                column = [word] + curr_dict[word]
                cort = reversed(tuple(i.text for i in column))
                if cort in link_dict:
                    fp_lexical_entry_client_id, fp_lexical_entry_object_id = link_dict[cort]
                else:
                    perspective = DBSession.query(DictionaryPerspective).\
                    filter_by(client_id=first_perspective_client_id, object_id = first_perspective_object_id).first()
                    if not perspective:
                        return {'error': str("No such perspective in the system")}
                    lexentr = LexicalEntry(client_id=client.id,
                                           parent_object_id=first_perspective_object_id, parent=perspective)
                    DBSession.add(lexentr)
                    fp_lexical_entry_client_id = lexentr.client_id
                    fp_lexical_entry_object_id = lexentr.object_id
                    create_entity(fp_lexical_entry_client_id, fp_lexical_entry_object_id, field_ids[EAF_TIERS[word.tier]][0], field_ids[EAF_TIERS[word.tier]][1],
                        None, client, word.text, filename=None, storage=storage)

                    link_dict[cort] = (fp_lexical_entry_client_id, fp_lexical_entry_object_id)

                    for other_word in curr_dict[word]:
                        create_entity(fp_lexical_entry_client_id, fp_lexical_entry_object_id, field_ids[EAF_TIERS[other_word.tier]][0], field_ids[EAF_TIERS[other_word.tier]][1],
                            None, client, other_word.text, filename=None, storage=storage)
                    if not no_sound:
                        if word.time[1] < len(full_audio):
                            with tempfile.NamedTemporaryFile() as temp:
                                full_audio[ word.time[0]: word.time[1]].export(temp.name, format="wav")
                                audio_slice = temp.read()
                                create_entity(fp_lexical_entry_client_id, fp_lexical_entry_object_id, field_ids["Sound"][0], field_ids["Sound"][1],
                                    None, client, filename="%s.wav" %(word.index) , folder_name="sound1", content=base64.urlsafe_b64encode(audio_slice).decode(), storage=storage)
                                temp.flush()

                dubl_tuple = ((sp_lexical_entry_client_id, sp_lexical_entry_object_id), (fp_lexical_entry_client_id, fp_lexical_entry_object_id))
                if not  dubl_tuple in dubl:
                    dubl.append(dubl_tuple)
                    create_entity(sp_lexical_entry_client_id, sp_lexical_entry_object_id, field_ids["Backref"][0], field_ids["Backref"][1],
                        None, client, filename=None, link_client_id=fp_lexical_entry_client_id, link_object_id=fp_lexical_entry_object_id, storage=storage)
                    create_entity(fp_lexical_entry_client_id, fp_lexical_entry_object_id, field_ids["Backref"][0], field_ids["Backref"][1],
                        None, client, filename=None, link_client_id=sp_lexical_entry_client_id, link_object_id=sp_lexical_entry_object_id, storage=storage)

    return
Exemplo n.º 50
0
def signup_post(request):  # tested
    try:
        req = request.json_body
        login = req['login']
        name = req['name']
        email = req['email']
        password = req['password']

        day = req.get('day')
        month = req.get('month')
        year = req.get('year')
        if day is None or month is None or year is None:
            request.response.status = HTTPBadRequest.code
            return {'Error': "day, month or year of the birth is missing"}
        # birthday = datetime.datetime.strptime(day + month + year, "%d%m%Y").date()
        try:
            day = int(day)
            month = int(month)
            year = int(year)
            birthday = datetime.date(year, month, day)
        except ValueError:
            request.response.status = HTTPBadRequest.code
            return {'Error': "Invalid birthday"}

        if DBSession.query(User).filter_by(login=login).first():
            raise CommonException(
                "The user with this login is already registered")
        if DBSession.query(Email).filter_by(email=email).first():
            raise CommonException(
                "The user with this email is already registered")
        new_user = User(login=login,
                        name=name,
                        created_at=datetime.datetime.utcnow(),
                        intl_name=login,
                        birthday=birthday,
                        is_active=True)
        pwd = Passhash(password=password)
        email = Email(email=email)
        new_user.password = pwd
        new_user.email = email
        DBSession.add(new_user)
        basegroups = []
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can create dictionaries").first()
        ]
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can create languages").first()
        ]
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can create organizations").first()
        ]
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can create translation strings").first()
        ]
        groups = []
        for base in basegroups:
            groups += [
                DBSession.query(Group).filter_by(
                    subject_override=True, base_group_id=base.id).first()
            ]
        for group in groups:
            add_user_to_group(new_user, group)
        DBSession.flush()
        return {}

    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'status': request.response.status, 'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'status': request.response.status, 'error': str(e)}

    except ValueError as e:
        request.response.status = HTTPConflict.code
        return {'status': request.response.status, 'error': str(e)}
Exemplo n.º 51
0
def move_lexical_entry_bulk(request):
    req = request.json_body
    real_delete = req.get(
        'real_delete')  # With great power comes great responsibility
    # Maybe there needs to be check for permission of some sort (can really delete only when updating dictionary)
    variables = {'auth': request.authenticated_userid}
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    if not client:
        raise KeyError(
            "Invalid client id (not registered on server). Try to logout and then login.",
            variables['auth'])
    user = DBSession.query(User).filter_by(id=client.user_id).first()
    if not user:
        raise CommonException(
            "This client id is orphaned. Try to logout and then login once more."
        )
    groups = DBSession.query(Group)\
        .join(BaseGroup, BaseGroup.id == Group.base_group_id)\
        .filter(BaseGroup.subject == 'lexical_entries_and_entities')\
        .filter(BaseGroup.action == 'create')\
        .join(User, Group.users)\
        .filter(User.id == user.id)\
        .group_by(Group)\
        .order_by('subject_override')\
        .all()

    wat = [o for o in groups]
    override = False
    ids = [{
        'client_id': o.subject_client_id,
        'object_id': o.subject_object_id
    } for o in groups]
    for group in groups:
        if group.subject_override:
            override = True
            break
    for par in req['move_list']:
        cli_id = par['client_id']
        obj_id = par['object_id']
        parent = DBSession.query(LexicalEntry).filter_by(
            client_id=cli_id, object_id=obj_id).first()
        can = True
        if parent:
            if not override:
                if {
                        'client_id': parent.parent_client_id,
                        'object_id': parent.parent_object_id
                } not in ids:
                    can = False
            if can:
                for ent in par['lexical_entries']:
                    can = True
                    object_id = ent['object_id']
                    client_id = ent['client_id']
                    entry = DBSession.query(LexicalEntry).filter_by(
                        client_id=client_id, object_id=object_id).first()
                    if entry:
                        if not override:
                            if {
                                    'client_id': entry.parent_client_id,
                                    'object_id': entry.parent_object_id
                            } not in ids:
                                can = False

                        if can:
                            if entry:
                                if parent.moved_to is None:
                                    if entry.moved_to is None:

                                        if not entry.marked_for_deletion and not parent.marked_for_deletion:
                                            # l1e = DBSession.query(LevelOneEntity).filter_by(parent = entry).all()
                                            l1e = list()
                                            for entity in l1e:
                                                # ent = DBSession.query(LevelOneEntity)\
                                                #     .filter_by(parent=parent,
                                                #                entity_type=entity.entity_type,
                                                #                content = entity.content)\
                                                #     .first()
                                                ent = None
                                                if ent:
                                                    entity.marked_for_deletion = True
                                                    if real_delete:
                                                        for publent in entity.publishleveloneentity:
                                                            DBSession.delete(
                                                                publent)
                                                        DBSession.delete(
                                                            entity)
                                                        continue
                                                entity.parent = parent

                                                for publent in entity.publishleveloneentity:
                                                    publent.marked_for_deletion = True
                                                    publent.parent = parent
                                                DBSession.flush()
                                            # ge = DBSession.query(GroupingEntity).filter_by(parent = entry).all()
                                            ge = list()
                                            for entity in ge:
                                                entity.parent = parent
                                                for publent in entity.publishgroupingentity:
                                                    publent.marked_for_deletion = True
                                                    publent.parent = parent
                                                DBSession.flush()
                                            entry.moved_to = str(
                                                cli_id) + '/' + str(obj_id)
                                            entry.marked_for_deletion = True
    request.response.status = HTTPOk.code
    return {}
Exemplo n.º 52
0
def move_lexical_entry(request):
    req = request.json_body
    variables = {'auth': request.authenticated_userid}
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    if not client:
        raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                       variables['auth'])
    user = DBSession.query(User).filter_by(id=client.user_id).first()
    if not user:
        raise CommonException("This client id is orphaned. Try to logout and then login once more.")
    object_id = request.matchdict.get('object_id')
    client_id = request.matchdict.get('client_id')
    cli_id = req['client_id']
    obj_id = req['object_id']
    real_delete = req.get('real_delete')   # With great power comes great responsibility
    # Maybe there needs to be check for permission of some sort (can really delete only when updating dictionary)
    entry = DBSession.query(LexicalEntry).filter_by(client_id=client_id, object_id=object_id).first()
    parent = DBSession.query(LexicalEntry).filter_by(client_id=cli_id, object_id=obj_id).first()
    if entry and parent:
        groupoverride = DBSession.query(Group)\
            .filter_by(subject_override=True)\
            .join(BaseGroup)\
            .filter_by(subject='lexical_entries_and_entities')\
            .first()
        group = DBSession.query(Group)\
            .filter_by(subject_client_id=parent.parent_client_id, subject_object_id=parent.parent_object_id)\
            .join(BaseGroup)\
            .filter_by(subject='lexical_entries_and_entities')\
            .first()
        if user not in groupoverride.users and  user not in group.users:
                raise CommonException("You should only move to lexical entires you own")
        if parent.moved_to is None:
            if entry.moved_to is None:

                if not entry.marked_for_deletion and not parent.marked_for_deletion:
                    # l1e = DBSession.query(LevelOneEntity).filter_by(parent = entry).all()
                    l1e = list()
                    for entity in l1e:
                        # ent = DBSession.query(LevelOneEntity)\
                        #     .filter_by(parent=parent, entity_type=entity.entity_type, content = entity.content)\
                        #     .first()
                        ent = None
                        if ent:
                            entity.marked_for_deletion = True
                            if real_delete:
                                for publent in entity.publishleveloneentity:
                                    DBSession.delete(publent)
                                DBSession.delete(entity)
                                continue
                        entity.parent = parent

                        for publent in entity.publishleveloneentity:
                            publent.marked_for_deletion = True
                            publent.parent = parent
                        DBSession.flush()
                    # ge = DBSession.query(GroupingEntity).filter_by(parent = entry).all()
                    ge = list()
                    for entity in ge:
                        entity.parent = parent
                        for publent in entity.publishgroupingentity:
                            publent.marked_for_deletion = True
                            publent.parent = parent
                        DBSession.flush()
                    entry.moved_to = str(cli_id) + '/' + str(obj_id)
                    entry.marked_for_deletion = True
                    request.response.status = HTTPOk.code
                    return {}
    request.response.status = HTTPNotFound.code
    return {'error': str("No such lexical entry in the system")}
Exemplo n.º 53
0
def move_lexical_entry(request):
    req = request.json_body
    variables = {'auth': request.authenticated_userid}
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    if not client:
        raise KeyError(
            "Invalid client id (not registered on server). Try to logout and then login.",
            variables['auth'])
    user = DBSession.query(User).filter_by(id=client.user_id).first()
    if not user:
        raise CommonException(
            "This client id is orphaned. Try to logout and then login once more."
        )
    object_id = request.matchdict.get('object_id')
    client_id = request.matchdict.get('client_id')
    cli_id = req['client_id']
    obj_id = req['object_id']
    real_delete = req.get(
        'real_delete')  # With great power comes great responsibility
    # Maybe there needs to be check for permission of some sort (can really delete only when updating dictionary)
    entry = DBSession.query(LexicalEntry).filter_by(
        client_id=client_id, object_id=object_id).first()
    parent = DBSession.query(LexicalEntry).filter_by(client_id=cli_id,
                                                     object_id=obj_id).first()
    if entry and parent:
        groupoverride = DBSession.query(Group)\
            .filter_by(subject_override=True)\
            .join(BaseGroup)\
            .filter_by(subject='lexical_entries_and_entities')\
            .first()
        group = DBSession.query(Group)\
            .filter_by(subject_client_id=parent.parent_client_id, subject_object_id=parent.parent_object_id)\
            .join(BaseGroup)\
            .filter_by(subject='lexical_entries_and_entities')\
            .first()
        if user not in groupoverride.users and user not in group.users:
            raise CommonException(
                "You should only move to lexical entires you own")
        if parent.moved_to is None:
            if entry.moved_to is None:

                if not entry.marked_for_deletion and not parent.marked_for_deletion:
                    # l1e = DBSession.query(LevelOneEntity).filter_by(parent = entry).all()
                    l1e = list()
                    for entity in l1e:
                        # ent = DBSession.query(LevelOneEntity)\
                        #     .filter_by(parent=parent, entity_type=entity.entity_type, content = entity.content)\
                        #     .first()
                        ent = None
                        if ent:
                            entity.marked_for_deletion = True
                            if real_delete:
                                for publent in entity.publishleveloneentity:
                                    DBSession.delete(publent)
                                DBSession.delete(entity)
                                continue
                        entity.parent = parent

                        for publent in entity.publishleveloneentity:
                            publent.marked_for_deletion = True
                            publent.parent = parent
                        DBSession.flush()
                    # ge = DBSession.query(GroupingEntity).filter_by(parent = entry).all()
                    ge = list()
                    for entity in ge:
                        entity.parent = parent
                        for publent in entity.publishgroupingentity:
                            publent.marked_for_deletion = True
                            publent.parent = parent
                        DBSession.flush()
                    entry.moved_to = str(cli_id) + '/' + str(obj_id)
                    entry.marked_for_deletion = True
                    request.response.status = HTTPOk.code
                    return {}
    request.response.status = HTTPNotFound.code
    return {'error': str("No such lexical entry in the system")}
Exemplo n.º 54
0
def move_lexical_entry_bulk(request):
    req = request.json_body
    real_delete = req.get('real_delete')  # With great power comes great responsibility
    # Maybe there needs to be check for permission of some sort (can really delete only when updating dictionary)
    variables = {'auth': request.authenticated_userid}
    client = DBSession.query(Client).filter_by(id=variables['auth']).first()
    if not client:
        raise KeyError("Invalid client id (not registered on server). Try to logout and then login.",
                       variables['auth'])
    user = DBSession.query(User).filter_by(id=client.user_id).first()
    if not user:
        raise CommonException("This client id is orphaned. Try to logout and then login once more.")
    groups = DBSession.query(Group)\
        .join(BaseGroup, BaseGroup.id == Group.base_group_id)\
        .filter(BaseGroup.subject == 'lexical_entries_and_entities')\
        .filter(BaseGroup.action == 'create')\
        .join(User, Group.users)\
        .filter(User.id == user.id)\
        .group_by(Group)\
        .order_by('subject_override')\
        .all()

    wat = [o for o in groups]
    override = False
    ids = [{'client_id': o.subject_client_id,'object_id': o.subject_object_id} for o in groups]
    for group in groups:
        if group.subject_override:
            override = True
            break
    for par in req['move_list']:
        cli_id = par['client_id']
        obj_id = par['object_id']
        parent = DBSession.query(LexicalEntry).filter_by(client_id=cli_id, object_id=obj_id).first()
        can = True
        if parent:
            if not override:
                if {'client_id': parent.parent_client_id, 'object_id': parent.parent_object_id} not in ids:
                    can = False
            if can:
                for ent in par['lexical_entries']:
                    can = True
                    object_id = ent['object_id']
                    client_id = ent['client_id']
                    entry = DBSession.query(LexicalEntry).filter_by(client_id=client_id, object_id=object_id).first()
                    if entry:
                        if not override:
                            if {'client_id': entry.parent_client_id, 'object_id': entry.parent_object_id} not in ids:
                                can = False

                        if can:
                                if entry:
                                    if parent.moved_to is None:
                                        if entry.moved_to is None:

                                            if not entry.marked_for_deletion and not parent.marked_for_deletion:
                                                # l1e = DBSession.query(LevelOneEntity).filter_by(parent = entry).all()
                                                l1e = list()
                                                for entity in l1e:
                                                    # ent = DBSession.query(LevelOneEntity)\
                                                    #     .filter_by(parent=parent,
                                                    #                entity_type=entity.entity_type,
                                                    #                content = entity.content)\
                                                    #     .first()
                                                    ent = None
                                                    if ent:
                                                        entity.marked_for_deletion = True
                                                        if real_delete:
                                                            for publent in entity.publishleveloneentity:
                                                                DBSession.delete(publent)
                                                            DBSession.delete(entity)
                                                            continue
                                                    entity.parent = parent

                                                    for publent in entity.publishleveloneentity:
                                                        publent.marked_for_deletion = True
                                                        publent.parent = parent
                                                    DBSession.flush()
                                                # ge = DBSession.query(GroupingEntity).filter_by(parent = entry).all()
                                                ge = list()
                                                for entity in ge:
                                                    entity.parent = parent
                                                    for publent in entity.publishgroupingentity:
                                                        publent.marked_for_deletion = True
                                                        publent.parent = parent
                                                    DBSession.flush()
                                                entry.moved_to = str(cli_id) + '/' + str(obj_id)
                                                entry.marked_for_deletion = True
    request.response.status = HTTPOk.code
    return {}
Exemplo n.º 55
0
def create_language(request):  # tested & in docs
    try:
        variables = {'auth': request.authenticated_userid}

        req = request.json_body
        try:
            parent_client_id = req['parent_client_id']
            parent_object_id = req['parent_object_id']
        except:
            parent_client_id = None
            parent_object_id = None
        translation_gist_client_id = req['translation_gist_client_id']
        translation_gist_object_id = req['translation_gist_object_id']
        client = DBSession.query(Client).filter_by(
            id=variables['auth']).first()
        object_id = req.get('object_id', None)
        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login.",
                variables['auth'])
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException(
                "This client id is orphaned. Try to logout and then login once more."
            )

        parent = None
        if parent_client_id and parent_object_id:
            parent = DBSession.query(Language).filter_by(
                client_id=parent_client_id,
                object_id=parent_object_id).first()
        language = Language(
            client_id=variables['auth'],
            object_id=object_id,
            translation_gist_client_id=translation_gist_client_id,
            translation_gist_object_id=translation_gist_object_id)
        DBSession.add(language)
        if parent:
            language.parent = parent
        DBSession.flush()
        basegroups = []
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can edit languages").first()
        ]
        basegroups += [
            DBSession.query(BaseGroup).filter_by(
                name="Can delete languages").first()
        ]
        if not object_id:
            groups = []
            for base in basegroups:
                group = Group(subject_client_id=language.client_id,
                              subject_object_id=language.object_id,
                              parent=base)
                groups += [group]
            for group in groups:
                add_user_to_group(user, group)
        request.response.status = HTTPOk.code
        return {
            'object_id': language.object_id,
            'client_id': language.client_id
        }
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}
Exemplo n.º 56
0
def bulk_group_entities(request):  # tested
    try:
        variables = {'auth': authenticated_userid(request)}
        response = dict()
        req = request.json_body
        client = DBSession.query(Client).filter_by(
            id=variables['auth']).first()
        field_client_id = req['field_client_id']
        field_object_id = req['field_object_id']
        counter = req['counter']
        field = DBSession.query(Field).\
            filter_by(client_id=field_client_id, object_id=field_object_id).first()

        if not client:
            raise KeyError(
                "Invalid client id (not registered on server). Try to logout and then login."
            )
        user = DBSession.query(User).filter_by(id=client.user_id).first()
        if not user:
            raise CommonException(
                "This client id is orphaned. Try to logout and then login once more."
            )
        client.counter = counter
        DBSession.flush()
        if not field:
            request.response.status = HTTPNotFound
            return {'error': str("No such field in the system")}
        if field.data_type != 'Grouping Tag':
            raise KeyError("wrong field data type")

        for tag in req['tag_groups']:
            for tag_ent in req['tag_groups'][tag]:
                tag_entity = DBSession.query(Entity) \
                    .join(Entity.field) \
                    .filter(Entity.parent_client_id == tag_ent['parent_client_id'],
                            Entity.parent_object_id == tag_ent['parent_object_id'],
                            Field.client_id == tag_ent['field_client_id'],
                            Field.object_id == tag_ent['field_object_id'],
                            Entity.content == tag).first()
                if not tag_entity:
                    tag_entity = Entity(
                        client_id=client.id,
                        object_id=tag_ent['object_id'],
                        field=field,
                        content=tag_ent['content'],
                        parent_client_id=tag_ent['parent_client_id'],
                        parent_object_id=tag_ent['parent_object_id'])
                    lex = DBSession.query(LexicalEntry).filter_by(
                        client_id=tag_ent['parent_client_id'],
                        object_id=tag_ent['parent_object_id']).one()
                    group = DBSession.query(Group).join(BaseGroup).filter(
                        BaseGroup.subject == 'lexical_entries_and_entities',
                        Group.subject_client_id == lex.parent_client_id,
                        Group.subject_object_id == lex.parent_object_id,
                        BaseGroup.action == 'create').one()
                    if user in group.users:
                        tag_entity.publishingentity.accepted = True
        # if 'tag' in req:
        #     tags.append(req['tag'])

        for tag in req['tag_groups']:
            tags = list()
            tag_ent = req['tag_groups'][tag][0]
            parent = DBSession.query(LexicalEntry).\
                filter_by(client_id=tag_ent['parent_client_id'], object_id=tag_ent['parent_object_id']).first()
            if not parent:
                request.response.status = HTTPNotFound.code
                return {'error': str("No such lexical entry in the system")}
            par_tags = find_all_tags(parent, field_client_id, field_object_id)
            for tag in par_tags:
                if tag not in tags:
                    tags.append(tag)
            lexical_entries = find_lexical_entries_by_tags(
                tags, field_client_id, field_object_id)
            if parent not in lexical_entries:
                lexical_entries.append(parent)

            for lex in lexical_entries:
                for tag in tags:
                    tag_entity = DBSession.query(Entity) \
                        .join(Entity.field) \
                        .filter(Entity.parent == lex,
                                Field.client_id == field_client_id,
                                Field.object_id == field_object_id,
                                Entity.content == tag).first()
                    if not tag_entity:
                        tag_entity = Entity(client_id=client.id,
                                            field=field,
                                            content=tag,
                                            parent=lex)

                        group = DBSession.query(Group).join(BaseGroup).filter(
                            BaseGroup.subject ==
                            'lexical_entries_and_entities',
                            Group.subject_client_id == lex.parent_client_id,
                            Group.subject_object_id == lex.parent_object_id,
                            BaseGroup.action == 'create').one()
                        if user in group.users:
                            tag_entity.publishingentity.accepted = True
            request.response.status = HTTPOk.code
            response['counter'] = client.counter
            return response
    except KeyError as e:
        request.response.status = HTTPBadRequest.code
        return {'error': str(e)}

    except IntegrityError as e:
        request.response.status = HTTPInternalServerError.code
        return {'error': str(e)}

    except CommonException as e:
        request.response.status = HTTPConflict.code
        return {'error': str(e)}