コード例 #1
0
def insert_properties(properties: Set[str],
                      session: scoped_session) -> Optional[Any]:
    """Insert all the properties as defined in the APIDocumentation into DB."""
    prop_list = [
        BaseProperty(name=prop) for prop in properties
        if not session.query(exists().where(
            BaseProperty.name == prop)).scalar()
    ]
    session.add_all(prop_list)
    session.commit()
    return None


# if __name__ == "__main__":
#     Session = sessionmaker(bind=engine)
#     session = Session()
#
#     doc = doc_gen("test", "test")
#     # Extract all classes with supportedProperty from both
#     classes = get_classes(doc.generate())
#
#     # Extract all properties from both
#     # import pdb; pdb.set_trace()
#     properties = get_all_properties(classes)
#     # Add all the classes
#     insert_classes(classes, session)
#     print("Classes inserted successfully")
#     # Add all the properties
#     insert_properties(properties, session)
#     print("Properties inserted successfully")
コード例 #2
0
ファイル: broadcast_test.py プロジェクト: earlbread/nekoyume
def test_broadcast_move(
        fx_server: WSGIServer,
        fx_session: scoped_session,
        fx_other_server: WSGIServer,
        fx_other_session: Session,
        fx_user: User,
        fx_novice_status: typing.Mapping[str, str],
):
    now = datetime.datetime.utcnow()
    node = Node(url=fx_server.url,
                last_connected_at=now)
    node2 = Node(url=fx_other_server.url,
                 last_connected_at=datetime.datetime.utcnow())
    move = fx_user.create_novice(fx_novice_status)
    fx_session.add_all([node, node2, move])
    fx_session.commit()
    assert not fx_other_session.query(Move).get(move.id)
    serialized = move.serialize(
        use_bencode=False,
        include_signature=True,
        include_id=True,
    )
    multicast(serialized=serialized, broadcast=broadcast_move)
    assert fx_other_session.query(Move).get(move.id)
    assert node.last_connected_at > now
コード例 #3
0
def delete_object(query_info: Dict[str, str],
                  session: scoped_session,
                  collection: bool = False) -> None:
    """
    Delete the object from the database
    :param query_info: Dict containing the id and @type of object that has to retrieved
    :param session: sqlalchemy session
    :param collection: True if the type_ is of a collection, False for any other class
    """
    type_ = query_info["@type"]
    id_ = query_info["id_"]
    database_class = get_database_class(type_)
    if collection:
        try:
            objects = session.query(database_class).filter_by(
                collection_id=id_).delete()
        except NoResultFound:
            raise InstanceNotFound(type_=type_, id_=id_)
        try:
            session.commit()
        except InvalidRequestError:
            session.rollback()
        return id_
    else:
        try:
            object_ = (session.query(database_class).filter(
                database_class.id == id_).one())
        except NoResultFound:
            raise InstanceNotFound(type_=type_, id_=id_)
        session.delete(object_)
        try:
            session.commit()
        except InvalidRequestError:
            session.rollback()
コード例 #4
0
def test_move_broadcast(fx_session: scoped_session, fx_user: User,
                        fx_novice_status: typing.Mapping[str, str]):
    move = fx_user.create_novice(fx_novice_status)
    fx_session.add(move)
    fx_session.commit()
    with unittest.mock.patch('nekoyume.tasks.multicast') as m:
        move_broadcast(move.id,
                       'http://localhost:5000',
                       'http://localhost:5001',
                       session=fx_session)
        serialized = move.serialize(
            use_bencode=False,
            include_signature=True,
            include_id=True,
        )
        assert m.called
        args = m.call_args[1]
        assert serialized == args['serialized']
        assert isinstance(args['sent_node'], Node)
        assert args['sent_node'].url == 'http://localhost:5000'
        assert isinstance(args['my_node'], Node)
        assert args['my_node'].url == 'http://localhost:5001'
        broadcast = args['broadcast']
        assert isinstance(broadcast, typing.Callable)
        assert broadcast.__name__ == 'broadcast_move'
コード例 #5
0
ファイル: crud.py プロジェクト: zelzhan/hydrus
def delete_multiple(id_: List[int], type_: str,
                    session: scoped_session) -> None:
    """
    To delete multiple rows in a single request
    :param id_: list of ids for objects to be deleted\
    :param type_: type of object to be deleted
    :param session: sqlalchemy scoped session

    Raises:
        ClassNotFound: If `type_` does not represent a valid/defined RDFClass.
        InstanceNotFound: If any instance with type 'type_' and any id in 'id_' list
            does not exist.

    """
    id_ = id_.split(',')
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == type_).one()
    except NoResultFound:
        raise ClassNotFound(type_=type_)

    instances = list()
    data_III = list()
    data_IAC = list()
    data_IIT = list()

    for index in id_:
        try:
            instance = session.query(Instance).filter(
                Instance.id == index and type_ == rdf_class.id).one()
            instances.append(instance)
        except NoResultFound:
            raise InstanceNotFound(type_=rdf_class.name, id_=index)
        data_IIT += session.query(triples).filter(
            triples.GraphIIT.subject == index).all()
        data_IAC += session.query(triples).filter(
            triples.GraphIAC.subject == index).all()
        data_III += session.query(triples).filter(
            triples.GraphIII.subject == index).all()

    data = data_III + data_IIT + data_IAC
    for item in data:
        session.delete(item)

    for data in data_IIT:
        terminal = session.query(Terminal).filter(
            Terminal.id == data.object_).one()
        session.delete(terminal)

    for data in data_III:
        III_instance = session.query(Instance).filter(
            Instance.id == data.object_).one()
        III_instance_type = session.query(RDFClass).filter(
            RDFClass.id == III_instance.type_).one()
        # Get the III object type_
        delete(III_instance.id, III_instance_type.name, session=session)
    for instance in instances:
        session.delete(instance)
    session.commit()
コード例 #6
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def test_broadcast_node_same_url(fx_session: scoped_session):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.commit()
    with Mocker() as m:
        broadcast_node(serialized={'url': url}, sent_node=node)
        assert not m.called
    assert node.last_connected_at == now
コード例 #7
0
ファイル: crud.py プロジェクト: raoashish10/hydrus
def insert_modification_record(method: str, resource_url: str,
                               session: scoped_session) -> int:
    """
    Insert a modification record into the database.
    :param method: HTTP method type of related operation.
    :param resource_url: URL of resource modified.
    :param session: sqlalchemy session.
    :return: ID of new modification record.
    """
    modification = Modification(method=method, resource_url=resource_url)
    session.add(modification)
    session.commit()
    return modification.job_id
コード例 #8
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def test_broadcast_my_node(fx_session: scoped_session):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    fx_session.add(node)
    fx_session.commit()
    with Mocker() as m:
        m.post('http://test.neko/nodes', json={'result': 'success'})
        broadcast_node({'url': url}, my_node=node)
        assert node.last_connected_at > now
        # check request.json value
        assert m.request_history[0].json() == {
            'url': 'http://test.neko',
            'sent_node': 'http://test.neko'
        }
コード例 #9
0
def delete(id_: str, type_: str, session: scoped_session) -> None:
    """Delete an Instance and all its relations from DB given id [DELETE].
    :param id_: id of object to be deleted
    :param type_: type of object to be deleted
    :param session: sqlalchemy scoped session

    Raises:
        ClassNotFound: If `type_` does not represent a valid/defined RDFClass.
        InstanceNotFound: If no instace of type `type_` with id `id_` exists.

    """
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == type_).one()
    except NoResultFound:
        raise ClassNotFound(type_=type_)
    try:
        instance = session.query(Instance).filter(
            Instance.id == id_ and type_ == rdf_class.id).one()
    except NoResultFound:
        raise InstanceNotFound(type_=rdf_class.name, id_=id_)

    data_IIT = session.query(triples).filter(
        triples.GraphIIT.subject == id_).all()
    data_IAC = session.query(triples).filter(
        triples.GraphIAC.subject == id_).all()
    data_III = session.query(triples).filter(
        triples.GraphIII.subject == id_).all()

    data = data_III + data_IIT + data_IAC
    for item in data:
        session.delete(item)

    for data in data_IIT:
        terminal = session.query(Terminal).filter(
            Terminal.id == data.object_).one()
        session.delete(terminal)

    for data in data_III:
        III_instance = session.query(Instance).filter(
            Instance.id == data.object_).one()
        III_instance_type = session.query(RDFClass).filter(
            RDFClass.id == III_instance.type_).one()
        # Get the III object type_
        delete(III_instance.id, III_instance_type.name, session=session)

    session.delete(instance)
    session.commit()
コード例 #10
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def broadcast_move_failed(fx_session: scoped_session, fx_user: User,
                          fx_novice_status: typing.Mapping[str, str], error):
    now = datetime.datetime.utcnow()
    move = fx_user.create_novice(fx_novice_status)
    node = Node(url='http://test.neko', last_connected_at=now)
    fx_session.add_all([node, move])
    fx_session.commit()
    with Mocker() as m:
        serialized = move.serialize(
            use_bencode=False,
            include_signature=True,
            include_id=True,
        )
        m.post('http://test.neko', exc=error)
        broadcast_move(serialized=serialized)
    assert node.last_connected_at == now
コード例 #11
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def broadcast_node_failed(fx_session: scoped_session,
                          fx_other_session: Session, error):
    now = datetime.datetime.utcnow()
    node = Node(url='http://test.neko', last_connected_at=now)
    node2 = Node(url='http://other.neko',
                 last_connected_at=datetime.datetime.utcnow())
    fx_session.add(node)
    fx_session.commit()
    fx_other_session.add(node2)
    fx_other_session.commit()
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    with Mocker() as m:
        m.post('http://test.neko', exc=error)
        broadcast_node(serialized={'url': fx_other_server.url})
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    assert node.last_connected_at == now
コード例 #12
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def test_broadcast_move_same_url(fx_session: scoped_session, fx_user: User,
                                 fx_novice_status: typing.Mapping[str, str]):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    move = fx_user.create_novice(fx_novice_status)
    fx_session.add_all([node, move])
    fx_session.commit()
    with Mocker() as m:
        serialized = move.serialize(
            use_bencode=False,
            include_signature=True,
            include_id=True,
        )
        broadcast_move(serialized=serialized, sent_node=node)
        assert not m.called
    assert node.last_connected_at == now
コード例 #13
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def test_broadcast_node(
    fx_server: WSGIServer,
    fx_session: scoped_session,
    fx_other_server: WSGIServer,
    fx_other_session: Session,
):
    now = datetime.datetime.utcnow()
    node = Node(url=fx_server.url, last_connected_at=now)
    node2 = Node(url=fx_other_server.url,
                 last_connected_at=datetime.datetime.utcnow())
    fx_session.add(node)
    fx_session.commit()
    fx_other_session.add(node2)
    fx_other_session.commit()
    assert not fx_session.query(Node).filter(Node.url == node2.url).first()
    broadcast_node(serialized={'url': fx_other_server.url})
    assert fx_session.query(Node).filter(Node.url == node2.url).first()
    assert node.last_connected_at > now
コード例 #14
0
ファイル: api_test.py プロジェクト: earlbread/nekoyume
def test_post_block_return_block_id(fx_test_client: FlaskClient, fx_user: User,
                                    fx_session: scoped_session):
    block = Block.create(fx_user, [])
    fx_session.add(block)
    fx_session.commit()
    block2 = Block.create(fx_user, [])
    des = block2.serialize(use_bencode=False,
                           include_suffix=True,
                           include_moves=True,
                           include_hash=True)
    des['id'] = 3
    resp = fx_test_client.post('/blocks',
                               data=json.dumps(des),
                               content_type='application/json')
    assert resp.status_code == 403
    data = json.loads(resp.get_data())
    assert data['result'] == 'failed'
    assert data['message'] == "new block isn't our next block."
    assert data['block_id'] == 2
コード例 #15
0
def insert_classes(classes: List[Dict[str, Any]],
                   session: scoped_session) -> Optional[Any]:
    """Insert all the classes as defined in the APIDocumentation into DB."""
    # print(session.query(exists().where(RDFClass.name == "Datastream")).scalar())
    class_list = [
        RDFClass(name=class_["label"].strip('.')) for class_ in classes
        if "label" in class_ and not session.query(exists().where(
            RDFClass.name == class_["label"].strip('.'))).scalar()
    ]

    class_list = class_list + [
        RDFClass(name=class_["title"].strip('.')) for class_ in classes
        if "title" in class_ and not session.query(exists().where(
            RDFClass.name == class_["title"].strip('.'))).scalar()
    ]
    # print(class_list)
    session.add_all(class_list)
    session.commit()
    return None
コード例 #16
0
ファイル: tasks_test.py プロジェクト: ipdae/nekoyume
def test_block_broadcast(fx_session: scoped_session, fx_user: User):
    block = Block.create(fx_user, [])
    fx_session.add(block)
    fx_session.commit()
    with unittest.mock.patch('nekoyume.tasks.broadcast_block') as m:
        block_broadcast(block.id,
                        'http://localhost:5000',
                        'http://localhost:5001',
                        session=fx_session)
        serialized = block.serialize(use_bencode=False,
                                     include_suffix=True,
                                     include_moves=True,
                                     include_hash=True)
        assert m.called
        args = m.call_args[1]
        assert serialized == args['serialized']
        assert isinstance(args['sent_node'], Node)
        assert args['sent_node'].url == 'http://localhost:5000'
        assert isinstance(args['my_node'], Node)
        assert args['my_node'].url == 'http://localhost:5001'
コード例 #17
0
ファイル: broadcast_test.py プロジェクト: ipdae/nekoyume
def test_broadcast_move_my_node(fx_session: scoped_session, fx_user: User,
                                fx_novice_status: typing.Mapping[str, str]):
    url = 'http://test.neko'
    now = datetime.datetime.utcnow()
    node = Node(url=url, last_connected_at=now)
    move = fx_user.create_novice(fx_novice_status)
    fx_session.add_all([node, move])
    fx_session.commit()
    with Mocker() as m:
        m.post('http://test.neko/moves', json={'result': 'success'})
        expected = serialized = move.serialize(
            use_bencode=False,
            include_signature=True,
            include_id=True,
        )
        broadcast_move(serialized=serialized, my_node=node)
        expected['sent_node'] = 'http://test.neko'
        assert node.last_connected_at > now
        # check request.json value
        assert m.request_history[0].json() == expected
コード例 #18
0
ファイル: crud.py プロジェクト: thesagarsehgal/hydrus
def delete(id_: int, type_: str, session: scoped_session) -> None:
    """Delete an Instance and all its relations from DB given id [DELETE]."""
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == type_).one()
    except NoResultFound:
        print(type_)
        raise ClassNotFound(type_=type_)
    try:
        instance = session.query(Instance).filter(
            Instance.id == id_ and type_ == rdf_class.id).one()
    except NoResultFound:
        raise InstanceNotFound(type_=rdf_class.name, id_=id_)

    data_IIT = session.query(triples).filter(
        triples.GraphIIT.subject == id_).all()
    data_IAC = session.query(triples).filter(
        triples.GraphIAC.subject == id_).all()
    data_III = session.query(triples).filter(
        triples.GraphIII.subject == id_).all()

    data = data_III + data_IIT + data_IAC
    for item in data:
        session.delete(item)

    for data in data_IIT:
        terminal = session.query(Terminal).filter(
            Terminal.id == data.object_).one()
        session.delete(terminal)

    for data in data_III:
        III_instance = session.query(Instance).filter(
            Instance.id == data.object_).one()
        III_instance_type = session.query(RDFClass).filter(
            RDFClass.id == III_instance.type_).one()
        # Get the III object type_
        delete(III_instance.id, III_instance_type.name, session=session)

    session.delete(instance)
    session.commit()
コード例 #19
0
async def fix_directories(
    posts: list[api_table],
    subscription: user_types,
    database_session: scoped_session,
    api_type: str,
):
    new_directories = []
    authed = subscription.get_authed()
    api = authed.api
    site_settings = api.get_site_settings()

    async def fix_directories2(
        post: api_table, media_db: list[template_media_table], all_files: list[Path]
    ):
        delete_rows = []
        final_api_type = (
            os.path.join("Archived", api_type) if post.archived else api_type
        )
        post_id = post.post_id
        media_db = [x for x in media_db if x.post_id == post_id]
        for media in media_db:
            media_id = media.media_id
            if media.link:
                url_path = urlparse.urlparse(media.link).path
                url_path = Path(url_path)
            else:
                url_path = Path(media.filename)
            new_filename = url_path.name
            original_filename, ext = (url_path.stem, url_path.suffix)
            ext = ext.replace(".", "")

            file_directory_format = site_settings.file_directory_format
            filename_format = site_settings.filename_format
            date_format = site_settings.date_format
            text_length = site_settings.text_length
            download_path = subscription.directory_manager.root_download_directory
            option = {}
            option["site_name"] = api.site_name
            option["post_id"] = post_id
            option["media_id"] = media_id
            option["profile_username"] = authed.username
            option["model_username"] = subscription.username
            option["api_type"] = final_api_type
            option["media_type"] = media.media_type
            option["filename"] = original_filename
            option["ext"] = ext
            option["text"] = post.text
            option["postedAt"] = media.created_at
            option["price"] = post.price
            option["date_format"] = date_format
            option["text_length"] = text_length
            option["directory"] = download_path
            option["preview"] = media.preview
            option["archived"] = post.archived
            prepared_format = prepare_reformat(option)
            file_directory = await prepared_format.reformat_2(file_directory_format)
            prepared_format.directory = file_directory
            old_filepath = ""
            if media.linked:
                filename_format = filename_format.with_name(f"linked_{filename_format}")
            new_filepath = await prepared_format.reformat_2(filename_format)
            old_filepaths = [
                x
                for x in all_files
                if original_filename in x.name and x.parts != new_filepath.parts
            ]
            if not old_filepaths:
                old_filepaths = [x for x in all_files if str(media_id) in x.name]
                print
            if not media.linked:
                old_filepaths: list[Path] = [
                    x for x in old_filepaths if "linked_" not in x.parts
                ]
            if old_filepaths:
                old_filepath = old_filepaths[0]
            # a = randint(0,1)
            # await asyncio.sleep(a)
            if old_filepath and old_filepath != new_filepath:
                moved = None
                while not moved:
                    try:
                        if old_filepath.exists():
                            _old_filename, old_ext = (url_path.stem, url_path.suffix)
                            if ".part" == old_ext:
                                old_filepath.unlink()
                                continue
                            if media.size:
                                media.downloaded = True
                            found_dupes = [
                                x
                                for x in media_db
                                if x.filename == new_filename and x.id != media.id
                            ]
                            delete_rows.extend(found_dupes)
                            os.makedirs(os.path.dirname(new_filepath), exist_ok=True)
                            if media.linked:
                                if os.path.dirname(old_filepath) == os.path.dirname(
                                    new_filepath
                                ):
                                    moved = shutil.move(old_filepath, new_filepath)
                                else:
                                    moved = shutil.copy(old_filepath, new_filepath)
                            else:
                                moved = shutil.move(old_filepath, new_filepath)
                        else:
                            break
                    except OSError as e:
                        print(traceback.format_exc())
                    print
                print

            if os.path.exists(new_filepath):
                if media.size:
                    media.downloaded = True
            if prepared_format.text:
                pass
            media.directory = file_directory.as_posix()
            media.filename = os.path.basename(new_filepath)
            new_directories.append(os.path.dirname(new_filepath))
        return delete_rows

    base_directory = subscription.directory_manager.user.find_legacy_directory(
        "download", api_type
    )
    temp_files: list[Path] = await subscription.directory_manager.walk(base_directory)
    result = database_session.query(user_database.media_table)
    media_db = result.all()
    pool = api.pool
    # tasks = pool.starmap(fix_directories2, product(posts, [media_db]))
    tasks = [
        asyncio.ensure_future(fix_directories2(post, media_db, temp_files))
        for post in posts
    ]
    settings = {"colour": "MAGENTA", "disable": False}
    delete_rows = await tqdm.gather(*tasks, **settings)
    delete_rows = list(chain(*delete_rows))
    for delete_row in delete_rows:
        database_session.query(user_database.media_table).filter(
            user_database.media_table.id == delete_row.id
        ).delete()
    database_session.commit()
    new_directories = list(set(new_directories))
    return posts, new_directories
コード例 #20
0
ファイル: crud.py プロジェクト: thesagarsehgal/hydrus
def insert(object_: Dict[str, Any],
           session: scoped_session,
           id_: Optional[int] = None) -> int:
    """Insert an object to database [POST] and returns the inserted object."""
    rdf_class = None
    instance = None

    # Check for class in the begging
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == object_["@type"]).one()
    except NoResultFound:
        raise ClassNotFound(type_=object_["@type"])

    if id_ is not None:
        if session.query(exists().where(Instance.id == id_)).scalar():
            raise InstanceExists(type_=rdf_class.name, id_=id_)
        else:
            instance = Instance(id=id_, type_=rdf_class.id)
    else:
        instance = Instance(type_=rdf_class.id)
    session.add(instance)
    session.flush()

    for prop_name in object_:
        if prop_name not in ["@type", "@context"]:
            try:
                property_ = session.query(properties).filter(
                    properties.name == prop_name).one()
            except NoResultFound:
                # Adds new Property
                session.close()
                raise PropertyNotFound(type_=prop_name)

            # For insertion in III
            if type(object_[prop_name]) == dict:
                instance_id = insert(object_[prop_name], session=session)
                instance_object = session.query(Instance).filter(
                    Instance.id == instance_id).one()

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIII(subject=instance.id,
                                      predicate=property_.id,
                                      object_=instance_object.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

            # For insertion in IAC
            elif session.query(exists().where(
                    RDFClass.name == str(object_[prop_name]))).scalar():
                if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                    property_.type_ = "ABSTRACT"
                    session.add(property_)
                    class_ = session.query(RDFClass).filter(
                        RDFClass.name == object_[prop_name]).one()
                    triple = GraphIAC(subject=instance.id,
                                      predicate=property_.id,
                                      object_=class_.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotAbstractProperty(type_=prop_name)

            # For insertion in IIT
            else:
                terminal = Terminal(value=object_[prop_name])
                session.add(terminal)
                session.flush()  # Assigns ID without committing

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIIT(subject=instance.id,
                                      predicate=property_.id,
                                      object_=terminal.id)
                    # Add things directly to session, if anything fails whole transaction is aborted
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

    session.commit()
    return instance.id
コード例 #21
0
def insert(object_: Dict[str, Any], session: scoped_session,
           id_: Optional[str] = None) -> str:
    """Insert an object to database [POST] and returns the inserted object.
    :param object_: object to be inserted
    :param session: sqlalchemy scoped session
    :param id_: id of the object to be inserted (optional param)
    :return: ID of object inserted


    Raises:
        ClassNotFound: If `object_["@type"] is not a valid/defined RDFClass.
        InstanceExists: If an Instance `id_` already exists.
        PropertyNotFound: If any property name of `object_` other than `@type` or `@context`
            is not a valid/defined property.
        NotInstanceProperty: If any property of `object_` is a dictionary but
            not an Instance property
        NotAbstractProperty: If any property of `object_` is a
            valid/defined RDFClass but is not a dictionary neither an Abstract Property

    """
    rdf_class = None
    instance = None
    # Check for class in the begging
    try:
        rdf_class = session.query(RDFClass).filter(
            RDFClass.name == object_["@type"]).one()
    except NoResultFound:
        raise ClassNotFound(type_=object_["@type"])
    if id_ is not None and session.query(exists().where(Instance.id == id_)).scalar():
        raise InstanceExists(type_=rdf_class.name, id_=id_)
    elif id_ is not None:
        instance = Instance(id=id_, type_=rdf_class.id)
    else:
        instance = Instance(type_=rdf_class.id)
    session.add(instance)
    session.flush()

    for prop_name in object_:

        if prop_name not in ["@type", "@context"]:
            try:
                property_ = session.query(properties).filter(
                    properties.name == prop_name).one()
            except NoResultFound:
                # Adds new Property
                session.close()
                raise PropertyNotFound(type_=prop_name)

            # For insertion in III
            if isinstance(object_[prop_name], dict):
                instance_id = insert(object_[prop_name], session=session)
                instance_object = session.query(Instance).filter(
                    Instance.id == instance_id).one()
                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIII(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=instance_object.id)
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

            # For insertion in IAC
            elif session.query(exists().where(RDFClass.name == str(object_[prop_name]))).scalar() \
                    and property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                property_.type_ = "ABSTRACT"
                session.add(property_)
                class_ = session.query(RDFClass).filter(
                    RDFClass.name == object_[prop_name]).one()
                triple = GraphIAC(
                    subject=instance.id,
                    predicate=property_.id,
                    object_=class_.id)
                session.add(triple)
            elif session.query(exists().where(RDFClass.name == str(object_[prop_name]))).scalar():
                session.close()
                raise NotAbstractProperty(type_=prop_name)

            # For insertion in IIT
            else:
                terminal = Terminal(value=object_[prop_name])
                session.add(terminal)
                session.flush()  # Assigns ID without committing

                if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                    property_.type_ = "INSTANCE"
                    session.add(property_)
                    triple = GraphIIT(
                        subject=instance.id,
                        predicate=property_.id,
                        object_=terminal.id)
                    # Add things directly to session, if anything fails whole
                    # transaction is aborted
                    session.add(triple)
                else:
                    session.close()
                    raise NotInstanceProperty(type_=prop_name)

    session.commit()
    return instance.id
コード例 #22
0
ファイル: crud.py プロジェクト: sreetamparida/hydrus
def insert_multiple(objects_: List[Dict[str,
                                        Any]],
                    session: scoped_session,
                    id_: Optional[str] = "") -> List[int]:
    """
    Adds a list of object with given ids to the database
    :param objects_: List of dict's to be added to the database
    :param session: scoped session from getSession in utils
    :param id_: optional parameter containing the ids of objects that have to be inserted
    :return: Ids that have been inserted
    """
    # instance list to store instances
    instance_list = list()
    triples_list = list()
    properties_list = list()
    instances = list()
    id_list = id_.split(',')
    instance_id_list = list()

    # the number of objects would be the same as number of instances
    for index in range(len(objects_)):
        try:
            rdf_class = session.query(RDFClass).filter(
                RDFClass.name == objects_[index]["@type"]).one()
        except NoResultFound:
            raise ClassNotFound(type_=objects_[index]["@type"])
        if index in range(len(id_list)) and id_list[index] != "":
            if session.query(
                    exists().where(
                        Instance.id == id_list[index])).scalar():
                print(session.query(
                    exists().where(
                        Instance.id == id_list[index])))
                # TODO handle where intance already exists , if instance is
                # fetched later anyways remove this
                raise InstanceExists(type_=rdf_class.name, id_=id_list[index])
            else:
                instance = Instance(id=id_list[index], type_=rdf_class.id)
                instances.append(instance)
        else:
            instance = Instance(type_=rdf_class.id)
            instances.append(instance)

    session.add_all(instances)
    session.flush()
    for i in range(len(instances)):
        instance_id_list.append(instances[i].id)

    for index in range(len(objects_)):
        for prop_name in objects_[index]:
            if prop_name not in ["@type", "@context"]:
                try:
                    property_ = session.query(properties).filter(
                        properties.name == prop_name).one()
                except NoResultFound:
                    # Adds new Property
                    session.close()
                    raise PropertyNotFound(type_=prop_name)

                # For insertion in III
                if isinstance(objects_[index][prop_name], dict):
                    instance_id = insert(
                        objects_[index][prop_name], session=session)
                    instance_object = session.query(Instance).filter(
                        Instance.id == instance_id).one()

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIII(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=instance_object.id)
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)

                # For insertion in IAC
                elif session.query(
                        exists().where(RDFClass.name == str(objects_[index][prop_name]))).scalar():
                    if property_.type_ == "PROPERTY" or property_.type_ == "ABSTRACT":
                        property_.type_ = "ABSTRACT"
                        properties_list.append(property_)
                        class_ = session.query(RDFClass).filter(
                            RDFClass.name == objects_[index][prop_name]).one()
                        triple = GraphIAC(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=class_.id)
                        triples_list.append(triple)

                    else:
                        session.close()
                        raise NotAbstractProperty(type_=prop_name)

                # For insertion in IIT
                else:
                    terminal = Terminal(value=objects_[index][prop_name])
                    session.add(terminal)
                    session.flush()  # Assigns ID without committing

                    if property_.type_ == "PROPERTY" or property_.type_ == "INSTANCE":
                        property_.type_ = "INSTANCE"
                        properties_list.append(property_)
                        triple = GraphIIT(
                            subject=instances[index].id,
                            predicate=property_.id,
                            object_=terminal.id)
                        # Add things directly to session, if anything fails
                        # whole transaction is aborted
                        triples_list.append(triple)
                    else:
                        session.close()
                        raise NotInstanceProperty(type_=prop_name)
    session.bulk_save_objects(properties_list)
    session.bulk_save_objects(triples_list)
    session.commit()
    return instance_id_list
コード例 #23
0
ファイル: start.py プロジェクト: kr33g33/OnlyFans
def fix_directories(api,posts, all_files, database_session: scoped_session, folder, site_name, parent_type, api_type, username, base_directory, json_settings):
    new_directories = []

    def fix_directories(post: api_table, media_db: list[media_table]):
        delete_rows = []
        final_type = ""
        if parent_type:
            final_type = f"{api_type}{os.path.sep}{parent_type}"
        final_type = final_type if final_type else api_type
        post_id = post.post_id
        media_db = [x for x in media_db if x.post_id == post_id]
        for media in media_db:
            media_id = media.media_id
            if media.link:
                path = urlparse.urlparse(media.link).path
            else:
                path: str = media.filename
            new_filename = os.path.basename(path)
            original_filename, ext = os.path.splitext(new_filename)
            ext = ext.replace(".", "")
            file_directory_format = json_settings["file_directory_format"]
            filename_format = json_settings["filename_format"]
            date_format = json_settings["date_format"]
            text_length = json_settings["text_length"]
            download_path = base_directory
            today = datetime.today()
            today = today.strftime("%d-%m-%Y %H:%M:%S")
            option = {}
            option["site_name"] = site_name
            option["post_id"] = post_id
            option["media_id"] = media_id
            option["username"] = username
            option["api_type"] = final_type if parent_type else api_type
            option["media_type"] = media.media_type
            option["filename"] = original_filename
            option["ext"] = ext
            option["text"] = post.text
            option["postedAt"] = media.created_at
            option["price"] = post.price
            option["date_format"] = date_format
            option["text_length"] = text_length
            option["directory"] = download_path
            option["preview"] = media.preview
            prepared_format = prepare_reformat(option)
            file_directory = main_helper.reformat(
                prepared_format, file_directory_format)
            prepared_format.directory = file_directory
            old_filepath = ""
            if media.linked:
                filename_format = f"linked_{filename_format}"
            old_filepaths = [
                x for x in all_files if original_filename in os.path.basename(x)]
            if not old_filepaths:
                old_filepaths = [
                    x for x in all_files if str(media_id) in os.path.basename(x)]
                print
            if not media.linked:
                old_filepaths = [x for x in old_filepaths if "linked_" not in x]
            if old_filepaths:
                old_filepath = old_filepaths[0]
            new_filepath = main_helper.reformat(
                prepared_format, filename_format)
            if old_filepath and old_filepath != new_filepath:
                if os.path.exists(new_filepath):
                    os.remove(new_filepath)
                moved = None
                while not moved:
                    try:
                        if os.path.exists(old_filepath):
                            if media.size:
                                media.downloaded = True
                            found_dupes = [
                                x for x in media_db if x.filename == new_filename and x.id != media.id]
                            delete_rows.extend(found_dupes)
                            os.makedirs(os.path.dirname(
                                new_filepath), exist_ok=True)
                            if media.linked:
                                if os.path.dirname(old_filepath) == os.path.dirname(new_filepath):
                                    moved = shutil.move(old_filepath, new_filepath)
                                else:
                                    moved = shutil.copy(old_filepath, new_filepath)
                            else:
                                moved = shutil.move(old_filepath, new_filepath)
                        else:
                            break
                    except OSError as e:
                        print(traceback.format_exc())
                    print
                print

            if os.path.exists(new_filepath):
                if media.size:
                    media.downloaded = True
            if prepared_format.text:
                pass
            media.directory = file_directory
            media.filename = os.path.basename(new_filepath)
            new_directories.append(os.path.dirname(new_filepath))
        return delete_rows
    result = database_session.query(folder.media_table)
    media_db = result.all()
    pool = api.pool
    delete_rows = pool.starmap(fix_directories, product(
    posts, [media_db]))
    delete_rows = list(chain(*delete_rows))
    for delete_row in delete_rows:
        database_session.query(folder.media_table).filter(
            folder.media_table.id == delete_row.id).delete()
    database_session.commit()
    new_directories = list(set(new_directories))
    return posts, new_directories
コード例 #24
0
def insert_object(object_: Dict[str, Any],
                  session: scoped_session,
                  collection: bool = False) -> str:
    """
    Insert the object in the database
    :param object_: Dict containing object properties
    :param session: sqlalchemy session
    :return: The ID of the inserted object
    """
    type_ = get_type(object_)
    database_class = get_database_class(type_)
    id_ = object_.get("id", None)
    if collection:
        # if type_ is of a collection class
        members = object_['members']
        collection_id = id_ if id_ else str(uuid.uuid4())
        for member in members:
            # add all the members of that collection
            inserted_object = database_class(
                members=member['id_'],
                collection_id=collection_id,
                member_type=member['@type'],
            )
            try:
                session.add(inserted_object)
                session.commit()
            except InvalidRequestError:
                session.rollback()
        return collection_id
    else:
        # when type_ is of a non-collection class
        if (id_ is not None and session.query(
                exists().where(database_class.id == id_)).scalar()):
            raise InstanceExists(type_, id_)
        foreign_keys = database_class.__table__.foreign_keys
        for fk in foreign_keys:
            # the name of the column through which this foreign key relationship
            # is being established
            fk_column = fk.info["column_name"]
            try:
                fk_object = object_[fk_column]
            except KeyError as e:
                wrong_property = e.args[0]
                raise PropertyNotGiven(type_=wrong_property)
            # insert the foreign key object
            fk_object_id = insert_object(fk_object, session)
            # put the id of the foreign instance in this table's column
            object_[fk_column] = fk_object_id
        try:
            # remove the @type from object before using the object to make a
            # instance of it using sqlalchemy class
            object_.pop("@type")
            inserted_object = database_class(**object_)
        except TypeError as e:
            # extract the wrong property name from TypeError object
            wrong_property = e.args[0].split("'")[1]
            raise PropertyNotFound(type_=wrong_property)
        try:
            session.add(inserted_object)
            session.commit()
        except InvalidRequestError:
            session.rollback()

        return inserted_object.id
コード例 #25
0
ファイル: start_ofr.py プロジェクト: reneGadeXCS/OnlyFans
async def fix_directories(
    posts,
    api: onlyfans.start,
    subscription: create_user,
    all_files,
    database_session: scoped_session,
    folder,
    site_name,
    api_type,
    base_directory,
    json_settings,
):
    new_directories = []

    async def fix_directories2(post: api_table,
                               media_db: list[template_media_table]):
        delete_rows = []
        final_api_type = (os.path.join("Archived", api_type)
                          if post.archived else api_type)
        post_id = post.post_id
        media_db = [x for x in media_db if x.post_id == post_id]
        for media in media_db:
            media_id = media.media_id
            if media.link:
                path = urlparse.urlparse(media.link).path
            else:
                path: str = media.filename
            new_filename = os.path.basename(path)
            original_filename, ext = os.path.splitext(new_filename)
            ext = ext.replace(".", "")
            file_directory_format = json_settings["file_directory_format"]
            filename_format = json_settings["filename_format"]
            date_format = json_settings["date_format"]
            text_length = json_settings["text_length"]
            download_path = base_directory
            option = {}
            option["site_name"] = site_name
            option["post_id"] = post_id
            option["media_id"] = media_id
            option["profile_username"] = subscription.subscriber.username
            option["model_username"] = subscription.username
            option["api_type"] = final_api_type
            option["media_type"] = media.media_type
            option["filename"] = original_filename
            option["ext"] = ext
            option["text"] = post.text
            option["postedAt"] = media.created_at
            option["price"] = post.price
            option["date_format"] = date_format
            option["text_length"] = text_length
            option["directory"] = download_path
            option["preview"] = media.preview
            option["archived"] = post.archived
            prepared_format = prepare_reformat(option)
            file_directory = await main_helper.reformat(
                prepared_format, file_directory_format)
            prepared_format.directory = file_directory
            old_filepath = ""
            if media.linked:
                filename_format = f"linked_{filename_format}"
            old_filepaths = [
                x for x in all_files
                if original_filename in os.path.basename(x)
            ]
            if not old_filepaths:
                old_filepaths = [
                    x for x in all_files
                    if str(media_id) in os.path.basename(x)
                ]
                print
            if not media.linked:
                old_filepaths = [
                    x for x in old_filepaths if "linked_" not in x
                ]
            if old_filepaths:
                old_filepath = old_filepaths[0]
            # a = randint(0,1)
            # await asyncio.sleep(a)
            new_filepath = await main_helper.reformat(prepared_format,
                                                      filename_format)
            if old_filepath and old_filepath != new_filepath:
                if os.path.exists(new_filepath):
                    os.remove(new_filepath)
                moved = None
                while not moved:
                    try:
                        if os.path.exists(old_filepath):
                            if media.size:
                                media.downloaded = True
                            found_dupes = [
                                x for x in media_db if
                                x.filename == new_filename and x.id != media.id
                            ]
                            delete_rows.extend(found_dupes)
                            os.makedirs(os.path.dirname(new_filepath),
                                        exist_ok=True)
                            if media.linked:
                                if os.path.dirname(
                                        old_filepath) == os.path.dirname(
                                            new_filepath):
                                    moved = shutil.move(
                                        old_filepath, new_filepath)
                                else:
                                    moved = shutil.copy(
                                        old_filepath, new_filepath)
                            else:
                                moved = shutil.move(old_filepath, new_filepath)
                        else:
                            break
                    except OSError as e:
                        print(traceback.format_exc())
                    print
                print

            if os.path.exists(new_filepath):
                if media.size:
                    media.downloaded = True
            if prepared_format.text:
                pass
            media.directory = file_directory
            media.filename = os.path.basename(new_filepath)
            new_directories.append(os.path.dirname(new_filepath))
        return delete_rows

    result = database_session.query(folder.media_table)
    media_db = result.all()
    pool = api.pool
    # tasks = pool.starmap(fix_directories2, product(posts, [media_db]))
    tasks = [
        asyncio.ensure_future(fix_directories2(post, media_db))
        for post in posts
    ]
    settings = {"colour": "MAGENTA", "disable": False}
    delete_rows = await tqdm.gather(*tasks, **settings)
    delete_rows = list(chain(*delete_rows))
    for delete_row in delete_rows:
        database_session.query(folder.media_table).filter(
            folder.media_table.id == delete_row.id).delete()
    database_session.commit()
    new_directories = list(set(new_directories))
    return posts, new_directories