Beispiel #1
0
def test_write():
    if WDUSER and WDPASS:
        login = wbi_login.Login(WDUSER, WDPASS)
        with pytest.raises(ValueError):
            wbi_core.FunctionsEngine.mediawiki_api_call_helper(
                data=None,
                login=login,
                mediawiki_api_url='https://unsdfdskfjljzkerezr.org/w/api.php')
Beispiel #2
0
def import_entity(username, password, data, label="", item_id=""):
    login_instance = wbi_login.Login(user=username, pwd=password)

    entity = wbi_core.ItemEngine(data=data, item_id=item_id)
    if label:
        entity.set_label(label, ENGLISH)

    entity_id = entity.write(login_instance)
    return entity_id
def add_source_in_db(source_name, stable_url, username=None, password=None):
    """"""
    # Load Wikibase Integrator config with the environment
    wbi_config["MEDIAWIKI_API_URL"] = os.environ[API_URL]
    wbi_config["SPARQL_ENDPOINT_URL"] = os.environ[SPARQL_BIGDATA_URL]
    wbi_config["WIKIBASE_URL"] = SVC_URL

    if not username:
        username = os.environ[USERNAME]
    if not password:
        password = os.environ[PASSWORD]
    stable_url_prop = os.environ[STABLE_URL_PROP]
    catalog_prop = os.environ[CATALOG_PROP]

    login_instance = wbi_login.Login(
        user=username,
        pwd=password,
    )

    source_instance_of = wbi_core.ItemID(
        prop_nr=os.environ[INSTANCE_PROP], value=os.environ[GTFS_SCHEDULE_SOURCE_CODE]
    )
    try:
        source_stable_url = wbi_core.Url(value=stable_url, prop_nr=stable_url_prop)
    except ValueError as ve:
        print(f"url {stable_url} for source name {source_name} raised {ve}")
        raise ve
    source_catalog_ref = wbi_core.ItemID(
        prop_nr=catalog_prop,
        value=os.environ[GTFS_CATALOG_OF_SOURCES_CODE],  # fix this
    )
    source_catalog_entity = wbi_core.ItemEngine(
        item_id=os.environ[GTFS_CATALOG_OF_SOURCES_CODE]
    )

    # Create the source entity
    source_data = [source_instance_of, source_stable_url, source_catalog_ref]
    source_entity = wbi_core.ItemEngine(data=source_data, core_props={stable_url_prop})

    source_entity.set_label(f"{source_name}")
    source_entity_id = source_entity.write(login=login_instance)

    # Create the Archives ID using the name and the entity code of the source
    archives_id_prefix = source_name.replace("'s GTFS Schedule source", "")
    archives_id_prefix = archives_id_prefix[:15]
    archives_id_prefix = re.sub(
        NON_ALPHABETICAL_CHAR_REGEX, "-", archives_id_prefix
    ).lower()
    archives_id_suffix = source_entity_id.lower()
    source_archives_id = f"{archives_id_prefix}-gtfs-{archives_id_suffix}"

    source_archives_id_data = wbi_core.String(
        prop_nr=os.environ[ARCHIVES_ID_PROP],
        value=source_archives_id,
    )
    source_data_updated = [source_archives_id_data]

    source_entity_updated = wbi_core.ItemEngine(item_id=source_entity_id)
    source_entity_updated.update(source_data_updated)
    source_entity_updated.write(login=login_instance)

    # Update the catalog of sources
    source_entity_prop = wbi_core.ItemID(
        value=source_entity_id, prop_nr=os.environ[SOURCE_ENTITY_PROP], if_exists=APPEND
    )
    catalog_data = [source_entity_prop]
    source_catalog_entity.update(catalog_data)
    source_catalog_entity.write(login_instance)

    return source_entity_id, source_archives_id
Beispiel #4
0
def add_usage_example(
    document_id=None,
    sentence=None,
    lid=None,
    form_id=None,
    sense_id=None,
    word=None,
    publication_date=None,
    language_style=None,
    type_of_reference=None,
    source=None,
    line=None,
):
    # Use WikibaseIntegrator aka wbi to upload the changes in one edit
    link_to_form = wbi_core.Form(prop_nr="P5830",
                                 value=form_id,
                                 is_qualifier=True)
    link_to_sense = wbi_core.Sense(prop_nr="P6072",
                                   value=sense_id,
                                   is_qualifier=True)
    if language_style == "formal":
        style = "Q104597585"
    else:
        if language_style == "informal":
            style = "Q901711"
        else:
            print(f"Error. Language style {language_style} " +
                  "not one of (formal,informal)")
            exit(1)
    logging.debug("Generating qualifier language_style " + f"with {style}")
    language_style_qualifier = wbi_core.ItemID(prop_nr="P6191",
                                               value=style,
                                               is_qualifier=True)
    # oral or written
    if type_of_reference == "written":
        medium = "Q47461344"
    else:
        if type_of_reference == "oral":
            medium = "Q52946"
        else:
            print(f"Error. Type of reference {type_of_reference} " +
                  "not one of (written,oral)")
            exit(1)
    logging.debug("Generating qualifier type of reference " + f"with {medium}")
    type_of_reference_qualifier = wbi_core.ItemID(prop_nr="P3865",
                                                  value=medium,
                                                  is_qualifier=True)
    if source == "riksdagen":
        if publication_date is not None:
            publication_date = datetime.fromisoformat(publication_date)
        else:
            print("Publication date of document {document_id} " +
                  "is missing. We have no fallback for that at the moment. " +
                  "Abort adding usage example.")
            return False
        stated_in = wbi_core.ItemID(prop_nr="P248",
                                    value="Q21592569",
                                    is_reference=True)
        document_id = wbi_core.ExternalID(
            prop_nr="P8433",  # Riksdagen Document ID
            value=document_id,
            is_reference=True)
        reference = [
            stated_in,
            document_id,
            wbi_core.Time(
                prop_nr="P813",  # Fetched today
                time=datetime.utcnow().replace(tzinfo=timezone.utc).replace(
                    hour=0,
                    minute=0,
                    second=0,
                ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
                is_reference=True,
            ),
            wbi_core.Time(
                prop_nr="P577",  # Publication date
                time=publication_date.strftime("+%Y-%m-%dT00:00:00Z"),
                is_reference=True,
            ),
            type_of_reference_qualifier,
        ]
    if source == "europarl":
        stated_in = wbi_core.ItemID(prop_nr="P248",
                                    value="Q5412081",
                                    is_reference=True)
        reference = [
            stated_in,
            wbi_core.Time(
                prop_nr="P813",  # Fetched today
                time=datetime.utcnow().replace(tzinfo=timezone.utc).replace(
                    hour=0,
                    minute=0,
                    second=0,
                ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
                is_reference=True,
            ),
            wbi_core.Time(
                prop_nr="P577",  # Publication date
                time="+2012-05-12T00:00:00Z",
                is_reference=True,
            ),
            wbi_core.Url(
                prop_nr="P854",  # reference url
                value="http://www.statmt.org/europarl/v7/sv-en.tgz",
                is_reference=True,
            ),
            # filename in archive
            wbi_core.String(
                (f"europarl-v7.{config.language_code}" +
                 f"-en.{config.language_code}"),
                "P7793",
                is_reference=True,
            ),
            # line number
            wbi_core.String(
                str(line),
                "P7421",
                is_reference=True,
            ),
            type_of_reference_qualifier,
        ]
    # This is the usage example statement
    claim = wbi_core.MonolingualText(
        sentence,
        "P5831",
        language=config.language_code,
        # Add qualifiers
        qualifiers=[
            link_to_form,
            link_to_sense,
            language_style_qualifier,
        ],
        # Add reference
        references=[reference],
    )
    if config.debug_json:
        logging.debug(f"claim:{claim.get_json_representation()}")
    item = wbi_core.ItemEngine(
        data=[claim],
        append_value=["P5831"],
        item_id=lid,
    )
    # if config.debug_json:
    #     print(item.get_json_representation())
    if config.login_instance is None:
        # Authenticate with WikibaseIntegrator
        print("Logging in with Wikibase Integrator")
        config.login_instance = wbi_login.Login(user=config.username,
                                                pwd=config.password)
    result = item.write(
        config.login_instance,
        edit_summary="Added usage example with [[Wikidata:LexUse]]")
    if config.debug_json:
        logging.debug(f"result from WBI:{result}")
    return result
Beispiel #5
0
# Licensed under GPLv3+ i.e. GPL version 3 or later.
import logging
from urllib.parse import urlparse, parse_qsl
from pprint import pprint
from csv import reader

from wikibaseintegrator import wbi_core, wbi_login

import config
import loglevel

# Constants
wd_prefix = "http://www.wikidata.org/entity/"

print("Logging in with Wikibase Integrator")
login_instance = wbi_login.Login(user=config.username, pwd=config.password)
#download all swedish lexemes via sparql (~23000 as of 2021-04-05)
#dictionary with word as key and list in the value
#list[0] = lid
#list[1] = category Qid
print("Fetching all lexemes")
lexemes_data = {}
lexemes_list = []
for i in range(0, 10000, 10000):
    print(i)
    results = wbi_core.ItemEngine.execute_sparql_query(f"""
            select ?lexemeId ?lemma ?category
        WHERE {{
          #hint:Query hint:optimizer "None".
          ?lexemeId dct:language wd:Q9027;
                    wikibase:lemma ?lemma;
Beispiel #6
0
def add_usage_example(
        document_id=None,
        sentence=None,
        lid=None,
        form_id=None,
        sense_id=None,
        word=None,
        publication_date=None,
        language_style=None,
        type_of_reference=None,
        source=None,
        line=None,
):
    # Use WikibaseIntegrator aka wbi to upload the changes in one edit
    link_to_form = wbi_core.Form(
        prop_nr="P5830",
        value=form_id,
        is_qualifier=True
    )
    link_to_sense = wbi_core.Sense(
        prop_nr="P6072",
        value=sense_id,
        is_qualifier=True
    )
    if language_style == "formal":
        style = "Q104597585"
    else:
        if language_style == "informal":
            style = "Q901711"
        else:
            print(_( "Error. Language style {} ".format(language_style) +
                     "not one of (formal,informal). Please report a bug at "+
                     "https://github.com/egils-consulting/LexUtils/issues" ))
            return
    logging.debug("Generating qualifier language_style " +
                  f"with {style}")
    language_style_qualifier = wbi_core.ItemID(
        prop_nr="P6191",
        value=style,
        is_qualifier=True
    )
    # oral or written
    if type_of_reference == "written":
        medium = "Q47461344"
    else:
        if type_of_reference == "oral":
            medium = "Q52946"
        else:
            print(_( "Error. Type of reference {} ".format(type_of_reference) +
                     "not one of (written,oral). Please report a bug at "+
                     "https://github.com/egils-consulting/LexUtils/issues" ))
            return
    logging.debug(_( "Generating qualifier type of reference " +
                  "with {}".format(medium) ))
    type_of_reference_qualifier = wbi_core.ItemID(
        prop_nr="P3865",
        value=medium,
        is_qualifier=True
    )
    if source == "riksdagen":
        if publication_date is not None:
            publication_date = datetime.fromisoformat(publication_date)
        else:
            print(_( "Publication date of document {} " +
                  "is missing. We have no fallback for that at the moment. " +
                  "Abort adding usage example.".format(document_id) ))
            return False
        stated_in = wbi_core.ItemID(
            prop_nr="P248",
            value="Q21592569",
            is_reference=True
        )
        document_id = wbi_core.ExternalID(
            prop_nr="P8433",  # Riksdagen Document ID
            value=document_id,
            is_reference=True
        )
        reference = [
            stated_in,
            document_id,
            wbi_core.Time(
                prop_nr="P813",  # Fetched today
                time=datetime.utcnow().replace(
                    tzinfo=timezone.utc
                ).replace(
                    hour=0,
                    minute=0,
                    second=0,
                ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
                is_reference=True,
            ),
            wbi_core.Time(
                prop_nr="P577",  # Publication date
                time=publication_date.strftime("+%Y-%m-%dT00:00:00Z"),
                is_reference=True,
            ),
            type_of_reference_qualifier,
        ]
    if source == "europarl":
        stated_in = wbi_core.ItemID(
            prop_nr="P248",
            value="Q5412081",
            is_reference=True
        )
        reference = [
            stated_in,
            wbi_core.Time(
                prop_nr="P813",  # Fetched today
                time=datetime.utcnow().replace(
                    tzinfo=timezone.utc
                ).replace(
                    hour=0,
                    minute=0,
                    second=0,
                ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
                is_reference=True,
            ),
            wbi_core.Time(
                prop_nr="P577",  # Publication date
                time="+2012-05-12T00:00:00Z",
                is_reference=True,
            ),
            wbi_core.Url(
                prop_nr="P854",  # reference url
                value="http://www.statmt.org/europarl/v7/sv-en.tgz",
                is_reference=True,
            ),
            # filename in archive
            wbi_core.String(
                (f"europarl-v7.{config.language_code}" +
                 f"-en.{config.language_code}"),
                "P7793",
                is_reference=True,
            ),
            # line number
            wbi_core.String(
                str(line),
                "P7421",
                is_reference=True,
            ),
            type_of_reference_qualifier,
        ]
    if source == "ksamsok":
        # No date is provided unfortunately, so we set it to unknown value
        stated_in = wbi_core.ItemID(
            prop_nr="P248",
            value="Q7654799",
            is_reference=True
        )
        document_id = wbi_core.ExternalID(
            # K-Samsök URI
            prop_nr="P1260",  
            value=document_id,
            is_reference=True
        )
        reference = [
            stated_in,
            document_id,
            wbi_core.Time(
                prop_nr="P813",  # Fetched today
                time=datetime.utcnow().replace(
                    tzinfo=timezone.utc
                ).replace(
                    hour=0,
                    minute=0,
                    second=0,
                ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
                is_reference=True,
            ),
            wbi_core.Time(
                # We don't know the value of the publication dates unfortunately
                prop_nr="P577",  # Publication date
                time="",
                snak_type="somevalue",
                is_reference=True,
            ),
            type_of_reference_qualifier,
        ]
    if reference is None:
        logger.error(_( "No reference defined, cannot add usage example" ))
        exit(1)
    # This is the usage example statement
    claim = wbi_core.MonolingualText(
        sentence,
        "P5831",
        language=config.language_code,
        # Add qualifiers
        qualifiers=[
            link_to_form,
            link_to_sense,
            language_style_qualifier,
        ],
        # Add reference
        references=[reference],
    )
    if config.debug_json:
        logging.debug(f"claim:{claim.get_json_representation()}")
    item = wbi_core.ItemEngine(
        data=[claim], append_value=["P5831"], item_id=lid,
    )
    # if config.debug_json:
    #     print(item.get_json_representation())
    if config.login_instance is None:
        # Authenticate with WikibaseIntegrator
        print("Logging in with Wikibase Integrator")
        config.login_instance = wbi_login.Login(
            user=config.username, pwd=config.password
        )
    result = item.write(
        config.login_instance,
        edit_summary=(
            _( "Added usage example "+
               "with [[Wikidata:LexUtils]] v{}".format(config.version) )
        )
    )
    if config.debug_json:
        logging.debug(f"result from WBI:{result}")
    # TODO add handling of result from WBI and return True == Success or False 
    return result
Beispiel #7
0
    def add_usage_example(self,
                          form: Form = None,
                          sense: Sense = None,
                          usage_example: UsageExample = None):
        """This only has side effects"""
        # TODO convert to use OOP
        logger = logging.getLogger(__name__)
        if form is None:
            raise ValueError("form was None")
        if sense is None:
            raise ValueError("sense was None")
        if usage_example is None:
            raise ValueError("usage_example was None")
        logger.info("Adding usage example with WBI")
        # Use WikibaseIntegrator aka wbi to upload the changes in one edit
        link_to_form = WBIForm(
            prop_nr="P5830",
            # FIXME debug why this is the lexeme id
            value=form.id)
        link_to_sense = WBISense(prop_nr="P6072", value=sense.id)
        language_style_qualifier = Item(
            prop_nr="P6191", value=usage_example.record.language_style.value)
        type_of_reference_qualifier = Item(
            prop_nr="P3865",
            value=usage_example.record.type_of_reference.value)
        retrieved_date = Time(
            prop_nr="P813",  # Fetched today
            time=datetime.utcnow().replace(tzinfo=timezone.utc).replace(
                hour=0,
                minute=0,
                second=0,
            ).strftime("+%Y-%m-%dT%H:%M:%SZ"))
        if usage_example.record.source == SupportedExampleSources.RIKSDAGEN:
            logger.info("Riksdagen record detected")
            if usage_example.record.date is not None:
                if usage_example.record.date.day == 1 and usage_example.record.date.month == 1:
                    logger.info("Detected year precision on the date")
                    publication_date = Time(
                        prop_nr="P577",  # Publication date
                        time=usage_example.record.date.strftime(
                            "+%Y-%m-%dT00:00:00Z"),
                        # Precision is year if the date is 1/1
                        precision=9)
                else:
                    publication_date = Time(
                        prop_nr="P577",  # Publication date
                        time=usage_example.record.date.strftime(
                            "+%Y-%m-%dT00:00:00Z"),
                        precision=11)
            else:
                raise ValueError(
                    _("Publication date of document {} ".format(
                        usage_example.record.id) +
                      "is missing. We have no fallback for that at the moment. "
                      + "Aborting."))
            if usage_example.record.document_qid is not None:
                logger.info(
                    f"using document QID {usage_example.record.document_qid} as value for P248"
                )
                stated_in = Item(prop_nr="P248",
                                 value=usage_example.record.document_qid)
                reference = [
                    stated_in,
                    retrieved_date,
                    publication_date,
                    type_of_reference_qualifier,
                ]
            else:
                stated_in = Item(prop_nr="P248", value="Q21592569")
                document_id = ExternalID(
                    prop_nr="P8433",  # Riksdagen Document ID
                    value=usage_example.record.id)
                if publication_date is not None:
                    reference = [
                        stated_in,
                        document_id,
                        retrieved_date,
                        publication_date,
                        type_of_reference_qualifier,
                    ]
        elif usage_example.record.source == SupportedExampleSources.WIKISOURCE:
            logger.info("Wikisource record detected")
            usage_example.record.lookup_qid()
            if usage_example.record.document_qid is not None:
                logger.info(
                    f"using document QID {usage_example.record.document_qid} as value for P248"
                )
                stated_in = Item(prop_nr="P248",
                                 value=usage_example.record.document_qid)
                wikimedia_import_url = URL(prop_nr="P4656",
                                           value=usage_example.record.url())
                reference = [
                    stated_in,
                    wikimedia_import_url,
                    retrieved_date,
                    type_of_reference_qualifier,
                ]
            else:
                # TODO discuss whether we want to add this, it can rather easily
                #  be inferred from the import url and the QID of the work
                # search via sparql for english wikisource QID?
                # stated_in = Item(
                #     prop_nr="P248",
                #     value=
                # )
                wikimedia_import_url = URL(prop_nr="P4656",
                                           value=usage_example.record.url())
                reference = [
                    # stated_in,
                    wikimedia_import_url,
                    retrieved_date,
                    type_of_reference_qualifier,
                ]
        elif usage_example.record.source == SupportedExampleSources.HISTORICAL_ADS:
            logger.info("Historical Ad record detected")
            stated_in = Item(
                prop_nr="P248",
                value=SupportedExampleSources.HISTORICAL_ADS.value)
            # TODO wait for https://www.wikidata.org/wiki/Wikidata:Property_proposal/Swedish_Historical_Job_Ads_ID to be approved
            record_number = String(
                prop_nr="P9994",  #  record number
                value=usage_example.record.id)
            reference_url = URL(prop_nr="P854",
                                value=usage_example.record.url())
            published_date = Time(
                prop_nr="P577",
                time=usage_example.record.date.strftime("+%Y-%m-%dT00:00:00Z"),
                precision=11
                #     (
                #     # First parse the date string and then output it
                #     usage_example.record.date
                #         .strptime("+%Y-%m-%dT%H:%M:%SZ")
                #         .strftime("+%Y-%m-%dT%H:%M:%SZ")
                # )
            )
            historical_ads_retrieved_date = Time(
                prop_nr="P813",  # Fetched 2021-01-13
                time=datetime.strptime(
                    "2021-01-13",
                    "%Y-%m-%d").replace(tzinfo=timezone.utc).replace(
                        hour=0,
                        minute=0,
                        second=0,
                    ).strftime("+%Y-%m-%dT%H:%M:%SZ"))
            reference = [
                stated_in,
                record_number,
                reference_url,
                historical_ads_retrieved_date,
                published_date,
                type_of_reference_qualifier,
            ]
        # elif source == "europarl":
        #     stated_in = wbi_datatype.ItemID(
        #         prop_nr="P248",
        #         value="Q5412081",
        #         is_reference=True
        #     )
        #     reference = [
        #         stated_in,
        #         wbi_datatype.Time(
        #             prop_nr="P813",  # Fetched today
        #             time=datetime.utcnow().replace(
        #                 tzinfo=timezone.utc
        #             ).replace(
        #                 hour=0,
        #                 minute=0,
        #                 second=0,
        #             ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
        #             is_reference=True,
        #         ),
        #         wbi_datatype.Time(
        #             prop_nr="P577",  # Publication date
        #             time="+2012-05-12T00:00:00Z",
        #             is_reference=True,
        #         ),
        #         wbi_datatype.Url(
        #             prop_nr="P854",  # reference url
        #             value="http://www.statmt.org/europarl/v7/sv-en.tgz",
        #             is_reference=True,
        #         ),
        #         # filename in archive
        #         wbi_datatype.String(
        #             (f"europarl-v7.{config.language_code}" +
        #              f"-en.{config.language_code}"),
        #             "P7793",
        #             is_reference=True,
        #         ),
        #         # line number
        #         wbi_datatype.String(
        #             str(line),
        #             "P7421",
        #             is_reference=True,
        #         ),
        #         type_of_reference_qualifier,
        #     ]
        # elif source == "ksamsok":
        #     # No date is provided unfortunately, so we set it to unknown value
        #     stated_in = wbi_datatype.ItemID(
        #         prop_nr="P248",
        #         value="Q7654799",
        #         is_reference=True
        #     )
        #     document_id = wbi_datatype.ExternalID(
        #         # K-Samsök URI
        #         prop_nr="P1260",
        #         value=document_id,
        #         is_reference=True
        #     )
        #     reference = [
        #         stated_in,
        #         document_id,
        #         wbi_datatype.Time(
        #             prop_nr="P813",  # Fetched today
        #             time=datetime.utcnow().replace(
        #                 tzinfo=timezone.utc
        #             ).replace(
        #                 hour=0,
        #                 minute=0,
        #                 second=0,
        #             ).strftime("+%Y-%m-%dT%H:%M:%SZ"),
        #             is_reference=True,
        #         ),
        #         wbi_datatype.Time(
        #             # We don't know the value of the publication dates unfortunately
        #             prop_nr="P577",  # Publication date
        #             time="",
        #             snak_type="somevalue",
        #             is_reference=True,
        #         ),
        #         type_of_reference_qualifier,
        #     ]
        else:
            raise ValueError(
                f"Did not recognize the source {usage_example.record.source.name.title()}"
            )
        if reference is None:
            raise ValueError(
                _("No reference defined, cannot add usage example"))
        else:
            # This is the usage example statement
            claim = MonolingualText(
                text=usage_example.text,
                prop_nr="P5831",
                language=usage_example.record.language_code.value,
                # Add qualifiers
                qualifiers=[
                    link_to_form,
                    link_to_sense,
                    language_style_qualifier,
                ],
                # Add reference
                references=[reference],
            )
            # if config.debug_json:
            #     logging.debug(f"claim:{claim.get_json_representation()}")
            if config.login_instance is None:
                # Authenticate with WikibaseIntegrator
                with console.status("Logging in with WikibaseIntegrator..."):
                    config.login_instance = wbi_login.Login(
                        auth_method='login',
                        user=config.username,
                        password=config.password,
                        debug=False)
                    # Set User-Agent
                    wbi_config.config["USER_AGENT_DEFAULT"] = config.user_agent
            wbi = WikibaseIntegrator(login=config.login_instance)
            lexeme = wbi.lexeme.get(form.lexeme_id)
            lexeme.add_claims([claim], action_if_exists=ActionIfExists.APPEND)
            # if config.debug_json:
            #     print(item.get_json_representation())

            result = lexeme.write(summary=(
                "Added usage example " +
                "with [[Wikidata:Tools/LexUtils]] v{}".format(config.version)))
            # logging.debug(f"result from WBI:{result}")
            # TODO add handling of result from WBI and return True == Success or False
            return result
Beispiel #8
0
def bad_login():
    wbi_login.Login("name",
                    "pass",
                    mediawiki_api_url="www.wikidataaaaaaaaa.org")
Beispiel #9
0
from wikibaseintegrator import wbi_login, wbi_core
import logging
logging.basicConfig(level=logging.INFO)

login_instance = wbi_login.Login(user='******', pwd='VP4ptJbLhNM9vB4')

my_first_wikidata_item = wbi_core.ItemEngine(item_id='Q1')

# to check successful installation and retrieval of the data, you can print the json representation of the item
print(my_first_wikidata_item.get_json_representation())

result = wbi_core.ItemEngine(
    item_id='Q1', data={'P3': 'http://www.wikidata.org/entity/Q65216433'})
Beispiel #10
0
def test_login():
    if WDUSER and WDPASS:
        wbi_login.Login(WDUSER, WDPASS)
    else:
        print("no WDUSER or WDPASS found in environment variables",
              file=sys.stderr)
Beispiel #11
0
def create_dataset_entity_for_gtfs_metadata(gtfs_representation,
                                            api_url,
                                            username=None,
                                            password=None):
    """Create a dataset entity for a new dataset version on the Database.
    :param gtfs_representation: The representation of the GTFS dataset to process.
    :param api_url: API url, either PRODUCTION_API_URL or STAGING_API_URL.
    :return: The representation of the GTFS dataset post-execution.
    """
    validate_api_url(api_url)
    validate_gtfs_representation(gtfs_representation)
    metadata = gtfs_representation.metadata

    ###########################
    # 1. Process the core props
    ###########################

    # Begin with the core properties data
    # To verify if the dataset entity already exist
    core_props_data = []

    # SHA-1 hash property
    if is_valid_instance(metadata.sha1_hash, str):
        core_props_data.append(
            wbi_core.String(value=metadata.sha1_hash,
                            prop_nr=os.environ[SHA1_HASH_PROP]))

    # Archives URL, from the stable URL property
    if is_valid_instance(metadata.stable_urls, dict):
        archives_url = metadata.stable_urls.get(ARCHIVES_URL)
        try:
            core_props_data.append(
                wbi_core.Url(
                    value=archives_url,
                    prop_nr=os.environ[STABLE_URL_PROP],
                    rank=PREFERRED,
                ))
        except ValueError as ve:
            print(
                f"url {archives_url} for source {metadata.source_entity_code} caused {ve}"
            )
            raise ve

    # If the 2 core props values in were NOT added in core_props_data,
    # then it is not possible to verify if the dataset entity already exists
    if len(core_props_data) != 2:
        raise MissingCorePropsException(core_props_data)

    # An existing dataset entity is considered the same as the one processed
    # if and only if 2 core props values are matching: the SHA-1 hash and the Archives URL
    # so the core properties threshold is 100%
    core_props_threshold = 1.0

    try:
        dataset_entity = wbi_core.ItemEngine(
            data=core_props_data,
            core_props={
                os.environ[STABLE_URL_PROP],
                os.environ[SHA1_HASH_PROP],
            },
            core_prop_match_thresh=core_props_threshold,
        )
    except ManualInterventionReqException as mi:
        print(
            f"ManualInterventionReqException : a core property value exists for multiple dataset entities."
        )
        raise mi
    except CorePropIntegrityException as cp:
        print(
            f"CorePropIntegrityException: a dataset entity exists with 1 of the 2 core props values."
        )
        raise cp
    except Exception as e:
        print(f"metadata : {metadata} raised {e}")
        raise e

    # If the dataset entity retrieved as already an item_id (entity id) value,
    # then we do nothing because the dataset already exists
    if dataset_entity.item_id != "":
        raise EntityAlreadyExistsException(dataset_entity.item_id)

    #################################################
    # 2. Add the other properties to the dataset data
    #################################################
    dataset_data = []

    # Add the core_props_data to the dataset_data
    dataset_data += core_props_data

    # Delete the archives_url from the metadata.stable_urls
    # Since it was part of the core_props_data
    del metadata.stable_urls[ARCHIVES_URL]

    # Stable urls property
    if is_valid_instance(metadata.stable_urls, dict):
        for url in metadata.stable_urls.values():
            try:
                dataset_data.append(
                    wbi_core.Url(value=url,
                                 prop_nr=os.environ[STABLE_URL_PROP],
                                 rank=NORMAL))
            except ValueError as ve:
                print(
                    f"url {url} for source {metadata.source_entity_code} caused {ve}"
                )
                raise ve

    # Instance property
    dataset_data.append(
        wbi_core.ItemID(
            value=os.environ[GTFS_SCHEDULE_DATA_FORMAT],
            prop_nr=os.environ[INSTANCE_PROP],
        ))

    # Source entity property
    dataset_data.append(
        wbi_core.ItemID(value=metadata.source_entity_code,
                        prop_nr=os.environ[SOURCE_ENTITY_PROP]))

    # Main timezone property
    if is_valid_instance(metadata.main_timezone, str):
        dataset_data.append(
            wbi_core.String(
                value=metadata.main_timezone,
                prop_nr=os.environ[TIMEZONE_PROP],
                rank=PREFERRED,
            ))

    # Other timezones property
    if is_valid_instance(metadata.other_timezones, list):
        for timezone in metadata.other_timezones:
            dataset_data.append(
                wbi_core.String(value=timezone,
                                prop_nr=os.environ[TIMEZONE_PROP],
                                rank=NORMAL))

    # Country code property
    if is_valid_instance(metadata.country_codes, list):
        for country_code in metadata.country_codes:
            dataset_data.append(
                wbi_core.String(
                    value=country_code,
                    prop_nr=os.environ[COUNTRY_CODE_PROP],
                    rank=NORMAL,
                ))

    # Main language code property
    if is_valid_instance(metadata.main_language_code, str):
        dataset_data.append(
            wbi_core.String(
                value=metadata.main_language_code,
                prop_nr=os.environ[MAIN_LANGUAGE_CODE_PROP],
                rank=PREFERRED,
            ))

    # Start service date property
    if is_valid_instance(metadata.start_service_date, str):
        dataset_data.append(
            wbi_core.String(
                value=metadata.start_service_date,
                prop_nr=os.environ[START_SERVICE_DATE_PROP],
            ))

    # End service date property
    if is_valid_instance(metadata.end_service_date, str):
        dataset_data.append(
            wbi_core.String(
                value=metadata.end_service_date,
                prop_nr=os.environ[END_SERVICE_DATE_PROP],
            ))

    # Start timestamp property
    if is_valid_instance(metadata.start_timestamp, str):
        dataset_data.append(
            wbi_core.String(value=metadata.start_timestamp,
                            prop_nr=os.environ[START_TIMESTAMP_PROP]))

    # End timestamp property
    if is_valid_instance(metadata.end_timestamp, str):
        dataset_data.append(
            wbi_core.String(value=metadata.end_timestamp,
                            prop_nr=os.environ[END_TIMESTAMP_PROP]))

    # Bounding box property
    if is_valid_instance(metadata.bounding_box, dict):
        for order_key, corner_value in metadata.bounding_box.items():
            dataset_data.append(
                create_geographical_property(order_key, corner_value,
                                             os.environ[BOUNDING_BOX_PROP]))

    # Bounding octagon property
    if is_valid_instance(metadata.bounding_octagon, dict):
        for order_key, corner_value in metadata.bounding_octagon.items():
            dataset_data.append(
                create_geographical_property(
                    order_key, corner_value,
                    os.environ[BOUNDING_OCTAGON_PROP]))

    # Stop counts
    if is_valid_instance(metadata.stops_count_by_type, dict):
        # Number of stops property
        stops_count = metadata.stops_count_by_type.get(STOP_KEY, None)
        if stops_count is not None:
            dataset_data.append(
                wbi_core.Quantity(
                    quantity=stops_count,
                    prop_nr=os.environ[NUM_OF_STOPS_PROP],
                ))

        # Number of stations property
        stations_count = metadata.stops_count_by_type.get(STATION_KEY, None)
        if stations_count is not None:
            dataset_data.append(
                wbi_core.Quantity(
                    quantity=stations_count,
                    prop_nr=os.environ[NUM_OF_STATIONS_PROP],
                ))

        # Number of entrances property
        entrances_count = metadata.stops_count_by_type.get(ENTRANCE_KEY, None)
        if entrances_count is not None:
            dataset_data.append(
                wbi_core.Quantity(
                    quantity=entrances_count,
                    prop_nr=os.environ[NUM_OF_ENTRANCES_PROP],
                ))

    if is_valid_instance(metadata.agencies_count, int):
        # Number of agencies property
        dataset_data.append(
            wbi_core.Quantity(
                quantity=metadata.agencies_count,
                prop_nr=os.environ[NUM_OF_AGENCIES_PROP],
            ))

    # Number of routes property
    if is_valid_instance(metadata.routes_count_by_type, dict):
        for route_key, route_value in metadata.routes_count_by_type.items():
            route_qualifier = [
                wbi_core.ItemID(
                    value=route_key,
                    prop_nr=os.environ[ROUTE_TYPE_PROP],
                    is_qualifier=True,
                )
            ]
            dataset_data.append(
                wbi_core.Quantity(
                    quantity=route_value,
                    prop_nr=os.environ[NUM_OF_ROUTES_PROP],
                    qualifiers=route_qualifier,
                ))

    # Download date
    if is_valid_instance(metadata.download_date, str):
        dataset_data.append(
            wbi_core.String(
                value=metadata.download_date,
                prop_nr=os.environ[DOWNLOAD_DATE_PROP],
            ))

    # Dataset version entity label
    version_name_label = metadata.dataset_version_name
    if not username:
        username = os.environ[USERNAME]
    if not password:
        password = os.environ[PASSWORD]
    login_instance = wbi_login.Login(user=username, pwd=password)

    #################################################
    # 3. Create the dataset entity on the database
    #################################################

    # Create the Dataset WITHOUT using the core_props
    # For some reasons, when using the core_props with all the data
    # the WikibaseIntegrator library retrieves entities
    # that are not sharing data with the actual dataset entity,
    # which makes the process crash
    dataset_entity = wbi_core.ItemEngine(data=dataset_data, )

    # Set the label (name)
    dataset_entity.set_label(version_name_label, ENGLISH)

    # Create the dataset entity on the database
    dataset_entity_id = dataset_entity.write(login_instance)
    metadata.dataset_version_entity_code = dataset_entity_id

    # Create the source data with the dataset entity code and property
    version_prop = wbi_core.ItemID(
        value=metadata.dataset_version_entity_code,
        prop_nr=os.environ[DATASET_PROP],
        if_exists=APPEND,
    )
    source_data = [version_prop]

    # Update the source entity
    # Try maximum 20 times in cases there is edit conflicts
    try_count = 20
    has_succeeded = False

    while not has_succeeded and try_count > 0:
        source_entity = wbi_core.ItemEngine(
            item_id=metadata.source_entity_code)
        source_entity.update(source_data)
        try:
            source_entity.write(login_instance)
        except MWApiError as mwae:
            print(
                f"Failed to update: {source_entity.item_id} with data: {source_data} raised MWApiError. "
                f"{try_count} attempts left.")
            try_count -= 1
            # If the attempts have not succeeded, fail loudly
            if try_count == 0:
                print(
                    f"source_entity: {source_entity.get_json_representation()} with data: "
                    f"{source_data} raised MWApiError.")
                raise mwae
            # Wait 20 seconds before the next attempt so the database updates
            # preventing other edit conflicts
            # and not overloading the database with requests
            time.sleep(20)
        else:
            has_succeeded = True
            metadata.source_entity_code = source_entity.item_id

    return gtfs_representation