from rldd.client import Client from rldd import config db = Client(config.PROD).connect() iteration = 0 query = { "activationDate": { "$gte": Client.ISODate("2019-12-31T21:00:00.000+0000"), "$lte": Client.ISODate("2020-12-03T21:00:00.000+0000") } } projection = { "_id": 1, "customClaimNumber": 1, "service.srguServiceId": 1, "service.srguServiceName": 1, "service.name": 1, "service.srguDepartmentId": 1, "service.srguDepartmentName": 1, "service.srguServicePassportId": 1, "service.srguServicePassportName": 1, } total_count = 11460885 claims = db["claims"].find(query, projection).limit(10000) for claim in claims: iteration += 1 print(f"{iteration} / {total_count}") ccn = claim["customClaimNumber"] if "srguServicePassportName" not in claim["service"]: print(ccn)
projection = { "_id": 1, "customClaimNumber": 1, "resultStatus": 1, "service": 1, "currStatus": 1, "senderCode": 1 } statuses_projection = { "statusCode": 1, } result_file = open('claims_without_result.csv', 'w+') result_file.write('customClaimNumber;\n') query = {"activationDate": {'$gte': Client.ISODate("2020-12-15T21:00:00.000+0000")}, "resultStatus": {'$exists': False}} query1 = {"customClaimNumber": "P001-0259864459-40561582"} while True: try: claims = db["claims"].find(query, projection, no_cursor_timeout=True).skip(iteration) for claim in claims: iteration += 1 print(iteration) claim_id = claim["_id"] ccn = claim["customClaimNumber"] status = db["claims_status"].find_one({"claimId": str(claim_id), "statusCode": {"$in": ["3", "4"]}}, statuses_projection) if status: claim = db["claims"].find_one({"customClaimNumber": ccn}, projection) if "resultStatus" not in claim:
} while True: try: claims = db["claims"].find( { "docSendDate": { "$exists": True }, "senderCode": { "$ne": "RRTR01001" }, "resultStatus": { "$exists": True }, "activationDate": { '$gte': Client.ISODate("2019-12-31T21:00:00.000+0000") } }, projection, no_cursor_timeout=True).skip(iteration) for claim in claims: try: claimId = claim["_id"] ccn = claim["customClaimNumber"] activationDate = claim["activationDate"] deadlineDate = claim["deadlineDate"] docSendDate = claim["docSendDate"] iteration += 1 statuses = list(db["claims_status"].find({ "claimId": str(claimId),
from rldd import config from rldd.client import Client query = { "statusDate": { '$gte': Client.ISODate("2020-10-15T21:00:00.000+0000"), '$lte': Client.ISODate("2020-10-20T21:00:00.000+0000") }, "createDate": { '$gte': Client.ISODate("2020-09-22T21:00:00.000+0000"), '$lte': Client.ISODate("2020-09-23T21:00:00.000+0000") } } iteration = 0 count = 0 db = Client(config.PROD).connect() claims = db["claims_status_mku"].find(query, {"claimId": 1}) for data in claims: iteration += 1 isFound = False claimId = data["claimId"] claim = db["claims"].find_one({"_id": claimId}) if claim is None: continue ccn = claim["customClaimNumber"] if "deadlineStages" in claim: for index, stage in enumerate(claim["deadlineStages"]): if "stageName" in stage: if stage["stageName"] == "Корректировка срока":
from rldd import config import json def post_full_status(status_dict): body = status_dict headers = {'Content-Type': 'application/json'} return requests.post(url='http://10.10.80.54:8080/api/statuses', data=json.dumps(body), headers=headers) projection = {"customClaimNumber": 1, "_id": 1, "currStatus": 1} query = { "activationDate": { '$gte': Client.ISODate("2020-01-01T08:40:43.439+0000"), '$lte': Client.ISODate("2020-08-29T08:40:43.439+0000") }, "consultation": False, "service.srguServicePassportId": "5000000000193224739", "currStatus.senderCode": "RGIS05001" } db = Client(config.PROD).connect() claims = db["claims"].find(query, projection) for claim in claims: claimId = claim["_id"] ccn = claim["customClaimNumber"] statuses = list(db["claims_status"].find({ "claimId": str(claimId) }).sort("statusDate", pymongo.DESCENDING)) for i in range(len(statuses)):
"activationDate": 1, "customClaimNumber": 1, "resultStatus": 1, "docSendDate": 1 } db = Client(config.PROD).connect() claims = db["claims"].find( { "customClaimNumber": { "$in": claims_list }, "resultStatus": { "$exists": True }, "activationDate": { "$gte": Client.ISODate("2019-11-30T21:00:00.000+0000"), "$lte": Client.ISODate("2020-01-08T21:00:00.000+0000") }, "docSendDate": { "$lte": Client.ISODate("2020-01-27T21:00:00.000+0000") } }, jop, no_cursor_timeout=True) for claim in claims: iteration += 1 claimId = claim["_id"] ccn = claim["customClaimNumber"] statuses = list(db["claims_status"].find({ "claimId": str(claimId),
import datetime from rldd.client import Client from rldd import config db = Client(config.PROD).connect() claims = db["claims"].find({ "service.srguServiceId": "5000000000191297688", "activationDate": { '$gte': Client.ISODate("2020-01-01T10:50:34.251+0000") } }) # claims = db["claims"].find({"customClaimNumber": "P001-6030412169-39476951"}) for claim in claims: ccn = claim["customClaimNumber"] if "statuses" in claim: for index, status in enumerate(claim["statuses"]): statDate = None if status["statusCode"] == "53": statDate = status["statusDate"] if claim["statuses"][index - 1]["statusCode"] == "7": if statDate - claim["statuses"][ index - 1]["statusDate"] > datetime.timedelta( hours=1): print(ccn)
iteration = 0 def get_statuses(arr): obj = [] for _s in arr: idPrefix = "_id" if "_id" in _s else "id" obj.append(str(_s[idPrefix])) return obj db = Client(config.PROD).connect() claims = db["claims"].find( { "activationDate": { '$gte': Client.ISODate("2020-09-21T00:00:00.000+0300"), '$lte': Client.ISODate("2020-09-28T00:00:00.000+0300") }, "oktmo": { '$ne': "99999999" } }, { "statuses": 1, "_id": 1, "customClaimNumber": 1 }, no_cursor_timeout=True) for claim in claims: iteration += 1 claimId = claim["_id"] ccn = claim[
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() query = { "service.srguServiceId": "5000000010000047695", "activationDate": { '$gte': Client.ISODate("2020-09-24T21:00:00.000+0000") } } projection = { "_id": 1, "fields": 1, "customClaimNumber": 1, "activationDate": 1 } claims = db["claims"].find(query, projection) result_file = open('emptyFields.csv', 'w+') for claim in claims: claimId = claim["_id"] activationDate = claim["activationDate"] ccn = claim["customClaimNumber"] or None try: if len(claim["fields"]["sequenceValue"]) > 0: seq = claim["fields"]["sequenceValue"] for elem in seq: if "stringId" in elem: if elem["stringId"] == "carList": carList = elem["sequenceValue"]
from rldd.client import Client from rldd import config from bson.tz_util import FixedOffset from datetime import datetime from pymongo import MongoClient database = Client(config.PROD).connect() collection = database["claims"] pipeline = [{ "$match": { "service.srguServicePassportId": "5000000010000015448", "activationDate": { "$gte": Client.ISODate("2020-05-05T21:00:00.000+0000") } } }, { "$count": "Total" }] pipeline2 = [{ "$match": { "service.srguServicePassportId": "5000000010000015448", "activationDate": { "$gte": Client.ISODate("2020-05-05T21:00:00.000+0000") } } }, { "$group": { "_id": "$persons", "count": {
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() persons = Client(config.DPS, "dps").connect() result_file = open("fio.csv", "w+") result_file.write("ФИО;ФИО;id заявки\n") claims = db["claims"].find({"service.srguServiceId": "5000000000167006500", "activationDate": {'$gte': Client.ISODate("2019-12-31T21:00:00.000+0000")}}) for claim in claims: claim_id = claim["_id"] if "personsInfo" not in claim: continue pers_info = claim["personsInfo"] for pers in pers_info: pers_id = pers["_id"] if "surname" not in pers: continue person_surname = pers["surname"] person = persons["persons"].find_one({"_id": pers_id}) if person is not None: person_surname_orig = person["surname"] if person_surname_orig.lower() != person_surname.lower(): result_file.write(f"{person_surname};{person_surname_orig};{claim_id}\n") print(person_surname, person_surname_orig, claim_id)
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() juridical_file = open('juridical.csv', 'w+') ip_file = open('ip.csv', 'w+') juridical_file.write('Фактический адрес;Адрес регистрации;ИНН\n') ip_file.write('Фактический адрес;Адрес регистрации;ИНН\n') iteration = 0 claims = db["claims"].find({ "activationDate": { '$gte': Client.ISODate("2018-12-31T21:00:00.000+0000"), '$lte': Client.ISODate("2020-12-15T21:00:00.000+0000") }, "service.srguServicePassportId": { "$in": ["5000000000184762039", "5000000000183970738"] } }) def form_address(_address): _arr = [ checkValue(_address, "region"), checkValue(_address, "street"), checkValue(_address, "houseNumber"), checkValue(_address, "room") ] new_arr = list(filter(lambda item: item.strip() != "", _arr)) return ", ".join(new_arr)
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() claims = db["claims"].find({ "senderCode": "IPGU01001", "activationDate": { "$gte": Client.ISODate("2020-12-31T21:00:00.000+0000"), "$lte": Client.ISODate("2021-01-30T21:00:00.000+0000") } }) result_file = open("unique_persons.csv", 'w+') iteration = 0 for claim in claims: iteration += 1 try: for person in claim["persons"]: result_file.write(f"{person}\n") print(iteration) except KeyError as k: continue
import datetime from rldd.client import Client from rldd.config import PROD db = Client(PROD).connect() year = 2021 calendar = db["calendars"].find_one({"year": year, "oktmo": ""}) daysOff = [] for day in calendar["daysOff"]: daysOff.append(datetime.datetime(year, 1, 1) + datetime.timedelta(day - 1) - datetime.timedelta(hours=3)) count = db["claims"].count_documents({ "deadlineDate": { "$in": daysOff }, "activationDate": {"$gte": Client.ISODate("2020-12-31T21:00:00.000+0000")} }) print(count)
from rldd.client import Client from rldd import config client = Client(config.PROD) db = client.connect() iteration = 0 claims = db["claims"].find({"service.srguServiceId": "1234567891000000001", "currStatus.statusCode": "2", "claimCreate": {'$gte': Client.ISODate("2020-01-08T16:19:26.860+0000")}}) for claim in claims: iteration += 1 claimId = claim["_id"] status62 = False client.postStatus(claimId, 56, "Статус проставлен автоматически через РЛДД") client.postStatus(claimId, 3, "Статус проставлен автоматически через РЛДД") # client.postStatus(claimId, 62, "Статус проставлен автоматически через РЛДД, в рамках задачи EISOUSUP-6074") while not status62: statuses = db["claims_status"].find_one({"claimId": str(claimId), "statusCode": "62"}) if statuses is not None: status62 = True client.postStatus(claimId, 24, "Статус проставлен автоматически через РЛДД") print(f"Claim {claimId} is done. Iteration: {iteration}")
from rldd.client import Client from rldd.config import PKPVD def format_date(_date): return datetime.date.strftime(_date, "%d/%m/%Y %H:%M:%S") pvd = Client(PKPVD, "pvdrs").connect() result_file = open("rejects_suspenses.csv", "w+", newline="") result_file.write( "ID обращения;Номер обращения;Номер PKPVDMFC;Дата подачи;Дата приостановки;Кол-во приостановок;Дата отказа;Кол-во отказов;Регистрационные действия\n" ) messages = pvd["jrnl_ppozinmessage"].find({ "statusWhen": { "$gte": Client.ISODate("2020-06-09T21:00:00.000+0000"), "$lte": Client.ISODate("2020-12-31T21:00:00.000+0000") }, "statusCode": { "$in": ["suspended", "rejected"] } }) iteration = 0 for message in messages: iteration += 1 rejects = 0 suspense = 0 try: substanceId = message["substanceId"] requestNumber = message["requestNumber"] except KeyError:
result_file = open('ids.csv', 'w+') count_result = open('counts.csv', 'w+') f_count = 0 r_count = 0 db = Client(config.PROD).connect() dps = Client(config.DPS, "dps").connect() jact = {"_id": 1, "personsInfo": 1, "pguConsultationInfo": 1} iteration1 = 0 iteration2 = 0 while True: try: claims = db["claims"].find( { "activationDate": { '$gte': Client.ISODate("2019-12-31T21:00:00.000+0000"), '$lte': Client.ISODate("2020-09-28T21:00:00.000+0000") }, "customClaimNumber": { '$regex': '^M.*' }, "personsInfo": { '$exists': True } }, jact, no_cursor_timeout=True).skip(iteration1) try: for claim in claims: iteration1 += 1 print("first", iteration1)
from rldd.client import Client from rldd import config dps_dev = Client(config.DPS_DEV, 'dps-develop').connect() dps = Client(config.DPS, 'dps').connect() dpsDocs = dps_dev["analytics"].find({ "esiaVerificationWidget.interactionDate": { "$gte": Client.ISODate("2021-02-04T15:30:00.000+0000") } }) inserts = dps["analytics"].insert_many(dpsDocs) print(inserts.inserted_ids)
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() res_file = open('res.csv', 'w+') claims = db["claims"].find({ "service.srguServicePassportId": "5000000000188307694", "activationDate": { '$gte': Client.ISODate("2020-08-01T00:00:00.000+0000") } }) for claim in claims: if "fields" not in claim: continue ccn = claim["customClaimNumber"] isCorrect = True fields = claim["fields"]["sequenceValue"] for seq in fields: if "stringId" in seq: if seq["stringId"] == "proshuvkldtp" \ or seq["stringId"] == "proshuvklPZZ" \ or seq["stringId"] == "proshuvklPPT": isCorrect = False if isCorrect: res_file.write(f"{ccn}\n")
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() claims = db["claims"].find({ "service.srguServicePassportId": "5000000000193224739", "senderCode": { '$regex': "^50000.*" }, "activationDate": { "$gte": Client.ISODate("2020-10-11T21:00:00.000+0000"), "$lte": Client.ISODate("2020-10-12T21:00:00.000+0000") } }) result_file = open("zahoron.csv", "w+") result_file.write( "наименование МФЦ;наименование процедуры;ФИО заявителя;дата подачи;статус;дата проставления статуса\n" ) for claim in claims: ccn = claim["customClaimNumber"] curr = claim["currStatus"] result = { "dept": claim["creatorDeptId"], "srgu": claim["service"]["name"], "fio": claim["person"]["fio"], "actDate": claim["activationDate"].isoformat(), "statusCode": curr["statusCode"], "statusDate": curr["statusDate"].isoformat() } isSecond = False
from rldd.client import Client from rldd import config db = Client(config.PROD).connect() claims = db["claims"].find({ "service.srguServicePassportId": "5000000000186738813", "resultStatus": { "$exists": True }, "docSendDate": { "$gte": Client.ISODate("2020-08-31T21:00:00.000+0000") } }) result_file = open("claims_without_result_docs.csv", "w+") result_file.write("customClaimNumber;docSendDate;resultStatus;service.name\n") for claim in claims: claimId = claim["_id"] ccn = claim["customClaimNumber"] docs = list(db["docs"].find({ "ownerId": str(claimId), "title": { "$regex": "^Результат.*" } })) if len(docs) == 0: print(ccn) result_file.write( f"{ccn};{claim['docSendDate'].isoformat()};{claim['resultStatus']};{claim['service']['name']}\n" ) result_file.close()
db = Client(config.PROD).connect() names = [ "name", "srguServiceName", "srguDepartmentName", "srguServicePassportName" ] data_names = [ "customClaimNumber", "activationDate", ] result_file = open('nonames.csv', 'w+') query = {"activationDate": {'$gte': Client.ISODate("2019-12-31T21:00:00.000+0000")}, "oktmo": {'$ne': "99999999"}} projection = {"_id": 1, "customClaimNumber": 1, "activationDate": 1, "service": 1} claims = db["claims"].find(query, projection).limit(1000) def form_string_from_dict(diction: dict): result_string = "" for value in diction.values(): result_string += value + ';' return result_string[:-1] + "\n" def add_array_to_obj(obj, arr): for i in arr: obj[i] = ""
from rldd.client import Client from rldd import config db = Client(config.DEV).connect() upd = db["claims"].update_many({ "deadlineDate": Client.ISODate("2020-10-01T21:00:00.000+0000"), "daysToDeadline": {'$gt': 1}, "resultStatus": {"$exists": False} }, { "$pull": {"deadlineStages": {"stageName": "Корректировка срока"}} }) print(f"{upd.modified_count} / {upd.matched_count}")
from bson import InvalidBSON from rldd.client import Client from rldd.config import PROD, DEV db = Client(DEV).connect() query = { "activationDate": { "$gte": Client.ISODate("2020-09-01T00:00:00.000+0000") }, "docSendDate": { "$exists": True } } pjct = { "_id": 1, "deadlineDate": 1, "docSendDate": 1, "customClaimNumber": 1, "daysToDeadline": 1 } iteration = 0 total_count = db["claims"].count_documents(query) while True: try: claims = db["claims"].find(query, pjct).skip(iteration) for claim in claims: iteration += 1 claimId = claim["_id"] daysToDeadline = (claim["deadlineDate"] -
import datetime from rldd.client import Client from rldd import config result_file = open('result.csv', 'w+') db = Client(config.PROD).connect() claims = db["claims"].find( { "deadlineDate": {'$gte': Client.ISODate("2020-10-03T21:00:00.000+0000")}, "suspenseReason": {'$exists': True}, "senderCode": {'$ne': "RRTR01001"} } ) for claim in claims: ccn = claim["customClaimNumber"] claimId = claim["_id"] deadlineDate = claim["deadlineDate"] endDate = claim["docSendDate"] if "docSendDate" in claim else datetime.datetime.now() if (deadlineDate - endDate).days < 0: print(ccn) result_file.write(f"{ccn}\n") result_file.close()
import pymongo from rldd.client import Client from rldd.config import PROD db = Client(PROD).connect() result_file = open("services.csv", "w+", newline="") result_file.write("Наименование услуги;Код процедуры\n") query = { "statusCode": "53", "statusDate": { "$gte": Client.ISODate("2019-01-01T00:00:00.000+0000") } } total_count = db["claims_status"].count_documents(query) statuses = db["claims_status"].find(query, no_cursor_timeout=True) serviceDict = {} iteration = 0 for status in statuses: iteration += 1 print(f"{iteration} / {total_count}") claimId = status["claimId"] statusesList = list(db["claims_status"].find({ "claimId": claimId }).sort("statusDate", pymongo.ASCENDING)) for index, statusElement in enumerate(statusesList): if statusElement["statusCode"] == "53": if len(statusesList) > index + 1: if statusesList[index + 1]["statusCode"] == "2": claim = db["claims"].find_one( {"_id": Client.getId(claimId)})
from bson import InvalidBSON from rldd.client import Client from rldd import config from datetime import datetime, timedelta import requests from time import sleep db = Client(config.PROD).connect() newDate = datetime(2020, 10, 2) - timedelta(hours=3) insert_count = 0 processed = 0 ind = 0 query = { "deadlineDate": {"$gte": Client.ISODate("2020-03-11T21:00:00.000+0000"), "$lte": Client.ISODate("2020-10-01T20:00:00.000+0000")} } projection = { "_id": 1, "customClaimNumber": 1, "deadlineDate": 1, "deadlineStages": 1, "deadline": 1, "deadlineInWorkDays": 1, "activationDate": 1, "resultStatus": 1, "docSendDate": 1 }
from rldd.client import Client from rldd import config import datetime import requests from colors import console_colors yellow = console_colors.CYELLOW end = console_colors.CEND db = Client(config.PROD).connect() processed = 0 insert_count = 0 query = { "activationDate": { "$gte": Client.ISODate("2020-10-03T00:00:00.000+0000") }, "suspenseReason": { "$exists": True } } result_file = open('deadlines_correct.csv', 'w+') result_file.write( "customClaimNumber;activationDate;old DeadlineDate;new deadlineDate;daysToDeadline\n" ) projection = { "_id": 1, "oktmo": 1, "claimCreate": 1,
"PKG_STM_RETURN_REJECT_34": "ППОЗ отказал в приёме пакета заявлений", "PKG_IMG_READY_40": "Пакет документов готов к отправке", "PKG_IMG_PROCESS_41": "Отправка пакета документов", "PKG_IMG_PROBLEM_42": "Проблемы с отправкой пакета документов", "PKG_IMG_WAIT_PPOZ_43": "Обработка документов в ППОЗ", "PKG_IMG_RETURN_REJECT_44": "ППОЗ отказал в приёме пакета документов", "CANCELED_114": "Аннулировано", "ABNORMAL_TERMINATION_115": "Аварийное завершение" } result_file.write( "Дата создания;Код МФЦ;Наименование МФЦ;Номер обращения;Номер заявления;Количество;Заявители;Оператор приема;Наименование услуги\n" ) query = { "createEvent.dateWhen": { "$gte": Client.ISODate("2020-06-10T00:00:00.000+0300"), "$lte": Client.ISODate("2021-01-01T00:00:00.000+0300") }, "currentStep": { "$nin": ["CANCELED_114", "CREATE", "ATTACH_IMAGE"] }, "name": { "$nin": [ "Предоставление копии документов из реестрового дела объекта недвижимости", "Предоставление копии документов из реестрового дела зоны, территории или границ", "Предоставление сведений об объекте недвижимости", "Предоставление сведений о правообладателе", "Предоставление сведений о кадастровом квартале", "Предоставление сведений о зоне", "Предоставление сведений о границе", "Предоставление сведений доступом к ФГИС ЕГРН",