def get(self, request, *args, **kwargs): provider_connection = self.queryset.get(id=1) mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) result = mongo_db_manager.check_column_data_type(db, 'order_items', 'price') return responses.ok(data=result, method=constant.GET, entity_name='db_provider_connection')
def get(self, request, *args, **kwargs): table_name = kwargs.get('table', None) connection_id = kwargs.get('pk', None) page = request.GET.get('page', 1) page_size = request.GET.get('page_size', 20) column_sort = request.GET.get('column_sort', None) sort = request.GET.get('sort', None) try: provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() try: db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) documents, count = mongo_db_manager.get_all_documents(db=db, collection=table_name, column_sort=column_sort, sort=sort, page=page, page_size=page_size) data = list(documents) result = json.loads(dumps(data)) return responses.paging_data(data=result, total_count=count, method=constant.POST, entity_name='db_provider_connection') except Exception as err: return responses.bad_request(data=str(err), message_code='BD_ERROR') else: # TODO: implement another phase pass else: return responses.bad_request(data='Provider not found', message_code='PROVIDER_NOT_FOUND') except DBProviderConnection.DoesNotExist as err: return responses.not_found(data=None, message_code='PROVIDER_CONNECTION_NOT_FOUND', message_system=err)
def list(self, request, *args, **kwargs): sql_function_id = kwargs.get('pk', None) page = request.GET.get('page', 1) page_size = request.GET.get('page_size', 20) try: mongo_db_manager = MongoDBManager() sql_function = SqlFunction.objects.get(id=sql_function_id) connection = sql_function.connection if connection.provider.name == MONGO: db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=connection) documents = mongo_db_manager.sql_function_exe( sql_function=sql_function, db=db, page=page, page_size=page_size) data = list(documents) first_record = data[0] columns = first_record.keys() result = json.loads(dumps(data)) final_data = {'columns': columns, 'collections': result} return responses.paging_data( data=final_data, total_count=page_size, method=constant.GET, entity_name='db_provider_connection') else: return responses.ok(data=None, method=constant.GET, entity_name='sql_function') except Exception as err: return responses.bad_request(data=str(err), message_code='SQL_ERROR')
def post(self, request, *args, **kwargs): data = request.data serializer = self.get_serializer(data=data) serializer.is_valid(raise_exception=True) self.perform_create(serializer) try: connection = DBProviderConnection.objects.get( id=data.get('connection')) provider = connection.provider custom_column = CustomColumnType.objects.get( id=data.get('custom_column')) if provider.name == MONGO: mongo_db = MongoDBManager() db, cache_db = mongo_db.connection_mongo_by_provider( provider_connection=connection) # type in [str, int, float, datetime] table = data.get('table_name') column = data.get('real_column') data_type = custom_column.slug _ = mongo_db.update_convert_column_data_type( db=db, table=table, column=column, data_type=data_type, provider_connection_id=connection.id) return responses.ok(data=serializer.data, method=constant.POST, entity_name='custom_column_mapping') else: return responses.ok(data=None, method=constant.POST, entity_name='custom_column_mapping') except Exception as err: return responses.bad_request(data=str(err), message_code='MAPPING_ERROR')
def get(self, request, *args, **kwargs): pk = kwargs.get('pk', None) page = request.GET.get('page', 1) page_size = request.GET.get('page_size', 20) try: custom_column_fk = self.get_queryset().get(id=pk) custom_column_filter = CustomColumnFKFilter.objects.filter( custom_column_fk=custom_column_fk) provider_connection = custom_column_fk.connection provider = provider_connection.provider if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) documents, count = mongo_db_manager.find_by_fk( db, custom_column_fk.table_name, custom_column_filter, page=page, page_size=page_size) data = list(documents) result = json.loads(dumps(data)) return responses.paging_data(data=result, total_count=count, method=constant.POST, entity_name='custom_column_fk') else: return responses.paging_data(data=None, total_count=0, method=constant.POST, entity_name='custom_column_fk') except Exception as err: return responses.bad_request( data=str(err), message_code='CUSTOM_COLUMN_FK_NOT_FOUND')
def post(self, request, *args, **kwargs): data = request.data connection_id = kwargs.get("connection_id") list_field = data.get("list_field") table_name = data.get("table_name") connection = DBProviderConnection.objects.filter( id=connection_id).first() if connection is None: return responses.bad_request( data=None, message_code="PROVIDER_CONNECTION_NOT_FOUND") if connection.provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=connection) collections = mongo_db_manager.get_all_collections( db=db, cache_db=cache_db) column_mapping = CustomColumnMapping.objects.filter( table_name=table_name, connection_id=connection.id).exists() if column_mapping is False and table_name not in collections: mongo_db_manager.create_new_collection(db, table_name) else: return responses.bad_request( data=None, message_code="TABLE_NAME_IS_EXISTS") for field in list_field: try: custom_column = CustomColumnType.objects.get( id=field.get('custom_column')) CustomColumnMapping.objects.create( connection_id=connection.id, table_name=table_name, real_column=field.get("column_name"), custom_column_name=field.get("column_name"), custom_column_id=custom_column.id) except Exception as ex: print(ex) continue resp = CustomColumnMapping.objects.filter(connection_id=connection.id, table_name=table_name) serializer = self.get_serializer(resp, many=True) return responses.ok(data=serializer.data, method=constant.POST, entity_name="custom_column_mapping")
def get(self, request, *args, **kwargs): table_name = kwargs.get('table_name', None) list_filter = kwargs.get('list_filter', None) list_column = kwargs.get('list_column', None) connection_id = kwargs.get('connection', None) try: provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() try: db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) # columns = mongo_db_manager.get_all_keys(db=db, collection=table_name) # documents, count = mongo_db_manager.get_all_documents(db=db, collection=table_name, # column_sort=None, # sort=None, page=1, page_size=20) documents = mongo_db_manager.export_db_by_column(db=db, table=table_name, list_filter=list_filter, list_column=list_column) data = list(documents) result = json.loads(dumps(data)) # final_data = [] # for d in result: # i = [] # for k,v in d.items(): # i.append(v) # final_data.append(i) pdf = GeneratePdf(result, table_name, list_column) response = pdf.generate_pdf(context={}) return response except Exception as err: return responses.bad_request(data=str(err), message_code='BD_ERROR') else: # TODO: implement another phase pass else: return responses.bad_request(data='Provider not found', message_code='PROVIDER_NOT_FOUND') except DBProviderConnection.DoesNotExist as err: return responses.not_found(data=None, message_code='EXPORT_ERROR', message_system=err)
def update(self, request, *args, **kwargs): pk = kwargs.get('pk', None) try: data = request.data partial = kwargs.pop('partial', False) instance = CustomColumnMapping.objects.get(id=pk) serializer = self.serializer_class(instance, data=data, partial=partial) serializer.is_valid(raise_exception=True) self.perform_update(serializer) connection = DBProviderConnection.objects.get( id=data.get('connection')) provider = connection.provider custom_column = CustomColumnType.objects.get( id=data.get('custom_column')) if provider.name == MONGO: mongo_db = MongoDBManager() db, cache_db = mongo_db.connection_mongo_by_provider( provider_connection=connection) # type in [str, int, float, datetime] table = data.get('table_name') column = data.get('real_column') data_type = custom_column.slug _ = mongo_db.update_convert_column_data_type( db=db, table=table, column=column, data_type=data_type, provider_connection_id=connection.id) return responses.ok(data=serializer.data, method=constant.PUT, entity_name='custom_column_mapping') else: return responses.ok(data=None, method=constant.PUT, entity_name='custom_column_mapping') except Exception as err: responses.bad_request( data=str(err), message_code='CUSTOM_COLUMN_MAPPING_NOT_FOUND')
def put(self, request, *args, **kwargs): data = request.data convert_field = data.get("convert_field") data_type = data.get("data_type") table = data.get("table") provider_connection_id = data.get("provider_connection_id") mongo_db = MongoDBManager() provider_connection = DBProviderConnection.objects.filter( id=provider_connection_id).first() db, cache_db = mongo_db.connection_mongo_by_provider( provider_connection=provider_connection) # type in [str, int, float, datetime] is_convert = mongo_db.update_convert_column_data_type( db=db, table=table, column=convert_field, data_type=data_type, provider_connection_id=provider_connection_id) return responses.ok(data={"is_convert": is_convert}, method="put", entity_name="test")
def get(self, request, *args, **kwargs): connection_id = kwargs.get('pk', None) try: if connection_id: provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) data = mongo_db_manager.get_all_collections(db=db, cache_db=cache_db) return responses.ok(data=data, method=constant.POST, entity_name='db_provider_connection') else: # TODO: implement another phase pass else: return responses.bad_request(data='Provider not found', message_code='PROVIDER_NOT_FOUND') else: return responses.bad_request(data=None, message_code='PROVIDER_CONNECTION_ID_EMPTY') except Exception as err: return responses.not_found(data=None, message_code='PROVIDER_CONNECTION_NOT_FOUND', message_system=err)
def post(self, request, *args, **kwargs): try: sql_function_id = kwargs.get('sql_function_id') table_name = kwargs.get('table_name') sql_function = self.get_queryset().get(id=sql_function_id) connection = sql_function.connection provider = connection.provider if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=connection) collection = mongo_db_manager.create_new_collection( db=db, collection_name=table_name) mongo_db_manager.create_table_with_sql_function( db, collection, sql_function) else: return responses.ok(data=None, method=constant.POST, entity_name=sql_function) except Exception as err: return responses.bad_request(data=str(err), message_code='SQL_FUNCTION_ERROR')
def post(self, request, *args, **kwargs): try: user = request.user file_obj = request.FILES['file'] # url_csv = upload_to_s3("import", file_obj.name, file_obj, user_id=user.id) file = file_obj.read().decode('utf-8') csv_data = csv.DictReader(StringIO(file)) connection_id = kwargs.get("connection") table_name = kwargs.get("table_name") connection = DBProviderConnection.objects.filter( id=connection_id).first() if connection.provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=connection) collections = mongo_db_manager.get_all_collections( db=db, cache_db=cache_db) if table_name not in collections: mongo_db_manager.create_new_collection(db, table_name) else: headers = list(csv_data.fieldnames) try: columns = mongo_db_manager.get_all_keys( db=db, collection=table_name) if columns: for header in headers: if header not in columns: return responses.bad_request( data= f"Column '{header}' is not exists in table {table_name}", message_code="Column is not exists") except Exception as err: return responses.bad_request(data=str(err), message_code=str(err)) # list_insert = [] # for row in csv_data: # data = dict(row) # # data["_id"] = str(ObjectId()) # list_insert.append(data) # # print(list_insert) file_full_name = file_obj.name.split(".") time_stamp = datetime.datetime.now().timestamp() file_name = f"{file_full_name[0]}_{str(int(time_stamp))}.{file_full_name[1]}" file_name = file_name.replace(" ", "_") fs = FileSystemStorage( location=f"{settings.MEDIA_ROOT}/import", base_url=f"{settings.MEDIA_ROOT}/import") filename = fs.save(file_name, file_obj) uploaded_file_url = fs.url(filename) static_dir = f"{settings.MEDIA_ROOT}/import/{filename}" import_record = ImportData.objects.create( provider_connection_id=connection.id, username=user.username, table=table_name, file_url=static_dir) # process_import_database.delay(import_id=import_record.id) return responses.ok(data="waiting import data", method='post', entity_name='import_database') return responses.bad_request(data=None, message_code="SQL_PROVIDER_NOT_FOUND") except Exception as err: return responses.not_found(data=None, message_code='SQL_FUNCTION_NOT_FOUND', message_system=err)
def get(self, request, *args, **kwargs): provider_connection = self.queryset.get(id=1) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) collection = db['order_items'] pipeline = [ { '$limit': 1 }, # Reduce the result set to a single document. { '$project': { '_id': 1 } }, # Strip all fields except the Id. { '$project': { '_id': 0 } }, # Strip the id. The document is now empty. { '$lookup': { 'from': 'order_items', 'pipeline': [ { '$match': { # 'date': {'$gte': ISODate('2018-09-01'), '$lte': ISODate('2018-09-10')}, # 'order_id': 'a548910a1c6147796b98fdf73dbeba33' # 'price': {'$lte': {'$toInt':'810'}} 'order_item_id': '1' } }, { '$project': { '_id': 0, 'result': '$price' } } ], 'as': 'collection1' } }, { '$lookup': { 'from': 'order_reviews', 'pipeline': [ { '$match': { # 'order_id': 'a548910a1c6147796b98fdf73dbeba33', 'review_score': '5' } }, { '$project': { '_id': 0, 'result': '$review_score' } } ], 'as': 'collection2' } }, { '$project': { 'Union': { '$setUnion': ['$collection1', '$collection2'] } } }, { '$unwind': '$Union' }, # Unwind the union collection into a result set. { '$replaceRoot': { 'newRoot': '$Union' } }, # Replace the root to cleanup the resulting documents. # {'$limit': 20}, # {'$skip': 2*20}, # {'$sort': {'dated': -1}} ] c = collection.aggregate(pipeline) page = 1 page_size = 20 data = list(c) result = json.loads(dumps(data)) start_length = 0 if page == 1 else (page - 1) * page_size + 1 end_length = page_size if page == 1 else page * page_size + 1 print(start_length) print(end_length) return responses.ok(data={ 'count': len(result), 'result': result[start_length:end_length] }, method=constant.GET, entity_name='sql_function')
def get(self, request, *args, **kwargs): table_name = kwargs.get('table', None) connection_id = kwargs.get('pk', None) try: provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) documents, count = mongo_db_manager.get_all_documents(db=db, collection=table_name, column_sort=None, sort=None, page=1, page_size=20) # TODO: PhuongTN -> get real column from database columns = [] real_columns = mongo_db_manager.get_all_keys(db=db, collection=table_name) # TODO: PhuongTN -> get custom column mapping custom_columns = CustomColumnMapping.objects.filter(connection_id=connection_id, table_name=table_name) if custom_columns.exists(): if count == 0: for cc in custom_columns: obj = { 'id': cc.id, 'real_column': cc.real_column, 'custom_column_name': cc.custom_column_name, 'custom_column_id': cc.custom_column.id } columns.append(obj) else: custom_columns = custom_columns for rc in real_columns: is_append = False for cc in custom_columns: if rc == cc.real_column: obj = { 'id': cc.id, 'real_column': cc.real_column, 'custom_column_name': cc.custom_column_name, 'custom_column_id': cc.custom_column.id } columns.append(obj) is_append = True break if not is_append: obj = { 'id': None, 'real_column': rc, 'custom_column_name': None, 'custom_column_id': None } columns.append(obj) else: for rc in real_columns: obj = { 'id': None, 'real_column': rc, 'custom_column_name': None, 'custom_column_id': None } columns.append(obj) return responses.ok(data=columns, method=constant.POST, entity_name='db_provider_connection') else: # TODO: implement another phase pass else: return responses.bad_request(data='Provider not found', message_code='PROVIDER_NOT_FOUND') except Exception as err: return responses.not_found(data=None, message_code='PROVIDER_CONNECTION_NOT_FOUND', message_system=str(err))
def get(self, request, *args, **kwargs): provider_connection = self.queryset.get(id=1) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) collection = db['order_items'] pipeline = [ { '$limit': 20 }, { '$skip': 0 }, { '$project': { '_id': 0 } }, { '$lookup': { 'from': 'order_reviews', 'localField': 'order_id', 'foreignField': 'order_id', 'as': 'data', }, }, { '$unwind': { 'path': '$data', 'preserveNullAndEmptyArrays': False } }, # {'$group': { # '_id': '$_id', # 'data': {'$push': '$data'}, # }}, # {'$match': { # '$and': [ # {'order_id': 'a548910a1c6147796b98fdf73dbeba33'}, # {'data.review_id': '80e641a11e56f04c1ad469d5645fdfde'} # ]}}, { '$sort': { 'order_id': -1 } }, { '$replaceRoot': { 'newRoot': { '$mergeObjects': ['$data', '$$ROOT'] } } }, { '$project': { 'data': 0, '_id': 0 } } ] c = collection.aggregate(pipeline) data = list(c) result = json.loads(dumps(data)) return responses.ok(data=result, method=constant.GET, entity_name='sql_function')
def get(self, request, *args, **kwargs): try: user = request.user connection_id = kwargs.get("connection") table_name = kwargs.get("table_name") list_filter = kwargs.get('list_filter', None) list_column = kwargs.get('list_column', None) provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider(provider_connection=provider_connection) documents = mongo_db_manager.export_db_by_column(db=db, table=table_name, list_filter=list_filter, list_column=list_column) if documents.count() <= 1000: result = json.loads(dumps(list(documents))) headers = list(result[0].keys()) output = io.BytesIO() workbook = xlsxwriter.Workbook(output) worksheet = workbook.add_worksheet() cell_format_header = workbook.add_format() cell_format_header.set_bold() for index in range(len(headers)): worksheet.write(0, index, headers[index], cell_format_header) for row_num, columns in enumerate(result): for index in range(len(headers)): value = columns.get(headers[index]) if index != 0 else columns.get(headers[index]).get( '$oid') worksheet.write(row_num + 1, index, value) workbook.close() output.seek(0) today = datetime.now().strftime("%d%m%Y_%H%M%S") filename = f"ExportData-{table_name}-{today}.xlsx" response = HttpResponse( output, content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' ) response['Content-Disposition'] = 'attachment; filename=%s' % filename return response else: ExportData.objects.create( provider_connection_id=provider_connection.id, username=user.username, table=table_name, status=ExportData.INIT, file_type=ExportData.EXCEL, list_filter=list_filter, list_column=list_column ) return responses.ok(data="Waiting notify for export", method=constant.GET, entity_name='export-data') return responses.bad_request(data=None, message_code="SQL_PROVIDER_NOT_FOUND") except Exception as err: return responses.not_found(data=None, message_code='SQL_FUNCTION_NOT_FOUND', message_system=err)
def process_import_database(): from wab.core.import_database.models import ImportData from wab.core.notifications.services.notifications_service import NotificationsService from wab.utils.constant import MONGO from wab.core.db_provider.models import DBProviderConnection from wab.utils.db_manager import MongoDBManager from wab.core.notifications.models import PUSH_NOTIFICATION, NOTIFY from bson import ObjectId import csv import os default_length = 20 import_data = ImportData.objects.all() for import_record in import_data: # import_record = ImportData.objects.filter(id=import_id).first() if import_record.provider_connection.provider.name == MONGO: table_name = import_record.table connection = DBProviderConnection.objects.filter( id=import_record.provider_connection.id).first() mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=connection) # collections = mongo_db_manager.get_all_collections(db=db, cache_db=cache_db) # if table_name not in collections: # # table_name ko tồn tại, pass # continue table = db[table_name] max_records = [] with open(import_record.file_url, mode='r') as csv_file: csv_reader = csv.DictReader(csv_file) for row in csv_reader: max_records.append(row) records = max_records[:default_length] for i in range(len(records)): row = records[i] records[i]["_id"] = ObjectId( row.get("_id")) if row.get("_id") else ObjectId() insert = table.insert_many(records) response_id = [] for ids in insert.inserted_ids: response_id.append(str(ids)) print(response_id) new_record = max_records[default_length:] if len(new_record) > 0: file = open(import_record.file_url, 'w') with file: header = new_record[0].keys() writer = csv.DictWriter(file, fieldnames=header) writer.writeheader() for r in new_record: writer.writerow(r) else: payload_single = { "channel": PUSH_NOTIFICATION, "title": "Insert data success", "body": f"{import_record.username} insert data success", "username": import_record.username, "data": { "username": import_record.username, "action": "import_data", "notification_type": NOTIFY } } notify_service = NotificationsService() notify_service.process_push_single_notification( data=payload_single) os.remove(import_record.file_url) import_record.delete()
def get(self, request, *args, **kwargs): try: user = request.user connection_id = kwargs.get("connection") table_name = kwargs.get("table_name") list_filter = kwargs.get('list_filter', None) list_column = kwargs.get('list_column', None) provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider.name == MONGO: mongo_db_manager = MongoDBManager() db, cache_db = mongo_db_manager.connection_mongo_by_provider(provider_connection=provider_connection) # c = db.__getattr__(table_name).find().limit(20) documents = mongo_db_manager.export_db_by_column(db=db, table=table_name, list_filter=list_filter, list_column=list_column) if documents.count() <= 1000: result = json.loads(dumps(list(documents))) headers = list(result[0].keys()) content = '' for header in headers: content += header if headers.index(header) != len(headers) - 1: content += ', ' else: content += '\n' for value in result: for header in headers: if header == "_id": content += value.get(header).get('$oid') else: try: content += value.get(header) except: content += '' if headers.index(header) != len(headers) - 1: content += ', ' else: content += '\n' today = datetime.now().strftime("%d%m%Y_%H%M%S") filename = f"ExportData-{table_name}-{today}.txt" response = HttpResponse(content, content_type='text/plain') response['Content-Disposition'] = 'attachment; filename=%s' % filename return response else: ExportData.objects.create( provider_connection_id=provider_connection.id, username=user.username, table=table_name, status=ExportData.INIT, file_type=ExportData.EXCEL, list_filter=list_filter, list_column=list_column ) return responses.ok(data="Waiting notify for export", method=constant.GET, entity_name='export-data') return responses.bad_request(data=None, message_code="SQL_PROVIDER_NOT_FOUND") except Exception as err: return responses.not_found(data=None, message_code='SQL_FUNCTION_NOT_FOUND', message_system=err)
def process_convert_data(): from wab.core.custom_column.models import CustomColumnTaskConvert from wab.core.db_provider.models import DBProviderConnection from wab.utils.db_manager import MongoDBManager from wab.utils.operator import MongoColumnType from pymongo import UpdateOne from wab.core.notifications.models import PUSH_NOTIFICATION from wab.core.notifications.services.notifications_service import NotificationsService from wab.core.notifications.models import NOTIFY convert_data = CustomColumnTaskConvert.objects.all() for data in convert_data: mongo_db = MongoDBManager() provider_connection = DBProviderConnection.objects.filter( id=data.connection.id).first() db, cache_db = mongo_db.connection_mongo_by_provider( provider_connection=provider_connection) value, name = MongoColumnType.get_type(data.data_real_type) r_value, r_name = MongoColumnType.get_type(data.data_type) if r_name: operations = [] collection = db[data.table_name] list_doc = collection.find({ data.column_name: { "$exists": True, "$type": value } }).limit(1000) for doc in list_doc: # Set a random number on every document update operations.append( UpdateOne({"_id": doc["_id"]}, { "$set": { data.column_name: mongo_db.convert_column_data_type( doc.get(data.column_name), r_name) } })) # Send once every 1000 in batch collection.bulk_write(operations, ordered=False) operations = [] if len(operations) > 0: collection.bulk_write(operations, ordered=False) # Note: list_doc.count() will count all select, not include skip & limit if list_doc.count() != 0: data.current_row += 1000 data.save() else: data.delete() payload_single = { "channel": PUSH_NOTIFICATION, "title": "Convert data success", "body": f"Send to {data.connection.creator.username}, system convert data success!", "username": data.connection.creator.username, "data": { "username": data.connection.creator.username, "notification_type": NOTIFY } } notify_service = NotificationsService() notify_service.process_push_single_notification( data=payload_single)
def get(self, request, *args, **kwargs): data = request.query_params page = int(data.get("page", '1')) page_size = int(data.get("page_size", '20')) sharing_key_encode = kwargs.get("sharing_key") try: sharing_key_decode_utf8 = base64.b64decode(sharing_key_encode) sharing_key_decode = sharing_key_decode_utf8.decode("utf-8") sharing_key_array = sharing_key_decode.split(";") if len(sharing_key_array) == 2: connection_id = sharing_key_array[0] table_name = sharing_key_array[1] # Get data from connection and table name provider_connection = self.queryset.get(id=connection_id) provider = provider_connection.provider if provider: if provider.name == MONGO: mongo_db_manager = MongoDBManager() try: db, cache_db = mongo_db_manager.connection_mongo_by_provider( provider_connection=provider_connection) columns = mongo_db_manager.get_all_keys( db=db, collection=table_name) documents, count = mongo_db_manager.get_all_documents( db=db, collection=table_name, column_sort=None, sort=None, page=page, page_size=page_size) data = list(documents) result_document = json.loads(json_util.dumps(data)) result = { 'columns': columns, 'documents': result_document } return responses.paging_data( data=result, total_count=count, method=constant.GET, entity_name='sharing_files') except Exception as err: return responses.bad_request( data=err, message_code='BD_ERROR') else: # TODO: implement another phase return responses.ok(data=None, method=constant.GET, entity_name='sharing_files') else: return responses.bad_request( data='Provider not found', message_code='PROVIDER_NOT_FOUND') else: return responses.not_found( data=None, message_code='SHARING_FILES_GET_DATA_NOT_FOUND') except Exception as err: return responses.not_found( data=None, message_code='SHARING_FILES_GET_DATA_NOT_FOUND', message_system=err)