def handle(self, *args, **options):

        history_manager = HistoryManager()
        rcs_obj = RCS()

        collection = get_database()[Triple.collection_name]
        cur = collection.Triple.find({'_type': 'GAttribute'})

        for n in cur:
            if type(n['attribute_type']) == ObjectId:
                attr_type = collection.Node.one({'_id': n['attribute_type']})
                if attr_type:
                    collection.update({'_id': n['_id']}, {
                        '$set': {
                            'attribute_type': {
                                "$ref": attr_type.collection_name,
                                "$id": attr_type._id,
                                "$db": attr_type.db.name
                            }
                        }
                    })
                else:
                    collection.remove({'_id': n['_id']})

            subject_doc = collection.Node.one({'_id': n.subject})
            n.name = subject_doc.name + " -- " + n.attribute_type[
                'name'] + " -- " + n.object_value

            # Creates a history (version-file) for GAttribute documents
            if history_manager.create_or_replace_json_file(n):
                fp = history_manager.get_file_path(n)
                message = "This document (" + n.name + ") is created on " + subject_doc.created_at.strftime(
                    "%d %B %Y")
                rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
    def handle(self, *args, **options):

        history_manager = HistoryManager()
        rcs_obj = RCS()

        collection = get_database()[Triple.collection_name]
        cur = collection.Triple.find({"_type": "GAttribute"})

        for n in cur:
            if type(n["attribute_type"]) == ObjectId:
                attr_type = collection.Node.one({"_id": n["attribute_type"]})
                if attr_type:
                    collection.update(
                        {"_id": n["_id"]},
                        {
                            "$set": {
                                "attribute_type": {
                                    "$ref": attr_type.collection_name,
                                    "$id": attr_type._id,
                                    "$db": attr_type.db.name,
                                }
                            }
                        },
                    )
                else:
                    collection.remove({"_id": n["_id"]})

            subject_doc = collection.Node.one({"_id": n.subject})
            n.name = subject_doc.name + " -- " + n.attribute_type["name"] + " -- " + n.object_value

            # Creates a history (version-file) for GAttribute documents
            if history_manager.create_or_replace_json_file(n):
                fp = history_manager.get_file_path(n)
                message = "This document (" + n.name + ") is created on " + subject_doc.created_at.strftime("%d %B %Y")
                rcs_obj.checkin(fp, 1, message.encode("utf-8"), "-i")
예제 #3
0
from gnowsys_ndf.ndf.rcslib import RCS
from gnowsys_ndf.ndf.org2any import org2html
from gnowsys_ndf.ndf.views.methods import get_node_common_fields, get_translate_common_fields, get_page, get_resource_type, diff_string, get_node_metadata, create_grelation_list, get_execution_time, parse_data
from gnowsys_ndf.ndf.management.commands.data_entry import create_gattribute
from gnowsys_ndf.ndf.views.html_diff import htmldiff
from gnowsys_ndf.ndf.views.methods import get_versioned_page, get_page, get_resource_type, diff_string, node_thread_access
from gnowsys_ndf.ndf.views.methods import create_gattribute, create_grelation, get_group_name_id, create_thread_for_node, delete_grelation

from gnowsys_ndf.ndf.templatetags.ndf_tags import group_type_info, get_relation_value

from gnowsys_ndf.mobwrite.diff_match_patch import diff_match_patch

#######################################################################################################################################

gst_page = node_collection.one({'_type': 'GSystemType', 'name': GAPPS[0]})
history_manager = HistoryManager()
rcs = RCS()
app = gst_page

#######################################################################################################################################
# VIEWS DEFINED FOR GAPP -- 'PAGE'
#######################################################################################################################################


@get_execution_time
def page(request, group_id, app_id=None, page_no=1):
    """Renders a list of all 'Page-type-GSystems' available within the database.
    """
    try:
        group_id = ObjectId(group_id)
    except:
    def handle(self, *args, **options):
        global UNIT_IDS
        global UNIT_NAMES
        global log_file
        global log_file_path
        global USER_SERIES 
        USER_SERIES = ['sp','cc','mz','ct','tg','rj']
        if not args[:2] in USER_SERIES: 
          print "\n\nPlease enter Valid User Series."
          call_exit()
        
        pattern = '-'+args[0]
        rg = re.compile(pattern,re.IGNORECASE)
        #Fetch all the author objects belonging to the given series
        authornds = node_collection.find({'_type':'Author','name':rg})
        
        if authornds.count() > 0:
          all_ann_units_cur = node_collection.find({'member_of': ann_unit_gst_id})
          print "\nTotal Units : ", all_ann_units_cur.count()
          for ind, each_ann_unit in enumerate(all_ann_units_cur, start=1):
            UNIT_IDS.append(each_ann_unit._id)
            UNIT_NAMES.append(each_ann_unit.name)
        
          print "\nUser Artifacts Cleaning of following Units:"
          print ("\n\t".join(["{0}. {1}".format(i,unit_name) for i, unit_name in enumerate(UNIT_NAMES, 1)]))

          proceed_flag = raw_input("\nEnter y/Y to Confirm: ")
          if proceed_flag:
            try:

              datetimestamp = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
              log_file_name = 'users_cleaning_' + str(GSTUDIO_INSTITUTE_ID) + "_"+ str(datetimestamp)

              #TOP_PATH = os.path.join(GSTUDIO_DATA_ROOT, 'data_export',  log_file_name)
              #SCHEMA_MAP_PATH = TOP_PATH

              log_file_path = create_log_file(log_file_name)
              #setup_dump_path()


              log_file = open(log_file_path, 'w+')
              log_file.write("\n######### Script ran on : " + str(datetime.datetime.now()) + " #########\n\n")
              log_file.write("User Cleaning of given series: " + str(UNIT_IDS))

              non_admin_user_id_list = get_nonadmin_users(UNIT_IDS)
              
              final_authset = 

              print "user_ids", non_admin_user_id_list
              print "unit data cleaning",UNIT_IDS,UNIT_NAMES
              if non_admin_user_id_list:
                log_file.write("Users ids: " + str(non_admin_user_id_list))
              
                log_file.write("\n********************************")
                log_file.write("delete_user_artifacts getting triggered")
                delete_user_artifacts(non_admin_user_id_list)
                counternds = get_counter_ids(user_ids=non_admin_user_id_list)
                print "Total counter nodes",counternds.count()
                for eachnd in counternds:
                  log_file.write("\n*******************************\n")
                  log_file.write("Fetched the counternd of user {0}".format(eachnd.user_id))
                  log_file.write("\nFetching the visited nodes of the given group")
                  print "counternd",eachnd._id
                  #print "visited_nodes",eachnd.visited_nodes.keys()
                  visited_actnds = eachnd.visited_nodes.keys()
                  authorname = Author.get_author_usernames_list_from_user_id_list([eachnd.user_id])
                  #print "before calling bnchmrkdata",visited_actnds,authorid
                  if len(visited_actnds) > 0:
                    bnchmrknds = get_bnchmrk_data(visited_actnds,authorname[0])
                    print "bnch",bnchmrknds.count(),type(bnchmrknds)
                    for eachbnchmrknd in bnchmrknds:
                      print "Removing :", eachbnchmrknd['_id']
                      #HistoryManager.delete_json_file(bnchmrk, type(bnchmrk))
                      benchmark_collection.collection.remove({'_id':ObjectId(eachbnchmrknd['_id'])})
                  HistoryManager.delete_json_file(eachnd, type(eachnd))
                  counter_collection.collection.remove({'_id':ObjectId(eachnd._id)})

                res = node_collection.collection.update({
                    "_type": {'$in': ['GSystem', 'Group']},
                    "contributors": {'$in':non_admin_user_id_list}
                }, {
                    "$pullAll": {"contributors": non_admin_user_id_list}
                },
                    upsert=False, multi=True
                )
                print "\n 7 >> contributors : \n", res

                res1 = node_collection.collection.update({
                    "_type": {'$in': ['GSystem', 'Group']},
                    "author_set": {'$in':non_admin_user_id_list}
                }, {
                    "$pullAll": {"author_set": non_admin_user_id_list}
                },
                    upsert=False, multi=True
                )
                print "\n 7 >> author_set : \n", res1

              else:
                log_file.write("No users with non-admin rights found.")
            except Exception as user_artifacts_cleaning_err:
              log_file.write("Error occurred: " + str(user_artifacts_cleaning_err))
              pass
            finally:
              log_file.write("\n*************************************************************")
              log_file.write("\n######### Script Completed at : " + str(datetime.datetime.now()) + " #########\n\n")
              print "\nSTART : ", str(datetimestamp)
              print "\nEND : ", str(datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S"))
              print "*"*70
              print "\n Log will be found at: ", log_file_path
              print "*"*70
              log_file.close()
              call_exit()
        else:
          call_exit()



# Pending:
# - check for grelation `profile_pic` and other to take decision of which object to keep
예제 #5
0
from schema_mapping import create_factory_schema_mapper
from users_dump_restore import create_users_dump
from gnowsys_ndf.ndf.views.methods import get_group_name_id
from gnowsys_ndf.ndf.templatetags.simple_filters import get_latest_git_hash, get_active_branch_name

# global variables declaration
GROUP_CONTRIBUTORS = []
DUMP_PATH = None
TOP_PATH = os.path.join(GSTUDIO_DATA_ROOT, 'data_export')
GROUP_ID = None
DATA_EXPORT_PATH = None
MEDIA_EXPORT_PATH = None
RESTORE_USER_DATA = False
SCHEMA_MAP_PATH = None
log_file = None
historyMgr = HistoryManager()
DUMP_NODES_LIST = []
DUMPED_NODE_IDS = set()
ROOT_DUMP_NODE_ID = None
ROOT_DUMP_NODE_NAME = None
MULTI_DUMP = False

def create_log_file(dump_node_id):
    '''
        Creates log file in gstudio-logs/ with 
        the name of the dump folder
    '''
    log_file_name = 'group_dump_' + str(dump_node_id)+ '.log'
    if not os.path.exists(GSTUDIO_LOGS_DIR_PATH):
        os.makedirs(GSTUDIO_LOGS_DIR_PATH)
예제 #6
0
    def handle(self, *args, **options):
        try:
            triple_collection_name = Triple.collection_name
            node_collection_name = Node.collection_name
            
            if triple_collection_name not in db.collection_names():
                try:
                    # [A] Create Triples collection
                    info_message = "\n\n  Creating new collection named as \"" + triple_collection_name + "\"..."
                    print info_message
                    log_list.append(info_message)

                    db.create_collection(triple_collection_name)

                    info_message = "\n\tCollection (" + triple_collection_name + ") created successfully."
                    print info_message
                    log_list.append(info_message)

                    info_message = "\n==================================================================================================="
                    print info_message
                    log_list.append(info_message)
                except Exception as e:
                    error_message = "\n  Collection (" + triple_collection_name + ") NOT created as following error occurred: " + str(e)
                    print error_message
                    log_list.append(error_message)
                    return

            # Fetch "Nodes" collection
            node_collection = db[node_collection_name].Node

            # Fetch newly created "Triples" collection
            triple_collection = db[triple_collection_name].Triple

            info_message = "\n\n  Before shifting document(s) from " + node_collection_name + " collection into " + triple_collection_name + " collection: "
            print info_message

            gattribute_cur = node_collection.find({"_type": "GAttribute"})
            gattribute_cur_count = gattribute_cur.count()
            info_message = "\n\n\tNo. of GAttribute node(s) found in " + node_collection_name + " collection: " + str(gattribute_cur_count)
            print info_message
            log_list.append(info_message)

            grelation_cur = node_collection.find({"_type": "GRelation"})
            grelation_cur_count = grelation_cur.count()
            info_message = "\n\tNo. of GRelation node(s) found in " + node_collection_name + " collection: " + str(grelation_cur_count)
            print info_message
            log_list.append(info_message)

            if gattribute_cur.count() == 0 and grelation_cur.count() == 0:
                info_message = "\n\n  No records found in " + node_collection_name + " collection to be shifted into " + triple_collection_name + " collection."
                print info_message
                log_list.append(info_message)
                # info_message = "\n\n  Triples collection already created and indexes, too, set on it."
                # print info_message
                # log_list.append(info_message)

                # info_message = "\n\tExisting index information on \"" + triple_collection_name + "\" collection is as follows:" + \
                #     "\n" + json.dumps(triple_collection.index_information(), indent=2, sort_keys=False)
                # print info_message
                # log_list.append(info_message)

            else:
                gtattribute_cur = triple_collection.find({"_type": "GAttribute"})
                gtattribute_cur_count = gtattribute_cur.count()
                info_message = "\n\n\tNo. of GAttribute node(s) found in " + triple_collection_name + " collection: " + str(gtattribute_cur_count)
                print info_message
                log_list.append(info_message)

                gtrelation_cur = triple_collection.find({"_type": "GRelation"})
                gtrelation_cur_count = gtrelation_cur.count()
                info_message = "\n\tNo. of GRelation node(s) found in " + triple_collection_name + " collection: " + str(gtrelation_cur_count)
                print info_message

                info_message = "\n==================================================================================================="
                print info_message
                log_list.append(info_message)

                info_message = "\n\n  Existing index information on \"" + triple_collection_name + "\" collection are as follows:" + \
                    "\n" + json.dumps(triple_collection.collection.index_information(), indent=4, sort_keys=False)
                print info_message
                log_list.append(info_message)

                # [B] Creating following indexes for "Triples" collection
                info_message = "\n\n\tCreating following indexes for \"" + triple_collection_name + "\" collection..." + \
                    "\n\t\t1. _type(1) >> subject(1) >> attribute_type.$id(1) >> status(1)" + \
                    "\n\t\t2. _type(1) >> subject(1) >> relation_type.$id(1) >> status(1) >> right_subject(1)" + \
                    "\n\t\t3. _type(1) >> right_subject(1) >> relation_type.$id(1) >> status(1)"
                print info_message
                log_list.append(info_message)

                # 1. _type(1) >> subject(1) >> attribute_type.$id(1) >> status(1)
                index_val = triple_collection.collection.ensure_index([("_type", ASCENDING), ("subject", ASCENDING), ("attribute_type.$id", ASCENDING), ("status", ASCENDING)])
                if index_val:
                    info_message = "\n\n\t" + str(index_val) + " index created for " + str(triple_collection_name) + " collection successfully."
                else:
                    info_message = "\n\n\t_type_1_subject_1_attribute_type.$id_1_status_1 index already created for " + str(triple_collection_name) + " collection."
                print info_message
                log_list.append(info_message)

                # 2. _type(1) >> subject(1) >> relation_type.$id(1) >> status(1) >> right_subject(1)
                index_val = triple_collection.collection.ensure_index([("_type", ASCENDING), ("subject", ASCENDING), ("relation_type.$id", ASCENDING), ("status", ASCENDING), ("right_subject", ASCENDING)])
                if index_val:
                    info_message = "\n\t" + str(index_val) + " index created for " + str(triple_collection_name) + " collection successfully."
                else:
                    info_message = "\n\t_type_1_subject_1_relation_type.$id_1_status_1_right_subject_1 index already created for " + str(triple_collection_name) + " collection."
                print info_message
                log_list.append(info_message)

                # 3. _type(1) >> right_subject(1) >> relation_type.$id(1) >> status(1)
                index_val = triple_collection.collection.ensure_index([("_type", ASCENDING), ("right_subject", ASCENDING), ("relation_type.$id", ASCENDING), ("status", ASCENDING)])
                if index_val:
                    info_message = "\n\t" + str(index_val) + " index created for " + str(triple_collection_name) + " collection successfully."
                else:
                    info_message = "\n\t_type_1_subject_1_relation_type.$id_1_status_1_right_subject_1 index already created for " + str(triple_collection_name) + " collection."
                print info_message
                log_list.append(info_message)

                info_message = "\n\n  Modified index information on \"" + triple_collection_name + "\" collection are as follows:" + \
                    "\n" + json.dumps(triple_collection.collection.index_information(), indent=4, sort_keys=False)
                print info_message
                log_list.append(info_message)

                info_message = "\n==================================================================================================="
                print info_message
                log_list.append(info_message)

                # [C] Move GAttribute & GRelation nodes from Nodes collection to Triples collection
                info_message = "\n\n  Moving GAttribute (" + str(gattribute_cur_count) + ") & GRelation (" + str(grelation_cur_count) + ") node(s) from Nodes collection to Triples collection..." + \
                    "\n  THIS MAY TAKE MORE TIME DEPENDING UPON HOW MUCH DATA YOU HAVE.. SO PLEASE HAVE PATIENCE !"
                print info_message
                log_list.append(info_message)

                bulk_insert = triple_collection.collection.initialize_unordered_bulk_op()
                bulk_remove = node_collection.collection.initialize_unordered_bulk_op()

                triple_cur = node_collection.find({"_type": {"$in": ["GAttribute", "GRelation"]}}, timeout=False)
                delete_nodes = []
                hm = HistoryManager()
                rcs_obj = RCS()
                existing_rcs_file = []
                newly_created_rcs_file = []
                at_rt_updated_node_list = []

                tf1 = time.time()
                for i, doc in enumerate(triple_cur):
                    info_message = "\n\n\tChecking attribute_type & relation_type fields of # " + str((i+1)) + " record :-"
                    print info_message
                    log_list.append(info_message)

                    if doc["_type"] == "GAttribute":
                        if (type(doc["attribute_type"]) != bson.dbref.DBRef) and ( (type(doc["attribute_type"]) == dict) or (type(doc["attribute_type"]) == AttributeType) ):
                            doc["attribute_type"] = node_collection.collection.AttributeType(doc["attribute_type"]).get_dbref()
                            at_rt_updated_node_list.append(str(doc._id))
                            info_message = "\n\tattribute_type field updated for # " + str((i+1)) + " record."
                            print info_message
                            log_list.append(info_message)

                    elif doc["_type"] == "GRelation":
                        if (type(doc["relation_type"]) != bson.dbref.DBRef) and ( (type(doc["relation_type"]) == dict) or (type(doc["relation_type"]) == RelationType) ):
                            doc["relation_type"] = node_collection.collection.RelationType(doc["relation_type"]).get_dbref()
                            at_rt_updated_node_list.append(str(doc._id))
                            info_message = "\n\trelation_type field updated for # " + str((i+1)) + " record."
                            print info_message
                            log_list.append(info_message)

                    delete_nodes.append(doc._id)

                    bulk_insert.insert(doc)

                    try:
                        node_rcs_file = hm.get_file_path(doc)

                        # As we have changed collection-name for Triple from Nodes to Triples
                        # Hence, we need to first replace Triples with Nodes
                        # In order to move rcs-files from Nodes into Triples directory
                        node_rcs_file = node_rcs_file.replace(triple_collection_name, node_collection_name)
                        info_message = "\n\n\tMoving # " + str((i+1)) + " Node rcs-file (" + node_rcs_file + ")..."
                        print info_message
                        log_list.append(info_message)

                        if os.path.exists(node_rcs_file + ",v"):
                            node_rcs_file = node_rcs_file + ",v"
                        elif os.path.exists(node_rcs_file):
                            node_rcs_file = node_rcs_file

                        info_message = "\n\t  node_rcs_file (json/,v) : " + node_rcs_file
                        print info_message
                        log_list.append(info_message)

                        # If exists copy to Triples directory
                        # Then delete it
                        if node_rcs_file[-2:] == ",v" and os.path.isfile(node_rcs_file):
                            info_message = "\n\t  File FOUND : " + node_rcs_file
                            print info_message
                            log_list.append(info_message)

                            # Replacing Node collection-name (Nodes) with Triple collection-name (Triples)
                            triple_rcs_file = node_rcs_file.replace(node_collection_name, triple_collection_name)
                            info_message = "\n\t  triple_rcs_file : " + triple_rcs_file
                            print info_message
                            log_list.append(info_message)

                            triple_dir_path = os.path.dirname(triple_rcs_file)
                            info_message = "\n\t  triple_dir_path : " + triple_dir_path
                            print info_message
                            log_list.append(info_message)

                            if not os.path.isdir(triple_dir_path):
                                # Creates required directory path for Triples collection in rcs-repo
                                os.makedirs(triple_dir_path)

                                info_message = "\n\t  CREATED PATH : " + triple_dir_path
                                print info_message
                                log_list.append(info_message)

                            # Copy files keeping metadata intact
                            shutil.copy2(node_rcs_file, triple_rcs_file)
                            info_message = "\n\t  COPIED TO : " + triple_rcs_file
                            print info_message
                            log_list.append(info_message)

                            # Deleting file from Nodes directory
                            os.remove(node_rcs_file)
                            info_message = "\n\t  DELETED : " + node_rcs_file
                            print info_message
                            log_list.append(info_message)

                            # Append to list to keep track of those Triple nodes
                            # for which corresponding rcs-file exists
                            existing_rcs_file.append(str(doc._id))

                        else:
                            error_message = "\n\t  Version-File (.json,v) NOT FOUND : " + node_rcs_file + " !!!"
                            print error_message
                            log_list.append(error_message)

                            if hm.create_or_replace_json_file(doc):
                                fp = hm.get_file_path(doc)
                                message = "This document (" + doc.name + ") is shifted (newly created) from Nodes collection to Triples collection on " + datetime.datetime.now().strftime("%d %B %Y")
                                rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

                                if os.path.isdir(os.path.dirname(fp)):
                                    # Append to list to keep track of those Triple nodes
                                    # for which corresponding rcs-file doesn't exists
                                    newly_created_rcs_file.append(str(doc._id))

                                    info_message = "\n\t  CREATED rcs-file : " + fp
                                    print info_message
                                    log_list.append(info_message)

                    except OSError as ose:
                        error_message = "\n\t  OSError (" + node_rcs_file + ") : " + str(ose) + " !!!"
                        print error_message
                        log_list.append(error_message)
                        continue

                    except Exception as e:
                        error_message = "\n\t  Exception (" + node_rcs_file + ") : " + str(e) + " !!!"
                        print error_message
                        log_list.append(error_message)
                        continue

                tf2 = time.time()
                info_message = "\n\n\tTime taken by for loop (list) : " + str(tf2 - tf1) + " secs"
                print info_message
                log_list.append(info_message)

                t1 = time.time()
                bulk_insert.execute()
                t2 = time.time()
                info_message = "\n\tTime taken to copy given no. of Triple's docmuents : " + str(t2 - t1) + " secs"
                print info_message
                log_list.append(info_message)

                t3 = time.time()
                bulk_remove.find({"_id": {"$in": delete_nodes}}).remove()
                bulk_remove.execute()
                t4 = time.time()
                info_message = "\n\tTime taken to delete given no. of Triple's docmuents () : " + str(t4 - t3) + " secs"
                print info_message
                log_list.append(info_message)

                info_message = "\n==================================================================================================="
                print info_message
                log_list.append(info_message)

                info_message = "\n\n  After shifting document(s) from " + node_collection_name + " collection into " + triple_collection_name + " collection: "
                print info_message
                log_list.append(info_message)

                # Entries in Nodes collection
                gattribute_cur = node_collection.find({"_type": "GAttribute"})
                gattribute_cur_count = gattribute_cur.count()
                info_message = "\n\n\tNo. of GAttribute node(s) found in " + node_collection_name + " collection: " + str(gattribute_cur_count)
                print info_message
                log_list.append(info_message)

                grelation_cur = node_collection.find({"_type": "GRelation"})
                grelation_cur_count = grelation_cur.count()
                info_message = "\n\tNo. of GRelation node(s) found in " + node_collection_name + " collection: " + str(grelation_cur_count)
                print info_message
                log_list.append(info_message)

                # Entries in Triples collection
                gtattribute_cur = triple_collection.find({"_type": "GAttribute"})
                gtattribute_cur_count = gtattribute_cur.count()
                info_message = "\n\n\tNo. of GAttribute node(s) found in " + triple_collection_name + " collection: " + str(gtattribute_cur_count)
                print info_message
                log_list.append(info_message)

                gtrelation_cur = triple_collection.find({"_type": "GRelation"})
                gtrelation_cur_count = gtrelation_cur.count()
                info_message = "\n\tNo. of GRelation node(s) found in " + triple_collection_name + " collection: " + str(gtrelation_cur_count)
                print info_message
                log_list.append(info_message)

                # Information about attribute_type & relation_type fields updated
                info_message = "\n\n\tNo. of node(s) (# " + str(len(at_rt_updated_node_list)) + ") whose attribute_type & relation_type fields are updated: \n" + str(at_rt_updated_node_list)
                print info_message
                log_list.append(info_message)

                # Information about RCS files
                info_message = "\n\n\tRCS file(s) moved for follwoing node(s) (# " + str(len(existing_rcs_file)) + ") :-  \n" + str(existing_rcs_file)
                print info_message
                log_list.append(info_message)

                info_message = "\n\tRCS file(s) re-created for follwoing node(s) (# " + str(len(newly_created_rcs_file)) + ") :-  \n" + str(newly_created_rcs_file)
                print info_message
                log_list.append(info_message)

                if triple_cur.alive:
                    triple_cur.close()
                    info_message = "\n\n\tTriple's cursor closed."
                    print info_message
                    log_list.append(info_message)

                info_message = "\n\n==================================================================================================="
                print info_message
                log_list.append(info_message)

            """
            info_message = "\n\n  Looking for dict type value(s) in attribute_type" + \
                " and relation_type fields of respective GAttribute and GRelation" + \
                "\n\tIf found code will replace corresponding value(s) with respective AttributeType/RelationType instances" + \
                "\n\tTHIS MAY TAKE MORE TIME DEPENDING UPON HOW MUCH DATA YOU HAVE.. SO PLEASE HAVE PATIENCE !\n"
            print info_message
            log_list.append(info_message)

            triple_cur = triple_collection.collection.find({"_type": {"$in": ["GAttribute", "GRelation"]}}, timeout=False)
            import bson
            hm = HistoryManager()
            sc = []
            ec = []
            tc = triple_cur.count()
            for i, each in enumerate(triple_cur):
                try:
                    n = None
                    info_message = "\n\n\tChecking # " + str((i+1)) + " record :-"
                    print info_message
                    log_list.append(info_message)

                    if each["_type"] == "GAttribute":
                        if (type(each["attribute_type"]) != bson.dbref.DBRef) and (type(each["attribute_type"]) == dict):
                            each["attribute_type"] = node_collection.collection.AttributeType(each["attribute_type"])
                            n = triple_collection.collection.GAttribute(each)
                            n.save()
                            sc.append(str(n._id))
                    elif each["_type"] == "GRelation":
                        if (type(each["relation_type"]) != bson.dbref.DBRef) and (type(each["relation_type"]) == dict):
                            each["relation_type"] = node_collection.collection.RelationType(each["relation_type"])
                            n = triple_collection.collection.GRelation(each)
                            n.save()
                            sc.append(str(n._id))
                except Exception as e:
                    error_message = "\n Error (" + str(each["_id"]) + ") : ", str(e) + " !!!"
                    print error_message
                    log_list.append(error_message)
                    ec.append(str(each["_id"]))
                    continue

            info_message = "\n\n\tTotal node(s) found: " + str(tc)
            print info_message
            log_list.append(info_message)

            info_message = "\n\n\tTotal node(s) updated (" + str(len(sc)) + ") : \n" + str(sc)
            print info_message
            log_list.append(info_message)

            info_message = "\n\n\tTotal node(s) where error encountered (" + str(len(ec)) + ") : \n" + str(ec)
            print info_message
            log_list.append(info_message)

            if triple_cur.alive:
                triple_cur.close()
                info_message = "\n\n\tTriple's cursor closed."
                print info_message
                log_list.append(info_message)

            info_message = "\n\n==================================================================================================="
            print info_message
            log_list.append(info_message)
            """
        except Exception as e:
            error_message = str(e)
            print error_message
            log_list.append("\n  Error: " + error_message + " !!!\n")

        finally:
            if log_list:
                info_message = "\n\n================ End of Iteration ================\n"
                print info_message
                log_list.append(info_message)

                log_file_name = "shift_Triples" + ".log"
                log_file_path = os.path.join(SCHEMA_ROOT, log_file_name)
                with open(log_file_path, 'a') as log_file:
                    log_file.writelines(log_list)
예제 #7
0
 def handle(self, *args, **options):
     HistoryManager().create_rcs_repo_collections(*VERSIONING_COLLECTIONS)