Exemple #1
0
def main():
    import argparse

    parser = argparse.ArgumentParser(prog="mvg")
    #print(parser)
    #args_group = parser.add_mutually_exclusive_group()
    args_group = parser
    args_group.add_argument("--recent",
                            "-r",
                            action="store_true",
                            help="fetch the most recent search.")
    args_group.add_argument("--departures",
                            "-d",
                            help="Departures at Station/Stop",
                            nargs='+')
    args_group.add_argument("--limit", "-l", help="# results to fetch")
    args_group.add_argument(
        "--mode", "-m", help="Transportation Mode: bus, ubahn, sbahn, tram.")
    args_group.add_argument("--station",
                            "-s",
                            help="Gets stations closest to the address.",
                            nargs='+')

    args = parser.parse_args()
    #print(args)
    recents_file_path = os.path.join(os.getcwd(), "recent.txt")
    history = HistoryManager(recents_file_path)

    if args.recent:
        result, latest_departure = history.get_latest()
        if not result:
            print(latest_departure)
        else:
            display_departures(latest_departure, mode=args.mode)
    elif args.departures:
        if args.limit:
            display_departures(' '.join(args.departures), int(args.limit),
                               args.mode)
        else:
            display_departures(' '.join(args.departures), mode=args.mode)
        with open(recents_file_path, "w") as recent:
            recent.write(' '.join(args.departures))
    elif args.station:
        get_nearest_stations(' '.join(args.station))
    else:
        top5 = history.get_top(5)
        # spaghetti cleanup pls
        print("Your most recent stations:")
        print("  ".join([
            str(idx + 1) + ". " + str(station)
            for idx, station in enumerate(top5)
        ]))
        display_departures(latest_departure, mode=args.mode)
Exemple #2
0
 def __init__(self,
              marathon_client,
              logger=None,
              dd_client=None,
              cli_args=None):
     self.marathon_client = marathon_client
     self.logger = logger or logging.getLogger(__name__)
     self.dd_client = dd_client
     self.enforce_version_match = False
     self.hm = HistoryManager(dd_client=dd_client)
     if cli_args is not None:
         self.enforce_version_match = cli_args.enforce_version_match
Exemple #3
0
    def version_dict(self):
        """Returns a dictionary containing list of revision numbers of the
        given node.

        Example:
        {
         "1": "1.1",
         "2": "1.2",
         "3": "1.3",
        }

        """
        history_manager = HistoryManager()
        return history_manager.get_version_dict(self)
Exemple #4
0
    def version_dict(self):
        """Returns a dictionary containing list of revision numbers of the
        given node.

        Example:
        {
         "1": "1.1",
         "2": "1.2",
         "3": "1.3",
        }

        """
        history_manager = HistoryManager()
        return history_manager.get_version_dict(self)
Exemple #5
0
    def save(self, *args, **kwargs):

        is_new = False if ('_id' in self) else True

        if is_new:
            self.uploaded_at = datetime.datetime.now()

        super(Filehive, self).save(*args, **kwargs)

        # storing Filehive JSON in RSC system:
        history_manager = HistoryManager()
        rcs_obj = RCS()

        if is_new:

            # Create history-version-file
            if history_manager.create_or_replace_json_file(self):
                fp = history_manager.get_file_path(self)
                message = "This document (" + str(
                    self.md5) + ") is created on " + self.uploaded_at.strftime(
                        "%d %B %Y")
                rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

        else:
            # Update history-version-file
            fp = history_manager.get_file_path(self)

            try:
                rcs_obj.checkout(fp, otherflags="-f")

            except Exception as err:
                try:
                    if history_manager.create_or_replace_json_file(self):
                        fp = history_manager.get_file_path(self)
                        message = "This document (" + str(
                            self.md5
                        ) + ") is re-created on " + self.uploaded_at.strftime(
                            "%d %B %Y")
                        rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

                except Exception as err:
                    print "\n DocumentError: This document (", self._id, ":", str(
                        self.md5), ") can't be re-created!!!\n"
                    node_collection.collection.remove({'_id': self._id})
                    raise RuntimeError(err)

            try:
                if history_manager.create_or_replace_json_file(self):
                    message = "This document (" + str(
                        self.md5
                    ) + ") is lastly updated on " + datetime.datetime.now(
                    ).strftime("%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'))

            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", str(
                    self.md5), ") can't be updated!!!\n"
                raise RuntimeError(err)
 def __init__(self, marathon_client, logger=None, dd_client=None, cli_args=None):
     self.marathon_client = marathon_client
     self.logger = logger or logging.getLogger(__name__)
     self.dd_client = dd_client
     self.enforce_version_match = False
     self.hm = HistoryManager(dd_client=dd_client)
     if cli_args is not None:
         self.enforce_version_match = cli_args.enforce_version_match
Exemple #7
0
    def save(self, *args, **kwargs):

        is_new = False if ('_id' in self) else True

        if is_new:
            self.uploaded_at = datetime.datetime.now()

        super(Filehive, self).save(*args, **kwargs)

        # storing Filehive JSON in RSC system:
        history_manager = HistoryManager()
        rcs_obj = RCS()

        if is_new:

            # Create history-version-file
            if history_manager.create_or_replace_json_file(self):
                fp = history_manager.get_file_path(self)
                message = "This document (" + str(self.md5) + ") is created on " + self.uploaded_at.strftime("%d %B %Y")
                rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

        else:
            # Update history-version-file
            fp = history_manager.get_file_path(self)

            try:
                rcs_obj.checkout(fp, otherflags="-f")

            except Exception as err:
                try:
                    if history_manager.create_or_replace_json_file(self):
                        fp = history_manager.get_file_path(self)
                        message = "This document (" + str(self.md5) + ") is re-created on " + self.uploaded_at.strftime("%d %B %Y")
                        rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

                except Exception as err:
                    print "\n DocumentError: This document (", self._id, ":", str(self.md5), ") can't be re-created!!!\n"
                    node_collection.collection.remove({'_id': self._id})
                    raise RuntimeError(err)

            try:
                if history_manager.create_or_replace_json_file(self):
                    message = "This document (" + str(self.md5) + ") is lastly updated on " + datetime.datetime.now().strftime("%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'))

            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", str(self.md5), ") can't be updated!!!\n"
                raise RuntimeError(err)
Exemple #8
0
            'required_for':
            to_reduce_doc_requirement,
            'doc_id':
            self._id
        })

        #print "~~~~~~~~~~~~~~~~~~~~It is not present in the ToReduce() class collection.Message Coming from save() method ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",self._id
        if not old_doc:
            z = node_collection.collection.ToReduceDocs()
            z.doc_id = self._id
            z.required_for = to_reduce_doc_requirement
            z.save()

    #If you create/edit anything then this code shall add it in the URL

        history_manager = HistoryManager()
        rcs_obj = RCS()

        if is_new:
            # Create history-version-file
            try:
                if history_manager.create_or_replace_json_file(self):
                    fp = history_manager.get_file_path(self)
                    user_list = User.objects.filter(pk=self.created_by)
                    user = user_list[0].username if user_list else 'user'
                    # user = User.objects.get(pk=self.created_by).username
                    message = "This document (" + self.name + ") is created by " + user + " on " + self.created_at.strftime(
                        "%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be created!!!\n"
Exemple #9
0
    def save(self, *args, **kwargs):
	if "is_changed" in kwargs:
            if not kwargs["is_changed"]:
                #print "\n ", self.name, "(", self._id, ") -- Nothing has changed !\n\n"
                return

        is_new = False

        if not "_id" in self:
            is_new = True               # It's a new document, hence yet no ID!"

            # On save, set "created_at" to current date
            self.created_at = datetime.datetime.today()

        self.last_update = datetime.datetime.today()

        # Check the fields which are not present in the class
        # structure, whether do they exists in their GSystemType's
        # "attribute_type_set"; If exists, add them to the document
        # Otherwise, throw an error -- " Illegal access: Invalid field
        # found!!! "

        try:

            invalid_struct_fields = list(set(self.structure.keys()) - set(self.keys()))
            # print '\n invalid_struct_fields: ',invalid_struct_fields
            if invalid_struct_fields:
                for each_invalid_field in invalid_struct_fields:
                    if each_invalid_field in self.structure:
                        self.structure.pop(each_invalid_field)
                        # print "=== removed from structure", each_invalid_field, ' : ',


            keys_list = self.structure.keys()
            keys_list.append('_id')
            invalid_struct_fields_list = list(set(self.keys()) - set(keys_list))
            # print '\n invalid_struct_fields_list: ',invalid_struct_fields_list
            if invalid_struct_fields_list:
                for each_invalid_field in invalid_struct_fields_list:
                    if each_invalid_field in self:
                        self.pop(each_invalid_field)
                        # print "=== removed ", each_invalid_field, ' : ',


        except Exception as e:
            print e
            pass

        invalid_fields = []

        for key, value in self.iteritems():
            if key == '_id':
                continue

            if not (key in self.structure):
                field_found = False
                for gst_id in self.member_of:
                    attribute_set_list = node_collection.one({'_id': gst_id}).attribute_type_set

                    for attribute in attribute_set_list:
                        if key == attribute['name']:
                            field_found = True

                            # TODO: Check whether type of "value"
                            # matches with that of
                            # "attribute['data_type']" Don't continue
                            # searching from list of remaining
                            # attributes
                            break

                    if field_found:
                        # Don't continue searching from list of
                        # remaining gsystem-types
                        break

                if not field_found:
                    invalid_fields.append(key)
                    print "\n Invalid field(", key, ") found!!!\n"
                    # Throw an error: " Illegal access: Invalid field
                    # found!!! "

        # print "== invalid_fields : ", invalid_fields
        try:
            self_keys = self.keys()
            if invalid_fields:
                for each_invalid_field in invalid_fields:
                    if each_invalid_field in self_keys:
                        self.pop(each_invalid_field)
        except Exception as e:
            print "\nError while processing invalid fields: ", e
            pass

        # if Add-Buddy feature is enabled:
        #   - Get all user id's of active buddies with currently logged in user.
        #   - Check if each of buddy-user-id does not exists in contributors of node object, add it.
        if GSTUDIO_BUDDY_LOGIN:
            from buddy import Buddy
            buddy_contributors = Buddy.get_buddy_userids_list_within_datetime(
                                                    self.created_by,
                                                    self.last_update or self.created_at
                                                )
            # print 'buddy_contributors : ', buddy_contributors

            if buddy_contributors:
                for each_bcontrib in buddy_contributors:
                    if each_bcontrib not in self.contributors:
                        self.contributors.append(each_bcontrib)

        super(Node, self).save(*args, **kwargs)

        # This is the save method of the node class.It is still not
        # known on which objects is this save method applicable We
        # still do not know if this save method is called for the
        # classes which extend the Node Class or for every class There
        # is a very high probability that it is called for classes
        # which extend the Node Class only The classes which we have
        # i.e. the MyReduce() and ToReduce() class do not extend from
        # the node class Hence calling the save method on those objects
        # should not create a recursive function

        # If it is a new document then Make a new object of ToReduce
        # class and the id of this document to that object else Check
        # whether there is already an object of ToReduce() with the id
        # of this object.  If there is an object present pass else add
        # that object I have not applied the above algorithm

        # TO BE COMMENTED map-reduce code:
        # 
        # Instead what I have done is that I have searched the
        # ToReduce() collection class and searched whether the ID of
        # this document is present or not.  If the id is not present
        # then add that id.If it is present then do not add that id

        old_doc = node_collection.collection.ToReduceDocs.find_one({'required_for':to_reduce_doc_requirement,'doc_id':self._id})

        #print "~~~~~~~~~~~~~~~~~~~~It is not present in the ToReduce() class collection.Message Coming from save() method ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",self._id
        if  not old_doc:
            z = node_collection.collection.ToReduceDocs()
            z.doc_id = self._id
            z.required_for = to_reduce_doc_requirement
            z.save()

        #If you create/edit anything then this code shall add it in the URL

        history_manager = HistoryManager()
        rcs_obj = RCS()

        if is_new:
            # Create history-version-file
            try:
                if history_manager.create_or_replace_json_file(self):
                    fp = history_manager.get_file_path(self)
                    user_list = User.objects.filter(pk=self.created_by)
                    user = user_list[0].username if user_list else 'user'
                    # user = User.objects.get(pk=self.created_by).username
                    message = "This document (" + self.name + ") is created by " + user + " on " + self.created_at.strftime("%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be created!!!\n"
                node_collection.collection.remove({'_id': self._id})
                raise RuntimeError(err)

        else:
            # Update history-version-file
            fp = history_manager.get_file_path(self)

            try:
                rcs_obj.checkout(fp, otherflags="-f")
            except Exception as err:
                try:
                    if history_manager.create_or_replace_json_file(self):
                        fp = history_manager.get_file_path(self)
                        # user = User.objects.get(pk=self.created_by).username
                        user_list = User.objects.filter(pk=self.created_by)
                        user = user_list[0].username if user_list else 'user'
                        message = "This document (" + self.name + ") is re-created by " + user + " on " + self.created_at.strftime("%d %B %Y")
                        rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

                except Exception as err:
                    print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be re-created!!!\n"
                    node_collection.collection.remove({'_id': self._id})
                    raise RuntimeError(err)

            try:
                if history_manager.create_or_replace_json_file(self):
                    # user = User.objects.get(pk=self.modified_by).username
                    user_list = User.objects.filter(pk=self.created_by)
                    user = user_list[0].username if user_list else 'user'
                    message = "This document (" + self.name + ") is lastly updated by " + user + " status:" + self.status + " on " + self.last_update.strftime("%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'))

            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be updated!!!\n"
                raise RuntimeError(err)

        #update the snapshot feild
        if kwargs.get('groupid'):
            # gets the last version no.
            rcsno = history_manager.get_current_version(self)
            node_collection.collection.update({'_id':self._id}, {'$set': {'snapshot'+"."+str(kwargs['groupid']):rcsno }}, upsert=False, multi=True)
Exemple #10
0
 def current_version(self):
     history_manager = HistoryManager()
     return history_manager.get_current_version(self)
Exemple #11
0
def history_mgr(testsettings):
    _history_mgr = HistoryManager()
    assert (type(_history_mgr) is HistoryManager)
    return _history_mgr
Exemple #12
0
class AutoScaler(object):
    """
    The source of the scaling decision.
    """
    def __init__(self,
                 marathon_client,
                 logger=None,
                 dd_client=None,
                 cli_args=None):
        self.marathon_client = marathon_client
        self.logger = logger or logging.getLogger(__name__)
        self.dd_client = dd_client
        self.enforce_version_match = False
        self.hm = HistoryManager(dd_client=dd_client)
        if cli_args is not None:
            self.enforce_version_match = cli_args.enforce_version_match

    def scale(self, app_def, rule_manager):
        """ Take scale action
        :param app_def: dict of marathon application settings
        :param rule_manager: object of scaling properties.
        :return: marathon response
        """
        if not app_def.is_app_participating:
            return

        scale_factor = int(
            rule_manager.last_triggered_criteria.get("scale_factor"))
        min_instances = int(rule_manager.min_instances)
        max_instances = int(rule_manager.max_instances)

        scale_to = app_def.instances + scale_factor
        scale_to_size = clamp(scale_to, min_instances, max_instances)

        if app_def.instances == scale_to_size:
            msg = "{app_name}: application already scaled to {size}"
            self.logger.info(
                msg.format(app_name=app_def.app_name, size=scale_to_size))
            return

        self.marathon_client.scale_app(app_def.id, scale_to_size)
        msg = "{app_name}: scaled to {size}"
        self.logger.info(
            msg.format(app_name=app_def.app_name, size=scale_to_size))

    def decide(self, app_metrics_summary):
        """
        The decision-maker of the autoscaler.
        :param app_metrics_summary: dict of app definitions and metrics
        :return: None
        """
        self.logger.info("Decision process beginning.")

        app_scale_recommendations = {}
        for app, metrics_summary in app_metrics_summary.items():
            app_def = ApplicationDefinition(
                metrics_summary.get("application_definition"))
            rm = RulesManager(app_def)
            if rm.is_app_participating():
                vote = 0
                scale_factor = 0
                cpu = metrics_summary.get("cpu_avg_usage")
                mem = metrics_summary.get("memory_avg_usage")
                metrics = dict(cpu=cpu, mem=mem)

                rm.trigger_rules(metrics)

                if rm.last_triggered_criteria:
                    scale_factor = int(
                        rm.last_triggered_criteria.get("scale_factor"))
                    vote = 1 if scale_factor > 0 else -1

                app_scale_recommendations[app] = dict(
                    vote=vote,
                    checksum=app_def.version,
                    timestamp=datetime.now(),
                    rule=rm.last_triggered_rule)
                info_msg = "{app_name}: vote: {vote} ; scale_factor requested: {scale_factor}"
                self.logger.info(
                    info_msg.format(app_name=app_def.app_name,
                                    vote=vote,
                                    scale_factor=scale_factor))
                # Check if app is participating
                # Check if app is ready
                # Check if app instances is greater than or equal to min and less than max

                if (rm.is_app_ready() and rm.is_app_within_min_or_max()
                        and rm.last_triggered_criteria):
                    tolerance_reached = self.hm.tolerance_reached(
                        app, rm.last_triggered_criteria.get("tolerance"), vote)
                    within_backoff = self.hm.within_backoff(
                        app, rm.last_triggered_criteria.get("backoff"), vote)

                    if vote is not IDLE and tolerance_reached and not within_backoff:
                        self.logger.info("{app}: Decision made: Scale.".format(
                            app=app_def.app_name))
                        app_scale_recommendations[app]["decision"] = vote
                        self.scale(app_def, rm)
                    elif vote == IDLE:
                        app_scale_recommendations[app]["decision"] = IDLE
                        self.logger.info(
                            "{app}: Decision made: No Change.".format(
                                app=app_def.app_name))

        self.hm.add_to_perf_tail(app_scale_recommendations)
    def setUp(self):

        pygame.init()
        self.A = LayerManager()
        self.B = HistoryManager(self.A)
class TestHistoryManager(unittest.TestCase):

    def setUp(self):

        pygame.init()
        self.A = LayerManager()
        self.B = HistoryManager(self.A)

    def test_history(self):

        #test pushing history
        self.B.maxhistory = 3
        self.assertEqual(len(self.B.history),0)
        self.B.push_history()
        self.assertEqual(len(self.B.history),1)
        self.B.push_history()
        self.B.push_history()
        self.assertEqual(len(self.B.history),3)
        
        L0 = self.B.history[0]
        L1 = self.B.history[1]
        L2 = self.B.history[2]
        
        self.B.push_history()
        self.assertEqual(len(self.B.history),3)
        self.assertEqual(self.B.history[0],L1)
        self.assertEqual(self.B.history[1],L2)

        #test pulling history
        self.B.pull_history()
        self.B.pull_history()
        self.assertEqual(self.A.layers,L2)
        self.B.pull_history()
        self.assertEqual(self.A.layers,L1)
        self.assertEqual(len(self.B.history),0)
        
        #nothing should happen because the history is empty
        self.B.pull_history()
        self.assertEqual(self.A.layers,L1)
        self.assertEqual(len(self.B.history),0)
Exemple #15
0
 def current_version(self):
     history_manager = HistoryManager()
     return history_manager.get_current_version(self)
class AutoScaler(object):
    """
    The source of the scaling decision.
    """
    def __init__(self, marathon_client, logger=None, dd_client=None, cli_args=None):
        self.marathon_client = marathon_client
        self.logger = logger or logging.getLogger(__name__)
        self.dd_client = dd_client
        self.enforce_version_match = False
        self.hm = HistoryManager(dd_client=dd_client)
        if cli_args is not None:
            self.enforce_version_match = cli_args.enforce_version_match

    def scale(self, app_def, rule_manager):
        """ Take scale action
        :param app_def: dict of marathon application settings
        :param rule_manager: object of scaling properties.
        :return: marathon response
        """
        if not app_def.is_app_participating:
            return

        scale_factor = int(rule_manager.last_triggered_criteria.get("scale_factor"))
        min_instances = int(rule_manager.min_instances)
        max_instances = int(rule_manager.max_instances)

        scale_to = app_def.instances + scale_factor
        scale_to_size = clamp(scale_to, min_instances, max_instances)

        if app_def.instances == scale_to_size:
            msg = "{app_name}: application already scaled to {size}"
            self.logger.info(msg.format(app_name=app_def.app_name,
                                        size=scale_to_size))
            return

        self.marathon_client.scale_app(app_def.id, scale_to_size)
        msg = "{app_name}: scaled to {size}"
        self.logger.info(msg.format(app_name=app_def.app_name,
                                    size=scale_to_size))

    def decide(self, app_metrics_summary):
        """
        The decision-maker of the autoscaler.
        :param app_metrics_summary: dict of app definitions and metrics
        :return: None
        """
        self.logger.info("Decision process beginning.")

        app_scale_recommendations = {}
        for app, metrics_summary in app_metrics_summary.items():
            app_def = ApplicationDefinition(metrics_summary.get("application_definition"))
            rm = RulesManager(app_def)
            if rm.is_app_participating():
                vote = 0
                scale_factor = 0
                cpu = metrics_summary.get("cpu_avg_usage")
                mem = metrics_summary.get("memory_avg_usage")
                metrics = dict(cpu=cpu,
                               mem=mem)

                rm.trigger_rules(metrics)

                if rm.last_triggered_criteria:
                    scale_factor = int(rm.last_triggered_criteria.get("scale_factor"))
                    vote = 1 if scale_factor > 0 else -1

                app_scale_recommendations[app] = dict(vote=vote,
                                                      checksum=app_def.version,
                                                      timestamp=datetime.now(),
                                                      rule=rm.last_triggered_rule)
                info_msg = "{app_name}: vote: {vote} ; scale_factor requested: {scale_factor}"
                self.logger.info(info_msg.format(app_name=app_def.app_name,
                                                 vote=vote,
                                                 scale_factor=scale_factor))
                # Check if app is participating
                # Check if app is ready
                # Check if app instances is greater than or equal to min and less than max

                if (rm.is_app_ready() and
                        rm.is_app_within_min_or_max() and
                        rm.last_triggered_criteria):
                    tolerance_reached = self.hm.tolerance_reached(app,
                                                                  rm.last_triggered_criteria.get("tolerance"),
                                                                  vote)
                    within_backoff = self.hm.within_backoff(app,
                                                            rm.last_triggered_criteria.get("backoff"),
                                                            vote)

                    if vote is not IDLE and tolerance_reached and not within_backoff:
                        self.logger.info("{app}: Decision made: Scale.".format(app=app_def.app_name))
                        app_scale_recommendations[app]["decision"] = vote
                        self.scale(app_def, rm)
                    elif vote == IDLE:
                        app_scale_recommendations[app]["decision"] = IDLE
                        self.logger.info("{app}: Decision made: No Change.".format(app=app_def.app_name))

        self.hm.add_to_perf_tail(app_scale_recommendations)
Exemple #17
0
    def save(self, *args, **kwargs):
        if "is_changed" in kwargs:
            if not kwargs["is_changed"]:
                #print "\n ", self.name, "(", self._id, ") -- Nothing has changed !\n\n"
                return

        is_new = False

        if not "_id" in self:
            is_new = True  # It's a new document, hence yet no ID!"

            # On save, set "created_at" to current date
            self.created_at = datetime.datetime.today()

        self.last_update = datetime.datetime.today()

        # Check the fields which are not present in the class
        # structure, whether do they exists in their GSystemType's
        # "attribute_type_set"; If exists, add them to the document
        # Otherwise, throw an error -- " Illegal access: Invalid field
        # found!!! "

        try:

            invalid_struct_fields = list(
                set(self.structure.keys()) - set(self.keys()))
            # print '\n invalid_struct_fields: ',invalid_struct_fields
            if invalid_struct_fields:
                for each_invalid_field in invalid_struct_fields:
                    if each_invalid_field in self.structure:
                        self.structure.pop(each_invalid_field)
                        # print "=== removed from structure", each_invalid_field, ' : ',

            keys_list = self.structure.keys()
            keys_list.append('_id')
            invalid_struct_fields_list = list(
                set(self.keys()) - set(keys_list))
            # print '\n invalid_struct_fields_list: ',invalid_struct_fields_list
            if invalid_struct_fields_list:
                for each_invalid_field in invalid_struct_fields_list:
                    if each_invalid_field in self:
                        self.pop(each_invalid_field)
                        # print "=== removed ", each_invalid_field, ' : ',

        except Exception as e:
            print e
            pass

        invalid_fields = []

        for key, value in self.iteritems():
            if key == '_id':
                continue

            if not (key in self.structure):
                field_found = False
                for gst_id in self.member_of:
                    attribute_set_list = node_collection.one({
                        '_id': gst_id
                    }).attribute_type_set

                    for attribute in attribute_set_list:
                        if key == attribute['name']:
                            field_found = True

                            # TODO: Check whether type of "value"
                            # matches with that of
                            # "attribute['data_type']" Don't continue
                            # searching from list of remaining
                            # attributes
                            break

                    if field_found:
                        # Don't continue searching from list of
                        # remaining gsystem-types
                        break

                if not field_found:
                    invalid_fields.append(key)
                    print "\n Invalid field(", key, ") found!!!\n"
                    # Throw an error: " Illegal access: Invalid field
                    # found!!! "

    # print "== invalid_fields : ", invalid_fields
        try:
            self_keys = self.keys()
            if invalid_fields:
                for each_invalid_field in invalid_fields:
                    if each_invalid_field in self_keys:
                        self.pop(each_invalid_field)
        except Exception as e:
            print "\nError while processing invalid fields: ", e
            pass

    # if Add-Buddy feature is enabled:
    #   - Get all user id's of active buddies with currently logged in user.
    #   - Check if each of buddy-user-id does not exists in contributors of node object, add it.
        if GSTUDIO_BUDDY_LOGIN:
            from buddy import Buddy
            buddy_contributors = Buddy.get_buddy_userids_list_within_datetime(
                self.created_by, self.last_update or self.created_at)
            # print 'buddy_contributors : ', buddy_contributors

            if buddy_contributors:
                for each_bcontrib in buddy_contributors:
                    if each_bcontrib not in self.contributors:
                        self.contributors.append(each_bcontrib)

        super(Node, self).save(*args, **kwargs)

        # This is the save method of the node class.It is still not
        # known on which objects is this save method applicable We
        # still do not know if this save method is called for the
        # classes which extend the Node Class or for every class There
        # is a very high probability that it is called for classes
        # which extend the Node Class only The classes which we have
        # i.e. the MyReduce() and ToReduce() class do not extend from
        # the node class Hence calling the save method on those objects
        # should not create a recursive function

        # If it is a new document then Make a new object of ToReduce
        # class and the id of this document to that object else Check
        # whether there is already an object of ToReduce() with the id
        # of this object.  If there is an object present pass else add
        # that object I have not applied the above algorithm

        # TO BE COMMENTED map-reduce code:
        #
        # Instead what I have done is that I have searched the
        # ToReduce() collection class and searched whether the ID of
        # this document is present or not.  If the id is not present
        # then add that id.If it is present then do not add that id

        old_doc = node_collection.collection.ToReduceDocs.find_one({
            'required_for':
            to_reduce_doc_requirement,
            'doc_id':
            self._id
        })

        #print "~~~~~~~~~~~~~~~~~~~~It is not present in the ToReduce() class collection.Message Coming from save() method ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~",self._id
        if not old_doc:
            z = node_collection.collection.ToReduceDocs()
            z.doc_id = self._id
            z.required_for = to_reduce_doc_requirement
            z.save()

    #If you create/edit anything then this code shall add it in the URL

        history_manager = HistoryManager()
        rcs_obj = RCS()

        if is_new:
            # Create history-version-file
            try:
                if history_manager.create_or_replace_json_file(self):
                    fp = history_manager.get_file_path(self)
                    user_list = User.objects.filter(pk=self.created_by)
                    user = user_list[0].username if user_list else 'user'
                    # user = User.objects.get(pk=self.created_by).username
                    message = "This document (" + self.name + ") is created by " + user + " on " + self.created_at.strftime(
                        "%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")
            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be created!!!\n"
                node_collection.collection.remove({'_id': self._id})
                raise RuntimeError(err)

        else:
            # Update history-version-file
            fp = history_manager.get_file_path(self)

            try:
                rcs_obj.checkout(fp, otherflags="-f")
            except Exception as err:
                try:
                    if history_manager.create_or_replace_json_file(self):
                        fp = history_manager.get_file_path(self)
                        # user = User.objects.get(pk=self.created_by).username
                        user_list = User.objects.filter(pk=self.created_by)
                        user = user_list[0].username if user_list else 'user'
                        message = "This document (" + self.name + ") is re-created by " + user + " on " + self.created_at.strftime(
                            "%d %B %Y")
                        rcs_obj.checkin(fp, 1, message.encode('utf-8'), "-i")

                except Exception as err:
                    print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be re-created!!!\n"
                    node_collection.collection.remove({'_id': self._id})
                    raise RuntimeError(err)

            try:
                if history_manager.create_or_replace_json_file(self):
                    # user = User.objects.get(pk=self.modified_by).username
                    user_list = User.objects.filter(pk=self.created_by)
                    user = user_list[0].username if user_list else 'user'
                    message = "This document (" + self.name + ") is lastly updated by " + user + " status:" + self.status + " on " + self.last_update.strftime(
                        "%d %B %Y")
                    rcs_obj.checkin(fp, 1, message.encode('utf-8'))

            except Exception as err:
                print "\n DocumentError: This document (", self._id, ":", self.name, ") can't be updated!!!\n"
                raise RuntimeError(err)

    #update the snapshot feild
        if kwargs.get('groupid'):
            # gets the last version no.
            rcsno = history_manager.get_current_version(self)
            node_collection.collection.update(
                {'_id': self._id},
                {'$set': {
                    'snapshot' + "." + str(kwargs['groupid']): rcsno
                }},
                upsert=False,
                multi=True)