Esempio n. 1
0
 def convert_board(self, new_board):
     old_board = new_board.mush
     old_posts = new_board.mush.lattr('~`*')
     old_dict = dict()
     for old_post in old_posts:
         post_details = old_board.mushget(old_post + '`DETAILS').split('|')
         poster_name = post_details[0]
         poster_objid = post_details[1]
         poster_obj = objmatch(poster_objid)
         if poster_obj:
             owner = poster_obj.obj
         else:
             owner = create.create_object(typeclass='classes.characters.BaseCharacter', key=poster_name)
             dbref, csecs = poster_objid.split(':', 1)
             cdate = from_unixtimestring(csecs)
             MushObject.objects.create(objid=poster_objid, dbref=dbref, created=cdate, type=8, recreated=1, obj=owner)
         post_date = from_unixtimestring(post_details[2])
         text = old_board.mushget(old_post)
         timeout_secs = int(old_board.mushget(old_post + '`TIMEOUT'))
         new_timeout = datetime.timedelta(0, timeout_secs, 0, 0, 0, 0, 0)
         subject = old_board.mushget(old_post + '`HDR')
         old_dict[old_post] = {'subject': subject, 'owner': owner, 'timeout': new_timeout,
                               'creation_date': post_date, 'text': text}
     for num, old_post in enumerate(sorted(old_posts, key=lambda old: old_dict[old]['creation_date'])):
         old_data = old_dict[old_post]
         new_board.posts.create(subject=old_data['subject'], owner=old_data['owner'],
                                creation_date=old_data['creation_date'], timeout=old_data['timeout'],
                                text=old_data['text'], order=num+1)
Esempio n. 2
0
 def switch_fclist(self):
     old_themes = cobj('themedb').children.all()
     for old_theme in old_themes:
         new_theme, created = FCList.objects.get_or_create(key=old_theme.name)
         desc = old_theme.mushget('DESCRIBE')
         if desc:
             new_theme.description = desc
         powers = old_theme.mushget('POWERS')
         if powers:
             new_theme.powers = powers
         info = old_theme.mushget('INFO')
         if info:
             new_theme.info = info
         old_characters = [objmatch(char) for char in old_theme.mushget('CAST').split(' ') if objmatch(char)]
         for char in old_characters:
             if not char.obj:
                 continue
             type = char.mushget('D`FINGER`TYPE') or 'N/A'
             status = char.mushget('D`FINGER`STATUS') or 'N/A'
             stat_kind, created = StatusKind.objects.get_or_create(key=status)
             type_kind, created = TypeKind.objects.get_or_create(key=type)
             stat_kind.characters.get_or_create(character=char.obj)
             type_kind.characters.get_or_create(character=char.obj)
             new_theme.cast.add(char.obj)
         new_theme.save()
Esempio n. 3
0
 def switch_groups(self):
     penn_groups = cobj('gop').children.all()
     for old_group in penn_groups:
         if not old_group.group:
             old_group.group, created = Group.objects.get_or_create(key=old_group.name)
             old_group.save(update_fields=['group'])
         new_group = old_group.group
         new_group.description = old_group.mushget('DESCRIBE')
         old_ranks = old_group.lattrp('RANK`\d+')
         old_rank_nums = [old_rank.split('`', 1)[1] for old_rank in old_ranks]
         rank_dict = dict()
         for num in old_rank_nums:
             new_rank, created = new_group.ranks.get_or_create(num=int(num))
             rank_name = old_group.mushget('RANK`%s`NAME' % num)
             if rank_name:
                 new_rank.name = sanitize_string(rank_name)
                 new_rank.save(update_fields=['name'])
             rank_dict[int(num)] = new_rank
         old_members = [objmatch(member) for member in old_group.mushget('MEMBERS').split(' ') if objmatch(member)]
         for old_member in old_members:
             if not old_member.obj:
                 continue
             old_num = int(old_member.mushget('D`GROUP`%s`RANK' % old_group.dbref)) or 4
             title = old_member.mushget('D`GROUP`%s`NAME' % old_group.dbref)
             if not title:
                 title = None
             new_member, created = new_group.participants.get_or_create(character=old_member.obj, title=title,
                                                                        rank=rank_dict[old_num])
             for channel in [new_group.ic_channel, new_group.ooc_channel]:
                 if channel:
                     if channel.locks.check(new_member.character, 'listen'):
                         channel.connect(new_member.character)
         new_group.save()
         board_group, created = BoardGroup.objects.get_or_create(main=0, group=new_group)
         for old_board in old_group.contents.all():
             if not old_board.board:
                 old_board.board = board_group.make_board(key=old_board.name)
                 old_board.save(update_fields=['board'])
             new_board = old_board.board
             old_order = int(old_board.mushget('ORDER'))
             new_board.order = old_order
             new_board.save()
             self.convert_board(new_board)
Esempio n. 4
0
    def switch_scenes(self):

        # Establishing Mysql Connection!
        from commands.mysql import sql_dict
        db = MySQLdb.connect(host=sql_dict['site'], user=sql_dict['username'],
                             passwd=sql_dict['password'], db=sql_dict['database'], cursorclass=cursors.DictCursor)
        c = db.cursor()

        # Just like with jobs, we need to create Stubs for everyone who has ever used SceneSys and link them to their
        # Scene IDs! Same code, believe it or not.
        c.execute("""SELECT * from scene_players""")
        old_players = c.fetchall()
        char_dict = dict()
        for old_player in old_players:
            match = objmatch(old_player['objid'])
            if match:
                char = match.obj
            else:
                key = old_player['player_name']
                char = create.create_object(typeclass='classes.characters.BaseCharacter', key=key)
                objid = old_player['objid']
                dbref, csecs = objid.split(':', 1)
                cdate = from_unixtimestring(csecs)
                new_mush = MushObject.objects.create(objid=objid, dbref=dbref, created=cdate, type=8, recreated=1, obj=char)
                new_mush.save()
            char_dict[old_player['player_id']] = char

        # Convert plots! This one's pretty easy.
        c.execute("""SELECT * FROM scene_plots ORDER BY plot_id""")
        old_plots = c.fetchall()
        plot_dict = dict()
        for old_plot in old_plots:
            if old_plot['start_date']:
                start_date = old_plot['start_date'].replace(tzinfo=pytz.utc)
            else:
                start_date = None
            if old_plot['end_date']:
                end_date = old_plot['end_date'].replace(tzinfo=pytz.utc)
            else:
                end_date = None
            owner = char_dict[old_plot['player_id']]
            description = penn_substitutions(old_plot['plot_desc'])
            plot_type = old_plot['plot_type']
            title = old_plot['title']
            new_plot = Plot.objects.create(owner=owner, description=description, title=title, date_start=start_date,
                                date_end=end_date, type=plot_type)
            plot_dict[old_plot['plot_id']]= new_plot

        # Another easy one. Importing the Events calendar of scheduled scenes.
        event_dict = dict()
        c.execute("""SELECT * from scene_schedule ORDER BY schedule_id""")
        old_events = c.fetchall()
        for old_event in old_events:
            owner = char_dict[old_event['player_id']]
            schedule_date = old_event['schedule_date'].replace(tzinfo=pytz.utc)
            description = penn_substitutions(old_event['schedule_desc'])
            schedule_title = old_event['schedule_title']
            plot = plot_dict.get(old_event['plot_id'], None)

            new_event = Event.objects.create(owner=owner, date_schedule=schedule_date, description=description,
                                             title=schedule_title, plot=plot)
            event_dict[old_event['schedule_id']] = new_event

        # Now we begin the process of importing scenes. This is a very involved process!
        scene_dict = dict()
        c.execute("""SELECT * FROM scene_scenes ORDER BY scene_id""")
        old_scenes = c.fetchall()
        for old_scene in old_scenes:
            owner = char_dict[old_scene['player_id']]
            scene_title = old_scene['scene_title']
            scene_desc = old_scene['scene_desc']
            scene_status = int(old_scene['scene_state'])

            creation_date = old_scene['creation_date'].replace(tzinfo=pytz.utc)

            if old_scene['finish_date']:
                finish_date = old_scene['finish_date'].replace(tzinfo=pytz.utc)
            else:
                finish_date = None

            plot = plot_dict.get(old_scene['plot_id'], None)
            room_objid = old_scene['room_objid']
            old_loc = objmatch(room_objid)
            if old_loc.obj:
                location = old_loc.obj
            else:
                room_name = old_scene['room_name']
                dbref, csecs = room_objid.split(':', 1)
                cdate = from_unixtimestring(csecs)
                location = create.create_object(typeclass='classes.rooms.BaseRoom', key=room_name)
                new_mush, created = MushObject.objects.get_or_create(objid=room_objid, dbref=dbref, type=1, created=cdate)
                new_mush.obj = location
                new_mush.save()

            new_scene = Scene.objects.create(owner=owner, title=scene_title, description=scene_desc, status=scene_status,
                                             date_created=creation_date, date_finished=finish_date, plot=plot)

            scene_dict[old_scene['scene_id']] = new_scene

            # In this section we'll be setting up the Participants for this scene and making an index dictionary
            # in preparation to import the poses.
            part_dict = dict()
            c.execute("""SELECT DISTINCT player_id FROM scene_poses WHERE scene_id=%s""", (old_scene['scene_id'],))
            posers = c.fetchall()
            for poser in posers:
                new_part = new_scene.participants.create(character=char_dict[poser['player_id']])
                part_dict[poser['player_id']] = new_part

            # Finally it's time to import the individual poses!
            pose_dict = dict()
            c.execute("""SELECT * from scene_poses WHERE scene_id=%s""", (old_scene['scene_id'],))
            old_poses = c.fetchall()
            for pose in old_poses:
                parse_pose = pose['pose'].decode('utf-8',errors='ignore')
                owner = part_dict[pose['player_id']]
                pose_date = pose['pose_time'].replace(tzinfo=pytz.utc)
                ignore = bool(int(pose['pose_ignore']))
                pose_text = penn_substitutions(parse_pose)

                new_pose = Pose.objects.create(owner=owner, ignore=ignore, text=pose_text, date_made=pose_date,
                                               location=location)
                pose_dict[pose['pose_id']] = new_pose
        db.close()
Esempio n. 5
0
    def switch_jobs(self):

        # Step one is importing all of the Job Categories from the MUSH data. Each category is a THING object
        # So we don't need mysql just yet.
        cat_dict = dict()
        old_categories = cobj('jobdb').children.all()
        for old_cat in old_categories:
            new_cat, created = JobCategory.objects.get_or_create(key=old_cat.name)
            if created:
                new_cat.setup()
            cat_dict[old_cat.objid] = new_cat

        # Establishing Mysql Connection!
        from commands.mysql import sql_dict
        db = MySQLdb.connect(host=sql_dict['site'], user=sql_dict['username'],
                             passwd=sql_dict['password'], db=sql_dict['database'], cursorclass=cursors.DictCursor)
        c = db.cursor()

        # Our next order of business is retrieving all of the players who've ever posted jobs.
        # This section searches the database by OBJID and creates a dictionary that links the old jobsys player_id
        # to the new communications.ObjectStub, creating them if necessary.
        c.execute("""SELECT * from jobsys_players""")
        old_players = c.fetchall()
        char_dict = dict()
        for old_player in old_players:
            match = objmatch(old_player['objid'])
            if match:
                char = match.obj
            else:
                key = old_player['player_name']
                char = create.create_object(typeclass='classes.characters.BaseCharacter', key=key)
                objid = old_player['objid']
                dbref, csecs = objid.split(':', 1)
                cdate = from_unixtimestring(csecs)
                MushObject.objects.create(objid=objid, dbref=dbref, created=cdate, type=8, recreated=1, obj=char)
            char_dict[old_player['player_id']] = char

        # Now that we have the Player ID->Stub dictionary, we can begin the process of actually importing job data!
        # we only want the jobs from categories that actually exist. Probably rare that any of them wouldn't be, but
        # just in case...
        cat_list = ', '.join("'%s'" % cat for cat in cat_dict.keys())
        c.execute("""SELECT * from jobsys_jobs WHERE job_objid IN (%s) ORDER BY job_id""" % cat_list)
        old_jobs = c.fetchall()
        for row in old_jobs:
            job_id = row['job_id']
            if row['close_date']:
                close_date = row['close_date'].replace(tzinfo=pytz.utc)
            else:
                close_date = None
            if row['due_date']:
                due_date = row['due_date'].replace(tzinfo=pytz.utc)
            else:
                due_date = None
            if row['submit_date']:
                submit_date = row['submit_date'].replace(tzinfo=pytz.utc)
            else:
                submit_date = None
            title = row['job_title']
            status = row['job_status']
            owner = char_dict[row['player_id']]
            text = penn_substitutions(row['job_text'])
            category = cat_dict[row['job_objid']]

            handler_dict = dict()
            # We have our job row data prepped! Now to create the job and its opening comment as well as the owner-handler.
            new_job = category.jobs.create(title=title, submit_date=submit_date, due_date=due_date,
                                           close_date=close_date, status=status)
            new_owner = new_job.characters.create(character=owner, is_owner=True, check_date=utcnow())
            new_owner.comments.create(text=text, date_made=submit_date)
            handler_dict[row['player_id']] = new_owner

            # Here it's time to import all of the job's claims, handlers, watchers, and create JobHandler rows for them.
            c.execute("""SELECT * from jobsys_claim WHERE job_id=%s""", (job_id,))
            claim_data = c.fetchall()
            for old_claim in claim_data:
                stub = char_dict[old_claim['player_id']]
                new_handler, created = new_job.characters.get_or_create(character=stub, check_date=utcnow())
                if old_claim['claim_mode'] == 0:
                    new_handler.is_handler = True
                if old_claim['claim_mode'] == 1:
                    new_handler.is_helper = True
                new_handler.save()
                handler_dict[old_claim['player_id']] = new_handler

            # Unfortunately it's also possible that people who didn't claim it might also need JobHandler entries so...
            c.execute("""SELECT DISTINCT player_id from jobsys_comments WHERE job_id=%s""", (job_id,))
            all_speakers = c.fetchall()
            for speaker in all_speakers:
                if speaker['player_id'] not in handler_dict:
                    new_handler, created = new_job.characters.get_or_create(character=char_dict[speaker['player_id']],
                                                                            check_date=utcnow())
                    handler_dict[speaker['player_id']] = new_handler

            # And another round. This time it's a matter of importing handlers for anyone who ever CHECKED a job.
            # Here we'll also import everyone's 'last date they checked the job'.
            c.execute("""SELECT * FROM jobsys_check WHERE job_id=%s""", (job_id,))
            old_checks = c.fetchall()
            for check in old_checks:
                if check['player_id'] not in handler_dict:
                    handler, created = new_job.characters.get_or_create(character=char_dict[check['player_id']],
                                                                        check_date=utcnow())
                    handler_dict[check['player_id']] = new_handler
                else:
                    handler = handler_dict[check['player_id']]
                handler.check_date = check['check_date'].replace(tzinfo=pytz.utc)
                handler.save(update_fields=['check_date'])

            # Now to import all of the comments and replies.
            c.execute("""SELECT * from jobsys_comments WHERE job_id=%s ORDER BY comment_id""", (job_id,))
            old_comments = c.fetchall()
            for old_com in old_comments:
                handler = handler_dict[old_com['player_id']]
                comment_text = penn_substitutions(old_com['comment_text'])
                comment_date = old_com['comment_date'].replace(tzinfo=pytz.utc)
                private = old_com['comment_type']
                handler.comments.create(text=comment_text, date_made=comment_date, is_private=private)
        db.close()