예제 #1
0
class JavManagerDB:
    def __init__(self):
        self.jav_db = FileBackend('jav_manager.db')

    def upcreate_jav(self, jav_obj: dict):
        # uniform car to upper case
        jav_obj['car'] = str(jav_obj['car']).upper()
        # set pk to car
        jav_obj['pk'] = jav_obj['car']
        # set default to no opinion
        #0-want, 1-viewed, 2-no opinion 3-local 4-downloading
        jav_obj.setdefault('stat', 2)

        _jav_doc = JavObj(jav_obj)
        _jav_doc.save(self.jav_db)
        self.jav_db.commit()

    def get_by_pk(self, pk: str):
        return self.jav_db.get(JavObj, {'pk': pk.upper()})

    def pk_exist(self, pk: str):
        try:
            self.jav_db.get(JavObj, {'pk': pk})
            return True
        except DoesNotExist:
            return False
예제 #2
0
class JavManagerDB:
    def __init__(self):
        self.jav_db = FileBackend('jav_manager.db')

    def create_indexes(self):
        print('creating index for stat')
        self.jav_db.create_index(JavObj, 'stat')

    def rebuild_index(self):
        self.jav_db.rebuild_index(self.jav_db.get_collection_for_cls(JavObj),
                                  'stat')

    def bulk_list(self):
        return self.jav_db.filter(JavObj, {})

    def partial_search(self, search_string: str):
        rt = self.jav_db.filter(JavObj,
                                {'pk': {
                                    '$regex': search_string.upper()
                                }})[:20]
        return rt

    def query_on_filter(self, filter_on: dict, page=1, limit=8):
        rt = self.jav_db.filter(JavObj, filter_on)
        rt_max_page = ceil(len(rt) / limit)
        rt_list = rt[(page - 1) * limit:(page) * limit]

        return [dict(x) for x in rt_list], rt_max_page

    def upcreate_jav(self, jav_obj: dict):
        # uniform car to upper case
        jav_obj['car'] = str(jav_obj['car']).upper()
        # set pk to car
        jav_obj['pk'] = jav_obj['car']

        # pull existing data since this is update function
        try:
            current_jav_obj = dict(self.get_by_pk(jav_obj['car']))
            # overwrite current db dict with input dict
            current_jav_obj.update(jav_obj)
        except DoesNotExist:
            # set default to no opinion
            #0-want, 1-viewed, 2-no opinion 3-local 4-downloading
            jav_obj.setdefault('stat', 2)

        _jav_doc = JavObj(jav_obj)
        _jav_doc.save(self.jav_db)
        self.jav_db.commit()
        print('writed ', jav_obj)

    def get_by_pk(self, pk: str):
        return self.jav_db.get(JavObj, {'pk': pk.upper()})

    def pk_exist(self, pk: str):
        try:
            self.jav_db.get(JavObj, {'pk': pk.upper()})
            return True
        except DoesNotExist:
            return False
예제 #3
0
파일: geocode.py 프로젝트: brokenbike/tools
class Geocode:
	def __init__(self, geocoderCache = True, printStatus = False):
		self.printStatus = printStatus
		self.geocoderCache = geocoderCache
		if self.geocoderCache:
			self.db = FileBackend('./geo-cache')
	def getGeo(self, lon, lat):
		if self.geocoderCache:
			try:
				nodeObj = self.db.get(GeoAssign,{'lat' : lat, 'lon' : lon})
				nodeObj['cached'] = True
				return nodeObj
			except GeoAssign.DoesNotExist:
				pass
		if self.printStatus:
			print('lon: '+str(lon)+', lat: '+str(lat)+' not in cache - start lookup at Nominatim-API')
		geolocator = Nominatim()
		location = geolocator.reverse([lat, lon], timeout=20)
		if 'address' in location.raw:
			location = location.raw['address']
			nodeObj = GeoAssign({
				'lat' : lat,
				'lon' : lon,
				'payload' : location
				})
			self.db.save(nodeObj)
			self.db.commit()
			nodeObj['cached'] = False
			return nodeObj
		else:
			# got no results (i.e. coordinates are incorrect)
			return None
예제 #4
0
파일: geocode.py 프로젝트: ffov/tools
class Geocode:
    def __init__(self, geocoderCache=True, printStatus=False):
        self.printStatus = printStatus
        self.geocoderCache = geocoderCache
        if self.geocoderCache:
            self.db = FileBackend('./geo-cache')

    def getGeo(self, lon, lat):
        if self.geocoderCache:
            try:
                nodeObj = self.db.get(GeoAssign, {'lat': lat, 'lon': lon})
                nodeObj['cached'] = True
                return nodeObj
            except GeoAssign.DoesNotExist:
                pass
        if self.printStatus:
            print('lon: ' + str(lon) + ', lat: ' + str(lat) +
                  ' not in cache - start lookup at Nominatim-API')
        geolocator = Nominatim()
        location = geolocator.reverse([lat, lon], timeout=20)
        if 'address' in location.raw:
            location = location.raw['address']
            nodeObj = GeoAssign({'lat': lat, 'lon': lon, 'payload': location})
            self.db.save(nodeObj)
            self.db.commit()
            nodeObj['cached'] = False
            return nodeObj
        else:
            # got no results (i.e. coordinates are incorrect)
            return None
예제 #5
0
class ImageToTumblr(object):
    def __init__(self, bot):
        self.cfg = PluginConfig(self)
        self.image_filetypes = self.cfg.get("image_filetypes").split(",")
        self.db = FileBackend(self.cfg.get("main_db"))
        self.tumblr = pytumblr.TumblrRestClient(
            self.cfg.get("consumer_key"),
            self.cfg.get("consumer_secret"),
            self.cfg.get("oauth_token"),
            self.cfg.get("oauth_secret"),
        )
        irc3.base.logging.log(
            irc3.base.logging.WARN, "Tumblr poster ready! Posting all URLs with: %s" % self.image_filetypes
        )

    def post_image(self, text, poster):
        # Strip everything but the address
        m = re.match(r".*(?P<url>http.*)", text)
        url = m.group("url")
        # Make sure we didn't do this one already
        try:
            self.db.get(PostedImage, {"url": url})
        except PostedImage.DoesNotExist:
            try:
                # First we post it to tumblr
                p = self.tumblr.create_photo(
                    "mmerpimages", state="published", source=str(url), caption="Found by %s" % poster
                )
                irc3.base.logging.log(irc3.base.logging.WARN, "Posting image by %s: %s" % (poster, url))

                # And then record the fact that we did.
                self.db.save(PostedImage({"url": url}))
                self.db.commit()
            except:
                irc3.base.logging.log(irc3.base.logging.WARN, "Could not post to tumblr: %s" % url)
                return
        else:
            irc3.base.logging.log(irc3.base.logging.WARN, "Not posting duplicate image: %s" % url)
            return

    @irc3.event(irc3.rfc.PRIVMSG)  # Triggered on every message anywhere.
    def parse_image(self, target, mask, data, event):
        for extension in self.image_filetypes:
            if "." + extension.lower() in data:
                self.post_image(data, mask.nick)
예제 #6
0
class Activity(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        self.backend = FileBackend('./activity-db')
        self.backend.autocommit = True
        self.scanned = False
        bot.logger.info("Activity plugin ready")

    def _maintain(self, bot):
        """Query the database and apply any configured roles"""
        pass

    def _find_one(self, member: discord.Member) -> Union[ActivityRecord, None]:
        """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples.
        This exists to tie up the Blitzdb boilerplate in one place."""
        try:
            comm = self.backend.get(ActivityRecord, {'author': member.id})
        except ActivityRecord.DoesNotExist:
            return None
        except ActivityRecord.MultipleDocumentsReturned:
            self.bot.logger.error(
                f"Discarding multiple results returned for '{member.display_name(member.id)}'"
            )
            return None
        else:
            return comm

    async def on_ready(self):
        if not self.scanned:
            self.scanned = True
            g = self.bot.guilds[0]  # type: discord.Guild
            self.bot.logger.info(
                f"Beginning server scan. {len(g.members)} to update.")
            for m in g.members:
                m: discord.Member
                if not m.bot and m.id != self.bot.id:
                    activity_record = self._find_one(m)
                    if activity_record:
                        continue
                    else:
                        a = ActivityRecord({
                            "author": m.id,
                            "lastspeak": datetime.datetime.now()
                        })
                        self.backend.save(a)

    @commands.Cog.listener()
    async def on_message(self, message: discord.message):
        if not self.bot.ready():
            return
        activity_record = self._find_one(message.author)
        activity_record["lastspeak"] = message.created_at
        self.backend.save(activity_record)
        pass
예제 #7
0
class Database:

    def __init__(self):
        self.backend = FileBackend(settings['database_location'])

    def save(self, entry):
        self.backend.save(entry)
        self.backend.commit()

    def search(self, table, query):
        if table == 'user':
            doc = User
        elif table == 'subm':
            doc = Submission
        try:
            return self.backend.get(doc, query)
        except doc.DoesNotExist:
            return None
예제 #8
0
class TwitchNotifier(commands.Cog):
    def __init__(self, bot):
        self.bot: 'PixlBot' = bot
        self.config = bot.config['TwitchNotifier']
        self.backend = FileBackend('db')
        self.backend.autocommit = True
        self.bot.logger.info("Twitch notifier plugin ready")
        self.uuids = []
        self.online_uuids = []
        self.sslcontext = ssl.SSLContext()
        self.sslcontext.load_cert_chain(self.config['cert_path'],
                                        self.config['key_path'])
        self._twitch_init_()

    def _twitch_init_(self):
        self.bot.logger.info("Registering with Twitch...")
        self.twitch = Twitch(self.config['id'], self.config['secret'])
        self.twitch.authenticate_app([])
        self.bot.logger.info(
            f"Registering webhook endpoint {self.config['myurl']} ...")
        self.hook = TwitchWebHook(self.config['myurl'],
                                  self.config['id'],
                                  self.config['port'],
                                  ssl_context=self.sslcontext)
        self.hook.authenticate(self.twitch)
        self.bot.logger.info("Clearing all hook subscriptions...")
        self.hook.unsubscribe_all(self.twitch)  # Clear all subs on startup
        self.hook.start()
        self._register_all()

    def _login_to_id(self, name: str) -> Optional[str]:
        """Returns the twitch ID for a given login name, or None if the name couldn't be resolved."""
        try:
            res: dict = self.twitch.get_users(logins=[name])
        except TwitchBackendException as e:
            self.bot.logger.error(f"Backend error fetching user! {e}")
            return None
        if len(res) == 0:
            return None
        else:
            return res['data'][0]['id']

    def _register_all(self):
        """Attempts to register stream_changed callbacks for all configured users."""
        self.bot.logger.info("Registering callbacks for all watched users..")
        users = self.backend.filter(TwitchWatchedUser,
                                    {'twitch_name': {
                                        "$exists": True
                                    }})
        if not users:
            self.bot.logger.info("No users to watch. No callbacks registered.")
        else:
            for u in users:
                self.bot.logger.info(f"Registering: {u['twitch_name']}")
                success, uuid = self.hook.subscribe_stream_changed(
                    u['twitch_id'], self._cb_stream_changed)
                if success and uuid:
                    self.uuids.append(uuid)
                    self.bot.logger.info(
                        f"{success}: registered subscription UUID: {uuid}")
                else:
                    self.bot.logger.error(
                        f"{success}: failed registering subscription: {uuid}")

    def _cb_stream_changed(self, uuid, data):
        """Callback for Twitch webhooks, fires on stream change event"""
        self.bot.logger.debug(f"Callback data for {uuid}: {data}")
        if data["type"] == "offline":
            if uuid in self.online_uuids:
                self.online_uuids.remove(
                    uuid
                )  # Stupid twitch sending the same damn webhook multiple times...
                return
            else:
                self.bot.logger.debug(
                    f"Ignoring duplicate offline callback for {uuid}")
                return
        elif data["type"] == "live":
            if uuid in self.online_uuids:
                self.bot.logger.debug(
                    f"Ignoring duplicate live callback for {uuid}")
                return
            else:
                self.online_uuids.append(uuid)
        else:
            self.bot.logger.error(
                f"Got a callback type we can't handle: {data['type']}")
            return

        if uuid not in self.uuids:
            self.bot.logger.error(
                f"Got a callback for a UUID we're not tracking: {uuid}, my UUIDs: {self.uuids}"
            )
            return

        try:
            item = self.backend.get(TwitchWatchedUser,
                                    {"twitch_id": data["user_id"]})
        except TwitchWatchedUser.DoesNotExist:
            self.bot.logger.error(
                f"Got a callback for a USER we're not tracking: {data['user_id']} -> {data['user_name']}"
            )
            return
        channel: discord.TextChannel = self.bot.get_channel(
            item['notify_channel'])

        width = 640
        height = 360
        url = data['thumbnail_url'].format(width=width, height=height)

        tu = self.twitch.get_users(data['user_id'])['data'][0]
        self.bot.logger.debug(tu)

        embed = discord.Embed(
            title=f"Now streaming {data['game_name']}",
            description=data['title'],
            color=discord.Color.green(),
        )
        embed.set_image(url=url)
        embed.set_thumbnail(url=tu["profile_image_url"])
        embed.set_author(name=item["twitch_name"],
                         url=f"https://twitch.tv/{data['user_name']}")
        embed.add_field(name="Watch live at",
                        value=f"https://twitch.tv/{data['user_name']}")
        self.bot.loop.create_task(
            channel.
            send(  # This isn't an async function, so enqueue it manually
                embed=embed))
        self.bot.logger.info(
            f"Successfully sent online notification for {data['user_id']}")

    @cog_ext.cog_subcommand(
        base="Twitchwatch",
        name="add_notification",
        description="Add a go live notification for Twitch",
        options=[twitch_name, notify_channel, notify_text],
        guild_ids=util.guilds)
    async def add_notification(self, ctx: SlashContext, twitch_name: str,
                               notify_channel: discord.TextChannel,
                               notify_text: str):
        twitch_id = self._login_to_id(twitch_name)
        try:
            self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name})
        except TwitchWatchedUser.DoesNotExist:
            pass
        except TwitchWatchedUser.MultipleDocumentsReturned:
            self.bot.logger.error(
                "Multiple users returned - database inconsistent???")
            return
        if not twitch_id:
            await ctx.send(embed=mkembed(
                'error',
                f"Unable to get the Twitch ID for the name {twitch_name}"))
            return
        await ctx.defer()  # This bit can take a minute.
        success, uuid = self.hook.subscribe_stream_changed(
            twitch_id, self._cb_stream_changed)
        if success and uuid:
            self.uuids.append(uuid)
            self.bot.logger.info(
                f"{success}: registered subscription UUID: {uuid}")
        else:
            self.bot.logger.error(
                f"{success}: failed registering subscription: {uuid}")
            await ctx.send("Bluh, couldn't register the webhook with twitch :("
                           )
            return
        item = TwitchWatchedUser({
            'twitch_name': twitch_name,
            'twitch_id': twitch_id,
            'discord_name': ctx.author.id,
            'notify_channel': notify_channel.id,
            'notify_text': notify_text,
            'uuid': str(uuid)
        })
        self.bot.logger.debug(f"DB object dump: {item.__dict__}")
        self.backend.save(item)
        await ctx.send(embed=mkembed("done",
                                     f"Notification added for {twitch_name}",
                                     channel=notify_channel.name))

    @cog_ext.cog_subcommand(
        base="Twitchwatch",
        name="del_notification",
        description="Remove a go live notification for Twitch",
        options=[twitch_name],
        guild_ids=util.guilds)
    async def del_notification(self, ctx: SlashContext, twitch_name: str):
        try:
            item = self.backend.get(TwitchWatchedUser,
                                    {'twitch_name': twitch_name})
        except TwitchWatchedUser.DoesNotExist:
            await ctx.send(embed=mkembed(
                "error", f"No notification exists for {twitch_name}"))
            return
        self.hook.unsubscribe(item['uuid'])
        self.bot.logger.info(f"Removing watch {item['uuid']}: {twitch_name}")
        self.backend.delete(item)
        if item['uuid'] in self.uuids:
            self.uuids.remove(item['uuid'])
        await ctx.send(
            embed=mkembed("done", f"Notification for {twitch_name} removed."))
예제 #9
0
from blitzdb import FileBackend

backend = FileBackend("/tmp/movies")

backend.register(Movie,{'collection':'movies'})
backend.register(Actor,{'collection':'actors'})

backend.filter(Movie,{}).delete()
backend.filter(Actor,{}).delete()

the_godfather.save(backend)
marlon_brando.save(backend)
al_pacino.save(backend)

the_godfather = backend.get(Movie,{'pk':1L})
#or...
the_godfather = backend.get(Movie,{'name' : 'The Godfather'})

print the_godfather

movies_from_1972 = backend.filter(Movie,{'year' : 1972})

the_godfather.cast = {'Don Vito Corleone' : marlon_brando, 'Michael Corleone' : al_pacino}

#Documents stored within other objects will be automatically converted to database references.

marlon_brando.performances = [the_godfather]
al_pacino.performances = [the_godfather]

marlon_brando.save(backend)
예제 #10
0
class Responder(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        self.backend = FileBackend('./responder-db')
        self.backend.autocommit = True
        bot.logger.info("Responder plugin ready")

    def _find_one(self, name: str) -> Union[ResponseCommand, None]:
        """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples.
        This exists to tie up the Blitzdb boilerplate in one place."""
        try:
            comm = self.backend.get(ResponseCommand, {'command': name})
        except ResponseCommand.DoesNotExist:
            return None
        except ResponseCommand.MultipleDocumentsReturned:
            self.bot.logger.error(
                f"_find_one discarding multiple results returned for '{name}'")
            return None
        else:
            return comm

    def _reply_allowed(self, comm: ResponseCommand,
                       message: discord.Message) -> bool:
        """Determine whether a message can be replied to based on its attributes
        In general, if a user or channel restriction is set on a command, it can only be used when called in the
        listed channel or by the listed user.
        """
        self.bot.logger.debug(f"Restriction dump: {comm.get('restrictions')}")
        if not comm.get("restrictions"):
            # No restrictions on this command, we can respond without doing anything else.
            return True
        else:
            if comm["restrictions"].get("channels"):
                channels = comm["restrictions"]["channels"]
                if message.channel.id in channels:
                    return True
                else:
                    return False
            elif comm["restrictions"].get("users"):
                users = comm["restrictions"]["users"]
                if message.author.id in users:
                    return True
                else:
                    return False
            else:
                return True

    @commands.command()
    async def addresponse(self, ctx: context, name: str, *responsen):
        """Adds an automatic response to (name) as (response)
        The first word (name) is the text that will be replied to. Everything else is what it will be replied to with.
        If you want to reply to an entire phrase, enclose name in quotes."""
        arg2 = " ".join(responsen)
        if self._find_one(name):
            await ctx.send(embed=mkembed('error', f"'{name}' already exists."))
            return
        else:
            comm = ResponseCommand({
                'command': name,
                'reply': arg2,
                'creator_str': str(ctx.author),
                'creator_id': ctx.author.id
            })
            self.backend.save(comm)
            self.bot.logger.info(
                f"'{name}' was added by {ctx.author.display_name}")
            await ctx.send(embed=mkembed('done', f"Saved: {name} => {arg2}"))

    @commands.command()
    async def delresponse(self, ctx: context, name: str):
        """Removes an autoresponse.
        Only the initial creator of a response can remove it."""
        comm = self._find_one(name)
        if not comm:
            ctx.send(embed=mkembed('error', f"{name} is not defined."))
            return
        elif not ctx.author.id == comm['creator_id']:
            await ctx.send(embed=mkembed(
                'error',
                f"You are not the creator of {name}. Ask {comm['creator_str']}"
            ))
        else:
            self.backend.delete(comm)
            self.bot.logger.info(
                f"'{name}' was deleted by {ctx.author.display_name}")
            await ctx.send(embed=mkembed('error', f"{name} has been removed."))

    @commands.command()
    async def responselimit(self, ctx: context, name: str, *tags: str):
        """Adds a restriction mode on a response so it only triggers in certain circumstances.
        Tags is a space-separated list of users and channels that this responder will apply to. These must be
        discord tags or pings, i.e. discord must show them as links, not just text.
        If the word NONE is used, all restrictions are removed.
        """
        comm = self._find_one(name)
        if not comm:
            await ctx.send(embed=mkembed('error', f"{name} does not exist."))
            return
        if not ctx.author.id == comm['creator_id']:
            await ctx.send(embed=mkembed(
                'error',
                f"You are not the creator of {name}. Ask {comm['creator_str']}"
            ))
            return
        if tags[0] == "NONE":
            comm["restrictions"] = {}
            self.backend.save(comm)
            await ctx.send(
                embed=mkembed('done', f"All restrictions removed from {name}"))
            return
        if ctx.message.mentions:
            if not comm.get("restrictions"):
                comm["restrictions"] = {}
            elif not comm["restrictions"].get("users"):
                comm["restrictions"]["users"] = []
            comm["restrictions"]["users"] = list(
                set(comm["restrictions"]["users"] +
                    [u.id for u in ctx.message.mentions]))
            self.backend.save(comm)
            display_users = [
                self.bot.get_user(u).display_name
                for u in comm["restrictions"]["users"]
            ]
            await ctx.send(embed=mkembed('done',
                                         'User restriction updated:',
                                         command=comm['command'],
                                         users=display_users))
        if ctx.message.channel_mentions:
            if not comm.get("restrictions"):
                comm["restrictions"] = {}
            if not comm["restrictions"].get("channels"):
                comm["restrictions"]["channels"] = []
            comm["restrictions"]["channels"] = list(
                set(comm["restrictions"]["channels"] +
                    [c.id for c in ctx.message.channel_mentions]))
            display_channels = [
                self.bot.get_channel(c).name
                for c in comm["restrictions"]["channels"]
            ]
            self.backend.save(comm)
            await ctx.send(embed=mkembed('done',
                                         'Channel restriction updated:',
                                         Command=comm['command'],
                                         Channels=display_channels))

    @commands.command()
    async def responserestrictions(self, ctx: context, name: str):
        """Show the restriction list for a given command"""
        comm = self._find_one(name)
        if not comm:
            await ctx.send(embed=mkembed('error', f"{name} does not exist."))
            return
        await ctx.send(
            embed=mkembed('info',
                          f"Information for `{name}`",
                          Reply=comm['reply'],
                          Restrictions=comm.get('restrictions', 'None'),
                          Creator=comm['creator_str']))

    @commands.Cog.listener()
    async def on_message(self, message: discord.message):
        comm = self._find_one(message.content)
        if comm and self._reply_allowed(comm, message):
            await message.channel.send(comm['reply'])
예제 #11
0
class Responder(commands.Cog):
    def __init__(self, bot):
        self.bot = bot
        self.backend = FileBackend('db')
        self.backend.autocommit = True
        bot.logger.info("Responder plugin ready")

    def _find_one(self, name: str) -> Union[ResponseCommand, None]:
        """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples.
        This exists to tie up the Blitzdb boilerplate in one place."""
        try:
            comm = self.backend.get(ResponseCommand, {'command': name})
        except ResponseCommand.DoesNotExist:
            return None
        except ResponseCommand.MultipleDocumentsReturned:
            self.bot.logger.error(f"_find_one discarding multiple results returned for '{name}'")
            return None
        else:
            return comm

    def _reply_allowed(self, comm: ResponseCommand, message: discord.Message) -> bool:
        """Determine whether a message can be replied to based on its attributes
        In general, if a user or channel restriction is set on a command, it can only be used when called in the
        listed channel or by the listed user.
        """
        self.bot.logger.debug(f"Restriction dump: {comm.get('restrictions')}")
        if not comm.get("restrictions"):
            # No restrictions on this command, we can respond without doing anything else.
            return True
        else:
            if comm["restrictions"].get("channels"):
                channels = comm["restrictions"]["channels"]
                if message.channel.id in channels:
                    return True
                else:
                    return False
            elif comm["restrictions"].get("users"):
                users = comm["restrictions"]["users"]
                if message.author.id in users:
                    return True
                else:
                    return False
            else:
                return True

    @cog_ext.cog_subcommand(base="Autoresponder", name="addresponse",
                            description="Adds an automatic response to certain text",
                            options=[respond_to, response], guild_ids=util.guilds)
    async def addresponse(self, ctx: SlashContext, respond_to: str, response: str):
        """Adds an automatic response to (name) as (response)
        The first word (name) is the text that will be replied to. Everything else is what it will be replied to with.
        If you want to reply to an entire phrase, enclose name in quotes."""
        if self._find_one(respond_to):
            await ctx.send(embed=mkembed('error', f"'{respond_to}' already exists."))
            return
        else:
            comm = ResponseCommand(
                {'command': respond_to, 'reply': response, 'creator_str': str(ctx.author), 'creator_id': ctx.author.id}
            )
            self.backend.save(comm)
            self.bot.logger.info(f"'{response}' was added by {ctx.author.display_name}")
            await ctx.send(embed=mkembed('done', "Autoresponse saved.", reply_to=respond_to, reply_with=response))

    @cog_ext.cog_subcommand(base="Autoresponder", name="delresponse",
                            description="Removes an automatic reponse from certain text",
                            options=[respond_to], guild_ids=util.guilds)
    async def delresponse(self, ctx: SlashContext, respond_to: str):
        """Removes an autoresponse.
        Only the initial creator of a response can remove it."""
        comm = self._find_one(respond_to)
        if not comm:
            await ctx.send(embed=mkembed('error', f"{respond_to} is not defined."))
            return
        elif not ctx.author.id == comm['creator_id']:
            await ctx.send(
                embed=mkembed('error', f"You are not the creator of {respond_to}. Ask {comm['creator_str']}"))
        else:
            self.backend.delete(comm)
            self.bot.logger.info(f"'{respond_to}' was deleted by {ctx.author.display_name}")
            await ctx.send(embed=mkembed('info', f"{respond_to} has been removed."))

    # @commands.command()
    # @cog_ext.cog_subcommand(base="Autoresponder", name="limit_user",
    #                         description="Limit a response to triggering on a certain user. Leave users blank to remove.",
    #                         options=[respond_to, restrict_user],
    #                         guild_ids=util.guilds)
    async def limitchannel(self, ctx: SlashContext, respond_to: str, **kwargs):
        comm = self._find_one(respond_to)
        if not comm:
            await ctx.send(embed=mkembed('error', f"'{respond_to}' does not exist."))
            return
        if not ctx.author.id == comm['creator_id']:
            await ctx.send(
                embed=mkembed('error', f"You are not the creator of '{respond_to}'. Ask {comm['creator_str']}"))
            return
        if len(kwargs) == 0:
            comm["restrictions"] = {}
            self.backend.save(comm)
            await ctx.send(embed=mkembed('done', f"All restrictions removed from {respond_to}"))
            return
        if kwargs['restrict_user']:
            if not comm.get("restrictions"):
                comm["restrictions"] = {}
            elif not comm["restrictions"].get("users"):
                comm["restrictions"]["users"] = []
            comm["restrictions"]["users"] = list(set(
                comm["restrictions"]["users"] + [u.id for u in restrict_user]
            ))
            self.backend.save(comm)
            display_users = [self.bot.get_user(u).display_name for u in comm["restrictions"]["users"]]
            await ctx.send(
                embed=mkembed('done', 'User restriction updated:', command=comm['command'], users=display_users)
            )
        if kwargs['restrict_channel']:
            if not comm.get("restrictions"):
                comm["restrictions"] = {}
            if not comm["restrictions"].get("channels"):
                comm["restrictions"]["channels"] = []
            comm["restrictions"]["channels"] = list(set(
                comm["restrictions"]["channels"] + [c.id for c in ctx.message.channel_mentions]
            ))
            display_channels = [self.bot.get_channel(c).name for c in comm["restrictions"]["channels"]]
            self.backend.save(comm)
            await ctx.send(
                embed=mkembed('done', 'Channel restriction updated:',
                              Command=comm['command'],
                              Channels=display_channels
                              )
            )

    @commands.command()
    async def responserestrictions(self, ctx: context, name: str):
        """Show the restriction list for a given command"""
        comm = self._find_one(name)
        if not comm:
            await ctx.send(embed=mkembed('error', f"{name} does not exist."))
            return
        await ctx.send(
            embed=mkembed('info', f"Information for `{name}`",
                          Reply=comm['reply'],
                          Restrictions=comm.get('restrictions', 'None'),
                          Creator=comm['creator_str']
                          )
        )

    @commands.Cog.listener()
    async def on_message(self, message: discord.message):
        comm = self._find_one(message.content)
        if comm and self._reply_allowed(comm, message):
            await message.channel.send(comm['reply'])
예제 #12
0
        backend.commit()

    username = '******'
    password = '******'
    s = Session(username, password)
    status, message = s.login()
    if status == STATUS_SUCCESS:
        html = s.get(URL_TOTALSCORE)
        if html.content is not None:
            backend = FileBackend("./emis.db")

            selector = etree.HTML(html.content.decode('gbk'))
            current_credit = int(selector.xpath(r'//*[@color="#FF0000"]/text()')[0])

            try:
                saved_credit = backend.get(Score, {'pk': 1})

                if current_credit != '':
                    if current_credit > int(saved_credit.credit):
                        saved_credit.credit = current_credit
                        saved_credit.save(backend)
                        backend.commit()
                        BmobSmsUtils().send_sms_template(['18395960722'], 'new_score')
                        print('Credit changed, SMS sent!')
                    elif current_credit == int(saved_credit.credit):
                        print('Credit not changed: ' + str(current_credit))
                    else:
                        save_new_credit(current_credit)
                        print('Credit: ' + str(current_credit) + ' is smaller than before!')
            except Score.DoesNotExist:
                save_new_credit(current_credit, pk=1)
예제 #13
0
def main():
	backend = FileBackend("./my-db") #locate the database
	fin = file("Cu_calc_input.yaml","r") #read the yaml file
	in_param = yaml.load(fin) #load it
	material = in_param.get("Material") #extract the parameters
	circuit_param = in_param.get("Circuit_Design")
	exp_param = in_param.get("Experimental_Setup")
	step = in_param.get("Output").get("Current_Sweep_Step")

	c_w = circuit_param.get("Width")
	c_R = circuit_param.get("Radius")
	c_t = circuit_param.get("Thickness")
	c_w_else = circuit_param.get("Width_Else")
	c_l_else = circuit_param.get("Length_Else")

	file_str = "%s_w_%d_um_R_%d_um_t_%d_nm" %(material, c_w*1e6, c_R*1e6, c_t*1e9) #set the output file
	fout = file(file_str+".txt","w")
	
	mtrl = backend.get(Material, {"name": material}) #find the material information in the database
	print mtrl.name
	x = c_w/c_R #w/R ratio

	##########LIQUID NITROGEN TEMPERATURE CASE################

	#calculate the resistance of the trace to determine current upper limit
	trace_res_77 = CALC_RES(c_w, c_R, c_t, c_w_else, c_l_else, 77., mtrl.Tref, mtrl.res, mtrl.alpha) 
	current_limit_77 = min(exp_param.get("Current_Limit"), exp_param.get("Voltage_Limit")/trace_res_77) 
	
	#calculate the signals
	output_77 =  np.tile(np.arange(0,current_limit_77,step),(3,1)).transpose() #currents, V_G, V_S
	for i in range(0,output_77.shape[0]):
		output_77[i,1] = SIG_NORMAL(mtrl.n, c_w, c_t, output_77[i,0])*MODEL_CRC(x)
		output_77[i,2] = HALL_POT(mtrl.n, c_w, c_t, output_77[i,0])*MODEL_LIN(x)
	#Plot the data
	plt.ioff()
	fig = plt.figure()
	plt.plot(output_77[:,0],output_77[:,1], label=r"$V_G$ (77K)", marker="x",linestyle="None",color="k")
	plt.plot(output_77[:,0],output_77[:,2], label=r"$V_H$ (77K)", marker="+",linestyle="None",color="k")

	#Store the data
	fout.write("##########LIQUID NITROGEN TEMPERATURE CASE################\n")
	res_str = "Resistance = %.2e ohm\n\n" %trace_res_77
	fout.write(res_str)
	fout.write("I(A)\tV_G(V)\tV_H(V)\n")
	np.savetxt(fout,output_77,fmt="%.2e")


	##########ROOM TEMPERATURE CASE################

	trace_res_293 = CALC_RES(c_w, c_R, c_t, c_w_else, c_l_else, 293., mtrl.Tref, mtrl.res, mtrl.alpha) 
	current_limit_293 = min(exp_param.get("Current_Limit"), exp_param.get("Voltage_Limit")/trace_res_293) 

	output_293 =  np.tile(np.arange(0,current_limit_293,step),(3,1)).transpose() #currents, V_G, V_S
	for i in range(0,output_293.shape[0]):
		output_293[i,1] = SIG_NORMAL(mtrl.n, c_w, c_t, output_293[i,0])*MODEL_CRC(x)
		output_293[i,2] = HALL_POT(mtrl.n, c_w, c_t, output_293[i,0])*MODEL_LIN(x)

	plt.plot(output_293[:,0],output_293[:,1], label=r"$V_G$ (Room Temp.)", marker="s",mfc="None",linestyle="None",color="k")
	plt.plot(output_293[:,0],output_293[:,2], label=r"$V_H$ (Room temp.)", marker="D",mfc="None",linestyle="None",color="k")

	fout.write("\n##########ROOM TEMPERATURE CASE################\n")
	res_str = "Resistance = %.2e ohm\n\n" %trace_res_293
	fout.write(res_str)
	fout.write("I(A)\tV_G(V)\tV_H(V)\n")
	np.savetxt(fout,output_293,fmt="%.2e")

	
	##########SUPERCONDUCTING CASE################
	if mtrl.sc:
		output_sc =  np.tile(np.arange(0,exp_param.get("Current_Limit"),step),(2,1)).transpose() #currents, V_G, V_S
		for i in range(0,output_sc.shape[0]):
			output_sc[i,1] = SIG_SC(mtrl.L, c_w, c_t, output_sc[i,0])*MODEL_SCCJ(x)

		plt.plot(output_sc[:,0],output_sc[:,1], label=r"$V_G$ (Supercond.)",color="k")

		fout.write("\n##########SUPERCONDUCTING CASE################\n")
		fout.write("I(A)\tV_G(V)\n")
		np.savetxt(fout,output_sc,fmt="%.2e")

	#plot details
	plt.xlabel("Input current (A)",fontsize="15")
	plt.ylabel("Potential difference (V)",fontsize="15")
	plt.yscale("log")
	plt.title(material)
	plt.legend(loc=4)
	plt.savefig(file_str+".png",dpi=300,format="png")
	plt.close(fig)

	fin.close()
	fout.close()
예제 #14
0
class VisinumDatabase:
    def __init__(self,
                 dbpath,
                 name=VisinumDB_name,
                 DbDocClass=DbDocFileMetadata):  # , attrs={}, **kwargs
        self.DbDocClass = DbDocClass
        self.name = name
        self.open_database(
            dbpath)  # sets self.dbpath and self.db as reference to database

    def open_database(self, dbpath):  # (self, dbpath, attrs={}, **kwargs)
        if os.path.basename(dbpath) == self.name:
            self.dbpath = dbpath  # opening existing Visinum database
        elif os.path.isdir(os.path.join(dbpath, self.name)):
            self.dbpath = os.path.join(dbpath,
                                       self.name)  # opening existing database
            logger.info("Found Visinum database %s in directory %s" %
                        (self.name, dbpath))
        elif os.path.isdir(dbpath):
            self.dbpath = os.path.join(dbpath, self.name)
            logger.info("Creating new Visinum database %s in directory %s" %
                        (self.name, dbpath))
        else:
            logger.error("Database path (dbpath) incorrectly specified %s" %
                         (dbpath, ))
            raise ValueError(
                "Database path (dbpath) incorrectly specified %s" % (dbpath, ))
        self.db = FileBackend(self.dbpath, {'serializer_class': 'json'})
        logger.info("Opening Visinum database %s" % (self.dbpath, ))
        config_attrs = self.get_config()
        """try:
            #config_attrs = self.db.get(self.DbDocClass, {'visinum_type' : 'db_config'})
            config_attrs = self.get_config()
        except self.DbDocClass.DoesNotExist:
            self.set_config( {'visinum_type' : 'db_config',
                            'title' : os.path.basename( os.path.dirname(self.dbpath) ),
                            'path_orig' : self.dbpath,
                            'UUID': make_uuid(version=3, namespace='uuid.NAMESPACE_URL', name=self.dbpath)
                            } )
            #config_attrs.update(attrs)
            #self.create_new_item(config_attrs, config_attrs, **kwargs )"""
        self.config_attrs = config_attrs

    def get_config(self, attr=None):
        try:
            dbitem = self.db.get(self.DbDocClass,
                                 {'visinum_type': 'db_config'})
            config_attrs = dbitem.attributes
        except self.DbDocClass.DoesNotExist:
            try:
                config_UUID = make_uuid(version=3,
                                        namespace='uuid.NAMESPACE_URL',
                                        name=self.dbpath)
                dbitem = self.db.get(self.DbDocClass, {'UUID': config_UUID})
                config_attrs = dbitem.attributes
            except self.DbDocClass.DoesNotExist:
                # cannot find db configuration, setup (reset) new configuration
                config_attrs = {
                    'visinum_type':
                    'db_config',
                    'title':
                    os.path.basename(os.path.dirname(self.dbpath)),
                    'UUID':
                    make_uuid(version=3,
                              namespace='uuid.NAMESPACE_URL',
                              name=self.dbpath),
                    'path_orig':
                    self.dbpath
                }
                self.set_config(config_attrs, reset=True)
                logger.warning(
                    "Cannot find db configuration, re-setting configuration %s"
                    % (config_attrs['UUID'], ))
        if attr:  # if attr and attr in config_attrs:
            return config_attrs[attr]
        return config_attrs

    def set_config(self, attrs={}, reset=False, **kwargs):
        if reset:
            config_attrs = {}
        else:
            config_attrs = self.get_config()
        config_attrs.update(attrs)
        for k, v in kwargs.items():
            config_attrs[k] = v
        UUID = self.set_dbitem(config_attrs)
        self.config_attrs = config_attrs
        return UUID

    def extract_file_metadata(self, dirpath, update=True):
        rootNode = make_file_system_tree(dirpath, excludedirs=[self.name])
        """UUID = self.save_item(rootNode.to_vndict(), {'visinum_type':'datatree',
                               'name':'datatree ' + rootNode.name,
                               'visinum_datatree':'file_system',
                               'visinum_node':rootNode.__class__.__name__,
                               'fpath':dirpath})"""
        rootNode.__class__.metadata_to_dict = node_metadata_to_dict
        for node in list(rootNode):
            mdata = {}
            if update:
                try:
                    dbdoc = self.get_dbitem(node._UUID)
                    mdata.update(dbdoc.attributes)
                    del dbdoc
                except:
                    pass
            mdata.update(node.metadata_to_dict(self.db, dirpath))
            dbdoc = self.DbDocClass(mdata)
            dbdoc.save(self.db)
            logger.info('Metadata extracted from %s' %
                        (node.get_path(dirpath), ))
        logger.info("Completed processing data tree %s " % (rootNode.name, ))
        self.db.commit()
        for node in list(rootNode):
            dbdoc = self.db.get(self.DbDocClass, {'UUID': node._UUID})
            if node is rootNode:
                dbdoc.parent = None
                #dbdoc.visinum_type = 'datatree'
                dbdoc.visinum_datatree = 'file_system'
                #self.set_config(filesystemtreeroot=rootNode._UUID)
                self.set_config(filesystemtreeroot=rootNode._UUID
                                )  # filesystemtreeroot=dbdoc
                ##self.datatreelist.append( ('file system', rootNode._UUID) )
            else:
                #dbdoc.parent = node.parent.UUID
                dbdoc.visinum_parent = self.db.get(self.DbDocClass,
                                                   {'UUID': node.parent._UUID})
            dbdoc.visinum_node = node.__class__.__name__
            dbdoc.visinum_childs = [
            ]  # database will not accept _childs as a doc attribute
            for child in node._childs:
                #_childs.append(child.UUID)
                dbdoc.visinum_childs.append(
                    self.db.get(self.DbDocClass, {'UUID': child._UUID}))
            dbdoc.visinum_nchilds = len(node._childs)
            dbdoc.save(self.db)
        self.db.commit()
        logger.info("Metadata committed to DB %s" % (rootNode.name, ))

    def set_dbitem(self, attrs={}, commit=True, **kwargs):
        for k, v in kwargs.items():
            attrs[k] = v
        if not 'UUID' in attrs:
            attrs['UUID'] = make_uuid()
        dbItem = self.DbDocClass(attrs)
        dbItem.save(self.db)
        if commit:
            self.db.commit()
        return attrs['UUID']

    def get_dbitem(self, attrs={}, DbDocClass=None):
        if not DbDocClass:
            DbDocClass = self.DbDocClass
        if isinstance(attrs, str):
            attrs = {'UUID': attrs}
        return self.db.get(DbDocClass, attrs)

    def filter(self, query, DbDocClass=None):
        if not DbDocClass:
            DbDocClass = self.DbDocClass
        return self.db.filter(DbDocClass, query)

    def tree_from_db(self, dbitem, root_node=None, parent=None):
        #dbitem = self.get_dbitem(rootUUID)
        metadata = dbitem.attributes  # {k:v for k, v in dict_.items() if k != "_childs"}
        node = FsNode(metadata['path'], parent=parent, metadata=metadata)
        if not root_node:
            root_node = node
        if 'visinum_childs' in metadata:
            for child in metadata['visinum_childs']:
                self.tree_from_db(child, root_node, node)
        return root_node
예제 #15
0
class eventmanager:
    def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real,
                 trigger_path):

        #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
        tstart = time.time()

        if real:
            self.backend = FileBackend("./realdb")
        else:
            self.backend = FileBackend("./testdb")

        try:
            thisevent = self.backend.get(Trigger, {'id': trigger_id})
            print 'Found this event in desgw database...'
        except Trigger.DoesNotExist:
            thisevent = Trigger({
                'id':
                trigger_id,
                'jsonfilelist':
                jsonfilelist,
                'triggerpath':
                triggerdir,
                'mapspath':
                datadir,
                'jobids': [
                    (0, 'jsonfile_corresponding_to_jobid.json'),
                ],
            })
            print 'Database entry created!'

        self.trigger_id = trigger_id
        self.trigger_path = trigger_path

        self.backend.save(thisevent)
        self.backend.commit()

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.config = yaml.safe_load(f)
        self.filterobslist = np.array(self.config['exposure_filter'],
                                      dtype='str')
        self.strategydict = {}

        for f in np.unique(self.filterobslist):
            self.strategydict[f] = len(
                self.filterobslist[self.filterobslist == f])

        self.connection = ea.connect(DATABASE)
        self.cursor = self.connection.cursor()

        self.jsonfilelist = jsonfilelist

        print self.jsonfilelist
        if hardjson:
            self.jsonfilelist = hj

        self.trigger_id = trigger_id
        self.datadir = datadir
        self.triggerdir = triggerdir
        self.processingdir = os.path.join(self.triggerdir, 'PROCESSING')
        if not os.path.exists(self.processingdir):
            os.makedirs(self.processingdir)

        dire = './processing/' + trigger_id + '/'
        if not os.path.exists(dire):
            os.makedirs(dire)

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.strategy = yaml.safe_load(f)

        with open("jobmanager.yaml", "r") as g:
            self.jmconfig = yaml.safe_load(g)


        q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \
             "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum"  # y1 images
        self.connection.query_and_save(q1, './processing/exposuresY1.tab')

        q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \
             "nite>20150901 and obstype='object' order by expnum"  # y2 and later
        self.connection.query_and_save(q2, './processing/exposuresCurrent.tab')

        os.system(
            'cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list'
        )

        self.submit_all_jsons_for_sejobs(
        )  #preps all DES images that already exist
        tfin = time.time()
        print 'TOTAL SE JOBS TIME', tfin - tstart
        #sys.exit()
        self.monitor_images_from_mountain(
        )  #A loop that waits for images off mountain and submits for processing

    def submit_all_jsons_for_sejobs(self):
        obsStartTime = self.getDatetimeOfFirstJson(
            self.jsonfilelist[0])  # THIS IS A DATETIME OBJ
        currentTime = dt.utcnow()
        print '***** The current time is UTC', currentTime, '*****'
        delt = obsStartTime - currentTime

        timedelta = td(days=delt.days,
                       seconds=delt.seconds).total_seconds() / 3600.
        print '***** The time delta is ', timedelta, 'hours *****'
        # if timedelta > np.pi:

        sejob_timecushion = self.jmconfig["sejob_timecushion"]

        if timedelta > sejob_timecushion:
            for jsonfile in self.jsonfilelist:
                print 'json', jsonfile
                try:  #check if this json file is already in the submitted preprocessing database
                    thisjson = self.backend.get(
                        preprocessing,
                        {'jsonfilename': os.path.join(self.datadir, jsonfile)})
                    print 'Found this json in desgw database...'
                except preprocessing.DoesNotExist:  #do submission and then add to database

                    print 'cd diffimg-proc; ./SEMaker_RADEC.sh ' + os.path.join(
                        self.datadir, jsonfile)
                    os.chdir("diffimg-proc")
                    out = os.popen(
                        './SEMaker_RADEC.sh ' +
                        os.path.join(self.datadir, jsonfile)).read()
                    #out = os.popen('ls').read()
                    os.chdir("..")
                    print out
                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(
                            self.trigger_id,
                            'Error in creating dag for .json: ' + out)
                    else:
                        for o in out.split('\n'):
                            if 'file://' in o:
                                dagfile = o.split('/')[-1]
                                self.dagfile = os.path.join(
                                    self.processingdir,
                                    jsonfile.split('/')[-1].split('.')[0] +
                                    '_' + dagfile)
                                os.system('cp diffimg-proc/' + dagfile + ' ' +
                                          self.dagfile)
                                jobsubmitline = copy(o)
                        print self.dagfile

                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                    out = os.popen(
                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                        'jobsub_submit_dag -G des --role=DESGW file://' +
                        self.dagfile).read()
                    print out
                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(
                            self.trigger_id,
                            'Error in submitting .json for preprocessing: ' +
                            out)
                    else:
                        if doimmediateremove:
                            for o in out.split('\n'):
                                if 'Use job id' in o:
                                    jobid = o.split()[3]
                            out = os.popen(
                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                'jobsub_rm --jobid=' + jobid +
                                ' --group=des --role=DESGW').read()
                            print out

                    thisjson = preprocessing({
                        'jsonfilename':
                        os.path.join(self.datadir, jsonfile),
                        'jobid':
                        jobid,
                        'dagfile':
                        self.dagfile,
                        'status':
                        'Submitted'
                    })

                    self.backend.save(thisjson)
                    self.backend.commit()
                    print 'saved'
                #raw_input()
                #runProcessingIfNotAlready(image, self.backend)

                #sys.exit()

        print 'Finished submitting minidagmaker with all json files'
        #sys.exit()
        #raw_input()

    # Loop queries for images from mountain and submits them
    # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added
    def monitor_images_from_mountain(self):
        #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG

        exposure_filter = np.array(self.strategy['exposure_filter'],
                                   dtype='str')
        uniquefilts = np.unique(self.strategy['exposure_filter'])

        filterstrategy = {}
        for f in uniquefilts:
            filterstrategy[f] = len(exposure_filter[exposure_filter == f])

        print 'filter strategy dictionary  ', filterstrategy

        starttime = time.time()
        pptime = time.time()
        keepgoing = True
        index = -1
        submission_counter = 0
        maxsub = 10000
        postprocessingtime = 10  #every half hour fire off Tim's code for post-processing
        while keepgoing:
            #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
            index += 1
            newfireds = []
            if time.time() - starttime > 50000:
                keepgoing = False
                continue

            ofile = open(os.path.join(self.triggerdir, 'latestquery.txt'), 'w')

            ofile.write(
                "--------------------------------------------------------------------------------------------------\n"
            )
            ofile.write(
                "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n"
            )
            ofile.write(
                "--------------------------------------------------------------------------------------------------\n"
            )

            print "--------------------------------------------------------------------------------------------------"
            print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT"
            print "--------------------------------------------------------------------------------------------------"

            query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \
                    "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum"  # latest

            self.cursor.execute(query)

            for s in self.cursor:
                ofile.write(
                    str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" +
                    str(s[3]) + "\t" + str(s[4]) + "\t" + str(s[5]) + "\t" +
                    str(s[6]) + "\t" + str(s[7]) + '\n')
                print str(s[0]) + "\t" + str(s[1]) + "\t" + str(
                    s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(
                        s[5]) + "\t" + str(s[6]) + "\t" + str(s[7])

                if not 'DESGW' in str(s[7]): continue
                #print 'exptime',float(s[3])
                if not float(s[3]) > 29.:
                    continue  #exposure must be longer than 30 seconds

                expnum = str(s[0])
                nite = str(s[1])
                band = str(s[2])
                exptime = str(s[3])

                #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE
                try:
                    exposure = self.backend.get(exposures, {'expnum': expnum})
                    print 'Found this exposure in desgw database...'
                    # print exposure.attributes
                    # if expnum == 506432:
                    #     sys.exit()
                    # self.backend.delete(exposure)
                    # self.backend.commit()
                    # exposure = self.backend.get(exposures, {'expnum': expnum})

                except exposures.DoesNotExist:  # add to database
                    #runProcessingIfNotAlready(image,self.backend)

                    print './diffimg-proc/getTiling.sh ' + expnum

                    res = os.popen('./diffimg-proc/getTiling.sh ' +
                                   expnum).readlines()
                    print res
                    #sys.exit()
                    field, tiling = res[-2], res[-1]
                    #print 'field_tiling',field_tiling
                    hexnite = field.strip() + '_' + tiling.strip() + '_' + str(
                        nite)
                    #print hexnite
                    #sys.exit()
                    #print 'hexnite',hexnite
                    print 'Creating exposure in database...', hexnite
                    #raw_input()
                    if '--' in hexnite:
                        print 'found bad example'
                        #raw_input()

                    exposure = exposures({
                        'expnum': expnum,
                        'nite': nite,
                        'field': field,
                        'tiling': tiling,
                        'hexnite': hexnite,
                        'band': band,
                        'jobid': np.nan,
                        'exptime': exptime,
                        'status': 'Awaiting additional exposures',
                        'triggerid': self.trigger_id,
                        'object': str(s[7])
                    })

                    self.backend.save(exposure)
                    self.backend.commit()

                hexnite = exposure.hexnite
                print 'hexnite', hexnite
                if '--' in hexnite:
                    print exposure.attributes
                    #raw_input()
                #raw_input()
                #sys.exit()
                try:
                    hex = self.backend.get(hexes, {'hexnite': hexnite})
                    #self.backend.delete(hex)
                    #self.backend.commit()
                    #hex = self.backend.get(hexes, {'hexnite': hexnite})

                    #print 'Found this hex in desgw database...'
                except hexes.DoesNotExist:
                    hex = hexes({
                        'hexnite':
                        hexnite,
                        'strategy':
                        self.strategy['exposure_filter'],
                        'num_target_g':
                        len(exposure_filter[exposure_filter == 'g']),
                        'num_target_r':
                        len(exposure_filter[exposure_filter == 'r']),
                        'num_target_i':
                        len(exposure_filter[exposure_filter == 'i']),
                        'num_target_z':
                        len(exposure_filter[exposure_filter == 'z']),
                        'observed_g': [],
                        'observed_r': [],
                        'observed_i': [],
                        'observed_z': [],
                        'exposures': [],
                        'status':
                        'Awaiting additional exposures',
                        'dagfile':
                        None,
                    })

                    self.backend.save(hex)
                    self.backend.commit()
                    print hex.attributes
                    print 'created new hex'
                    #raw_input()

                if hex.status == 'Submitted for processing':
                    print 'This hex has already been submitted for processing'
                    continue

                # if '--' in hexnite:
                #     print hex.attributes
                #     raw_input()
                # if hex.status == 'Submitted for processing':
                #     print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing'
                #     #raw_input()
                #     continue

                if band == 'g':
                    if not expnum in hex.observed_g:
                        hex.observed_g.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'r':
                    if not expnum in hex.observed_r:
                        hex.observed_r.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'i':
                    if not expnum in hex.observed_i:
                        hex.observed_i.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'z':
                    if not expnum in hex.observed_z:
                        hex.observed_z.append(expnum)
                        hex.exposures.append(expnum)

                self.backend.save(hex)
                self.backend.commit()

                print hex.attributes

                didwork = False
                if len(hex.observed_g) == hex.num_target_g:
                    if len(hex.observed_r) == hex.num_target_r:
                        if len(hex.observed_i) == hex.num_target_i:
                            if len(hex.observed_z) == hex.num_target_z:
                                print 'All exposures in strategy satisfied! '
                                #raw_input()
                                submissionPassed = True

                                for target, exps in zip([
                                        hex.num_target_g, hex.num_target_r,
                                        hex.num_target_i, hex.num_target_z
                                ], [
                                        hex.observed_g, hex.observed_r,
                                        hex.observed_i, hex.observed_z
                                ]):

                                    if target == 0: continue
                                    exposurestring = ''
                                    logstring = ''
                                    for ex in exps:
                                        exposurestring += ex + ' '
                                        logstring += ex + '_'

                                    print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring
                                    os.chdir("diffimg-proc")
                                    #out = os.popen('ls').read()
                                    out = os.popen('./DAGMaker.sh ' +
                                                   exposurestring).read()
                                    os.chdir("..")
                                    print out
                                    f = open(
                                        os.path.join(
                                            self.processingdir,
                                            logstring + hexnite + '.log'), 'w')
                                    f.write(out)
                                    f.close()
                                    tt = time.time()
                                    if not 'To submit this DAG do' in out:
                                        dt.sendEmailSubject(
                                            self.trigger_id,
                                            'Error in creating dag for desgw hex: '
                                            + out)
                                        submissionPassed = False
                                    else:
                                        for o in out.split('\n'):
                                            if 'file://' in o:
                                                dagfile = o.split('/')[-1]
                                                self.dagfile = os.path.join(
                                                    self.processingdir,
                                                    logstring + 'job.dag')
                                                os.system('cp diffimg-proc/' +
                                                          dagfile + ' ' +
                                                          self.dagfile)
                                        print self.dagfile

                                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                                    out = os.popen(
                                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                        'jobsub_submit_dag -G des --role=DESGW file://'
                                        + self.dagfile).read()
                                    print out
                                    if 'non-zero exit status' in out:
                                        dt.sendEmailSubject(
                                            self.trigger_id,
                                            'Error in submitting hex dag for processing: '
                                            + out)
                                        submissionPassed = False
                                    else:
                                        if doimmediateremove:
                                            for o in out.split('\n'):
                                                if 'Use job id' in o:
                                                    jobid = o.split()[3]
                                            out = os.popen(
                                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                                'jobsub_rm --jobid=' + jobid +
                                                ' --group=des --role=DESGW'
                                            ).read()
                                            print out
                                    ttt = time.time()
                                    #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt

                                    #sys.exit()
                                #raw_input()
                                if submissionPassed:

                                    hex.status = 'Submitted for processing'
                                    hex.dagfile = self.dagfile
                                    self.backend.save(hex)
                                    self.backend.commit()

                                    for expn in hex.exposures:
                                        print expn, 'updated in database to Submitted For Processing'
                                        exp = self.backend.get(
                                            exposures, {'expnum': expn})
                                        exp.status = 'Submitted for processing'
                                        self.backend.save(exp)
                                        self.backend.commit()
                                    didwork = True
                                print 'didwork', didwork
                                print 'dagfile', self.dagfile
                                #raw_input()

                if not didwork:
                    print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \
                        'and will continue waiting...'
                    #raw_input()

                #HERE YOU NEED TO ADD TO HEXSTRATEGYDICT DATABASE

                if time.time(
                ) - pptime > postprocessingtime:  #happens every 30 minutes or so...
                    pptime = time.time()
                    print '***** Firing post processing script *****'
                    #sys.exit()
                    self.submit_post_processing()
                #sys.exit()
                print 'Waiting 10s to check from mountain...'
                #sys.exit()
                time.sleep(10)  #looping over checking the mountain top

            # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates'))
            # for f in cfiles:
            #     if f.split('.')[-1] == 'npz':
            #         cp.makeNewPage(f)

    def submit_post_processing(self):
        firedlist = open('./processing/firedlist.txt', 'r')
        fl = firedlist.readlines()
        firedlist.close()
        print fl
        fl = ['475914', '475915', '475916', '482859', '482860', '482861']
        expnumlist = ''
        for f in fl:
            expnumlist += f.strip() + ' '

        print 'FIRING TIMs CODE'

        gwpostdir = os.environ['GWPOST_DIR']
        print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
                        python '                                +os.path.join(gwpostdir,'postproc.py')\
                         +' --expnums ' + expnumlist\
                         + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
                         + ' --triggerid '+self.trigger_id+' --season 46 --ups True'
        # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
        #                  python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\
        #                  + ' --triggerid '+trigger_id+' --season 46 --ups True' )

        #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh'))
        args = ['ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+
                        'yes | python '+os.path.join(gwpostdir,'postproc.py')\
                         +' --expnums ' + expnumlist\
                         + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
                         + ' --triggerid '+self.trigger_id+' --season 46 --ups True"'
                ]
        print args

        #p = subprocess.Popen(args,stdout=PIPE, stderr=PIPE,shell=True)
        #print p.communicate()
        #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True)
        return

    def getDatetimeOfFirstJson(self, jsonstring):
        js = jsonstring.split('UTC')[1]  #-2015-12-27-3:2:00.json
        #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json')
        date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S-test.json')
        print '***** Datetime of first observation UTC', date_object, '*****'
        return date_object

    def sortHexes(self):
        pass
예제 #16
0

for file in os.listdir(dir):
    if file.endswith(".json"):
        fdata = open(dir + file).read()
        data = json.loads(fdata)

        total_words = data['total_words']
        word_matches = data['match_total']
        tf = computeTF(total_words, word_matches)
        idf = computeIDF()
        tf_idf = computeTF_IDF(tf, idf)
        data['tf'] = tf
        data['idf'] = idf
        data['tf_idf'] = tf_idf
        link = backend.get(Link, {'pk': data['uri']})
        data['location'] = link.location
        uri[data['tf_idf']] = data

table = open("tfidf_table.tex", "w")
table.write("\\begin{table}[h!]\n")
table.write("\\small\n")
table.write("\\centering\n")
table.write(" \\begin{tabular}{|c | c | c | c | p{10cm} |}\n")
table.write(" \\hline\n")
table.write("Rank & TFIDF & TF & IDF & URI \\\\\n")
table.write("\\hline\n")
count =  1
for d in reversed(sorted(uri.keys())):
    val = uri[d]
    line = str(count) + " & {:10.6f} & {:10.6f} & {:10.6f} & ".format(val['tf_idf'], val['tf'], val['idf'])+val['location']
예제 #17
0
loc = "./my-db"
backend = FileBackend(loc)

print("Created Backend", loc)

the_godfather.save(backend)
marlon_brando.save(backend)
al_pacino.save(backend)

print("Backend Saved and committed:", os.path.realpath(os.curdir))


# print(backend.get(Movie,{'pk':1}))
# or...
the_godfather = backend.get(Movie,{'name' : 'The Godfather'})
print("the_godfather",the_godfather)

the_godfather.cast = {'Don Vito Corleone' : marlon_brando, 'Michael Corleone' : al_pacino}

#Documents stored within other objects will be automatically converted to database references.

marlon_brando.performances = [the_godfather]
al_pacino.performances = [the_godfather]

marlon_brando.save(backend)
al_pacino.save(backend)
the_godfather.save(backend)


예제 #18
0
class Profiles(object):
    def __init__(self, bot):
        self.bot = bot

        self.cfg = PluginConfig(self)
        self.db = FileBackend(self.cfg.get('main_db'))

        mtt = MessageRetargeter(bot)
        self.msg = mtt.msg

        web = Flask(__name__, template_folder=tmpl_dir)
        mako = MakoTemplates()
        mako.init_app(web)

        # Add routes here
        web.add_url_rule('/edit_web/<args>', 'edit_web', self.edit_web, methods=['GET', 'POST'])

        _thread.start_new_thread(web.run, (), {'host': '0.0.0.0'})

    @command
    def learn(self, mask, target, args):
        """
        Stores information allowing for later retrieval. Names are downcased for sanity.


        Usage:
            %%learn <name> <information>...
        """
        name = args['<name>'].lower()
        info = ' '.join(args['<information>'])

        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            profile = Profile(
                {
                    'name': name,
                    'owner': mask.nick.lower(),
                    'lines': [info],
                    'random': False,
                    'public': False
                }
            )
            profile.save(self.db)
            self.db.commit()
            self.msg(mask, target, 'Your data "%s" has been stored.' % name)
            return

        except Profile.MultipleDocumentsReturned:
            self.msg(mask, target, "Found more than one %s. This is bad! Please notify the bot owner." % name)
            return

        if is_allowed_to(Action.edit, mask.nick, profile):
            lines_to_append = profile.lines
            lines_to_append.append(info)
            profile.save(self.db)
            self.db.commit()
            self.msg(mask, target, 'Your data "%s" has been updated.' % name)
            return
        else:
            self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.'
                     % (name, profile.owner))
            return

    @command
    def query(self, mask, target, args):
        """
        Retrieve the information associated with <name>. If the item is marked random, then one random item will be
        returned.

        Usage:
            %%query <name>
            ?? <name>
        """
        name = args['<name>'].lower()

        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return

        if profile.random:
            self.msg(mask, target, get_flags(profile) + random.choice(profile.lines))
        else:
            for line in profile.lines:
                self.msg(mask, target, get_flags(profile) + line)
                if len(profile.lines) >= int(self.cfg.get('throttle_max')):
                    sleep(int(self.cfg.get('throttle_time')))

    @command
    def forget(self, mask, target, args):
        """
        Delete <name> from the records. Only the person who created the item can remove it.

        Usage:
            %%forget <name>
        """
        name = args['<name>'].lower()
        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return

        if is_allowed_to(Action.delete, mask.nick, profile):
            self.db.delete(profile)
            self.db.commit()
            self.msg(mask, target, "%s has been deleted." % name)
        else:
            self.msg(mask, target, 'You are not authorized to delete "%s". Ask %s instead.'
                     % (name, profile.owner))

    @command(permission='admin', show_in_help_list=False)
    def rmf(self, mask, target, args):
        """
        Delete <name> from the records without checking permissions.

        Usage:
            %%rmf <name>
        """
        name = args['<name>'].lower()
        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return

        self.db.delete(profile)
        self.db.commit()
        self.msg(mask, target, "%s has been deleted." % name)

    @command(permission='admin', show_in_help_list=False)
    def chown(self, mask, target, args):
        """
        Change the owner of <name> to <newowner>.

        Usage:
            %%chown <name> <newowner>
        """
        name = args['<name>'].lower()
        newowner = args['<newowner>'].lower()
        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return
        profile.owner = newowner
        self.db.save(profile)
        self.db.commit()
        self.msg(mask, target, "%s is now owned by %s." % (name, newowner))

    @command
    def toggle_public(self, mask, target, args):
        """
        Changes whether <name> is publicly editable or not

        Usage:
            %%toggle_public <name>
        """
        profile = args['<name>'].lower()
        try:
            profile = self.db.get(Profile, {'name': profile})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % profile)
            return

        if is_allowed_to(Action.edit, mask.nick, profile):
            if profile.public:
                profile.public = False
                self.msg(mask, target, '"%s" is no longer publicly editable.' % profile)
            else:
                profile.public = True
                self.msg(mask, target, '"%s" is now publicly editable.' % profile)
            self.db.save(profile)
            self.db.commit()
            return
        else:
            self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.'
                     % (profile, profile.owner))
            return

    @command
    def toggle_random(self, mask, target, args):
        """
        Toggle the randomness of an item, so that it shows a single random line instead of all lines when queried.

        Usage:
            %%toggle_random <name>
        """
        name = args['<name>'].lower()
        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return

        if is_allowed_to(Action.edit, mask.nick, profile):
            profile.random = not profile.random
            self.msg(mask, target, 'Random mode for %s is set to: %s' % (profile.name, profile.random))
            profile.save(self.db)
            self.db.commit()
        else:
            self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.'
                     % (name, profile.owner))
            irc3.base.logging.log(irc3.base.logging.WARN,
                                  "%s tried to edit %s, but can't since it's owned by %s" %
                                  (mask.nick, profile.name, profile.owner)
                                  )

    @event("(@(?P<tags>\S+) )?:(?P<mask>\S+) PRIVMSG (?P<target>\S+) :\?\? (?P<data>.*)")
    def easy_query(self, mask, target, data):
        self.bot.get_plugin(Commands).on_command(cmd='query', mask=mask, target=target, data=data)

    ####
    # All web stuff below this point
    #

    @command
    def edit(self, mask, target, args):
        """
        Sends you a webpage link to edit <name>. Great for longer profiles. Make sure to keep the URL you are given
        secure, as with it, anyone can edit your profiles.

        Usage:
            %%edit <name>
        """
        # TODO: Clear any existing sessions the user has

        data = {
            'id': str(uuid.uuid4()),
            'name': mask.nick,
            'profile': args['<name>']
        }

        name = args['<name>'].lower()

        try:
            profile = self.db.get(Profile, {'name': name})
        except Profile.DoesNotExist:
            self.msg(mask, target, 'I cannot find "%s" in the records.' % name)
            return

        if is_allowed_to(Action.fulledit, mask.nick, profile):
            newses = Session(data)
            self.db.save(newses)
            self.db.commit()
            self.bot.privmsg(mask.nick,
                             "An editor has been set up for you at http://skaianet.tkware.us:5000/edit_web/%s" % str(
                                 data['id']))
            self.bot.privmsg(mask.nick,
                             "Be very careful not to expose this address - with it, anyone can edit your stuff")
        else:
            self.msg(mask, target, 'You are not authorized to webedit "%s". Ask %s instead.'
                     % (name, profile.owner))

    def edit_web(self, args):
        # Web endpoint: /edit_web/<args>

        if request.method == 'GET':
            # Does the session exist?
            try:
                edit_session = self.db.get(Session, {'id': args})
            except Session.DoesNotExist:
                return render_template('youfail.html',
                                       bot=self.bot,
                                       failreason='Invalid Session',
                                       userfail=True)
            # Does the profile exist?
            name = edit_session.profile
            try:
                profile = self.db.get(Profile, {'name': name.lower()})
            except Profile.DoesNotExist:
                return render_template('youfail.html',
                                       bot=self.bot,
                                       failreason='I cannot find "%s" in the records.' % name
                                       )
            # Kick off to the edit page!
            return render_template('edit.html',
                                   bot=self.bot,
                                   profile=profile,
                                   username=edit_session.name,
                                   sessionid=edit_session.id
                                   )
        elif request.method == 'POST':
            # We have to look up the session ID one more time. Something could have happened to the profile
            # since we created the session.
            try:
                edit_session = self.db.get(Session, {'id': request.form['ID']})
            except Session.DoesNotExist:
                return render_template('youfail.html',
                                       bot=self.bot,
                                       failreason='Invalid Session',
                                       userfail=True)
            name = request.form['profile']
            try:
                profile = self.db.get(Profile, {'name': request.form['profile']})
            except Profile.DoesNotExist:
                return render_template('youfail.html',
                                       bot=self.bot,
                                       failreason='I cannot find "%s" in the records.' % name,
                                       userfail=True
                                       )

            # Now with the profile in hand, blank the lines field and rebuild it from the form.
            # Here we grab all numeric items from the submission, sort it, and one by one refill the DB object.
            lines = [item for item in request.form if item.isdigit()]
            lines.sort()
            profile.lines = []
            for item in lines:
                profile.lines.append(request.form[item])
            self.db.save(profile)
            self.db.delete(edit_session)
            self.db.commit()
            return render_template('done.html',
                                   bot=self.bot,
                                   profile=profile.name
                                   )
예제 #19
0
class BlitzDB(Database):

    key = 'pk'
    _database_type = 'blitzdb'

    class Data(Document):
        pass

    class Cutout(Document):
        pass

    class Fingerprint(Document):
        pass

    class Similarity(Document):
        pass

    def _get_table(self, table_name):
        if table_name == 'data':
            return BlitzDB.Data
        elif table_name == 'cutout':
            return BlitzDB.Cutout
        elif table_name == 'fingerprint':
            return BlitzDB.Fingerprint
        elif table_name == 'similarity':
            return BlitzDB.Similarity
        else:
            log.error('BAD TABLE NAME {}'.format(table_name))

    def __init__(self, filename):
        self._filename = filename
        self._backend = FileBackend(self._filename)

    def save(self, table, data):

        # Convert to dict if not one already
        if not isinstance(data, dict):
            data = data.save()

        blitz_table = self._get_table(table)
        data.update({'pk': data['uuid']})
        save_id = self._backend.save(blitz_table(data))
        self._backend.commit()
        return save_id['pk']

    def find(self, table, key=None):
        blitz_table = self._get_table(table)
        factory = get_factory(table)

        if key is None:
            return [
                factory(dict(x), db=self)
                for x in self._backend.filter(blitz_table, {})
            ]
        elif isinstance(key, list):
            return [
                factory(dict(x), db=self) for x in self._backend.filter(
                    blitz_table, {'pk': {
                        '$in': key
                    }})
            ]
        else:
            return factory(dict(self._backend.get(blitz_table, {'pk': key})),
                           db=self)

    def count(self, table):
        blitz_table = self._get_table(table)
        return len(self._backend.filter(blitz_table, {}))

    def update(self, table, key, data):
        blitz_table = self._get_table(table)
        entry = self._backend.get(blitz_table, {'pk': key})
        for k, v in data.items():
            setattr(entry, k, v)
        entry.save()
        self._backend.commit()

    def close(self):
        pass

    def delete_database(self):
        shutil.rmtree(self._filename)
예제 #20
0
from blitzdb import FileBackend

loc = "./my-db"
backend = FileBackend(loc)

print("Created Backend", loc)

the_godfather.save(backend)
marlon_brando.save(backend)
al_pacino.save(backend)

print("Backend Saved and committed:", os.path.realpath(os.curdir))

# print(backend.get(Movie,{'pk':1}))
# or...
the_godfather = backend.get(Movie, {'name': 'The Godfather'})
print("the_godfather", the_godfather)

the_godfather.cast = {
    'Don Vito Corleone': marlon_brando,
    'Michael Corleone': al_pacino
}

#Documents stored within other objects will be automatically converted to database references.

marlon_brando.performances = [the_godfather]
al_pacino.performances = [the_godfather]

marlon_brando.save(backend)
al_pacino.save(backend)
the_godfather.save(backend)
예제 #21
0
파일: jobmanager.py 프로젝트: djbrout/gw_mi
class eventmanager:
    def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path):

        #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
        tstart = time.time()

        if real:
            self.backend = FileBackend("./realdb")
        else:
            self.backend = FileBackend("./testdb")

        try:
            self.thisevent = self.backend.get(Trigger, {'id': trigger_id})
            print 'Found this event in desgw database...'
        except Trigger.DoesNotExist:
            self.thisevent = Trigger({
                'id':trigger_id,
                'jsonfilelist':jsonfilelist,
                'triggerpath':triggerdir,
                'mapspath':datadir,
                'jobids':[
                    (0,'jsonfile_corresponding_to_jobid.json'),
                ],
                'postprocint': 0
            })
            print 'Database entry created!'


        self.trigger_id = trigger_id
        self.trigger_path = trigger_path

        self.backend.save(self.thisevent)
        self.backend.commit()

        with open(os.path.join(triggerdir,"strategy.yaml"), "r") as f:
            self.config = yaml.safe_load(f);
        self.filterobslist = np.array(self.config['exposure_filter'],dtype='str')
        self.strategydict = {}

        for f in np.unique(self.filterobslist):
            self.strategydict[f] = len(self.filterobslist[self.filterobslist == f])

        self.connection = ea.connect(DATABASE)
        self.cursor = self.connection.cursor()

        self.jsonfilelist = jsonfilelist

        print self.jsonfilelist
        if hardjson:
            self.jsonfilelist = hj

        #self.pp = subprocess.Popen('echo starting',stdout=PIPE, stderr=PIPE,shell=True)

        self.trigger_id = trigger_id
        self.datadir = datadir
        self.triggerdir = triggerdir
        self.processingdir = os.path.join(self.triggerdir,'PROCESSING')
        if not os.path.exists(self.processingdir):
            os.makedirs(self.processingdir)

        dire = './processing/' + trigger_id + '/'
        if not os.path.exists(dire):
            os.makedirs(dire)

        with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f:
            self.strategy = yaml.safe_load(f)

        with open("jobmanager.yaml", "r") as g:
            self.jmconfig = yaml.safe_load(g);


        q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \
             "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum"  # y1 images
        self.connection.query_and_save(q1, './processing/exposuresY1.tab')

        q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \
             "nite>20150901 and obstype='object' order by expnum"  # y2 and later
        self.connection.query_and_save(q2, './processing/exposuresCurrent.tab')

        os.system('cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list')

        #self.submit_post_processing()

        self.submit_all_jsons_for_sejobs()#preps all DES images that already exist
        tfin = time.time()
        print 'TOTAL SE JOBS TIME', tfin - tstart
        #sys.exit()
        self.monitor_images_from_mountain()#A loop that waits for images off mountain and submits for processing

    def submit_all_jsons_for_sejobs(self):
        obsStartTime = self.getDatetimeOfFirstJson(self.jsonfilelist[0])  # THIS IS A DATETIME OBJ
        currentTime = dt.utcnow()
        print '***** The current time is UTC', currentTime, '*****'
        delt = obsStartTime - currentTime

        timedelta = td(days=delt.days, seconds=delt.seconds).total_seconds() / 3600.
        print '***** The time delta is ', timedelta, 'hours *****'
        # if timedelta > np.pi:

        sejob_timecushion = self.jmconfig["sejob_timecushion"]

        if timedelta > sejob_timecushion:
            for jsonfile in self.jsonfilelist:
                print 'json',jsonfile
                try: #check if this json file is already in the submitted preprocessing database
                    thisjson = self.backend.get(preprocessing, {'jsonfilename': os.path.join(self.datadir, jsonfile)})
                    print 'Found this json in desgw database...'
                except preprocessing.DoesNotExist: #do submission and then add to database

                    print 'cd diffimg-proc; ./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile)
                    os.chdir("diffimg-proc")
                    out = os.popen('./SEMaker_RADEC.sh '+os.path.join(self.datadir, jsonfile)).read()
                    of = open(os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'.SEMakerlog'),'w')
                    of.write(out)
                    of.close()
                    #out = os.popen('ls').read()
                    os.chdir("..")
                    print out
                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(self.trigger_id,'Error in creating SEMaker dag for .json: '+out)
                    else:
                        for o in out.split('\n'):
                            if 'file://' in o:
                                dagfile = o.split('/')[-1]
                                self.dagfile = os.path.join(self.processingdir,jsonfile.split('/')[-1].split('.')[0]+'_'+dagfile)
                                os.system('cp diffimg-proc/'+dagfile+' '+self.dagfile)
                                jobsubmitline = copy(o)
                        print self.dagfile

                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile

                    out = os.popen(
                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                        'jobsub_submit_dag -G des --role=DESGW file://'+self.dagfile).read()
                    print out

                    of = open(os.path.join(self.processingdir,
                                           jsonfile.split('/')[-1].split('.')[0] + '.SEdagsubmitlog'), 'w')
                    of.write(out)
                    of.close()

                    if 'non-zero exit status' in out:
                        dt.sendEmailSubject(self.trigger_id, 'Error in submitting .json for preprocessing: ' + out)
                    else:
                        for o in out.split('\n'):
                            if 'Use job id' in o:
                                jobid = o.split()[3]
                        if doimmediateremove:
                            out = os.popen(
                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read()
                            print out

                    thisjson = preprocessing({
                        'jsonfilename': os.path.join(self.datadir, jsonfile),
                        'jobid': jobid,
                        'dagfile': self.dagfile,
                        'status' : 'Submitted'
                    })

                    self.backend.save(thisjson)
                    self.backend.commit()
                    print 'saved'
                    #sys.exit()
                #raw_input()
                #runProcessingIfNotAlready(image, self.backend)

                #sys.exit()

        print 'Finished submitting minidagmaker with all json files'
        #sys.exit()
        #raw_input()

    # Loop queries for images from mountain and submits them
    # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added
    def monitor_images_from_mountain(self):
        #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG

        exposure_filter = np.array(self.strategy['exposure_filter'],dtype='str')
        uniquefilts = np.unique(self.strategy['exposure_filter'])

        filterstrategy = {}
        for f in uniquefilts:
            filterstrategy[f] = len(exposure_filter[exposure_filter == f])

        print 'filter strategy dictionary  ', filterstrategy

        starttime = time.time()
        pptime = time.time()
        keepgoing = True
        index = -1
        submission_counter = 0
        maxsub = 10000
        postprocessingtime = 2000 #every half hour fire off Tim's code for post-processing
        while keepgoing:
            #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]')
            index += 1
            newfireds = []
            if time.time() - starttime > 50000:
                keepgoing = False
                continue

            ofile = open(os.path.join(self.triggerdir , 'latestquery.txt'), 'w')

            ofile.write(
                "--------------------------------------------------------------------------------------------------\n")
            ofile.write("EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n")
            ofile.write(
                "--------------------------------------------------------------------------------------------------\n")

            print "--------------------------------------------------------------------------------------------------"
            print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT"
            print "--------------------------------------------------------------------------------------------------"

            query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \
                    "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum"  # latest


            self.cursor.execute(query)

            for s in self.cursor:
                ofile.write(
                    str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(
                        s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) + '\n')
                print str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(
                    s[4]) + "\t" + str(s[5]) + "\t" + str(s[6]) + "\t" + str(s[7])

                if not 'DESGW' in str(s[7]): continue
                #print 'exptime',float(s[3])
                if not float(s[3]) > 29.: continue #exposure must be longer than 30 seconds

                expnum = str(s[0])
                nite = str(s[1])
                band = str(s[2])
                exptime = str(s[3])


                #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE
                try:
                    exposure = self.backend.get(exposures, {'expnum': expnum})
                    print 'Found this exposure in desgw database...'
                    # print exposure.attributes
                    # if expnum == 506432:
                    #     sys.exit()
                    # self.backend.delete(exposure)
                    # self.backend.commit()
                    # exposure = self.backend.get(exposures, {'expnum': expnum})

                except exposures.DoesNotExist:  # add to database
                    #runProcessingIfNotAlready(image,self.backend)

                    print './diffimg-proc/getTiling.sh '+expnum

                    res = os.popen('./diffimg-proc/getTiling.sh '+expnum).readlines()
                    print res
                    #sys.exit()
                    field,tiling =res[-2],res[-1]
                    #print 'field_tiling',field_tiling
                    hexnite = field.strip()+'_'+tiling.strip()+'_'+str(nite)
                    #print hexnite
                    #sys.exit()
                    #print 'hexnite',hexnite
                    print 'Creating exposure in database...',hexnite
                    #raw_input()
                    if '--' in hexnite:
                        print 'found bad example'
                        #raw_input()

                    exposure = exposures({
                        'expnum':expnum,
                        'nite':nite,
                        'field':field,
                        'tiling':tiling,
                        'hexnite':hexnite,
                        'band':band,
                        'jobid':np.nan,
                        'exptime':exptime,
                        'status':'Awaiting additional exposures',
                        'triggerid': self.trigger_id,
                        'object':str(s[7])
                    })

                    self.backend.save(exposure)
                    self.backend.commit()

                hexnite = exposure.hexnite
                print 'hexnite',hexnite
                if '--' in hexnite:
                    print exposure.attributes
                    #raw_input()
                #raw_input()
                #sys.exit()
                try:
                    hex = self.backend.get(hexes, {'hexnite': hexnite})
                    #self.backend.delete(hex)
                    #self.backend.commit()
                    #hex = self.backend.get(hexes, {'hexnite': hexnite})

                    #print 'Found this hex in desgw database...'
                except hexes.DoesNotExist:
                    hex = hexes({
                        'hexnite': hexnite,
                        'strategy': self.strategy['exposure_filter'],
                        'num_target_g': len(exposure_filter[exposure_filter == 'g']),
                        'num_target_r': len(exposure_filter[exposure_filter == 'r']),
                        'num_target_i': len(exposure_filter[exposure_filter == 'i']),
                        'num_target_z': len(exposure_filter[exposure_filter == 'z']),
                        'observed_g': [],
                        'observed_r': [],
                        'observed_i': [],
                        'observed_z': [],
                        'exposures': [],
                        'status': 'Awaiting additional exposures',
                        'dagfile' : None,
                    })

                    self.backend.save(hex)
                    self.backend.commit()
                    print hex.attributes
                    print 'created new hex'
                    #raw_input()

                if hex.status == 'Submitted for processing':
                    print 'This hex has already been submitted for processing'
                    continue

                # if '--' in hexnite:
                #     print hex.attributes
                #     raw_input()
                # if hex.status == 'Submitted for processing':
                #     print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing'
                #     #raw_input()
                #     continue

                if band == 'g':
                    if not expnum in hex.observed_g:
                        hex.observed_g.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'r':
                    if not expnum in hex.observed_r:
                        hex.observed_r.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'i':
                    if not expnum in hex.observed_i:
                        hex.observed_i.append(expnum)
                        hex.exposures.append(expnum)
                if band == 'z':
                    if not expnum in hex.observed_z:
                        hex.observed_z.append(expnum)
                        hex.exposures.append(expnum)

                self.backend.save(hex)
                self.backend.commit()

                print hex.attributes

                didwork = False
                if len(hex.observed_g) == hex.num_target_g:
                    if len(hex.observed_r) == hex.num_target_r:
                        if len(hex.observed_i) == hex.num_target_i:
                            if len(hex.observed_z) == hex.num_target_z:
                                print 'All exposures in strategy satisfied! '
                                #raw_input()
                                submissionPassed = True

                                for target, exps in zip([hex.num_target_g,hex.num_target_r,hex.num_target_i,hex.num_target_z],
                                                        [hex.observed_g,hex.observed_r,hex.observed_i,hex.observed_z]):

                                    if target == 0: continue
                                    exposurestring = ''
                                    logstring = ''
                                    for ex in exps:
                                        exposurestring += ex+' '
                                        logstring += ex+'_'

                                    print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring
                                    os.chdir("diffimg-proc")
                                    #out = os.popen('ls').read()
                                    out = os.popen('./DAGMaker.sh ' + exposurestring ).read()


                                    os.chdir("..")
                                    print out
                                    f = open(os.path.join(self.processingdir,logstring+hexnite+'.dagmakerlog'),'w')
                                    f.write(out)
                                    f.close()
                                    tt = time.time()
                                    if not 'To submit this DAG do' in out:
                                        dt.sendEmailSubject(self.trigger_id, 'Error in creating dag for desgw hex: ' + out)
                                        submissionPassed = False
                                    else:
                                        for o in out.split('\n'):
                                            if 'file://' in o:
                                                dagfile = o.split('/')[-1]
                                                self.dagfile = os.path.join(self.processingdir,logstring+'job.dag')
                                                os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile)
                                        print self.dagfile

                                    print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile

                                    out = os.popen(
                                        'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                        'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read()
                                    print out
                                    f = open(os.path.join(self.processingdir, logstring + hexnite + '.dagsumbitlog'), 'w')
                                    f.write(out)
                                    f.close()

                                    if 'non-zero exit status' in out:
                                        dt.sendEmailSubject(self.trigger_id,
                                                            'Error in submitting hex dag for processing: ' + out)
                                        submissionPassed = False
                                    else:
                                        if doimmediateremove:
                                            for o in out.split('\n'):
                                                if 'Use job id' in o:
                                                    jobid = o.split()[3]
                                            out = os.popen(
                                                'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; '
                                                'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read()
                                            print out
                                    ttt = time.time()
                                    #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt

                                    #sys.exit()
                                #raw_input()
                                if submissionPassed:

                                    hex.status = 'Submitted for processing'
                                    hex.dagfile = self.dagfile
                                    self.backend.save(hex)
                                    self.backend.commit()

                                    for expn in hex.exposures:
                                        print expn, 'updated in database to Submitted For Processing'
                                        exp = self.backend.get(exposures, {'expnum': expn})
                                        exp.status = 'Submitted for processing'
                                        self.backend.save(exp)
                                        self.backend.commit()
                                    didwork = True
                                print 'didwork',didwork
                                print 'dagfile',self.dagfile
                                #sys.exit()
                                #raw_input()

                if not didwork:
                    print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \
                        'and will continue waiting...'
                    #raw_input()


                if time.time() - pptime > postprocessingtime: #happens every 30 minutes or so...
                    pptime = time.time()
                    print '***** Firing post processing script *****'
                    #sys.exit()

                    #ppout = self.pp.communicate()

                    # if self.thisevent.postprocint > 0:
                    #     print ppout
                    #     f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint))+'.log'),'w')
                    #     f.write(ppout)
                    #     f.close()
                    self.thisevent.postprocint += 1
                    self.backend.save(self.thisevent)
                    self.backend.commit()

                    self.submit_post_processing()
                #sys.exit()
                #print 'Waiting 10s to check from mountain...'
                #sys.exit()
                time.sleep(10)#looping over checking the mountain top

            # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates'))
            # for f in cfiles:
            #     if f.split('.')[-1] == 'npz':
            #         cp.makeNewPage(f)

    def submit_post_processing(self):
        #firedlist = open('./processing/firedlist.txt', 'r')
        #fl = firedlist.readlines()
        #firedlist.close()
        #print fl
        #fl = ['475914','475915','475916','482859','482860','482861']

        fl = self.backend.filter(exposures, {'triggerid': self.trigger_id})

        expnumlist = ''
        for f in fl:
            expnumlist += f.expnum.strip()+' '
        print expnumlist
        #expnumlist = '475905  475909  475913  475917  475921  475925  475929  475933  475937  475941  475945  475949  475953  475957  475961'
        print 'FIRING TIMs CODE'
        try:
            os.mkdir(os.path.join(self.trigger_path,self.trigger_id,'candidates'))
        except:
            print 'Candidates directory exists,',os.path.join(self.trigger_path,self.trigger_id,'candidates')
            pass
        #sys.exit()
        gwpostdir = os.environ['GWPOST_DIR']
        print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
                        python '+os.path.join(gwpostdir,'postproc.py')\
                         +' --expnums ' + expnumlist\
                         + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
                         + ' --triggerid '+self.trigger_id+' --season 70 --ups True'
        # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \
        #                  python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\
        #                  + ' --triggerid '+trigger_id+' --season 46 --ups True' )

        #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh'))
        # args = ['yes | ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+
        #                 'yes | python '+os.path.join(gwpostdir,'postproc.py')\
        #                  +' --expnums ' + expnumlist\
        #                  + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\
        #                  + ' --triggerid '+self.trigger_id+' --season 46 --ups True"'
        #         ]

        args = ['yes | python ' + os.path.join(gwpostdir, 'postproc.py') \
                + ' --expnums ' + expnumlist \
                + ' --outputdir ' + os.path.join(self.trigger_path, self.trigger_id, 'candidates') \
                + ' --triggerid ' + self.trigger_id + ' --season 70 --ups True']
        print args

        #sys.exit()
        f = open(os.path.join(self.processingdir,'postproc_attempt'+str(int(self.thisevent.postprocint)))+'.log','w')
        self.pp = subprocess.Popen(args,stdout=f, stderr=f,shell=True)
        #p = subprocess.Popen(args, stdin=None, stdout=None, stderr=None, close_fds=True, shell=True)
        #print 'going'*1000
        #print self.pp.communicate()
        #print 'gone'*1000
        #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True)
        #p.communicate()
        #sys.exit()
        return



    def getDatetimeOfFirstJson(self,jsonstring):
        #'M263920-30-UTC-2016-12-1-0:44:00.json'
        js = jsonstring.split('UTC')[1]#-2015-12-27-3:2:00.json
        #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json')
        date_object = dt.strptime(js, '-%Y-%m-%d-%H:%M:%S.json')
        print '***** Datetime of first observation UTC',date_object,'*****'
        return date_object

    def sortHexes(self):
        pass