class Geocode: def __init__(self, geocoderCache=True, printStatus=False): self.printStatus = printStatus self.geocoderCache = geocoderCache if self.geocoderCache: self.db = FileBackend('./geo-cache') def getGeo(self, lon, lat): if self.geocoderCache: try: nodeObj = self.db.get(GeoAssign, {'lat': lat, 'lon': lon}) nodeObj['cached'] = True return nodeObj except GeoAssign.DoesNotExist: pass if self.printStatus: print('lon: ' + str(lon) + ', lat: ' + str(lat) + ' not in cache - start lookup at Nominatim-API') geolocator = Nominatim() location = geolocator.reverse([lat, lon], timeout=20) if 'address' in location.raw: location = location.raw['address'] nodeObj = GeoAssign({'lat': lat, 'lon': lon, 'payload': location}) self.db.save(nodeObj) self.db.commit() nodeObj['cached'] = False return nodeObj else: # got no results (i.e. coordinates are incorrect) return None
def open_database(self, dbpath): # (self, dbpath, attrs={}, **kwargs) if os.path.basename(dbpath) == self.name: self.dbpath = dbpath # opening existing Visinum database elif os.path.isdir(os.path.join(dbpath, self.name)): self.dbpath = os.path.join(dbpath, self.name) # opening existing database logger.info("Found Visinum database %s in directory %s" % (self.name, dbpath)) elif os.path.isdir(dbpath): self.dbpath = os.path.join(dbpath, self.name) logger.info("Creating new Visinum database %s in directory %s" % (self.name, dbpath)) else: logger.error("Database path (dbpath) incorrectly specified %s" % (dbpath, )) raise ValueError( "Database path (dbpath) incorrectly specified %s" % (dbpath, )) self.db = FileBackend(self.dbpath, {'serializer_class': 'json'}) logger.info("Opening Visinum database %s" % (self.dbpath, )) config_attrs = self.get_config() """try: #config_attrs = self.db.get(self.DbDocClass, {'visinum_type' : 'db_config'}) config_attrs = self.get_config() except self.DbDocClass.DoesNotExist: self.set_config( {'visinum_type' : 'db_config', 'title' : os.path.basename( os.path.dirname(self.dbpath) ), 'path_orig' : self.dbpath, 'UUID': make_uuid(version=3, namespace='uuid.NAMESPACE_URL', name=self.dbpath) } ) #config_attrs.update(attrs) #self.create_new_item(config_attrs, config_attrs, **kwargs )""" self.config_attrs = config_attrs
def on_data(self, data): dict_data = json.loads(data) db_data = {} try: # Ignore retweets if dict_data['retweeted'] == False: try: pp("GET DATA") # Get relevant key value pairs only for KEY in DESIRED_KEYS: db_data[KEY] = dict_data[KEY] db_data['screen_name'] = dict_data['user']['screen_name'] cards = parse_text(db_data['text']) # Add it to the db_data dict to import into BlitzDB db_data['cards'] = cards pp(db_data) except: pp("GET DATA FAILED") try: # Import into BlitzDB backend = FileBackend("./test-db") tweet = Tweet(db_data) backend.save(tweet) # backend.commit() pp("DATABASE INSERTION SUCCESSFUL") except: pp("DATA INSERTION FAILED") except: pp("Retweet detected, skipping...") return True
class JavManagerDB: def __init__(self): self.jav_db = FileBackend('jav_manager.db') def upcreate_jav(self, jav_obj: dict): # uniform car to upper case jav_obj['car'] = str(jav_obj['car']).upper() # set pk to car jav_obj['pk'] = jav_obj['car'] # set default to no opinion #0-want, 1-viewed, 2-no opinion 3-local 4-downloading jav_obj.setdefault('stat', 2) _jav_doc = JavObj(jav_obj) _jav_doc.save(self.jav_db) self.jav_db.commit() def get_by_pk(self, pk: str): return self.jav_db.get(JavObj, {'pk': pk.upper()}) def pk_exist(self, pk: str): try: self.jav_db.get(JavObj, {'pk': pk}) return True except DoesNotExist: return False
class Geocode: def __init__(self, geocoderCache = True, printStatus = False): self.printStatus = printStatus self.geocoderCache = geocoderCache if self.geocoderCache: self.db = FileBackend('./geo-cache') def getGeo(self, lon, lat): if self.geocoderCache: try: nodeObj = self.db.get(GeoAssign,{'lat' : lat, 'lon' : lon}) nodeObj['cached'] = True return nodeObj except GeoAssign.DoesNotExist: pass if self.printStatus: print('lon: '+str(lon)+', lat: '+str(lat)+' not in cache - start lookup at Nominatim-API') geolocator = Nominatim() location = geolocator.reverse([lat, lon], timeout=20) if 'address' in location.raw: location = location.raw['address'] nodeObj = GeoAssign({ 'lat' : lat, 'lon' : lon, 'payload' : location }) self.db.save(nodeObj) self.db.commit() nodeObj['cached'] = False return nodeObj else: # got no results (i.e. coordinates are incorrect) return None
def _query(self, f, q): try: out = f(*q) except KeyError: self.db = FileBackend(expanduser("~/.omesa/db")) f = self.db.filter out = f(*q) return out
def removeHexFromProcessingDB(hexnite,real): if real: backend = FileBackend("./realdb") else: backend = FileBackend("./testdb") hex = backend.filter(hexes, {'hexnite': hexnite}) backend.delete(hex) 'Hexnite',hexnite,'removed from database'
class Activity(commands.Cog): def __init__(self, bot): self.bot = bot self.backend = FileBackend('./activity-db') self.backend.autocommit = True self.scanned = False bot.logger.info("Activity plugin ready") def _maintain(self, bot): """Query the database and apply any configured roles""" pass def _find_one(self, member: discord.Member) -> Union[ActivityRecord, None]: """Searches for a response in the DB, returning it if found, or None if it doesn't exist or there are multiples. This exists to tie up the Blitzdb boilerplate in one place.""" try: comm = self.backend.get(ActivityRecord, {'author': member.id}) except ActivityRecord.DoesNotExist: return None except ActivityRecord.MultipleDocumentsReturned: self.bot.logger.error( f"Discarding multiple results returned for '{member.display_name(member.id)}'" ) return None else: return comm async def on_ready(self): if not self.scanned: self.scanned = True g = self.bot.guilds[0] # type: discord.Guild self.bot.logger.info( f"Beginning server scan. {len(g.members)} to update.") for m in g.members: m: discord.Member if not m.bot and m.id != self.bot.id: activity_record = self._find_one(m) if activity_record: continue else: a = ActivityRecord({ "author": m.id, "lastspeak": datetime.datetime.now() }) self.backend.save(a) @commands.Cog.listener() async def on_message(self, message: discord.message): if not self.bot.ready(): return activity_record = self._find_one(message.author) activity_record["lastspeak"] = message.created_at self.backend.save(activity_record) pass
def __init__(self, bot): self.bot: 'PixlBot' = bot self.config = bot.config['TwitchNotifier'] self.backend = FileBackend('db') self.backend.autocommit = True self.bot.logger.info("Twitch notifier plugin ready") self.uuids = [] self.online_uuids = [] self.sslcontext = ssl.SSLContext() self.sslcontext.load_cert_chain(self.config['cert_path'], self.config['key_path']) self._twitch_init_()
def __init__(self): retry = 0 while retry < 3: try: self.jav_db = FileBackend('jav_manager.db') break except Exception as e: print(f'read file db error {e}, gonna retry') retry += 1 sleep(5) if not self.jav_db: raise Exception('read local db error')
def get_project_and_backend(path): try: project_config = get_project_config(path+"/.checkmate") except (IOError,): sys.stderr.write("No project configuration found!\n") exit(-1) backend_config = project_config['backend'] project_class = project_config.get('project_class','DiskProject') if not project_class in settings.models: sys.stderr.write("Invalid project type: %s. Maybe the plugin is missing?" % project_class) exit(-1) ProjectClass = settings.models[project_class] if backend_config['driver'] == 'mongo': if not pymongo_support: sys.stderr.write("Encountered pymongo backend, but pymongo is not installed!") exit(-1) pymongo_db = pymongo.MongoClient()[backend_config['db']] backend = MongoBackend(pymongo_db,autoload_embedded = False,allow_documents_in_query = False) elif backend_config['driver'] == 'file': backend = FileBackend(path+"/.checkmate",autoload_embedded = False) else: sys.stdout.write("Unknown backend driver: %s" % backend_config['driver']) try: project = backend.get(ProjectClass,{'pk' : project_config['project_id']}) except ProjectClass.DoesNotExist: project = ProjectClass({'pk' : project_config['project_id']}) backend.save(project) project.path = path backend.save(project) backend.commit() return project,backend
def vote(pk): print(pk) if not current_user: return redirect("/login") if request.method == 'GET': if pk + '_vote' in request.cookies: return redirect('/already_voted') redirect_to_index = redirect("/thankyou") response = current_app.make_response(redirect_to_index) response.set_cookie(pk + '_vote', value='true') waifu = current_app.dbbackend.get(Waifu, {"pk": pk}) ip = request.environ.get('HTTP_X_REAL_IP', request.remote_addr) try: if ip in waifu.votes_l: return redirect("/already_voted") waifu.votes_l.append(ip) except: waifu.votes_l = list() waifu.votes_l.append(ip) user_m = current_user._get_current_object() if waifu in user_m.voted_waifus: return redirect("/already_voted") user_m.voted_waifus.append(waifu) current_app.dbbackend.save(user_m) waifu.votes = waifu.votes + 1 current_app.dbbackend.save(waifu) current_app.dbbackend.commit() current_app.dbbackend = FileBackend("./database") return response
class Database: def __init__(self): self.backend = FileBackend(settings['database_location']) def save(self, entry): self.backend.save(entry) self.backend.commit() def search(self, table, query): if table == 'user': doc = User elif table == 'subm': doc = Submission try: return self.backend.get(doc, query) except doc.DoesNotExist: return None
def main(): backend = FileBackend("./my-db") #locate the backend fin = file("Cu_material_db.yaml","r") #read the yaml file in_param = yaml.load(fin) #load it name = in_param.get("Name") #extract out the parameters cond = in_param.get("Conductivity") n = in_param.get("Carrier_Density") supercond = in_param.get("Superconductivity") res = in_param.get("Resistance") right_input_flag = True #check the superconducting type matches the number of critical fields if supercond.get("Superconducting"): #create a new entry for a superconductor right_input_flag = False if len(supercond.get("Critical_Fields")) != supercond.get("Type") else True if right_input_flag: new = Material({"name": name, "cond": cond, "n": n, "sc": supercond.get("Superconducting"), "sctype" : supercond.get("Type"), "Tc" : supercond.get("Critical_Temperature"), "nc" : supercond.get("Critical_Current_Density"), "Hc" : supercond.get("Critical_Fields"), "L" : supercond.get("Penetration_Depth"), "Tref" : res.get("Ref_Temperature"), "res" : res.get("Resistivity"), "alpha" : res.get("Temperature_Coefficient") }) else: print "Superconducting type and number of critical fields don't match!" else: #create a new entry for a non-superconductor new = Material({"name": name, "cond": cond, "n": n, "sc": supercond.get("Superconducting"), "Tref" : res.get("Ref_Temperature"), "res" : res.get("Resistivity"), "alpha" : res.get("Temperature_Coefficient") }) if right_input_flag: new.save(backend) #save the entry in the database #always update the database when there's an old entry of this material in the database mtrl = backend.filter(Material, {"name":name}) mtrl.delete() backend.commit() print "Input succeeded!"
def removeHexFromProcessingDB(hexnite, real): if real: backend = FileBackend("./realdb") else: backend = FileBackend("./testdb") hex = backend.filter(hexes, {'hexnite': hexnite}) backend.delete(hex) 'Hexnite', hexnite, 'removed from database'
class Database(object): """Blitzdb database.""" def __init__(self): """Load backend.""" self.db = FileBackend(expanduser("~/.omesa/db")) def _query(self, f, q): try: out = f(*q) except KeyError: self.db = FileBackend(expanduser("~/.omesa/db")) f = self.db.filter out = f(*q) return out def save(self, doc): """Save document do db.""" self.db.save(doc) self.db.commit() def fetch(self, doc, q): """Filter and return first entry.""" try: return self._query(self.db.filter, (doc, q))[0] except IndexError: print(str(doc), str(q)) print("File does not exist.") def get_component(self, doc, name): # FIXME: see if returning non-decoded is relevant for anything try: return sr.decode( dict(self._query(self.db.filter, (doc, { 'name': name }))[0])) except IndexError: print(str(doc), {'name': name}) print("File does not exist.") def getall(self, doc): """Returns all entries in db.""" return [d for d in self._query(self.db.filter, (doc, {}))]
class Database(object): """Blitzdb database.""" def __init__(self): """Load backend.""" self.db = FileBackend(expanduser("~/.omesa/db")) def _query(self, f, q): try: out = f(*q) except KeyError: self.db = FileBackend(expanduser("~/.omesa/db")) f = self.db.filter out = f(*q) return out def save(self, doc): """Save document do db.""" self.db.save(doc) self.db.commit() def fetch(self, doc, q): """Filter and return first entry.""" try: return self._query(self.db.filter, (doc, q))[0] except IndexError: print(str(doc), str(q)) print("File does not exist.") def get_component(self, doc, name): # FIXME: see if returning non-decoded is relevant for anything try: return sr.decode(dict(self._query( self.db.filter, (doc, {'name': name}))[0])) except IndexError: print(str(doc), {'name': name}) print("File does not exist.") def getall(self, doc): """Returns all entries in db.""" return [d for d in self._query(self.db.filter, (doc, {}))]
def __init__(self, driver_type, connection_string): super(BlitzDBDALDriver, self).__init__() self.database_name = 'datmo_db' if driver_type == "file": from blitzdb import FileBackend self.backend = FileBackend(connection_string) elif driver_type == "mongo": from pymongo import MongoClient from blitzdb.backends.mongo import Backend as MongoBackend c = MongoClient(connection_string) #create a new BlitzDB backend using a MongoDB database self.backend = MongoBackend(c[self.database_name])
class DataProvider: def __init__(self, db_path): self.db = FileBackend(db_path) def last(self, user): return get_first_func(self.db.filter(WeightRecord, {'user': user, 'last': True})) def all_mornings(self, user): db_filter = self.db.filter(WeightRecord, {'user': user, 'morning': True}) #if db_filter: # for i in db_filter: # logging.debug("T0 {}".format(i.time)) sor = sorted(db_filter, key=lambda x: x.time, reverse=False) #if sor: # for i in sor: # logging.debug("T1 {}".format(i.time)) return sor def last_morning(self, data): return get_first_func(self.db.filter(WeightRecord, { 'last': True, 'morning': True, 'user': data.user})) def today_morning(self, data): return get_first_func(self.db.filter(WeightRecord, { 'year': data.year, 'month': data.month, 'day': data.day, 'user': data.user, 'morning': True})) def save(self, record): record.save(self.db) def commit(self): self.db.commit()
def __init__(self, bot): self.cfg = PluginConfig(self) self.image_filetypes = self.cfg.get("image_filetypes").split(",") self.db = FileBackend(self.cfg.get("main_db")) self.tumblr = pytumblr.TumblrRestClient( self.cfg.get("consumer_key"), self.cfg.get("consumer_secret"), self.cfg.get("oauth_token"), self.cfg.get("oauth_secret"), ) irc3.base.logging.log( irc3.base.logging.WARN, "Tumblr poster ready! Posting all URLs with: %s" % self.image_filetypes )
def wrapped_f(bot, update, *args, **kwargs): msg_entry = Message(json.loads(update.to_json())) backend = FileBackend(settings.ACCESS_LOG_DB_PATH) backend.save(msg_entry) backend.commit() return f(bot, update, *args, **kwargs)
def cleardb(real=False): password = '******' user_input = raw_input('Please Enter Password: '******'Incorrect Password, terminating... \n') if real: backend = FileBackend("./realdb") else: backend = FileBackend("./testdb") images = backend.filter(preprocessing, {'status': 'Submitted'}) images.delete() hexen = backend.filter(hexes, {'num_target_g': 0}) hexen.delete() exposuren = backend.filter(exposures, {'status': 'Awaiting additional exposures'}) exposuren.delete() backend.commit()
def __init__(self, bot): self.bot = bot self.cfg = PluginConfig(self) self.db = FileBackend(self.cfg.get('main_db')) mtt = MessageRetargeter(bot) self.msg = mtt.msg web = Flask(__name__, template_folder=tmpl_dir) mako = MakoTemplates() mako.init_app(web) # Add routes here web.add_url_rule('/edit_web/<args>', 'edit_web', self.edit_web, methods=['GET', 'POST']) _thread.start_new_thread(web.run, (), {'host': '0.0.0.0'})
class DataProvider: def __init__(self, db_path): self.db = FileBackend(db_path) def last(self, user): return get_first_func( self.db.filter(WeightRecord, { 'user': user, 'last': True })) def all_mornings(self, user): db_filter = self.db.filter(WeightRecord, { 'user': user, 'morning': True }) #if db_filter: # for i in db_filter: # logging.debug("T0 {}".format(i.time)) sor = sorted(db_filter, key=lambda x: x.time, reverse=False) #if sor: # for i in sor: # logging.debug("T1 {}".format(i.time)) return sor def last_morning(self, data): return get_first_func( self.db.filter(WeightRecord, { 'last': True, 'morning': True, 'user': data.user })) def today_morning(self, data): return get_first_func( self.db.filter( WeightRecord, { 'year': data.year, 'month': data.month, 'day': data.day, 'user': data.user, 'morning': True })) def save(self, record): record.save(self.db) def commit(self): self.db.commit()
def cleardb(real=False): password = '******' user_input = raw_input('Please Enter Password: '******'Incorrect Password, terminating... \n') if real: backend = FileBackend("./realdb") else: backend = FileBackend("./testdb") images = backend.filter(preprocessing, {'status': 'Submitted'}) images.delete() hexen = backend.filter(hexes,{'num_target_g':0}) hexen.delete() exposuren = backend.filter(exposures,{'status':'Awaiting additional exposures'}) exposuren.delete() backend.commit()
class ImageToTumblr(object): def __init__(self, bot): self.cfg = PluginConfig(self) self.image_filetypes = self.cfg.get("image_filetypes").split(",") self.db = FileBackend(self.cfg.get("main_db")) self.tumblr = pytumblr.TumblrRestClient( self.cfg.get("consumer_key"), self.cfg.get("consumer_secret"), self.cfg.get("oauth_token"), self.cfg.get("oauth_secret"), ) irc3.base.logging.log( irc3.base.logging.WARN, "Tumblr poster ready! Posting all URLs with: %s" % self.image_filetypes ) def post_image(self, text, poster): # Strip everything but the address m = re.match(r".*(?P<url>http.*)", text) url = m.group("url") # Make sure we didn't do this one already try: self.db.get(PostedImage, {"url": url}) except PostedImage.DoesNotExist: try: # First we post it to tumblr p = self.tumblr.create_photo( "mmerpimages", state="published", source=str(url), caption="Found by %s" % poster ) irc3.base.logging.log(irc3.base.logging.WARN, "Posting image by %s: %s" % (poster, url)) # And then record the fact that we did. self.db.save(PostedImage({"url": url})) self.db.commit() except: irc3.base.logging.log(irc3.base.logging.WARN, "Could not post to tumblr: %s" % url) return else: irc3.base.logging.log(irc3.base.logging.WARN, "Not posting duplicate image: %s" % url) return @irc3.event(irc3.rfc.PRIVMSG) # Triggered on every message anywhere. def parse_image(self, target, mask, data, event): for extension in self.image_filetypes: if "." + extension.lower() in data: self.post_image(data, mask.nick)
def __init__(self, bot): self.bot = bot self.backend = FileBackend('./activity-db') self.backend.autocommit = True self.scanned = False bot.logger.info("Activity plugin ready")
from blitzdb import Document, FileBackend backend = FileBackend('./mob-db') ''' A user shall have the following attributes: name, email, username/handle, twitter/facebook ID, auth token, karma points, as well as future references to posts, comments and votes. ''' class User(Document): pass ''' A post shall have the following attributes: title, content, a reference to the user who posted it, timestamp, a list containing IDs of comments in chronological order, as well as a list of vote IDs. ''' class Post(Document): pass ''' A comment shall have the following attributes: content, a reference to the author. ''' class Comment(Document): pass
def __init__(self, geocoderCache=True, printStatus=False): self.printStatus = printStatus self.geocoderCache = geocoderCache if self.geocoderCache: self.db = FileBackend('./geo-cache')
from blitzdb import Document from blitzdb import FileBackend from lxml import etree import Benotete_Abgabe3.Teilaufgabe1.Aufgabe3 as aufg1 db = FileBackend("./my-db") class Proceedings(Document): pass class Inproceedings(Document): pass # Die XML-Datenbasis #datei = '../test.xml' datei = '../dblp-2017-05-02.xml' # Teilaufgabe 2, Nr. 1 def get_inproceedings_by_year(datei, year): inproceedings = [] # iterparser zum Einlesen der Datei unter Berücksichtigung ausschließlich der "End"-Events context = etree.iterparse(datei, events=('end', ), load_dtd=True, encoding='ISO-8859-1', huge_tree=True) # Iterieren über alle Elemente des Iterparser
def main(): backend = FileBackend("./my-db") #locate the database fin = file("Cu_calc_input.yaml","r") #read the yaml file in_param = yaml.load(fin) #load it material = in_param.get("Material") #extract the parameters circuit_param = in_param.get("Circuit_Design") exp_param = in_param.get("Experimental_Setup") step = in_param.get("Output").get("Current_Sweep_Step") c_w = circuit_param.get("Width") c_R = circuit_param.get("Radius") c_t = circuit_param.get("Thickness") c_w_else = circuit_param.get("Width_Else") c_l_else = circuit_param.get("Length_Else") file_str = "%s_w_%d_um_R_%d_um_t_%d_nm" %(material, c_w*1e6, c_R*1e6, c_t*1e9) #set the output file fout = file(file_str+".txt","w") mtrl = backend.get(Material, {"name": material}) #find the material information in the database print mtrl.name x = c_w/c_R #w/R ratio ##########LIQUID NITROGEN TEMPERATURE CASE################ #calculate the resistance of the trace to determine current upper limit trace_res_77 = CALC_RES(c_w, c_R, c_t, c_w_else, c_l_else, 77., mtrl.Tref, mtrl.res, mtrl.alpha) current_limit_77 = min(exp_param.get("Current_Limit"), exp_param.get("Voltage_Limit")/trace_res_77) #calculate the signals output_77 = np.tile(np.arange(0,current_limit_77,step),(3,1)).transpose() #currents, V_G, V_S for i in range(0,output_77.shape[0]): output_77[i,1] = SIG_NORMAL(mtrl.n, c_w, c_t, output_77[i,0])*MODEL_CRC(x) output_77[i,2] = HALL_POT(mtrl.n, c_w, c_t, output_77[i,0])*MODEL_LIN(x) #Plot the data plt.ioff() fig = plt.figure() plt.plot(output_77[:,0],output_77[:,1], label=r"$V_G$ (77K)", marker="x",linestyle="None",color="k") plt.plot(output_77[:,0],output_77[:,2], label=r"$V_H$ (77K)", marker="+",linestyle="None",color="k") #Store the data fout.write("##########LIQUID NITROGEN TEMPERATURE CASE################\n") res_str = "Resistance = %.2e ohm\n\n" %trace_res_77 fout.write(res_str) fout.write("I(A)\tV_G(V)\tV_H(V)\n") np.savetxt(fout,output_77,fmt="%.2e") ##########ROOM TEMPERATURE CASE################ trace_res_293 = CALC_RES(c_w, c_R, c_t, c_w_else, c_l_else, 293., mtrl.Tref, mtrl.res, mtrl.alpha) current_limit_293 = min(exp_param.get("Current_Limit"), exp_param.get("Voltage_Limit")/trace_res_293) output_293 = np.tile(np.arange(0,current_limit_293,step),(3,1)).transpose() #currents, V_G, V_S for i in range(0,output_293.shape[0]): output_293[i,1] = SIG_NORMAL(mtrl.n, c_w, c_t, output_293[i,0])*MODEL_CRC(x) output_293[i,2] = HALL_POT(mtrl.n, c_w, c_t, output_293[i,0])*MODEL_LIN(x) plt.plot(output_293[:,0],output_293[:,1], label=r"$V_G$ (Room Temp.)", marker="s",mfc="None",linestyle="None",color="k") plt.plot(output_293[:,0],output_293[:,2], label=r"$V_H$ (Room temp.)", marker="D",mfc="None",linestyle="None",color="k") fout.write("\n##########ROOM TEMPERATURE CASE################\n") res_str = "Resistance = %.2e ohm\n\n" %trace_res_293 fout.write(res_str) fout.write("I(A)\tV_G(V)\tV_H(V)\n") np.savetxt(fout,output_293,fmt="%.2e") ##########SUPERCONDUCTING CASE################ if mtrl.sc: output_sc = np.tile(np.arange(0,exp_param.get("Current_Limit"),step),(2,1)).transpose() #currents, V_G, V_S for i in range(0,output_sc.shape[0]): output_sc[i,1] = SIG_SC(mtrl.L, c_w, c_t, output_sc[i,0])*MODEL_SCCJ(x) plt.plot(output_sc[:,0],output_sc[:,1], label=r"$V_G$ (Supercond.)",color="k") fout.write("\n##########SUPERCONDUCTING CASE################\n") fout.write("I(A)\tV_G(V)\n") np.savetxt(fout,output_sc,fmt="%.2e") #plot details plt.xlabel("Input current (A)",fontsize="15") plt.ylabel("Potential difference (V)",fontsize="15") plt.yscale("log") plt.title(material) plt.legend(loc=4) plt.savefig(file_str+".png",dpi=300,format="png") plt.close(fig) fin.close() fout.close()
#!/usr/bin/env python import os import json import math from blitzdb import Document from blitzdb import FileBackend backend = FileBackend("./my-db") dir = "selected_docs/" corpus = 20000000000.0 corpus_matches = 640000000.0 uri = {} class Link(Document): pass def log2(x): return math.log(x) / math.log(2) def computeTF(total_words, word_matches): tf = float(word_matches) / float(total_words) return tf def computeIDF(): idf = log2(corpus / corpus_matches) return idf
def create_app(configfile=None): app = Flask(__name__) AppConfig(app, configfile) # Flask-Appconfig is not necessary, but # highly recommend =) # https://github.com/mbr/flask-appconfig Bootstrap(app) login_manager = LoginManager() login_manager.init_app(app) login_manager.login_view='login' #NoSQL Backend backend = FileBackend("/tmp/wakeonlan.db") backend.create_index(Device, fields={'id':1}, unique=True) #TEST Devices alldevices = backend.filter(Device, {}) if len(alldevices) == 0 : try: pc1 = Device({"id" : "001122334411", "name" : "PC 1", "mac" : "00:11:22:33:44:11", "ip":"192.168.222.111", 'status' : ''}) backend.save(pc1) pc2 = Device({"id" : "001122334422","name" : "PC 2", "mac" : "00:11:22:33:44:22", "ip":"192.168.222.112", 'status' : ''}) backend.save(pc2) pc3 = Device({"id" : "001122334433","name" : "Router", "mac" : "00:11:22:33:44:33", "ip":"192.168.222.1", 'status' : ''}) backend.save(pc3) backend.commit() except: backend.revert() pass # in a real app, these should be configured through Flask-Appconfig app.config['SECRET_KEY'] = 'devkey' # app.config['RECAPTCHA_PUBLIC_KEY'] = \ # '6Lfol9cSAAAAADAkodaYl9wvQCwBMr3qGR_PPHcw' def getDeviceById(id): device = None try: device = backend.get(Device, {'id':id}) except: pass return device def pingDeviceById(id): #Get Device device = backend.get(Device, {'id':id}) if device: #Get Device's IP ip = device['ip'] result = pingDeviceByIp(ip) #Update Status UP/Down/'' if result==0: device['status'] = 'UP' else: device['status'] = 'DOWN' backend.save(device) return result return None def wolDeviceById(id): #Get Device device = backend.get(Device, {'id':id}) if device: #WoL for Device MAC mac = device['mac'] wolDeviceByMac(mac) return None @login_manager.user_loader def user_loader(user_id): """Given *user_id*, return the associated User object. :param unicode user_id: user_id (email) user to retrieve """ user_entry = User.getById(user_id) if user_entry is not None: user = User(user_entry[0], user_entry[1]) return user else: return None @app.route('/', methods=('GET', 'POST')) @login_required def index(): form = ExampleForm() form.validate_on_submit() # to get error messages to the browser # flash('critical message', 'critical') # flash('error message', 'error') # flash('warning message', 'warning') # flash('info message', 'info') # flash('debug message', 'debug') # flash('different message', 'different') # flash('uncategorized message') alldevices = None alldevices = backend.filter(Device, {}).sort('name') #app.logger.info('Devices: %s' % (len(alldevices) ) ) return render_template('index.html', form=form, devices = alldevices) @app.route('/login', methods=('GET', 'POST')) def login(): if request.method == 'GET': form = LoginForm() form.validate_on_submit() # to get error messages to the browser return render_template('login.html', form=form) username = request.form['username'] password = request.form['password'] user_entry = User.get(username, password) if user_entry is None: flash('Username or Passord is invalid', 'error') return redirect(url_for('login')) user = User(user_entry[0], user_entry[1]) login_user(user, remember=True) return redirect(request.args.get('next') or url_for('index')) @app.route("/logout", methods=["GET"]) @login_required def logout(): """Logout the current user.""" user = current_user user.authenticated = False logout_user() return redirect(url_for('login')) @app.route('/addDevice', methods=('GET', 'POST')) @login_required def addDevice(): if request.method == 'GET': form = AddDeviceForm() form.validate_on_submit() # to get error messages to the browser return render_template('add_device.html', form=form) name = request.form['name'] mac = request.form['mac'] ip = request.form['ip'] id = mac.replace(':','') try: newDevice = Device({"id" : id, "name" : name, "mac" : mac, "ip":ip, 'status' : ''}) backend.save(newDevice) backend.commit() except: flash('Error creating new Device', 'error') pass return redirect(url_for('index')) @app.route('/editListDevice', methods=('GET', 'POST')) @login_required def editListDevice(): alldevices = None alldevices = backend.filter(Device, {}).sort('name') return render_template('list_device.html', devices = alldevices) @app.route('/pingDevice/<deviceId>', methods=('GET', 'POST')) @login_required def pingDevice(deviceId): app.logger.info('pingDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) result = pingDeviceById(deviceId) app.logger.info('pingDevice: %s' % (result ) ) if result is None: flash('Ping - Error on device %s' % (device['name']), 'error') elif result == 0: flash('Device %s is UP' % (device['name']), 'info') else: flash('Device %s is DOWN' % (device['name']), 'error') return redirect(url_for('index')) @app.route('/wolDevice/<deviceId>', methods=('GET', 'POST')) @login_required def wolDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) result = wolDeviceById(deviceId) if device: flash('WoL sent to %s' % (device['name']), 'info') else: flash('WoL error', 'error') return redirect(url_for('index')) @app.route('/deleteDevice/<deviceId>', methods=('GET', 'POST')) @login_required def deleteDevice(deviceId): app.logger.info('wolDevice: %s' % (deviceId ) ) device = getDeviceById(deviceId) try: backend.delete(device) backend.commit() flash('%s Deleted' % (device['name']), 'info') except: flash('Delete error', 'error') pass return redirect(url_for('editListDevice')) return app
from collections import Counter # Xml FilePath file_path = "dblp-2017-05-02.xml" # Für BlitzDB class Inproceedings(Document): pass class Proceedings(Document): pass backend = FileBackend("./dblp-db") # Ende BlitzDb # Hauptmenü zur einfachen Navigation. Teilaufgaben def hauptmenu(): print("\nBitte wählen Sie aus folgenden Punkten:") while True: print("\n--Hauptmenü--") print("\n1 - Teilaufgabe 1.1" "\n2 - Teilaufgabe 1.2" "\n3 - Teilaufgabe 1.3" "\n4 - Teilaufgabe 2.1" "\n5 - Teilaufgabe 2.2"
import os from blitzdb import Document from blitzdb import FileBackend backend = FileBackend('./file_db') class File(Document): pass class Category(Document): pass class CategoryItem(): def __init__(self): self.cat_name = '' self.cat_screen_name = '' self.cat_path = '' def save_cat(self): catinfo = {} catinfo['cat_name'] = self.cat_name catinfo['cat_screen_name'] = self.cat_screen_name catinfo['cat_slug'] = self.cat_name.replace(' ', '-') catinfo['cat_path'] = self.cat_path cat = Category(catinfo) backend.save(cat)
from blitzdb import Document from blitzdb import FileBackend import csv backend = FileBackend("my-db") output_folder = 'output/' class inproceedings(Document): pass # Takes data (list), takes a header-list and a filename, and saves the data into a csv-file def save_csv(save_data, save_header, filename): with open(output_folder + filename, 'w+', encoding='windows-1252') as f: writer = csv.DictWriter(f, save_header, delimiter=';', extrasaction='ignore', lineterminator='\n') writer.writeheader() writer.writerows(save_data) print('Datensätze gespeichert!') # finds all inproceedings of a given editor def find_data_by_editor(editor): results = backend.filter(inproceedings, {}) result_list = [] for result in results: try: if (result["proc:editor"] == editor): result_list.append(result)
#!/usr/bin/python # -*- coding: utf-8 -*- import tweepy, secrets, time, random, json from blitzdb import Document, FileBackend backend = FileBackend(".db") class User(Document): pass with open('phrases.json') as f: phrases = json.load(f) users = backend.filter(User,{'twitted':False}) auth = tweepy.OAuthHandler(secrets.CONSUMER_KEY, secrets.CONSUMER_SECRET) auth.set_access_token(secrets.ACCES_TOKEN, secrets.ACCES_TOKEN_SECRET) api = tweepy.API(auth) for startup in users: user = startup.get('user').split('?')[0] print user if len(user) > 1: phrase = random.choice(phrases) print phrase % user api.update_status(phrase % user) startup.twitted = True startup.save(backend) else:
class JavManagerDB: def __init__(self): self.jav_db = FileBackend('jav_manager.db') def create_indexes(self): print('creating index for stat') self.jav_db.create_index(JavObj, 'stat') def rebuild_index(self): self.jav_db.rebuild_index(self.jav_db.get_collection_for_cls(JavObj), 'stat') def bulk_list(self): return self.jav_db.filter(JavObj, {}) def partial_search(self, search_string: str): rt = self.jav_db.filter(JavObj, {'pk': {'$regex': search_string}})[:20] return rt def query_on_filter(self, filter_on: dict, page=1, limit=8): rt = self.jav_db.filter(JavObj, filter_on) rt_max_page = ceil(len(rt) / limit) rt_list = rt[(page - 1) * limit:(page) * limit] return [dict(x) for x in rt_list], rt_max_page def upcreate_jav(self, jav_obj: dict): # uniform car to upper case jav_obj['car'] = str(jav_obj['car']).upper() # set pk to car jav_obj['pk'] = jav_obj['car'] # pull existing data since this is update function try: current_jav_obj = dict(self.get_by_pk(jav_obj['car'])) # overwrite current db dict with input dict current_jav_obj.update(jav_obj) except DoesNotExist: # set default to no opinion #0-want, 1-viewed, 2-no opinion 3-local 4-downloading jav_obj.setdefault('stat', 2) _jav_doc = JavObj(jav_obj) _jav_doc.save(self.jav_db) self.jav_db.commit() print('writed ', jav_obj) def get_by_pk(self, pk: str): return self.jav_db.get(JavObj, {'pk': pk.upper()}) def pk_exist(self, pk: str): try: self.jav_db.get(JavObj, {'pk': pk}) return True except DoesNotExist: return False
class Profiles(object): def __init__(self, bot): self.bot = bot self.cfg = PluginConfig(self) self.db = FileBackend(self.cfg.get('main_db')) mtt = MessageRetargeter(bot) self.msg = mtt.msg web = Flask(__name__, template_folder=tmpl_dir) mako = MakoTemplates() mako.init_app(web) # Add routes here web.add_url_rule('/edit_web/<args>', 'edit_web', self.edit_web, methods=['GET', 'POST']) _thread.start_new_thread(web.run, (), {'host': '0.0.0.0'}) @command def learn(self, mask, target, args): """ Stores information allowing for later retrieval. Names are downcased for sanity. Usage: %%learn <name> <information>... """ name = args['<name>'].lower() info = ' '.join(args['<information>']) try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: profile = Profile( { 'name': name, 'owner': mask.nick.lower(), 'lines': [info], 'random': False, 'public': False } ) profile.save(self.db) self.db.commit() self.msg(mask, target, 'Your data "%s" has been stored.' % name) return except Profile.MultipleDocumentsReturned: self.msg(mask, target, "Found more than one %s. This is bad! Please notify the bot owner." % name) return if is_allowed_to(Action.edit, mask.nick, profile): lines_to_append = profile.lines lines_to_append.append(info) profile.save(self.db) self.db.commit() self.msg(mask, target, 'Your data "%s" has been updated.' % name) return else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (name, profile.owner)) return @command def query(self, mask, target, args): """ Retrieve the information associated with <name>. If the item is marked random, then one random item will be returned. Usage: %%query <name> ?? <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if profile.random: self.msg(mask, target, get_flags(profile) + random.choice(profile.lines)) else: for line in profile.lines: self.msg(mask, target, get_flags(profile) + line) if len(profile.lines) >= int(self.cfg.get('throttle_max')): sleep(int(self.cfg.get('throttle_time'))) @command def forget(self, mask, target, args): """ Delete <name> from the records. Only the person who created the item can remove it. Usage: %%forget <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.delete, mask.nick, profile): self.db.delete(profile) self.db.commit() self.msg(mask, target, "%s has been deleted." % name) else: self.msg(mask, target, 'You are not authorized to delete "%s". Ask %s instead.' % (name, profile.owner)) @command(permission='admin', show_in_help_list=False) def rmf(self, mask, target, args): """ Delete <name> from the records without checking permissions. Usage: %%rmf <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return self.db.delete(profile) self.db.commit() self.msg(mask, target, "%s has been deleted." % name) @command(permission='admin', show_in_help_list=False) def chown(self, mask, target, args): """ Change the owner of <name> to <newowner>. Usage: %%chown <name> <newowner> """ name = args['<name>'].lower() newowner = args['<newowner>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return profile.owner = newowner self.db.save(profile) self.db.commit() self.msg(mask, target, "%s is now owned by %s." % (name, newowner)) @command def toggle_public(self, mask, target, args): """ Changes whether <name> is publicly editable or not Usage: %%toggle_public <name> """ profile = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': profile}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % profile) return if is_allowed_to(Action.edit, mask.nick, profile): if profile.public: profile.public = False self.msg(mask, target, '"%s" is no longer publicly editable.' % profile) else: profile.public = True self.msg(mask, target, '"%s" is now publicly editable.' % profile) self.db.save(profile) self.db.commit() return else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (profile, profile.owner)) return @command def toggle_random(self, mask, target, args): """ Toggle the randomness of an item, so that it shows a single random line instead of all lines when queried. Usage: %%toggle_random <name> """ name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.edit, mask.nick, profile): profile.random = not profile.random self.msg(mask, target, 'Random mode for %s is set to: %s' % (profile.name, profile.random)) profile.save(self.db) self.db.commit() else: self.msg(mask, target, 'You are not authorized to edit "%s". Ask %s instead.' % (name, profile.owner)) irc3.base.logging.log(irc3.base.logging.WARN, "%s tried to edit %s, but can't since it's owned by %s" % (mask.nick, profile.name, profile.owner) ) @event("(@(?P<tags>\S+) )?:(?P<mask>\S+) PRIVMSG (?P<target>\S+) :\?\? (?P<data>.*)") def easy_query(self, mask, target, data): self.bot.get_plugin(Commands).on_command(cmd='query', mask=mask, target=target, data=data) #### # All web stuff below this point # @command def edit(self, mask, target, args): """ Sends you a webpage link to edit <name>. Great for longer profiles. Make sure to keep the URL you are given secure, as with it, anyone can edit your profiles. Usage: %%edit <name> """ # TODO: Clear any existing sessions the user has data = { 'id': str(uuid.uuid4()), 'name': mask.nick, 'profile': args['<name>'] } name = args['<name>'].lower() try: profile = self.db.get(Profile, {'name': name}) except Profile.DoesNotExist: self.msg(mask, target, 'I cannot find "%s" in the records.' % name) return if is_allowed_to(Action.fulledit, mask.nick, profile): newses = Session(data) self.db.save(newses) self.db.commit() self.bot.privmsg(mask.nick, "An editor has been set up for you at http://skaianet.tkware.us:5000/edit_web/%s" % str( data['id'])) self.bot.privmsg(mask.nick, "Be very careful not to expose this address - with it, anyone can edit your stuff") else: self.msg(mask, target, 'You are not authorized to webedit "%s". Ask %s instead.' % (name, profile.owner)) def edit_web(self, args): # Web endpoint: /edit_web/<args> if request.method == 'GET': # Does the session exist? try: edit_session = self.db.get(Session, {'id': args}) except Session.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='Invalid Session', userfail=True) # Does the profile exist? name = edit_session.profile try: profile = self.db.get(Profile, {'name': name.lower()}) except Profile.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='I cannot find "%s" in the records.' % name ) # Kick off to the edit page! return render_template('edit.html', bot=self.bot, profile=profile, username=edit_session.name, sessionid=edit_session.id ) elif request.method == 'POST': # We have to look up the session ID one more time. Something could have happened to the profile # since we created the session. try: edit_session = self.db.get(Session, {'id': request.form['ID']}) except Session.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='Invalid Session', userfail=True) name = request.form['profile'] try: profile = self.db.get(Profile, {'name': request.form['profile']}) except Profile.DoesNotExist: return render_template('youfail.html', bot=self.bot, failreason='I cannot find "%s" in the records.' % name, userfail=True ) # Now with the profile in hand, blank the lines field and rebuild it from the form. # Here we grab all numeric items from the submission, sort it, and one by one refill the DB object. lines = [item for item in request.form if item.isdigit()] lines.sort() profile.lines = [] for item in lines: profile.lines.append(request.form[item]) self.db.save(profile) self.db.delete(edit_session) self.db.commit() return render_template('done.html', bot=self.bot, profile=profile.name )
class Movie(Document): pass the_godfather = Movie({'name': 'The Godfather','year':1972,'pk':1}) marlon_brando = Actor({'name':'Marlon Brando','pk':2}) al_pacino = Actor({'name' : 'Al Pacino','pk':3}) print("Backend") from blitzdb import FileBackend loc = "./my-db" backend = FileBackend(loc) print("Created Backend", loc) the_godfather.save(backend) marlon_brando.save(backend) al_pacino.save(backend) print("Backend Saved and committed:", os.path.realpath(os.curdir)) # print(backend.get(Movie,{'pk':1})) # or... the_godfather = backend.get(Movie,{'name' : 'The Godfather'}) print("the_godfather",the_godfather)
def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path): #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') tstart = time.time() if real: self.backend = FileBackend("./realdb") else: self.backend = FileBackend("./testdb") try: thisevent = self.backend.get(Trigger, {'id': trigger_id}) print 'Found this event in desgw database...' except Trigger.DoesNotExist: thisevent = Trigger({ 'id': trigger_id, 'jsonfilelist': jsonfilelist, 'triggerpath': triggerdir, 'mapspath': datadir, 'jobids': [ (0, 'jsonfile_corresponding_to_jobid.json'), ], }) print 'Database entry created!' self.trigger_id = trigger_id self.trigger_path = trigger_path self.backend.save(thisevent) self.backend.commit() with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.config = yaml.safe_load(f) self.filterobslist = np.array(self.config['exposure_filter'], dtype='str') self.strategydict = {} for f in np.unique(self.filterobslist): self.strategydict[f] = len( self.filterobslist[self.filterobslist == f]) self.connection = ea.connect(DATABASE) self.cursor = self.connection.cursor() self.jsonfilelist = jsonfilelist print self.jsonfilelist if hardjson: self.jsonfilelist = hj self.trigger_id = trigger_id self.datadir = datadir self.triggerdir = triggerdir self.processingdir = os.path.join(self.triggerdir, 'PROCESSING') if not os.path.exists(self.processingdir): os.makedirs(self.processingdir) dire = './processing/' + trigger_id + '/' if not os.path.exists(dire): os.makedirs(dire) with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.strategy = yaml.safe_load(f) with open("jobmanager.yaml", "r") as g: self.jmconfig = yaml.safe_load(g) q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \ "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum" # y1 images self.connection.query_and_save(q1, './processing/exposuresY1.tab') q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \ "nite>20150901 and obstype='object' order by expnum" # y2 and later self.connection.query_and_save(q2, './processing/exposuresCurrent.tab') os.system( 'cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list' ) self.submit_all_jsons_for_sejobs( ) #preps all DES images that already exist tfin = time.time() print 'TOTAL SE JOBS TIME', tfin - tstart #sys.exit() self.monitor_images_from_mountain( ) #A loop that waits for images off mountain and submits for processing
from blitzdb import Document class Movie(Document): pass class Actor(Document): pass the_godfather = Movie({'name': 'The Godfather','year':1972,'pk':1L}) marlon_brando = Actor({'name':'Marlon Brando','pk' : 1L}) al_pacino = Actor({'name' : 'Al Pacino','pk' : 2L}) from blitzdb import FileBackend backend = FileBackend("/tmp/movies") backend.register(Movie,{'collection':'movies'}) backend.register(Actor,{'collection':'actors'}) backend.filter(Movie,{}).delete() backend.filter(Actor,{}).delete() the_godfather.save(backend) marlon_brando.save(backend) al_pacino.save(backend) the_godfather = backend.get(Movie,{'pk':1L}) #or... the_godfather = backend.get(Movie,{'name' : 'The Godfather'})
if __name__ == '__main__': def save_new_credit(data, pk=None): new_credit = Score({'credit': data, 'pk': pk}) if pk else Score({'credit': data}) new_credit.save(backend) backend.commit() username = '******' password = '******' s = Session(username, password) status, message = s.login() if status == STATUS_SUCCESS: html = s.get(URL_TOTALSCORE) if html.content is not None: backend = FileBackend("./emis.db") selector = etree.HTML(html.content.decode('gbk')) current_credit = int(selector.xpath(r'//*[@color="#FF0000"]/text()')[0]) try: saved_credit = backend.get(Score, {'pk': 1}) if current_credit != '': if current_credit > int(saved_credit.credit): saved_credit.credit = current_credit saved_credit.save(backend) backend.commit() BmobSmsUtils().send_sms_template(['18395960722'], 'new_score') print('Credit changed, SMS sent!') elif current_credit == int(saved_credit.credit):
class Memos(object): def __init__(self, bot): self.bot = bot self.cfg = PluginConfig(self) self.db = FileBackend(self.cfg.get('main_db')) mtt = MessageRetargeter(bot) self.msg = mtt.msg @command def note(self, target, mask, args): """ Leaves a note for <name>, containing <text>. The next time I see <name> speak, I will deliver any notes they have waiting. Notes taken in private are delivered in private, and vice versa. Usage: %%note <name> <text>... """ if mask.is_channel: pubmsg = True else: pubmsg = False if args['<name>'] == self.bot.nick: self.msg(mask, target, "You can't leave notes for me, silly :)") return newmemo = Memo( { 'sender': target.nick.lower(), 'recipient': args['<name>'].lower(), 'public': pubmsg, 'timestamp': ctime(), 'text': ' '.join(args['<text>']) } ) newmemo.save(self.db) self.db.commit() confirmation_msg = "Your note for %s has been queued for delivery." % args['<name>'] self.msg(mask, target, confirmation_msg) @irc3.event(irc3.rfc.PRIVMSG) # Triggered on every message anywhere. def check_notes(self, target, mask, data, event): del data, event try: msgs = self.db.filter(Memo, {'recipient': mask.nick.lower()}) msgword = "message" if len(msgs) < 2 else "messages" # Fix: I have 1 messages for you! except Memo.DoesNotExist: return if len(msgs) == 0: return # Avoid telling people they have messages in public, if any of them are set public=False if contains_private_messages(msgs): self.msg(mask, mask.nick, "I have %s %s for you, %s!" % (len(msgs), msgword, mask.nick)) else: self.msg(mask, target, "I have %s %s for you, %s!" % (len(msgs), msgword, mask.nick)) # Actually deliver the memos for msg in msgs: # This looks ridiculous but we don't care about the timezone really, only the relative time # from the local system clock. now = datetime.datetime.strptime(ctime(), "%a %b %d %H:%M:%S %Y") reltime = humanize.naturaltime(now - datetime.datetime.strptime(msg.timestamp, "%a %b %d %H:%M:%S %Y")) message_text = "%s // %s // %s" % (msg.sender, reltime, msg.text) if msg.public: self.msg(mask, target, message_text) self.db.delete(msg) else: self.bot.privmsg(mask.nick, message_text) self.db.delete(msg) self.db.commit()
def start(args): """ Start the app :param args: input parameters :type args: Namespace """ # -------------------------------------------------------------------------- # Set Database connection # -------------------------------------------------------------------------- if args.DB_TYPE == "file": # File oriented DB if not args.FILE_DB_PATH: _path = os.path.join(os.getcwd(), "service_db") else: _path = os.path.join(args.FILE_DB_PATH, "service_db") backend = FileBackend(_path, {'serializer_class': 'pickle'}) backend.create_index(Service, 'name', ephemeral=False, fields=["name"]) else: import socket from pymongo import MongoClient, DESCENDING if not args.MONGODB_HOST: raise ValueError("You must specify a MongoDB host") _mongo_password = "" if not args.MONGODB_PASSWORD else args.MONGODB_PASSWORD _mongo_user = "" if not args.MONGODB_USER else args.MONGODB_USER _mongo_port = 27018 if not args.MONGODB_PORT else args.MONGODB_PORT _mongo_uri = 'mongodb://%(user)s%(sep)s%(password)s%(arr)s%(host)s' % dict( user=_mongo_user, sep=":" if _mongo_user else "", arr="@" if _mongo_user else "", password=_mongo_password, host=args.MONGODB_HOST ) # PyMongo doesn't check socket timeout -> test manually try: sock = socket.socket() sock.connect((args.MONGODB_HOST, _mongo_port)) except socket.error: raise ConnectionError("Can't connect to MongoDB host") # MongoDB c = MongoClient(_mongo_uri, port=_mongo_port, connectTimeoutMS=5000) # Get database and scheme db = c["pyregister" if not args.MONGODB_DB else args.MONGODB_DB] col = db["services" if not args.MONGODB_SCHEME else args.MONGODB_SCHEME] # Create index col.create_index([("name", DESCENDING)]) # create a new BlitzDB backend using a MongoDB database backend = MongoBackend(col) # # Link backend to web-server property # backend.autocommit = True # -------------------------------------------------------------------------- # Routes # -------------------------------------------------------------------------- # Catalog routes_catalog(app) app.config['APP_DB'] = backend # -------------------------------------------------------------------------- # Enable doc? # -------------------------------------------------------------------------- if args.NOD_DOC is False: Swagger(app) app.run(host=args.IP, port=args.PORT)
def __init__(self, geocoderCache = True, printStatus = False): self.printStatus = printStatus self.geocoderCache = geocoderCache if self.geocoderCache: self.db = FileBackend('./geo-cache')
import os import json from blitzdb import Document, FileBackend class Inproceedings(Document): pass class Proceedings(Document): pass file_path = "input/dplp-2017-05-02.xml" output_folder = 'output/' db = FileBackend("./My-DB") # Parses xml file via iterparse and counts number of inproceedings, proceedings and journals def parsertest(): inproceedings = 0 proceedings = 0 journals = 0 for event, elem in ET.iterparse(file_path, events=("start", "end"), load_dtd=True): if elem.tag == "inproceedings": if event == "end" and len(list(elem)) > 0: inproceedings += 1 elem.clear()
class TwitchNotifier(commands.Cog): def __init__(self, bot): self.bot: 'PixlBot' = bot self.config = bot.config['TwitchNotifier'] self.backend = FileBackend('db') self.backend.autocommit = True self.bot.logger.info("Twitch notifier plugin ready") self.uuids = [] self.online_uuids = [] self.sslcontext = ssl.SSLContext() self.sslcontext.load_cert_chain(self.config['cert_path'], self.config['key_path']) self._twitch_init_() def _twitch_init_(self): self.bot.logger.info("Registering with Twitch...") self.twitch = Twitch(self.config['id'], self.config['secret']) self.twitch.authenticate_app([]) self.bot.logger.info( f"Registering webhook endpoint {self.config['myurl']} ...") self.hook = TwitchWebHook(self.config['myurl'], self.config['id'], self.config['port'], ssl_context=self.sslcontext) self.hook.authenticate(self.twitch) self.bot.logger.info("Clearing all hook subscriptions...") self.hook.unsubscribe_all(self.twitch) # Clear all subs on startup self.hook.start() self._register_all() def _login_to_id(self, name: str) -> Optional[str]: """Returns the twitch ID for a given login name, or None if the name couldn't be resolved.""" try: res: dict = self.twitch.get_users(logins=[name]) except TwitchBackendException as e: self.bot.logger.error(f"Backend error fetching user! {e}") return None if len(res) == 0: return None else: return res['data'][0]['id'] def _register_all(self): """Attempts to register stream_changed callbacks for all configured users.""" self.bot.logger.info("Registering callbacks for all watched users..") users = self.backend.filter(TwitchWatchedUser, {'twitch_name': { "$exists": True }}) if not users: self.bot.logger.info("No users to watch. No callbacks registered.") else: for u in users: self.bot.logger.info(f"Registering: {u['twitch_name']}") success, uuid = self.hook.subscribe_stream_changed( u['twitch_id'], self._cb_stream_changed) if success and uuid: self.uuids.append(uuid) self.bot.logger.info( f"{success}: registered subscription UUID: {uuid}") else: self.bot.logger.error( f"{success}: failed registering subscription: {uuid}") def _cb_stream_changed(self, uuid, data): """Callback for Twitch webhooks, fires on stream change event""" self.bot.logger.debug(f"Callback data for {uuid}: {data}") if data["type"] == "offline": if uuid in self.online_uuids: self.online_uuids.remove( uuid ) # Stupid twitch sending the same damn webhook multiple times... return else: self.bot.logger.debug( f"Ignoring duplicate offline callback for {uuid}") return elif data["type"] == "live": if uuid in self.online_uuids: self.bot.logger.debug( f"Ignoring duplicate live callback for {uuid}") return else: self.online_uuids.append(uuid) else: self.bot.logger.error( f"Got a callback type we can't handle: {data['type']}") return if uuid not in self.uuids: self.bot.logger.error( f"Got a callback for a UUID we're not tracking: {uuid}, my UUIDs: {self.uuids}" ) return try: item = self.backend.get(TwitchWatchedUser, {"twitch_id": data["user_id"]}) except TwitchWatchedUser.DoesNotExist: self.bot.logger.error( f"Got a callback for a USER we're not tracking: {data['user_id']} -> {data['user_name']}" ) return channel: discord.TextChannel = self.bot.get_channel( item['notify_channel']) width = 640 height = 360 url = data['thumbnail_url'].format(width=width, height=height) tu = self.twitch.get_users(data['user_id'])['data'][0] self.bot.logger.debug(tu) embed = discord.Embed( title=f"Now streaming {data['game_name']}", description=data['title'], color=discord.Color.green(), ) embed.set_image(url=url) embed.set_thumbnail(url=tu["profile_image_url"]) embed.set_author(name=item["twitch_name"], url=f"https://twitch.tv/{data['user_name']}") embed.add_field(name="Watch live at", value=f"https://twitch.tv/{data['user_name']}") self.bot.loop.create_task( channel. send( # This isn't an async function, so enqueue it manually embed=embed)) self.bot.logger.info( f"Successfully sent online notification for {data['user_id']}") @cog_ext.cog_subcommand( base="Twitchwatch", name="add_notification", description="Add a go live notification for Twitch", options=[twitch_name, notify_channel, notify_text], guild_ids=util.guilds) async def add_notification(self, ctx: SlashContext, twitch_name: str, notify_channel: discord.TextChannel, notify_text: str): twitch_id = self._login_to_id(twitch_name) try: self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name}) except TwitchWatchedUser.DoesNotExist: pass except TwitchWatchedUser.MultipleDocumentsReturned: self.bot.logger.error( "Multiple users returned - database inconsistent???") return if not twitch_id: await ctx.send(embed=mkembed( 'error', f"Unable to get the Twitch ID for the name {twitch_name}")) return await ctx.defer() # This bit can take a minute. success, uuid = self.hook.subscribe_stream_changed( twitch_id, self._cb_stream_changed) if success and uuid: self.uuids.append(uuid) self.bot.logger.info( f"{success}: registered subscription UUID: {uuid}") else: self.bot.logger.error( f"{success}: failed registering subscription: {uuid}") await ctx.send("Bluh, couldn't register the webhook with twitch :(" ) return item = TwitchWatchedUser({ 'twitch_name': twitch_name, 'twitch_id': twitch_id, 'discord_name': ctx.author.id, 'notify_channel': notify_channel.id, 'notify_text': notify_text, 'uuid': str(uuid) }) self.bot.logger.debug(f"DB object dump: {item.__dict__}") self.backend.save(item) await ctx.send(embed=mkembed("done", f"Notification added for {twitch_name}", channel=notify_channel.name)) @cog_ext.cog_subcommand( base="Twitchwatch", name="del_notification", description="Remove a go live notification for Twitch", options=[twitch_name], guild_ids=util.guilds) async def del_notification(self, ctx: SlashContext, twitch_name: str): try: item = self.backend.get(TwitchWatchedUser, {'twitch_name': twitch_name}) except TwitchWatchedUser.DoesNotExist: await ctx.send(embed=mkembed( "error", f"No notification exists for {twitch_name}")) return self.hook.unsubscribe(item['uuid']) self.bot.logger.info(f"Removing watch {item['uuid']}: {twitch_name}") self.backend.delete(item) if item['uuid'] in self.uuids: self.uuids.remove(item['uuid']) await ctx.send( embed=mkembed("done", f"Notification for {twitch_name} removed."))
#!/usr/bin/python # -*- coding: utf-8 -*- import json from blitzdb import Document, FileBackend backend = FileBackend(".db") class User(Document): pass with open('startups.json') as f: data = json.load(f) for startup in data: if 'twitter' in startup: u = User({'twitted': False, 'name': startup.get('title'), 'twitter': startup.get('twitter'), 'user': '******' + startup.get('twitter').split('/')[-1]}) u.save(backend) backend.commit()
def __init__(self): self.backend = FileBackend(settings['database_location'])
def __init__(self): """Load backend.""" self.db = FileBackend(expanduser("~/.omesa/db"))
def __init__(self, db_path): self.db = FileBackend(db_path)
import configparser import json from blitzdb import FileBackend config_section = 'Project' config = configparser.RawConfigParser() config.read('miner.cfg') project_name = config.get(config_section, 'name') db_dir = '../projects_data/db' array_path = '../projects_data/arrays.js' # Подключаемся к БД db = FileBackend('{0}/{1}'.format(db_dir, project_name)) print('Всего: {0}'.format(len(db))) items = db.all() sorted_items = sorted(items, key=lambda item: (item['long'], item['lat'])) ids = [] unique_items = [] for item in items: id_ = item['id'] if id_ not in ids: unique_items.append(item) ids.append(id_) # Генерируем массив json_array = map(lambda item: [item['lat'], item['long'], 1], unique_items) with open(array_path, 'w') as outfile:
class eventmanager: def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path): #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') tstart = time.time() if real: self.backend = FileBackend("./realdb") else: self.backend = FileBackend("./testdb") try: thisevent = self.backend.get(Trigger, {'id': trigger_id}) print 'Found this event in desgw database...' except Trigger.DoesNotExist: thisevent = Trigger({ 'id': trigger_id, 'jsonfilelist': jsonfilelist, 'triggerpath': triggerdir, 'mapspath': datadir, 'jobids': [ (0, 'jsonfile_corresponding_to_jobid.json'), ], }) print 'Database entry created!' self.trigger_id = trigger_id self.trigger_path = trigger_path self.backend.save(thisevent) self.backend.commit() with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.config = yaml.safe_load(f) self.filterobslist = np.array(self.config['exposure_filter'], dtype='str') self.strategydict = {} for f in np.unique(self.filterobslist): self.strategydict[f] = len( self.filterobslist[self.filterobslist == f]) self.connection = ea.connect(DATABASE) self.cursor = self.connection.cursor() self.jsonfilelist = jsonfilelist print self.jsonfilelist if hardjson: self.jsonfilelist = hj self.trigger_id = trigger_id self.datadir = datadir self.triggerdir = triggerdir self.processingdir = os.path.join(self.triggerdir, 'PROCESSING') if not os.path.exists(self.processingdir): os.makedirs(self.processingdir) dire = './processing/' + trigger_id + '/' if not os.path.exists(dire): os.makedirs(dire) with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.strategy = yaml.safe_load(f) with open("jobmanager.yaml", "r") as g: self.jmconfig = yaml.safe_load(g) q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \ "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum" # y1 images self.connection.query_and_save(q1, './processing/exposuresY1.tab') q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \ "nite>20150901 and obstype='object' order by expnum" # y2 and later self.connection.query_and_save(q2, './processing/exposuresCurrent.tab') os.system( 'cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list' ) self.submit_all_jsons_for_sejobs( ) #preps all DES images that already exist tfin = time.time() print 'TOTAL SE JOBS TIME', tfin - tstart #sys.exit() self.monitor_images_from_mountain( ) #A loop that waits for images off mountain and submits for processing def submit_all_jsons_for_sejobs(self): obsStartTime = self.getDatetimeOfFirstJson( self.jsonfilelist[0]) # THIS IS A DATETIME OBJ currentTime = dt.utcnow() print '***** The current time is UTC', currentTime, '*****' delt = obsStartTime - currentTime timedelta = td(days=delt.days, seconds=delt.seconds).total_seconds() / 3600. print '***** The time delta is ', timedelta, 'hours *****' # if timedelta > np.pi: sejob_timecushion = self.jmconfig["sejob_timecushion"] if timedelta > sejob_timecushion: for jsonfile in self.jsonfilelist: print 'json', jsonfile try: #check if this json file is already in the submitted preprocessing database thisjson = self.backend.get( preprocessing, {'jsonfilename': os.path.join(self.datadir, jsonfile)}) print 'Found this json in desgw database...' except preprocessing.DoesNotExist: #do submission and then add to database print 'cd diffimg-proc; ./SEMaker_RADEC.sh ' + os.path.join( self.datadir, jsonfile) os.chdir("diffimg-proc") out = os.popen( './SEMaker_RADEC.sh ' + os.path.join(self.datadir, jsonfile)).read() #out = os.popen('ls').read() os.chdir("..") print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in creating dag for .json: ' + out) else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join( self.processingdir, jsonfile.split('/')[-1].split('.')[0] + '_' + dagfile) os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile) jobsubmitline = copy(o) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read() print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in submitting .json for preprocessing: ' + out) else: if doimmediateremove: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW').read() print out thisjson = preprocessing({ 'jsonfilename': os.path.join(self.datadir, jsonfile), 'jobid': jobid, 'dagfile': self.dagfile, 'status': 'Submitted' }) self.backend.save(thisjson) self.backend.commit() print 'saved' #raw_input() #runProcessingIfNotAlready(image, self.backend) #sys.exit() print 'Finished submitting minidagmaker with all json files' #sys.exit() #raw_input() # Loop queries for images from mountain and submits them # Need to add complexity that monitors filter strategy and waits for entire groups of images to be co-added def monitor_images_from_mountain(self): #NEED TO ADD COADD LOGIC USING STRATEGY FROM CONFIG exposure_filter = np.array(self.strategy['exposure_filter'], dtype='str') uniquefilts = np.unique(self.strategy['exposure_filter']) filterstrategy = {} for f in uniquefilts: filterstrategy[f] = len(exposure_filter[exposure_filter == f]) print 'filter strategy dictionary ', filterstrategy starttime = time.time() pptime = time.time() keepgoing = True index = -1 submission_counter = 0 maxsub = 10000 postprocessingtime = 10 #every half hour fire off Tim's code for post-processing while keepgoing: #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') index += 1 newfireds = [] if time.time() - starttime > 50000: keepgoing = False continue ofile = open(os.path.join(self.triggerdir, 'latestquery.txt'), 'w') ofile.write( "--------------------------------------------------------------------------------------------------\n" ) ofile.write( "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT\n" ) ofile.write( "--------------------------------------------------------------------------------------------------\n" ) print "--------------------------------------------------------------------------------------------------" print "EXPNUM\tNITE\tBAND\tEXPTIME\tRADEG\t DECDEG\tPROPID\tOBJECT" print "--------------------------------------------------------------------------------------------------" query = "SELECT expnum,nite,band,exptime,radeg,decdeg,propid,object FROM prod.exposure@desoper WHERE " \ "expnum > 475900 and propid=" + propid + "and obstype='object' ORDER BY expnum" # latest self.cursor.execute(query) for s in self.cursor: ofile.write( str(s[0]) + "\t" + str(s[1]) + "\t" + str(s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str(s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) + '\n') print str(s[0]) + "\t" + str(s[1]) + "\t" + str( s[2]) + "\t" + str(s[3]) + "\t" + str(s[4]) + "\t" + str( s[5]) + "\t" + str(s[6]) + "\t" + str(s[7]) if not 'DESGW' in str(s[7]): continue #print 'exptime',float(s[3]) if not float(s[3]) > 29.: continue #exposure must be longer than 30 seconds expnum = str(s[0]) nite = str(s[1]) band = str(s[2]) exptime = str(s[3]) #FIRST CHECK HERE THAT THE EXPOSURE NUMBER ISNT ALREADY IN THE DATABASE try: exposure = self.backend.get(exposures, {'expnum': expnum}) print 'Found this exposure in desgw database...' # print exposure.attributes # if expnum == 506432: # sys.exit() # self.backend.delete(exposure) # self.backend.commit() # exposure = self.backend.get(exposures, {'expnum': expnum}) except exposures.DoesNotExist: # add to database #runProcessingIfNotAlready(image,self.backend) print './diffimg-proc/getTiling.sh ' + expnum res = os.popen('./diffimg-proc/getTiling.sh ' + expnum).readlines() print res #sys.exit() field, tiling = res[-2], res[-1] #print 'field_tiling',field_tiling hexnite = field.strip() + '_' + tiling.strip() + '_' + str( nite) #print hexnite #sys.exit() #print 'hexnite',hexnite print 'Creating exposure in database...', hexnite #raw_input() if '--' in hexnite: print 'found bad example' #raw_input() exposure = exposures({ 'expnum': expnum, 'nite': nite, 'field': field, 'tiling': tiling, 'hexnite': hexnite, 'band': band, 'jobid': np.nan, 'exptime': exptime, 'status': 'Awaiting additional exposures', 'triggerid': self.trigger_id, 'object': str(s[7]) }) self.backend.save(exposure) self.backend.commit() hexnite = exposure.hexnite print 'hexnite', hexnite if '--' in hexnite: print exposure.attributes #raw_input() #raw_input() #sys.exit() try: hex = self.backend.get(hexes, {'hexnite': hexnite}) #self.backend.delete(hex) #self.backend.commit() #hex = self.backend.get(hexes, {'hexnite': hexnite}) #print 'Found this hex in desgw database...' except hexes.DoesNotExist: hex = hexes({ 'hexnite': hexnite, 'strategy': self.strategy['exposure_filter'], 'num_target_g': len(exposure_filter[exposure_filter == 'g']), 'num_target_r': len(exposure_filter[exposure_filter == 'r']), 'num_target_i': len(exposure_filter[exposure_filter == 'i']), 'num_target_z': len(exposure_filter[exposure_filter == 'z']), 'observed_g': [], 'observed_r': [], 'observed_i': [], 'observed_z': [], 'exposures': [], 'status': 'Awaiting additional exposures', 'dagfile': None, }) self.backend.save(hex) self.backend.commit() print hex.attributes print 'created new hex' #raw_input() if hex.status == 'Submitted for processing': print 'This hex has already been submitted for processing' continue # if '--' in hexnite: # print hex.attributes # raw_input() # if hex.status == 'Submitted for processing': # print 'Hex ',hexnite,' band',band,'exposure',expnum,'has already been submitted for processing' # #raw_input() # continue if band == 'g': if not expnum in hex.observed_g: hex.observed_g.append(expnum) hex.exposures.append(expnum) if band == 'r': if not expnum in hex.observed_r: hex.observed_r.append(expnum) hex.exposures.append(expnum) if band == 'i': if not expnum in hex.observed_i: hex.observed_i.append(expnum) hex.exposures.append(expnum) if band == 'z': if not expnum in hex.observed_z: hex.observed_z.append(expnum) hex.exposures.append(expnum) self.backend.save(hex) self.backend.commit() print hex.attributes didwork = False if len(hex.observed_g) == hex.num_target_g: if len(hex.observed_r) == hex.num_target_r: if len(hex.observed_i) == hex.num_target_i: if len(hex.observed_z) == hex.num_target_z: print 'All exposures in strategy satisfied! ' #raw_input() submissionPassed = True for target, exps in zip([ hex.num_target_g, hex.num_target_r, hex.num_target_i, hex.num_target_z ], [ hex.observed_g, hex.observed_r, hex.observed_i, hex.observed_z ]): if target == 0: continue exposurestring = '' logstring = '' for ex in exps: exposurestring += ex + ' ' logstring += ex + '_' print 'cd diffimg-proc; source DAGMaker.sh ' + exposurestring os.chdir("diffimg-proc") #out = os.popen('ls').read() out = os.popen('./DAGMaker.sh ' + exposurestring).read() os.chdir("..") print out f = open( os.path.join( self.processingdir, logstring + hexnite + '.log'), 'w') f.write(out) f.close() tt = time.time() if not 'To submit this DAG do' in out: dt.sendEmailSubject( self.trigger_id, 'Error in creating dag for desgw hex: ' + out) submissionPassed = False else: for o in out.split('\n'): if 'file://' in o: dagfile = o.split('/')[-1] self.dagfile = os.path.join( self.processingdir, logstring + 'job.dag') os.system('cp diffimg-proc/' + dagfile + ' ' + self.dagfile) print self.dagfile print 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_submit_dag -G des --role=DESGW file://' + self.dagfile).read() print out if 'non-zero exit status' in out: dt.sendEmailSubject( self.trigger_id, 'Error in submitting hex dag for processing: ' + out) submissionPassed = False else: if doimmediateremove: for o in out.split('\n'): if 'Use job id' in o: jobid = o.split()[3] out = os.popen( 'source /cvmfs/fermilab.opensciencegrid.org/products/common/etc/setup; setup jobsub_client; ' 'jobsub_rm --jobid=' + jobid + ' --group=des --role=DESGW' ).read() print out ttt = time.time() #print 'TOTAL JOBSUB FOR A SINGLE DESGW IMAGE',ttt-tt #sys.exit() #raw_input() if submissionPassed: hex.status = 'Submitted for processing' hex.dagfile = self.dagfile self.backend.save(hex) self.backend.commit() for expn in hex.exposures: print expn, 'updated in database to Submitted For Processing' exp = self.backend.get( exposures, {'expnum': expn}) exp.status = 'Submitted for processing' self.backend.save(exp) self.backend.commit() didwork = True print 'didwork', didwork print 'dagfile', self.dagfile #raw_input() if not didwork: print 'Could not find all images in strategy for this hex... Added hex', hexnite,' to database ' \ 'and will continue waiting...' #raw_input() #HERE YOU NEED TO ADD TO HEXSTRATEGYDICT DATABASE if time.time( ) - pptime > postprocessingtime: #happens every 30 minutes or so... pptime = time.time() print '***** Firing post processing script *****' #sys.exit() self.submit_post_processing() #sys.exit() print 'Waiting 10s to check from mountain...' #sys.exit() time.sleep(10) #looping over checking the mountain top # cfiles = os.listdir(os.path.join(trigger_path,trigger_id,'candidates')) # for f in cfiles: # if f.split('.')[-1] == 'npz': # cp.makeNewPage(f) def submit_post_processing(self): firedlist = open('./processing/firedlist.txt', 'r') fl = firedlist.readlines() firedlist.close() print fl fl = ['475914', '475915', '475916', '482859', '482860', '482861'] expnumlist = '' for f in fl: expnumlist += f.strip() + ' ' print 'FIRING TIMs CODE' gwpostdir = os.environ['GWPOST_DIR'] print 'source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ python ' +os.path.join(gwpostdir,'postproc.py')\ +' --expnums ' + expnumlist\ + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ + ' --triggerid '+self.trigger_id+' --season 46 --ups True' # os.system('source ' + os.path.join(gwpostdir, 'diffimg_setup.sh') + '; \ # python '+os.path.join(gwpostdir,'postproc.py')\ # +' --expnums ' + expnumlist\ # + ' --outputdir ' + os.path.join(trigger_path,trigger_id,'candidates')\ # + ' --triggerid '+trigger_id+' --season 46 --ups True' ) #pid = os.spawnlp(os.P_WAIT, "source", os.path.join(gwpostdir, 'diffimg_setup.sh')) args = ['ssh -t [email protected] "source '+ os.path.join(gwpostdir, 'mi_setup.sh')+'; '+ 'yes | python '+os.path.join(gwpostdir,'postproc.py')\ +' --expnums ' + expnumlist\ + ' --outputdir ' + os.path.join(self.trigger_path,self.trigger_id,'candidates')\ + ' --triggerid '+self.trigger_id+' --season 46 --ups True"' ] print args #p = subprocess.Popen(args,stdout=PIPE, stderr=PIPE,shell=True) #print p.communicate() #p = subprocess.Popen(args,stdin=None, stdout=None, stderr=None, close_fds=True,shell=True) return def getDatetimeOfFirstJson(self, jsonstring): js = jsonstring.split('UTC')[1] #-2015-12-27-3:2:00.json #date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S.json') date_object = dt.strptime(js, '-%Y-%m-%d-%H_%M_%S-test.json') print '***** Datetime of first observation UTC', date_object, '*****' return date_object def sortHexes(self): pass
def __init__(self): self.jav_db = FileBackend('jav_manager.db')
def __init__(self, bot): self.bot = bot self.cfg = PluginConfig(self) self.db = FileBackend(self.cfg.get('main_db')) mtt = MessageRetargeter(bot) self.msg = mtt.msg
class Movie(Document): pass the_godfather = Movie({'name': 'The Godfather', 'year': 1972, 'pk': 1}) marlon_brando = Actor({'name': 'Marlon Brando', 'pk': 2}) al_pacino = Actor({'name': 'Al Pacino', 'pk': 3}) print("Backend") from blitzdb import FileBackend loc = "./my-db" backend = FileBackend(loc) print("Created Backend", loc) the_godfather.save(backend) marlon_brando.save(backend) al_pacino.save(backend) print("Backend Saved and committed:", os.path.realpath(os.curdir)) # print(backend.get(Movie,{'pk':1})) # or... the_godfather = backend.get(Movie, {'name': 'The Godfather'}) print("the_godfather", the_godfather) the_godfather.cast = {
def __init__(self, trigger_id, jsonfilelist, triggerdir, datadir, real, trigger_path): #os.system('kinit -k -t /var/keytab/desgw.keytab desgw/des/[email protected]') tstart = time.time() if real: self.backend = FileBackend("./realdb") else: self.backend = FileBackend("./testdb") try: self.thisevent = self.backend.get(Trigger, {'id': trigger_id}) print 'Found this event in desgw database...' except Trigger.DoesNotExist: self.thisevent = Trigger({ 'id':trigger_id, 'jsonfilelist':jsonfilelist, 'triggerpath':triggerdir, 'mapspath':datadir, 'jobids':[ (0,'jsonfile_corresponding_to_jobid.json'), ], 'postprocint': 0 }) print 'Database entry created!' self.trigger_id = trigger_id self.trigger_path = trigger_path self.backend.save(self.thisevent) self.backend.commit() with open(os.path.join(triggerdir,"strategy.yaml"), "r") as f: self.config = yaml.safe_load(f); self.filterobslist = np.array(self.config['exposure_filter'],dtype='str') self.strategydict = {} for f in np.unique(self.filterobslist): self.strategydict[f] = len(self.filterobslist[self.filterobslist == f]) self.connection = ea.connect(DATABASE) self.cursor = self.connection.cursor() self.jsonfilelist = jsonfilelist print self.jsonfilelist if hardjson: self.jsonfilelist = hj #self.pp = subprocess.Popen('echo starting',stdout=PIPE, stderr=PIPE,shell=True) self.trigger_id = trigger_id self.datadir = datadir self.triggerdir = triggerdir self.processingdir = os.path.join(self.triggerdir,'PROCESSING') if not os.path.exists(self.processingdir): os.makedirs(self.processingdir) dire = './processing/' + trigger_id + '/' if not os.path.exists(dire): os.makedirs(dire) with open(os.path.join(triggerdir, "strategy.yaml"), "r") as f: self.strategy = yaml.safe_load(f) with open("jobmanager.yaml", "r") as g: self.jmconfig = yaml.safe_load(g); q1 = "select expnum,nite,mjd_obs,telra,teldec,band,exptime,propid,obstype,object from exposure where " \ "nite>20130828 and nite<20150101 and expnum<300000 and obstype='object' order by expnum" # y1 images self.connection.query_and_save(q1, './processing/exposuresY1.tab') q2 = "select expnum,nite,mjd_obs,radeg,decdeg,band,exptime,propid,obstype,object from prod.exposure where " \ "nite>20150901 and obstype='object' order by expnum" # y2 and later self.connection.query_and_save(q2, './processing/exposuresCurrent.tab') os.system('cat ./processing/exposuresY1.tab ./processing/exposuresCurrent.tab > ./processing/exposures.list') #self.submit_post_processing() self.submit_all_jsons_for_sejobs()#preps all DES images that already exist tfin = time.time() print 'TOTAL SE JOBS TIME', tfin - tstart #sys.exit() self.monitor_images_from_mountain()#A loop that waits for images off mountain and submits for processing