def addentity(): if not ADMIN: return '' """ entity = { 'name':'Brighton, NY', 'description':'Town of Brighton, NY', 'website':'http://www.townofbrighton.org/', 'creationdatetime':'2014-02-23 21:24:26', } """ success = True try: try: entity = json.loads(request.args['entity']) except: entity = None if entity != None: access = Access() access.addentity(entity) else: success = False except: success = False return json.dumps({'success':success})
def __init__(self,DEBUG=False): self._stop = threading.Event() self._interval = 1 self.DEBUG = DEBUG self.access = Access()
def addorg(): if not ADMIN: return '' """ org = { 'name': 'Brighton Town Board', 'description': 'Brighton, NY Town Board', 'matchs': [ 'town board', 'brighton', ] 'entityid': ObjectID(' ... '), 'creationdatetime': '2014-03-14 22:01:30', } """ success = True try: try: org = json.loads(request.args['org']) except: org = None if org != None: access = Access() access.addorg(org) else: success = False except: success = False return json.dumps({'success': success})
def __init__(self, downloaddir='./downloads', entityid=None, DEBUG=False): #threading.Thread.__init__(self) self._stop = threading.Event() self._interval = 1 self.downloaddir = downloaddir self.entityid = entityid self.DEBUG = DEBUG if self.DEBUG: if self.entityid != None: print "Using conditional EntityID." self.unpdfer = Unpdfer() #self.searchapi = Search() self.myid = str(uuid.uuid4()) self.dbaccess = Access(DEBUG=True) # setup access layer #self.myid = str(uuid.uuid4()) #self.busaccess = BusAccess(myid=self.myid,DEBUG=True) #self.busaccess.setcallback(self._callback) # start seperate thread with listener in it #self.listenthread = threading.Thread(target=self.busaccess.listen) #self.listenthread.start() if self.DEBUG: print "Converter INIT completed successfully."
class TestRAAM(unittest.TestCase): def setUp(self): n = 5 supply_grid = tu.create_nxn_grid(n) demand_grid = supply_grid.sample(1) cost_matrix = tu.create_cost_matrix(supply_grid, 'euclidean') self.model = Access(demand_df = demand_grid, demand_index = 'id', demand_value = 'value', supply_df = supply_grid, supply_index = 'id', supply_value = 'value', cost_df = cost_matrix, cost_origin = 'origin', cost_dest = 'dest', cost_name = 'cost', neighbor_cost_df = cost_matrix, neighbor_cost_origin = 'origin', neighbor_cost_dest = 'dest', neighbor_cost_name = 'cost') def test_raam_single_demand_location_equals_sum_of_supply(self): self.model.raam() expected = self.model.supply_df.value.sum() actual = self.model.access_df['raam_value'].iloc[0] self.assertEqual(expected, actual) def test_raam_single_demand_location_equals_sum_of_supply_initial_step_int(self): self.model.raam(initial_step=1) expected = self.model.supply_df.value.sum() actual = self.model.access_df['raam_value'].iloc[0] self.assertEqual(expected, actual) def test_raam_single_demand_location_equals_sum_of_supply_min_step(self): self.model.raam(min_step=1, verbose=True) expected = self.model.supply_df.value.sum() actual = self.model.access_df['raam_value'].iloc[0] self.assertEqual(expected, actual) def test_raam_run_again_and_test_overwrite(self): self.model.raam() self.model.raam() expected = self.model.supply_df.value.sum() actual = self.model.access_df['raam_value'].iloc[0] self.assertEqual(expected, actual) def test_raam_single_demand_location_equals_sum_of_supply_normalize(self): self.model.raam(normalize=True) actual = self.model.access_df['raam_value'].iloc[0] self.assertEqual(actual, 25)
def addentity(): if not ADMIN: return '' """ entity = { 'name':'Brighton, NY', 'description':'Town of Brighton, NY', 'website':'http://www.townofbrighton.org/', 'creationdatetime':'2014-02-23 21:24:26', } """ success = True try: try: entity = json.loads(request.args['entity']) except: entity = None if entity != None: access = Access() access.addentity(entity) else: success = False except: success = False return json.dumps({'success': success})
def test_weighted_catchment_with_gravity_weights(self): n = 5 supply_grid = tu.create_nxn_grid(n) demand_grid = supply_grid cost_matrix = tu.create_cost_matrix(supply_grid, 'euclidean') self.model = Access(demand_df=demand_grid, demand_index='id', demand_value='value', supply_df=supply_grid, supply_index='id', supply_value='value', cost_df=cost_matrix, cost_origin='origin', cost_dest='dest', cost_name='cost') gravity = weights.gravity(scale=60, alpha=1) self.model.weighted_catchment(name='gravity', weight_fn=gravity) ids = [1, 5, 13, 19, 24] expected_vals = [ 1.322340210, 1.322340210, 0.780985109, 0.925540119, 1.133733026, ] for id, expected in zip(ids, expected_vals): actual = self.model.access_df.gravity_value.loc[id] self.assertAlmostEqual(actual, expected)
def searches(): try: access = Access() results = access.getsearches() except: results = [] return json.dumps(results)
def statuses(): try: access = Access() statuses = access.getstatuses() for i in range(0, len(statuses)): statuses[i]['_id'] = str(statuses[i]['_id']) except: statuses = [] return json.dumps(statuses)
def statuses(): try: access = Access() statuses = access.getstatuses() for i in range(0,len(statuses)): statuses[i]['_id'] = str(statuses[i]['_id']) except: statuses = [] return json.dumps(statuses)
def getdoc(): try: docurl = request.args['docurl'] access = Access() doc = access.getdoc(docurl) if doc == None: doc = {} except: doc = {} return json.dumps(doc)
def __init__(self, address='localhost', exchange='monroeminutes', DEBUG=False): self.dispatcher = Dispatcher(address=address, exchange=exchange, DEBUG=DEBUG) self.access = Access() self.urls = []
def getdocs(): """ doc = { "scrapedatetime": "2014-02-22 03:25:49", "docfilename": "/home/administrator/dev/monroeminutes/downloads//4028_1393057549.41.download", "being_processed": false, "being_converted": true, "pdfhash": "", "docurl": "http://www.townofbrighton.org/DocumentCenter/View/4028", "linktext": "View here", "converted": false, "pdftext": "", "_id": "53085f0ea70f9e0e63aeb15a", "urldata": { "maxlinklevel": 4, "status": "running", "runs": [], "description": "Brighton, NY", "title": "Town of Brighton", "scraperid": "2aeaa63f-bef9-4362-a7b3-e8c6c6a92913", "doctype": "application/pdf", "frequency": 604800, "startdatetime": "2014-02-22 03:25:31", "targeturl": "http://www.townofbrighton.org/", "finishdatetime": "", "creationdatetime": "2014-02-22 03:25:09" }, "processed": false } """ try: access = Access() try: entityid = request.args['entityid'] except: entityid = '' if entityid != '': docs = access.getdocsbyentityid(entityid) for i in range(0,len(docs)): docs[i]['_id'] = str(docs[i]['_id']) docs[i]['created'] = str(docs[i]['created']) else: docs = [] except: docs = [] return json.dumps(docs)
def get(self, mode=""): # key for the app CONSUMER_KEY = "fgPzVQVDu8pUn8tsz8ixog" CONSUMER_SECRET = "MMEEaWtsEIY2CzYs8CXcgX981zext7y4kDv1Hvjfw" # This is where the user is sent to after they have # authenticated with Twitter. callback_url = "%s/verify" % self.request.host_url client = oauth.TwitterClient(CONSUMER_KEY, CONSUMER_SECRET, callback_url) if mode == "login": return self.redirect(client.get_authorization_url()) elif mode == "verify": auth_token = self.request.get("oauth_token") auth_verifier = self.request.get("oauth_verifier") user_info = client.get_user_info(auth_token, auth_verifier=auth_verifier) ACCESS_TOKEN = user_info.get("token") ACCESS_SECRET = user_info.get("secret") # WARNING: only keeps one user's access key! db.delete(Access.all()) user_access = Access() user_access.store(ACCESS_TOKEN, ACCESS_SECRET) self.response.out.write("<a href='/timeline'>Go to my news feed</a>") elif mode == "timeline": twdict = self.getNewsfeed(client) tweets = "" # self.response.out.write(twdict) for k,v in twdict[0]["user"].items(): print k # for tweet in twdict: # tweets += tweet["text"] + " " # self.response.out.write("{text: '%s'}" % tweet["text"]) # self.response.out.write("<br /><br />") elif mode == "query": keyword = self.request.get("kw") twdict = self.getNewsfeed(client) tweets = [] for tweet in twdict: if keyword in tweet["text"]: # re.compile(r'\b({0})\b'.format(keyword), flags=re.IGNORECASE).search tweets += '{"text": "%s", "from_user": "******", "from_user_name": "%s", "profile_image_url": "%s"}' \ % (tweet["text"], tweet["user"]["name"], tweet["user"]["screen_name"], tweet["user"]["profile_image_url_https"]) self.response.out.write(tweets) else: self.response.out.write("<a href='/login'>Login via Twitter</a>")
def getdocs(): """ doc = { "scrapedatetime": "2014-02-22 03:25:49", "docfilename": "/home/administrator/dev/monroeminutes/downloads//4028_1393057549.41.download", "being_processed": false, "being_converted": true, "pdfhash": "", "docurl": "http://www.townofbrighton.org/DocumentCenter/View/4028", "linktext": "View here", "converted": false, "pdftext": "", "_id": "53085f0ea70f9e0e63aeb15a", "urldata": { "maxlinklevel": 4, "status": "running", "runs": [], "description": "Brighton, NY", "title": "Town of Brighton", "scraperid": "2aeaa63f-bef9-4362-a7b3-e8c6c6a92913", "doctype": "application/pdf", "frequency": 604800, "startdatetime": "2014-02-22 03:25:31", "targeturl": "http://www.townofbrighton.org/", "finishdatetime": "", "creationdatetime": "2014-02-22 03:25:09" }, "processed": false } """ try: access = Access() try: entityid = request.args['entityid'] except: entityid = '' if entityid != '': docs = access.getdocsbyentityid(entityid) for i in range(0, len(docs)): docs[i]['_id'] = str(docs[i]['_id']) docs[i]['created'] = str(docs[i]['created']) else: docs = [] except: docs = [] return json.dumps(docs)
def setUp(self): n = 5 supply_grid = tu.create_nxn_grid(n) demand_grid = supply_grid.sample(1) cost_matrix = tu.create_cost_matrix(supply_grid, 'euclidean') self.model = Access(demand_df = demand_grid, demand_index = 'id', demand_value = 'value', supply_df = supply_grid, supply_index = 'id', supply_value = 'value', cost_df = cost_matrix, cost_origin = 'origin', cost_dest = 'dest', cost_name = 'cost', neighbor_cost_df = cost_matrix, neighbor_cost_origin = 'origin', neighbor_cost_dest = 'dest', neighbor_cost_name = 'cost')
def login(): connection = mysql.connector.connect(user=os.getenv("DB_USER"), password=os.getenv("DB_PASS"), host=os.getenv("DB_HOST"), database=os.getenv("DB_NAME")) cursor = connection.cursor() login_access = Access(connection, cursor) electoral_key = request.form.get('electoral_key') password = request.form.get('pass') response = make_response( json.dumps(login_access.login(str(electoral_key), str(password)))) response.headers['Access-Control-Allow-Origin'] = '*' response.headers['Access-Control-Allow-Methods'] = 'GET, POST, OPTIONS' response.headers['Access-Control-Allow-Headers'] = 'Content-Type' connection.close() return response
def __init__(self, address): super().__init__(address) self.about = About(self) self.access = Access(self) self.adjustment = Adjustment(self) self.axis = Axis(self) self.displacement = Displacement(self) self.ecu = Ecu(self) self.functions = Functions(self) self.manual = Manual(self) self.network = Network(self) self.nlc = Nlc(self) self.pilotlaser = Pilotlaser(self) self.realtime = Realtime(self) self.system = System(self) self.system_service = System_service(self) self.update = Update(self) try: self.streaming = Streaming(self) except NameError as e: if "Streaming" in str(e): print("Warning: Streaming is not supported on your platform") else: raise e
def __init__(self,downloaddir='./downloads',entityid=None,DEBUG=False): #threading.Thread.__init__(self) self._stop = threading.Event() self._interval = 1 self.downloaddir = downloaddir self.entityid = entityid self.DEBUG = DEBUG if self.DEBUG: if self.entityid != None: print "Using conditional EntityID." self.unpdfer = Unpdfer() #self.searchapi = Search() self.myid = str(uuid.uuid4()) self.dbaccess = Access(DEBUG=True) # setup access layer #self.myid = str(uuid.uuid4()) #self.busaccess = BusAccess(myid=self.myid,DEBUG=True) #self.busaccess.setcallback(self._callback) # start seperate thread with listener in it #self.listenthread = threading.Thread(target=self.busaccess.listen) #self.listenthread.start() if self.DEBUG: print "Converter INIT completed successfully."
def signup(): connection = mysql.connector.connect(user=os.getenv("DB_USER"), password=os.getenv("DB_PASS"), host=os.getenv("DB_HOST"), database=os.getenv("DB_NAME")) cursor = connection.cursor() signup_access = Access(connection, cursor) electoral_key = request.form.get('electoral_key') password = request.form.get('pass') mail = request.form.get('email') response = make_response( str( signup_access.register(str(electoral_key), str(password), str(mail)))) response.headers.add('Access-Control-Allow-Origin', '*') connection.close() return response
def __init__(self, sid, username, password, last_result): self.mysql = MySQL() self.sid = sid self.get_info(sid) self.username = username self.ac = Access(self.oj, username, password) self.ce_info = '' self.last_result = last_result
def setUp(self): demand_data = pd.DataFrame({ 'id': [0, 1], 'x': [0, 0], 'y': [0, 1], 'value': [1, 1] }) demand_grid = gpd.GeoDataFrame(demand_data, geometry=gpd.points_from_xy( demand_data.x, demand_data.y)) demand_grid['geometry'] = demand_grid.buffer(.5) supply_data = pd.DataFrame({ 'id': [1], 'x': [0], 'y': [1], 'value': [1] }) supply_grid = gpd.GeoDataFrame(supply_data, geometry=gpd.points_from_xy( supply_data.x, supply_data.y)) cost_matrix = pd.DataFrame({ 'origin': [0, 0, 1, 1], 'dest': [1, 0, 0, 1], 'cost': [1, 0, 1, 0] }) self.model = Access(demand_df=demand_grid, demand_index='id', demand_value='value', supply_df=supply_grid, supply_index='id', supply_value='value', cost_df=cost_matrix, cost_origin='origin', cost_dest='dest', cost_name='cost', neighbor_cost_df=cost_matrix, neighbor_cost_origin='origin', neighbor_cost_dest='dest', neighbor_cost_name='cost')
def test_access_initialize_with_supply_value_col_in_dict_raises_value_error( self): with self.assertRaises(ValueError): value_in_dict = {'value': ''} self.model = Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value=value_in_dict)
class Dispatch(object): def __init__(self, address='localhost', exchange='monroeminutes', DEBUG=False): self.dispatcher = Dispatcher(address=address, exchange=exchange, DEBUG=DEBUG) self.access = Access() self.urls = [] def _getentities(self): # get urls entities = self.access.getentities() # build urls urls = [] for entity in entities: now = str(strftime("%Y-%m-%d %H:%M:%S")) pkg = { 'targeturl': entity['website'], # url to scrape 'title': entity['name'], # town/village/city name 'description': entity['description'], # town/village/city description 'entityid': str(entity['_id']), # entityid 'maxlinklevel': 4, # if its more than this, we're screwed ... 'creationdatetime': now, # current ISO date/time 'doctype': 'application/pdf', # pdf documents 'frequency': 10080, # in minutes 'allowdomains': [], } urls.append(pkg) return urls def updateurls(self): # get the entities list urls = self._getentities() # set urls self.dispatcher.seturls(urls) def start(self): print "Starting Dispatcher ..." # start the dispatcher with the URL list #try: if True: self.dispatcher.start()
def test_access_initialize_without_supply_value_col_in_list_raises_value_error( self): with self.assertRaises(ValueError): bad_value_name = ['Not a col in supply df'] Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value=bad_value_name)
def test_access_initialize_without_demand_index_col_raises_value_error( self): with self.assertRaises(ValueError): bad_index_name = 'Not a col in demand df' Access(demand_df=self.demand_grid, demand_index=bad_index_name, demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value='value')
def copy_img_match(dbpath, hospitname, srcfield, imgdir, destdir): ObjSql = SqlAccess() objDB = Access(dbpath) # dirs = glob.glob sqls = r" SELECT DISTINCT t.%s FROM %s AS t " % (srcfield, TABLENAME[hospitname]) paths = "" if not objDB.sql_excute(sqls): RunError(objDB, sqls) emptycnt = 0 nofilecnt = 0 cnt = 0 allcnt = 0 while True: allcnt += 1 row = objDB.sql_cur_fetchone() if not row: print("error in row") break applynostr = row[0] dirs = os.path.join(imgdir, applynostr) paths = glob.glob(dirs) if len(paths) == 0 or len(paths) == 2: tstr = " %s not exist or multipy %d" % (dirs, len(paths)) print(tstr) s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s not or multipy" % applynostr) emptycnt += 1 continue if 0 == len(os.listdir(paths[0])): tstr = "%s have no file" % paths[0] print(tstr) s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s null" % applynostr) nofilecnt += 1 continue else: tstr = " %s have files " % paths[0] s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s ok" % applynostr) cnt += 1 srcpath = paths[0] destpath = os.path.join(destdir, applynostr) if os.path.isdir(destpath): logtime = time.strftime("%Y%m%d%H%M%S", time.localtime(time.time())) destpath += '_' + logtime regexlog.info(destpath) shutil.copytree(srcpath, destpath) if 0 == (allcnt % 100): print(allcnt) objDB.dbclose() tstr = "no file %d, empty file %d, have file %d, allfile %d" % ( emptycnt, nofilecnt, cnt, allcnt) regexlog.info(tstr) print(tstr)
def updata_user(id, oj, username, password, last_rid, user_last_rid, user_id, mysql): sql = "UPDATE users_oj_account SET is_using = '1', updating = '0' WHERE id = '%s'" % id mysql.update(sql) ac = Access(oj, username, password) if ac.logined(): status = [] url = user_status[oj] % ('', username) html = ac.get_html(url=url) match = re.compile(get_status[oj], re.M | re.I | re.S) result = match.findall(html) i = 0 while result[i][0] != user_last_rid: p_id = mysql.query( "SELECT id FROM problem_problem WHERE oj = '%s' AND problem_id = '%s'" % (oj, result[i][3])) if not p_id: mysql.update( "INSERT INTO problem_problem (oj, problem_id, defunct, judge_type, date, title, description, input, output, sample_input, sample_output, hint, source, submit, solved, type, memory_limit_c, memory_limit_java, time_limit_c, time_limit_java, data_number) VALUES('%s', '%s', '1', '1', '%s', ' ', ' ', ' ', ' ', ' ', ' ', ' ', ' ', '0', '0', ' ', '0', '0', '0', '0', '-1')" % ( oj, result[i][3], date.today())) p_id = mysql.get_id() else: p_id = p_id[0][0] status.append( [result[i][2], p_id, result[i][4], result[i][5], result[i][6], result[i][7], result[i][1], result[i][0]]) i += 1 if i == len(result): url = user_status[oj] % (int(result[i - 1][0]) - 1, username) html = ac.get_html(url=url) result = match.findall(html) if len(result) == 0: break i = 0 l = len(status) j = 1 for i in status: code = get_code(ac.get_html(url=get_code_url[oj] % i[7]), oj) sql = "INSERT INTO status_solve (status, submit_time, problem_id, use_time, use_memory, length, language, code, user_id) VALUES('%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s', '%s')" % ( i[0], i[6], i[1], i[2], i[3], i[4], i[5], code, user_id) mysql.update(sql) solve_id = mysql.get_id() if i[0] == 'Compilation Error': ce_info_t = get_ce_info(ac.get_html(url=get_ce_url[oj] % i[7]), oj) sql = "INSERT INTO status_ce_info (info, solve_id) VALUES('%s', '%s')" % (ce_info_t, solve_id) mysql.update(sql) sql = "SELECT ac FROM users_submit_problem WHERE user_id = '%s' AND problem_id = '%s'" % (user_id, i[1]) result = mysql.query(sql) if result: if i[0] == 'Accepted' and int(result[0][0]) == 0: sql = "UPDATE users_submit_problem SET ac = '1' WHERE user_id = '%s' AND problem_id = '%s'" % ( user_id, i[1]) mysql.update(sql) else: sql = "INSERT INTO users_submit_problem (ac, problem_id, user_id) VALUES('%s', '%s', '%s')" % ( '1' if i[0] == 'Accepted' else '0', i[1], user_id) mysql.update(sql) sql = "UPDATE users_oj_account SET updating = '%d' WHERE id = '%s'" % (int(j / float(l) * 100), id) mysql.update(sql) j += 1 sql = "UPDATE users_oj_account SET is_using = '0', last_rid = '%s' WHERE id = '%s'" % (last_rid, id) mysql.update(sql)
def test_access_initialize_with_supply_value_col_in_list(self): value_in_list = ['value'] self.model = Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value=value_in_list) actual = self.model.supply_types self.assertEqual(actual, ['value'])
def __init__(self, address='localhost', exchange='monroeminutes', downloaddir="./downloads", DEBUG=False): self.exchange = exchange self.downloaddir = downloaddir self.DEBUG = DEBUG self.access = Access() #setup message bus self.reqcon = pika.BlockingConnection( pika.ConnectionParameters(host=address)) self.reqchan = self.reqcon.channel() self.reqchan.exchange_declare(exchange=self.exchange, type='fanout') result = self.reqchan.queue_declare(exclusive=True) queue_name = result.method.queue self.reqchan.queue_bind(exchange=self.exchange, queue=queue_name) self.reqchan.basic_consume(self.reqcallback, queue=queue_name, no_ack=True)
def getNewsfeed(self, client): user_acs=Access.all() for ua in user_acs: ACCESS_TOKEN = ua.token ACCESS_SECRET = ua.secret timeline_url = "http://api.twitter.com/1/statuses/home_timeline.json" results = client.make_request(url=timeline_url.encode('utf-8'), additional_params={"count":100}, token=ACCESS_TOKEN, secret=ACCESS_SECRET) twdict = simplejson.loads(results.content) return twdict
class Dispatch(object): def __init__(self,address='localhost',exchange='monroeminutes',DEBUG=False): self.dispatcher = Dispatcher(address=address,exchange=exchange, DEBUG=DEBUG) self.access = Access() self.urls = [] def _getentities(self): # get urls entities = self.access.getentities() # build urls urls = [] for entity in entities: now = str(strftime("%Y-%m-%d %H:%M:%S")) pkg = { 'targeturl': entity['website'], # url to scrape 'title': entity['name'], # town/village/city name 'description': entity['description'], # town/village/city description 'entityid': str(entity['_id']), # entityid 'maxlinklevel': 4, # if its more than this, we're screwed ... 'creationdatetime': now, # current ISO date/time 'doctype': 'application/pdf', # pdf documents 'frequency': 10080, # in minutes 'allowdomains': [], } urls.append(pkg) return urls def updateurls(self): # get the entities list urls = self._getentities() # set urls self.dispatcher.seturls(urls) def start(self): print "Starting Dispatcher ..." # start the dispatcher with the URL list #try: if True: self.dispatcher.start()
def test_access_initialize_with_valid_neighbor_cost_name_in_dict_raises_value_error( self): with self.assertRaises(ValueError): cost_name_dict = {'cost': ''} self.model = Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value='value', neighbor_cost_df=self.cost_matrix, neighbor_cost_origin='origin', neighbor_cost_dest='dest', neighbor_cost_name=cost_name_dict)
def test_access_initialize_without_valid_cost_dest_raises_value_error( self): with self.assertRaises(ValueError): bad_cost_dest = "Not a valid cost dest column" Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value='value', cost_df=self.cost_matrix, cost_origin='origin', cost_dest=bad_cost_dest, cost_name='cost')
class UserDAO(object): __database = None __cursor = None def __init__(self): self.__database = Access() self.__cursor = self.__database.getCursor() self.initDatabase() def initDatabase(self): try: self.__cursor.execute(""" create table user (name text, username text, password text) """) self.__database.commit() except: pass def insert(self, user): if len(self.getUser(user.getUsername())) == 0: users = [(user.getName(), user.getUsername() , user.getPassword()), ] self.__cursor.executemany("INSERT INTO user VALUES (?,?,?)", users) self.__database.commit() def update(self, user): users = [(user.getName(),user.getPassword(), user.getUsername())] self.__cursor.executemany("UPDATE user SET name = ?, password = ? where username = ? ", users) self.__database.commit() def delete(self, username): self.__cursor.execute("DELETE FROM user WHERE username = "******"SELECT * FROM user") print self.__cursor.fetchall() def getUser(self, username): self.__cursor.execute("SELECT * FROM user WHERE username = ?",[(username)] ) return self.__cursor.fetchall() def log(self, user, request): flines = user.toString() + " >>> " + request + "\n" f = open(log, 'a') f.writelines([flines,]) f.close()
def addorg(): if not ADMIN: return '' """ org = { 'name': 'Brighton Town Board', 'description': 'Brighton, NY Town Board', 'matchs': [ 'town board', 'brighton', ] 'entityid': ObjectID(' ... '), 'creationdatetime': '2014-03-14 22:01:30', } """ success = True try: try: org = json.loads(request.args['org']) except: org = None if org != None: access = Access() access.addorg(org) else: success = False except: success = False return json.dumps({'success':success})
def test_access_initialize_without_valid_neighbor_cost_name_in_list_raises_value_error( self): with self.assertRaises(ValueError): bad_cost_name = ["Not a valid cost name column"] Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value='value', neighbor_cost_df=self.cost_matrix, neighbor_cost_origin='origin', neighbor_cost_dest='dest', neighbor_cost_name=bad_cost_name)
def test_access_initialize_with_valid_neighbor_cost_name_in_list(self): cost_name_list = ['cost'] self.model = Access(demand_df=self.demand_grid, demand_index='id', demand_value='value', supply_df=self.supply_grid, supply_index='id', supply_value='value', neighbor_cost_df=self.cost_matrix, neighbor_cost_origin='origin', neighbor_cost_dest='dest', neighbor_cost_name=cost_name_list) actual = self.model.neighbor_cost_names self.assertEqual(actual, ['cost'])
def __init__(self, address='localhost', exchange='monroeminutes', downloaddir="./downloads", DEBUG=False): self.exchange = exchange self.downloaddir = downloaddir self.DEBUG = DEBUG self.access = Access() #setup message bus self.reqcon = pika.BlockingConnection(pika.ConnectionParameters(host=address)) self.reqchan = self.reqcon.channel() self.reqchan.exchange_declare(exchange=self.exchange,type='fanout') result = self.reqchan.queue_declare(exclusive=True) queue_name = result.method.queue self.reqchan.queue_bind(exchange=self.exchange,queue=queue_name) self.reqchan.basic_consume(self.reqcallback,queue=queue_name,no_ack=True)
def __init__(self, address): super().__init__(address) self.about = About(self) self.access = Access(self) self.amcids = Amcids(self) self.control = Control(self) self.description = Description(self) self.diagnostic = Diagnostic(self) self.functions = Functions(self) self.move = Move(self) self.network = Network(self) self.res = Res(self) self.rotcomp = Rotcomp(self) self.rtin = Rtin(self) self.rtout = Rtout(self) self.status = Status(self) self.system_service = System_service(self) self.update = Update(self)
def show_applyno_match_ZD(dbpath, hospitname, imgdir): ObjSql = SqlAccess() objDB = Access(dbpath) # dirs = glob.glob sqls = r" SELECT DISTINCT t.ApplyNo FROM %s AS t " % TABLENAME[hospitname] paths = "" if not objDB.sql_excute(sqls): RunError(objDB, sqls) emptycnt = 0 nofilecnt = 0 cnt = 0 while True: row = objDB.sql_cur_fetchone() if not row: print("error in row") break applynostr = row['applyno'] dirs = os.path.join(imgdir, applynostr) paths = glob.glob(dirs) if len(paths) == 0 or len(paths) == 2: tstr = " %s not exist or multipy %d" % (dirs, len(paths)) print(tstr) s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s not or multipy" % applynostr) emptycnt += 1 continue if 0 == len(os.listdir(paths[0])): tstr = "%s have no file" % paths[0] print(tstr) s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s null" % applynostr) nofilecnt += 1 continue else: tstr = " %s have files " % paths[0] s1 = tstr.encode("utf-8") # regexlog.info(s1) regexlog.info("%s ok" % applynostr) cnt += 1 objDB.dbclose() tstr = "empty file %d, no file %d, have file %d" % (emptycnt, nofilecnt, cnt) regexlog.info(tstr) print(tstr)
def getruns(): access = Access() runs = access.getruns() return json.dumps(runs)
import json from access import Access if __name__ == '__main__': print "Downloading MonroeMinutes data ..." a = Access() runs = a.getruns() docs = a.getdocs() entities = a.getentities() orgs = a.getorgs() runsdata = [] for run in runs: run['_id'] = str(run['_id']) runsdata.append(run) docsdata = [] for doc in docs: doc['_id'] = str(doc['_id']) doc['created'] = str(doc['created']) docsdata.append(doc) entitiesdata = [] for entity in entities: entity['_id'] = str(entity['_id']) entitiesdata.append(entity)
def __init__(self,address='localhost',exchange='monroeminutes',DEBUG=False): self.dispatcher = Dispatcher(address=address,exchange=exchange, DEBUG=DEBUG) self.access = Access() self.urls = []
def search(): """ return { "count": 0, "phrase": "test", "results": [ {}, ], "success": true, "error": "" } """ error = 'None' success = True results = {} try: #if True: # grab the phrase that we are looking for try: phrase = request.args['phrase'] except: phrase = '' # try and get the entity id. We need at least this to do a search try: entityid = request.args['entityid'] except: entityid = '' # try and get the org id. We don't need this to do the search. try: orgid = request.args['orgid'] except: orgid = '' # get page number try: page = request.args['page'] except: page = 0 if phrase != '' and entityid != '': access = Access() results = access.search( phrase=phrase, orgid=orgid, entityid=entityid, pagesize=25, page=page ) #for i in range(0,len(results)): # try: # results[i]['_id'] = str(results[i]['_id']) # except: # # result didn't include this field ... # pass else: results['error'] = 'Invalid Input' results['success'] = False except: results['error'] = 'General Error' results['success'] = False return json.dumps(results)
def getorgs(): access = Access() orgs = access.getorgs() for i in range(0,len(orgs)): orgs[i]['_id'] = str(orgs[i]['_id']) return json.dumps(orgs)
class Vjudge: def __init__(self, sid, username, password, last_result): self.mysql = MySQL() self.sid = sid self.get_info(sid) self.username = username self.ac = Access(self.oj, username, password) self.ce_info = '' self.last_result = last_result def get_info(self, sid): result = self.mysql.query("SELECT problem_id, language, code, user_id FROM status_solve WHERE id = '%s'" % sid) self.problem_id = result[0][0] self.language = result[0][1] self.code = result[0][2] self.uid = result[0][3] result = self.mysql.query("SELECT oj, problem_id FROM problem_problem WHERE id = '%s'" % self.problem_id) self.oj = result[0][0] self.pid = result[0][1] def submit(self): url = url_submit[self.oj] referer = url_referer[self.oj] + self.pid postdata = post_data[self.oj] postdata[judge_listmap[self.oj][0]] = self.pid postdata[judge_listmap[self.oj][1]] = language_map[self.oj][self.language] postdata[judge_listmap[self.oj][2]] = self.code self.ac.get_html(url, postdata, referer) def hdu_get_status(self): if not self.rid: url = url_status[self.oj][0] html = self.ac.get_html(url) match = re.search(re_string[self.oj][0] % self.username, html, re.M | re.I | re.S) s = match.group() self.rid = re.findall(re_string[self.oj][1], s, re.M | re.I | re.S)[-1] url = url_status[self.oj][1] + self.rid html = self.ac.get_html(url) match = re.search(re_string[self.oj][2] % self.rid, html, re.M | re.I | re.S) if match.group(1) == 'Compilation Error': url = url_ce[self.oj] + self.rid html = self.ac.get_html(url) t = re.search(re_string[self.oj][3], html, re.M | re.I | re.S) self.ce_info = t.group(1) return match.group(1), match.group(2), match.group(3) def hdu_again(self, s): if s == 'Queuing' or s == 'Compiling' or s == 'Running': return True return False def run(self): self.submit() self.rid = '' sleep(0.5) o = eval('self.' + self.oj + '_get_status')() self.mysql.update( "UPDATE status_solve SET status = '%s', use_time = '%s', use_memory = '%s' WHERE id = '%s'" % ( o[0], o[1], o[2], self.sid)) sleep(0.2) while eval('self.' + self.oj + '_again')(o[0]): o = eval('self.' + self.oj + '_get_status')() self.mysql.update( "UPDATE status_solve SET status = '%s', use_time = '%s', use_memory = '%s' WHERE id = '%s'" % ( o[0], o[1], o[2], self.sid)) sleep(0.2) # print o if self.ce_info: sql = "SELECT info FROM status_ce_info WHERE solve_id = '%s'" % self.sid result = self.mysql.query(sql) if result: sql = "UPDATE status_ce_info SET info = '%s' WHERE solve_id = '%s'" % (self.ce_info, self.sid) self.mysql.update(sql) else: sql = "INSERT INTO status_ce_info (info, solve_id) VALUES('%s', '%s')" % (self.ce_info, self.sid) self.mysql.update(sql) sql = "SELECT ac FROM users_submit_problem WHERE user_id = '%s' AND problem_id = '%s'" % (self.uid, self.problem_id) result = self.mysql.query(sql) if result: if o[0] == 'Accepted' and int(result[0][0]) == 0: sql = "UPDATE users_submit_problem SET ac = '1' WHERE user_id = '%s' AND problem_id = '%s'" % ( self.uid, self.problem_id) self.mysql.update(sql) else: sql = "INSERT INTO users_submit_problem (ac, problem_id, user_id) VALUES('%s', '%s', '%s')" % ( '1' if o[0] == 'Accepted' else '0', self.problem_id, self.uid) self.mysql.update(sql)
def __init__(self): self.__database = Access() self.__cursor = self.__database.getCursor() self.initDatabase()
def geturls(): access = Access() urls = access.geturls() return json.dumps(urls)
from access import Access if __name__ == '__main__': print "Resetting database flags ..." a = Access() a.resetflags() print "Done."
def get_last_rid(oj, username): ac = Access(oj=oj) url = user_status[oj] % ('', username) html = ac.get_html(url=url) match = re.search(user_last_rid[oj], html, re.M) return match.group(1)
def test_account(oj, username, password): ac = Access(oj, username, password) return ac.logined()
from access import Access from time import strftime if __name__ == '__main__': access = Access() towns = [] towns.append(['http://www.townofbrighton.org/','Town of Brighton','Brighton, NY']) towns.append(['http://www.brockportny.org/','Village of Brockport','Brockport, NY']) towns.append(['http://www.townofchili.org/','Town of Chili','Chili, NY']) towns.append(['http://www.churchville.net/','Town of Churchville','Churchville, NY']) towns.append(['http://www.clarksonny.org/','Town of Clarkson','Clarkson, NY']) towns.append(['http://eastrochester.org/','Town of East Rochester','East Rochester, NY']) towns.append(['http://www.village.fairport.ny.us/','Village of Fairport','Fairport, NY']) towns.append(['http://www.townofgates.org/','Town of Gates','Gates, NY']) towns.append(['http://greeceny.gov/','Town of Greece','Greece, NY']) towns.append(['http://www.hamlinny.org/','Town of Hamlin','Hamlin, NY']) towns.append(['http://www.henrietta.org/','Town of Henrietta','Henrietta, NY']) towns.append(['http://www.hiltonny.org/','Village of Hilton','Hilton, NY']) towns.append(['http://www.villageofhoneoyefalls.org/','Village of Honeoye Falls','Honeye Falls, NY']) towns.append(['http://www.townofmendon.org/','Town of Medon','Mendon, NY']) #towns.append(['http://www.ogdenny.com','Town of Ogden','Ogdon, NY',['http://www.ecode360.com/documents/list/OG0089/quick/']]) towns.append(['http://www.parmany.org/','Town of Parma','Parma, NY']) towns.append(['http://www.penfield.org/','Town of Penfield','Penfield, NY']) towns.append(['http://www.perinton.org/','Town of Perinton','Perinton, NY']) towns.append(['http://townofpittsford.org/','Town of Pitsford','Pitsford, NY']) towns.append(['http://www.townofriga.org/','Town of Riga','Riga, NY'])
class Archiver(object): def __init__(self, address='localhost', exchange='monroeminutes', downloaddir="./downloads", DEBUG=False): self.exchange = exchange self.downloaddir = downloaddir self.DEBUG = DEBUG self.access = Access() #setup message bus self.reqcon = pika.BlockingConnection(pika.ConnectionParameters(host=address)) self.reqchan = self.reqcon.channel() self.reqchan.exchange_declare(exchange=self.exchange,type='fanout') result = self.reqchan.queue_declare(exclusive=True) queue_name = result.method.queue self.reqchan.queue_bind(exchange=self.exchange,queue=queue_name) self.reqchan.basic_consume(self.reqcallback,queue=queue_name,no_ack=True) def start(self): if self.DEBUG: print "Listening for new documents ..." self.reqchan.start_consuming() def reqcallback(self,ch,method,properties,body): response = simplejson.loads(body) if self.DEBUG: print "Archiver: Message Recieved ('{0}')".format(response['command']) # This means that a pdf document has been found, we need to put it in the database if response['command'] == 'found_doc': if self.DEBUG: print "Processing new document ..." # pull out our important information docurl = response['message']['docurl'] linktext = response['message']['linktext'] scrapedatetime = response['message']['scrapedatetime'] urldata = response['message']['urldata'] if self.DEBUG: print "Document meta data decoded ..." # check to see if the URL exists in the database docid = self.access.getdocbydocurl(docurl) if docid == None: # download the document filename,datetime,success = self.download(docurl) if not success: if self.DEBUG: print "Unable to download PDF." return # decode document name docname = urllib2.unquote(docurl.split('/')[-1]) # save doc to the database docid = self.access.adddoc(docurl,linktext,docname,filename,scrapedatetime,urldata) # report if docid == None: if self.DEBUG: print "Document already in database." else: if self.DEBUG: print "New document added to the database." else: if self.DEBUG: print "Document already in database, not downloading." elif response['command'] == 'scraper_status_simple': if self.DEBUG: print "Logging scraper simple status packet." scraperid = response['sourceid'] statuspacket = response['message'] self.access.logstatus(scraperid,statuspacket) elif response['command'] == 'scraper_finished': if self.DEBUG: print "Scraper Finished, Logging Run." # log scraper run within the database self.access.logrun(response['message']) elif response['command'] == 'global_shutdown': if self.DEBUG: print "Global Shutdown Command Seen" raise Exception("Archiver Exiting.") def download(self,docurl): success = True try: #if True: urlfile = docurl[docurl.rfind("/")+1:] t = time.time() _filename = "{0}/{1}_{2}.download".format(self.downloaddir,urlfile,t) while self._fileexists(_filename): _filename = "{0}/{1}_{2}.download".format(dest,urlfile,t) filename,_headers = urllib.urlretrieve(docurl,_filename) if self.DEBUG: print "Download Successful: '{0}'".format(_filename) except: filename = "" success = False if self.DEBUG: print "Error trying to download document." isodatetime = str(strftime("%Y-%m-%d %H:%M:%S")) return (filename,isodatetime,success) def _fileexists(self,filename): exists = False if filename == None: exists = False else: try: with open(filename): exists = True pass except: exists = False return exists
from access import Access if __name__ == '__main__': print "Resetting database flags ..." a = Access() a._resetstates() print "Done."
class TestEuclidean(unittest.TestCase): def setUp(self): demand_data = pd.DataFrame({ 'id': [0], 'x': [0], 'y': [0], 'value': [1] }) demand_grid = gpd.GeoDataFrame(demand_data, geometry=gpd.points_from_xy( demand_data.x, demand_data.y)) demand_grid['geometry'] = demand_grid.buffer(.5) supply_data = pd.DataFrame({ 'id': [1], 'x': [0], 'y': [1], 'value': [1] }) supply_grid = gpd.GeoDataFrame(supply_data, geometry=gpd.points_from_xy( supply_data.x, supply_data.y)) supply_grid['geometry'] = supply_grid.buffer(.5) cost_matrix = pd.DataFrame({'origin': [0], 'dest': [1], 'cost': [1]}) self.model = Access(demand_df=demand_grid, demand_index='id', demand_value='value', supply_df=supply_grid, supply_index='id', supply_value='value', cost_df=cost_matrix, cost_origin='origin', cost_dest='dest', cost_name='cost', neighbor_cost_df=cost_matrix, neighbor_cost_origin='origin', neighbor_cost_dest='dest', neighbor_cost_name='cost') def test_euclidean_point_to_point(self): self.model.create_euclidean_distance(name='euclidian', threshold=2, centroid_o=True, centroid_d=True) actual = self.model.cost_df['euclidian'][0] self.assertAlmostEqual(actual, 1) def test_euclidean_point_to_poly(self): self.model.create_euclidean_distance(name='euclidian', threshold=2, centroid_o=True, centroid_d=False) actual = self.model.cost_df['euclidian'][0] self.assertAlmostEqual(actual, .5) def test_euclidean_poly_to_poly(self): self.model.create_euclidean_distance(name='euclidian', threshold=2, centroid_o=False, centroid_d=False) actual = self.model.cost_df['euclidian'][0] self.assertAlmostEqual(actual, 0) def test_euclidean_without_geopandas_demand_dataframe_raises_TypeError( self): with self.assertRaises(TypeError): self.model.demand_df = self.model.demand_df[['x', 'y', 'value']] self.model.create_euclidean_distance() def test_euclidean_without_geopandas_supply_dataframe_raises_TypeError( self): with self.assertRaises(TypeError): self.model.supply_df = self.model.supply_df[['x', 'y', 'value']] self.model.create_euclidean_distance() def test_euclidean_sets_euclidean_as_default_if_no_default_exists(self): delattr(self.model, '_default_cost') self.model.create_euclidean_distance() actual = hasattr(self.model, '_default_cost') self.assertEquals(actual, True)
def getentities(): access = Access() entities = access.getentities() for i in range(0,len(entities)): entities[i]['_id'] = str(entities[i]['_id']) return json.dumps(entities)