def enter_server_loop(): """ The main part of the MetaShop Server application that consists of maintaining the running state of the server, updating the FLANN image index when needed, and spawning client servicing threads. """ global main_server, image_processor config.create_directories() database.open_connection(config.DATABASE_PATHNAME) # To insure the existing index isn't being saved shared_data.lock.acquire() # Enter Critical Section if os.path.isfile(os.getcwd() + "/matcher.bin"): image_matcher = ImageMatcher() image_matcher.load(os.getcwd()) image_processor.update_image_matcher(image_matcher) shared_data.lock.release() # Exit Critical Section #main_server = Server("localhost", 32304, image_processor) main_server = Server("0.0.0.0", 32304, image_processor) main_server.enter_main_loop() database.close_connection()
def probe_ip(ip): whois = IPWhoisResult(ip) db.open_connection() db.exec_query(select_websrv_result.format(ip=ip)) db_row = db.cur.fetchone() if db_row: # Take result from local DB whois.contact_name = db_row[3] whois.contact_email = db_row[11] whois.contact_address = db_row[12] whois.geo_ip.organization = db_row[3] whois.geo_ip.isp = db_row[4] whois.geo_ip.city = db_row[8] whois.geo_ip.country = db_row[5] whois.geo_ip.region = db_row[9] whois.geo_ip.postal_code = db_row[10] whois.geo_ip.as_desc = db_row[2] whois.geo_ip.coordinates = GeoCoordinates(db_row[6], db_row[7]) else: # Perform queries on remote DBs whois.get_from_whois() whois.get_from_geoip() db.exec_query(add_websrv_result.format( ip=ip, as_desc=whois.geo_ip.as_desc, organization=whois.geo_ip.organization, isp=whois.geo_ip.isp, country=whois.geo_ip.country, region=whois.geo_ip.region, city=whois.geo_ip.city, postal_code=whois.geo_ip.postal_code, contact_email=whois.contact_email, contact_address=whois.contact_address, lat=whois.geo_ip.coordinates.lat, lon=whois.geo_ip.coordinates.lon )) db.commit() db.close_connection() return whois
def get(self, event_id): conn = database.open_connection(db_file) with conn: try: return database.get_event(conn, event_id), 200 except: abort(400) abort(500)
def delete(self, user_email, event_id): conn = database.open_connection(db_file) with conn: try: database.unregister_event(conn, user_email, event_id) except database.NonExistingError: abort(400) return {} abort(500)
def delete(self, event_id): conn = database.open_connection(db_file) with conn: try: database.delete_event(conn, event_id) return {} except database.NonExistingError: abort(400) abort(500)
def post(self): conn = database.open_connection(db_file) with conn: try: event_id = database.add_event_from_json( conn, request.json) return {'event_id': event_id}, 200 except: abort(400) abort(500)
def get(self, token, event_id): conn = database.open_connection(db_file) if token != secret_token: abort(401) with conn: try: return database.get_users_attending_event( conn, event_id), 200 except database.NonExistingError: abort(400) abort(500)
def post(self, user_email, event_id): conn = database.open_connection(db_file) with conn: # Automatically register user in DB try: database.register_user(conn, user_email, event_id) except database.DuplicateError: # Ignore if duplicate pass try: database.register_event(conn, user_email, event_id) event = database.get_event(conn, event_id) email_utils.send_notification(smtp_context, user_email, event_id) email_utils.send_calendar_invite( smtp_context, user_email, event) except database.NonExistingError: name_space.abort(400) except database.DuplicateError: # Ignore if duplicate pass return {} abort(500)
#!/usr/bin/env python import json import os import gdata.spreadsheet.service as service import config import database import database_util from model.preload import ParameterType, ValueEncoding, CodeSet, Unit, FillValue, FunctionType, ParameterFunction, \ Parameter, Stream, StreamDependency database.initialize_connection(database.PreloadDatabaseMode.EMPTY_FILE) database.open_connection() key = config.SPREADSHEET_KEY use_cache = config.USE_CACHED_SPREADSHEET cachedir = '.cache' IGNORE_SCENARIOS = ['VOID'] def sheet_generator(name): cache_path = os.path.join(cachedir, name) rows = [] if use_cache and os.path.exists(cache_path): try: rows.extend(json.load(open(cache_path))) print 'used cache' for row in rows: yield row return
import database import interface from datetime import datetime connection, cursor = database.open_connection() # database.init_database(connection, cursor) # database.insert_data(connection, cursor) interface.start() database.close_connection(connection, cursor)
#!/usr/bin/env python import codecs import os import shutil import logging import jinja2 import database from model.preload import Stream database.initialize_connection(database.PreloadDatabaseMode.POPULATED_MEMORY) database.open_connection() DROP_KEYSPACE = 'drop keyspace ooi;\n\n' CREATE_KEYSPACE = "create keyspace ooi with replication = { 'class' : 'SimpleStrategy', 'replication_factor' : 1 };\n\n" CREATE_PROVENANCE = ''' CREATE TABLE ooi.dataset_l0_provenance ( subsite text, node text, sensor text, method text, deployment int, id uuid, fileName text, parserName text, parserVersion text, PRIMARY KEY((subsite, node, sensor), method, deployment, id) ); '''
def get(self): conn = database.open_connection(db_file) with conn: return database.list_events(conn), 200 abort(500)
def get(self, user_email): conn = database.open_connection(db_file) with conn: return database.list_user_events(conn, user_email), 200 abort(500)
def setUpClass(cls): database.initialize_connection(PreloadDatabaseMode.POPULATED_MEMORY) database.open_connection()
def tearDown(self): conn = database.open_connection(self.db_file) conn.execute("DELETE FROM attendings") conn.execute("DELETE FROM users") conn.commit()
def count_events(self, email): conn = database.open_connection(self.db_file) return database.count_events(conn, email)
def __init__(self): super().__init__() self.note = Note("夜不语诡异档案") self.connection = database.open_connection() self.lock = mp.Lock()