Example #1
0
class TestStore(unittest.TestCase):
    def setUp(self):
        self.redis_storage = RedisStorage(host=os.getenv('REDIS_HOST'),
                                          port=int(os.getenv('REDIS_PORT')))
        self.store = Store(self.redis_storage)
        self.key = 'key1'
        self.value = 'value1'

    def test_store_connected(self):
        self.assertTrue(self.store.storage.set(self.key, self.value))
        self.assertEqual(self.store.get(self.key), self.value)

    def test_store_disconnected(self):
        self.redis_storage.db.get = MagicMock(side_effect=ConnectionError())

        self.assertRaises(ConnectionError, self.store.get, self.key)
        self.assertEqual(self.redis_storage.db.get.call_count, Store.max_retries)

    def test_cache_connected(self):
        self.assertTrue(self.store.cache_set(self.key, self.value))
        self.assertEqual(self.store.cache_get(self.key), self.value)

    def test_cache_disconnected(self):
        self.redis_storage.db.get = MagicMock(side_effect=ConnectionError())
        self.redis_storage.db.set = MagicMock(side_effect=ConnectionError())

        self.assertEqual(self.store.cache_get(self.key), None)
        self.assertEqual(self.store.cache_set(self.key, self.value), None)
        self.assertEqual(self.redis_storage.db.get.call_count, Store.max_retries)
        self.assertEqual(self.redis_storage.db.set.call_count, Store.max_retries)
Example #2
0
class OperateCsv(object):
    """for csv file
    """
    def __init__(self):
        self.store = Store()

    def read_csv_columns(self, source_file, *cared_title, **kwargs):
        """read_csv_columns to read csv specific one or more columns.

        +--------------------+------+------------------------------------+
        | Input Paramaters   | Man. | Description                        |
        +--------------------+------+------------------------------------+
        | source_file        | yes  | Path of csv file                   |
        +--------------------+------+------------------------------------+
        | cared_title        | yes  | one or more cared column's title   |
        +--------------------+------+------------------------------------+
        | return             | [colA_list] or [[colA_list], [colB_list]] |
        +--------------------+------+------------------------------------+

        Example:

        +------------------+--------------+-------------+------------+
        | read_csv_columns | /opt/xxx.csv | Time        |            |
        +------------------+--------------+-------------+------------+
        | read_csv_columns | /opt/xxx.csv | Time        | SFN        |
        +------------------+--------------+-------------+------------+

        """

        result = []
        csv_obj = CsvHandler(source_file, cared_title[0])
        self.store.add(csv_obj, alias=kwargs.get('alias'))
        result = csv_obj.get_csv_columns_list(*cared_title)
        self.store.remove(alias=kwargs.get('alias'))
        return result
Example #3
0
def test_get_store_one_item(mocker):
    with open("test_config.json", "w") as f:
        f.write(
            '{ "store_name": "MyShop", "database_name": "MyShop.db",  "timeout": 3600}'
        )
    mocker.patch.object(Database, 'get_record')
    mocker.patch.object(Database, 'get_records')
    mocker.patch.object(User, "check_session")
    User.check_session.return_value = True
    mocker.patch.object(Store, 'get_categories')
    Database.get_records.return_value = {
        "1": {
            "sku": "1001",
            "name": "",
            "price": "",
            "discount": "",
            "final_price": "",
            "description": "",
            "material": "",
            "color": "",
            "size": 123
        }
    }
    Database.get_record.return_value = {"name": "", "terms": ""}
    mocker.patch.object(json, 'load')
    store = Store("test_config.json")
    mocker.patch.object(Store, 'get_categories')
    store.get_store()
    assert 1 == len(store.product_list)
    os.remove("test_config.json")
Example #4
0
 def setup(dbhost='localhost', dbport=7474, dburl=None, querypath=None):
     '''
     Program to set up for running our REST server.
     We do these things:
         - Attach to the database
         - Initialize our type objects so things like ClientQuery will work...
         - Load the queries into the database from flat files
             Not sure if doing this here makes the best sense, but it
             works, and currently we're the only one who cares about them
             so it seems reasonable -- at the moment ;-)
             Also we're most likely to iterate changing on those relating to the
             REST server, so fixing them just by restarting the REST server seems
             to make a lot of sense (at the moment)
         - Remember the set of queries in the 'allqueries' hash table
     '''
     if dburl is None:
         dburl = ('http://%s:%d/db/data/' % (dbhost, dbport))
     print >> sys.stderr, 'CREATING Graph("%s")' % dburl
     neodb = neo4j.Graph(dburl)
     qstore = Store(neodb, None, None)
     print GraphNode.classmap
     for classname in GraphNode.classmap:
         GraphNode.initclasstypeobj(qstore, classname)
     print "LOADING TREE!"
     if querypath is None:
         querypath = "/home/alanr/monitor/src/queries"
     queries = ClientQuery.load_tree(qstore, querypath)
     for q in queries:
         allqueries[q.queryname] = q
     qstore.commit()
     for q in allqueries:
         allqueries[q].bind_store(qstore)
 def setup(dbhost='localhost', dbport=7474, dburl=None, querypath=None):
     '''
     Program to set up for running our REST server.
     We do these things:
         - Attach to the database
         - Initialize our type objects so things like ClientQuery will work...
         - Load the queries into the database from flat files
             Not sure if doing this here makes the best sense, but it
             works, and currently we're the only one who cares about them
             so it seems reasonable -- at the moment ;-)
             Also we're most likely to iterate changing on those relating to the
             REST server, so fixing them just by restarting the REST server seems
             to make a lot of sense (at the moment)
         - Remember the set of queries in the 'allqueries' hash table
     '''
     if dburl is None:
         dburl = ('http://%s:%d/db/data/' % (dbhost, dbport))
     print >> sys.stderr, 'CREATING Graph("%s")' % dburl
     neodb = neo4j.Graph(dburl)
     qstore = Store(neodb, None, None)
     print GraphNode.classmap
     for classname in GraphNode.classmap:
         GraphNode.initclasstypeobj(qstore, classname)
     print "LOADING TREE!"
     if querypath is None:
         querypath = "/home/alanr/monitor/src/queries"
     queries = ClientQuery.load_tree(qstore, querypath)
     for q in queries:
         allqueries[q.queryname] = q
     qstore.commit()
     for q in allqueries:
         allqueries[q].bind_store(qstore)
 def _jsonstr_other(self, thing):
     'Do our best to make JSON out of a "normal" python object - the final "other" case'
     ret = '{'
     comma = ''
     attrs = thing.__dict__.keys()
     attrs.sort()
     if Store.has_node(thing) and Store.id(thing) is not None:
         ret += '"_id": %s' %  str(Store.id(thing))
         comma = ','
     for attr in attrs:
         skip = False
         for prefix in self.filterprefixes:
             if attr.startswith(prefix):
                 skip = True
                 continue
         if skip:
             continue
         value = getattr(thing, attr)
         if self.maxJSON > 0 and attr.startswith('JSON_') and len(value) > self.maxJSON:
             continue
         if self.expandJSON and attr.startswith('JSON_') and value.startswith('{'):
             js = pyConfigContext(value)
             if js is not None:
                 value = js
         ret += '%s"%s":%s' % (comma, attr, self._jsonstr(value))
         comma = ','
     ret += '}'
     return ret
Example #7
0
 def create_player(cls, name):
     """
     :type name: str
     """
     if cls.get_player(name):
         raise RuntimeError('Player already exists: %s' % name)
     Store.get_store().set_player(Player(name, trueskill.Rating()))
Example #8
0
    def members_ring_order(self):
        'Return all the Drones that are members of this ring - in ring order'
        ## FIXME - There's a cypher query that will return these all in one go
        # START Drone=node:Drone(Drone="drone000001")
        # MATCH Drone-[:RingNext_The_One_Ring*]->NextDrone
        # RETURN NextDrone.designation, NextDrone

        if self._insertpoint1 is None:
            #print >> sys.stderr, 'NO INSERTPOINT1'
            return
        if Store.is_abstract(self._insertpoint1):
            #print >> sys.stderr, ('YIELDING INSERTPOINT1:', self._insertpoint1
            #,       type(self._insertpoint1))
            yield self._insertpoint1
            return
        startid = Store.id(self._insertpoint1)
        # We can't pre-compile this, but we hopefully we won't use it much...
        q = '''START Drone=node(%s)
             MATCH p=Drone-[:%s*0..]->NextDrone
             WHERE length(p) = 0 or Drone <> NextDrone
             RETURN NextDrone''' % (startid, self.ournexttype)
        query = neo4j.CypherQuery(CMAdb.cdb.db, q)
        for elem in CMAdb.store.load_cypher_nodes(query, Drone):
            yield elem
        return
Example #9
0
    def initStigmergy(self):
        # default travel time (sec) = length (m) / s(m/sec)
        self.long_term_stigmergies = dict([(edge_id, stigmergy.Stigmergy(
            netutil.freeFlowTravelTime(self.sumo_net, edge_id))) for edge_id in self.edge_ids])

        self.short_term_stigmergies = copy.deepcopy(self.long_term_stigmergies)

        # use stored data in long term stigmergy
        if self.conf.short_cut != -1:
            before_read_time = time.clock()

            if self.conf.redis_use:
                store = Store(self.conf.redis_host, self.network_file)
            else:
                store = StoreJSON(self.conf.short_cut_file, self.network_file, 'r')

            past_stigmergy_list = store.getLongTermStigmergy(self.conf.weight_of_past_stigmergy, self.conf.short_cut)

            for k in past_stigmergy_list:
                key = k.replace(store.createFileNameWithoutKey(
                    self.conf.weight_of_past_stigmergy,
                    self.conf.short_cut), "")
                data_list = [float(travel_time) for travel_time in store.getLongTermStigmergyWithKey(k)]
                self.long_term_stigmergies[key].addStigmergy(data_list)
            print("read long term stigmergy from redis(" + str(time.clock() - before_read_time) + "sec)")
    def play(self):
        """
        main method
        """
        Database.connect()
        if Database().is_empty():
            Category.importing()
            Store.importing()
            Product.importing()
        user = input("""1 - Quel aliment souhaitez-vous remplacer ?
2 - Retrouver mes aliments substitués.
3 - Quitter
:""")
        try:
            user = int(user)
            assert 0 < user <= 3
        except ValueError:
            return self.play()
        except AssertionError:
            return self.play()
        if user == 1:
            self.find_substitute()
            Database.disconnect()
        elif user == 2:
            products = Substitute.load()
            for product in products:
                print(Product.get(product[0]).display())
            self.play()
        elif user == 3:
            print('You\'re leaving the programm')
            Database.disconnect()
            exit()
        else:
            print('Choice has to be 1, 2 or 3')
            return self.play()
Example #11
0
def domain_list():
    store = Store()
    domains = store.index()
    if request.headers.get("Accept") == "text/plain":
        return template("domains.text", domains=domains)
    else:
        return template("domains.html", domains=domains, url=url, flashed_messages=get_flashed_messages())
Example #12
0
 def test_cache_get(self, value):
     self.redis_base.flushall()
     store = Store(RedisStore())
     key, val = value
     self.redis_base.set(key, val)
     self.assertEqual(val, store.cache_get(key))
     self.assertIsInstance(store.cache_get(key), basestring)
Example #13
0
 def __init__(self, id=ID, size=4, instance_type='m5.large'):
     self.id = id
     with Session() as session:
         # Get stack
         if self.id in session:
             self.stack = session[self.id]
         else:
             self.stack = {}
             session[self.id] = self.stack
         # Get Store
         if 'store' in self.stack:
             self.store = self.stack['store']
         else:
             self.store = Store(id=self.id)
             self.stack['store'] = self.store
         # Get Cluster
         if 'cluster' in self.stack:
             self.cluster = self.stack['cluster']
         else:
             self.cluster = Cluster(id=self.id,
                                    size=size,
                                    region_name='eu-west-1',
                                    instance_type=instance_type,
                                    instance_role=HOME_INSTANCE_ROLE)
             self.stack['cluster'] = self.cluster
    def add_mac_ip(self, drone, macaddr, IPlist):
        '''We process all the IP addresses that go with a given MAC address (NICNode)
        The parameters are expected to be canonical address strings like str(pyNetAddr(...)).
        '''
        nicnode = self.store.load_or_create(NICNode, domain=drone.domain, macaddr=macaddr)
        if not Store.is_abstract(nicnode):
            # This NIC already existed - let's see what IPs it already owned
            currips = {}
            oldiplist = self.store.load_related(nicnode, CMAconsts.REL_ipowner, IPaddrNode)
            for ipnode in oldiplist:
                currips[ipnode.ipaddr] = ipnode
                #print >> sys.stderr, ('IP %s already related to NIC %s'
                #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            # See what IPs still need to be added
            ips_to_add = []
            for ip in IPlist:
                if ip not in currips:
                    ips_to_add.append(ip)
            # Replace the original list of IPs with those not already there...
            IPlist = ips_to_add

        # Now we have a NIC and IPs which aren't already related to it
        for ip in IPlist:
            ipnode = self.store.load_or_create(IPaddrNode, domain=drone.domain
            ,       ipaddr=ip)
            #print >> sys.stderr, ('CREATING IP %s for NIC %s'
            #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            if not Store.is_abstract(ipnode):
                # Then this IP address already existed,
                # but it wasn't related to our NIC...
                # Make sure it isn't related to a different NIC
                for oldnicnode in self.store.load_in_related(ipnode, CMAconsts.REL_ipowner
                    , GraphNode.factory):
                    self.store.separate(oldnicnode, CMAconsts.REL_ipowner, ipnode)
            self.store.relate(nicnode, CMAconsts.REL_ipowner, ipnode)
Example #15
0
 def __init__(self, id=ID, size=4, instance_type='p2.xlarge'):
     self.id = id
     with Session() as session:
         # Get stack
         if self.id in session:
             self.stack = session[self.id]
         else:
             self.stack = {}
             session[self.id] = self.stack
         # Get Store
         if 'store' in self.stack:
             self.store = self.stack['store']
         else:
             self.store = Store(id=self.id,
                                profile_name='federate',
                                tags=OATH_TAGS)
             self.stack['store'] = self.store
         # Get Cluster
         if 'cluster' in self.stack:
             self.cluster = self.stack['cluster']
         else:
             self.cluster = Cluster(id=self.id,
                                    size=size,
                                    profile_name='federate',
                                    region_name='eu-west-1',
                                    image_id=OATH_EU_WEST_1_AMI,
                                    subnet_id='subnet-ea60c68e',
                                    instance_type=instance_type,
                                    instance_role=OATH_INSTANCE_ROLE,
                                    tags=OATH_TAGS,
                                    ip_mask=OATH_NETWORK)
             self.stack['cluster'] = self.cluster
Example #16
0
class Entry(object):
    def __init__(self):
        self.store = Store()

    def load_image(self, file_path, name):
        if not os.path.exists(file_path):
            print("Image file not existed")
            return -1

        image_hash = self.compute_hash(file_path)
        if self.store.get_face_by_hash(image_hash):
            print("Face already recorded.")
            return -2

        try:
            image = face_recognition.load_image_file(file_path)
            face_encoding = face_recognition.face_encodings(image)[0]
        except Exception:
            print("Failed to recognition face")
            return -3

        face = {
            "name": name,
            "hash": image_hash,
            "face_encoding": list(face_encoding)
        }

        self.store.create_face(face)

    def compute_hash(self, file_path):
        with open(file_path, "r") as f:
            data = f.read()
            image_md5 = hashlib.md5(data)
            return image_md5.hexdigest()
Example #17
0
    def main(self):
        """ set everything up, then invoke go() """

        (options, args) = self.parser.parse_args()

        log_level = logging.ERROR
        if options.debug:
            log_level = logging.DEBUG
        elif options.verbose:
            log_level = logging.INFO
        logging.basicConfig(level=log_level)    #, format='%(message)s')


        if options.test:
            self.store = DummyStore(self.name, self.doc_type)
        else:
            # load in config file for real run
            config = ConfigParser.ConfigParser()
            config.readfp(open(options.ini_file))
            auth_user = config.get("DEFAULT",'user')
            auth_pass = config.get("DEFAULT",'pass')
            server = config.get("DEFAULT",'server')

            self.store = Store(self.name, self.doc_type, auth_user=auth_user, auth_pass=auth_pass, server=server)


        if options.cache:
            logging.info("using .cache")
            opener = urllib2.build_opener(CacheHandler(".cache"))
            urllib2.install_opener(opener)

        self.go(options)
Example #18
0
  def performTask(self, task):
    self.performing_task = task
    
    # Creating new store
    store = Store(task.index_fields)

    # Reading data
    self.state = 'reading data from log %s' % task.log
    data = self.filekeeper.read(task.log)
    logging.debug("[worker %s] read %d bytes from %s" % (task.collector_name, len(data), task.log))

    # Parsing and pushing data
    self.state = 'parsing data from log %s' % task.log
    total = 0
    parsed = 0
    t1 = time.time()
    for line in data.split('\n'):
      record = task.parser.parseLine(line+'\n')
      if task.parser.successful:
        parsed += 1
        # after_parse_callbacks
        for callback in task.after_parse_callbacks:
          func = callback[0]
          args = [record] + callback[1:]
          try:
            func(*args)
          except Exception, e:
            logging.debug('[%s.%s] %s' % (func.__module__, func.__name__, repr(e)))
        store.push(record)
      total += 1
Example #19
0
def auth(request):

    try:
        checkAuth = Store()
    except Exception as e:
        log().data(
            "error",
            e).msg("Unexpected exception while attempting to initialise store")
        return "", 404

    try:
        authHeader = request.headers[AUTH_HEADER][5:].split(":")
        user = authHeader[0]
        password = authHeader[1]
    except Exception as e:
        log().data("error", e).msg(
            "Aborting authentication attempt, missing required request headers"
        )
        return "", 404

    if checkAuth.isAuthorised(user, password):
        log().data("user", user).msg("User authenticated, returning 200")
        return "", 200
    else:
        log().data("user", user).msg("User not authenticated, returning 404")
        return "", 404
Example #20
0
def total():
    result = ['<head/><body><table><tr><td>Name</td><td>Total</td></tr>']
    s = Store('localhost:27017')
    for record in s.find('test', 'total', {}):
      result.append('<tr><td>%s</td><td>%d</td></tr>' % (record['_id'], record['value']))
    result.append('</table><body>')
    return '\n'.join(result)
Example #21
0
class Chann:
    """
    Communication and user-friendliness between Store and web page
    """
    def __init__(self):
        "position - also, keep a list of Store's for history and back and such"
        self.p = 0
        self.st = Store()

    def move(self, how):
        q = self.p
        if how == 37 and self.p > 0:
            self.p -= 1
        elif how == 38:
            self.p = 0
        elif how == 39 and self.p < self.st.sz:
            self.p += 1
        elif how == 40:
            self.p = self.st.sz
        return str(q) + " > " + str(self.p)

    def put(self, ch):
        self.st.put(self.p, ch)
        self.p += 1
        return self.st.writ()

    def wh(self):
        return str(self.p)
Example #22
0
class AddYoutubeListThread(threading.Thread):
    def __init__(self, jukebox, list_url, add_playlist):
        threading.Thread.__init__(self)
        self.jukebox = jukebox
        self.list_url = list_url
        self.add_playlist = add_playlist
        self.store = Store()

    def run(self):
        infos = youtube.get_video_infos_from_list(self.list_url)
        for info in infos:
            print(info)
            try:
                song = Song()
                song.url = info["url"]
                song.uid = info["uid"]
                song.title = info["title"]
                song.img_url = info["image_url"]
                song.duration = info['duration']

                if song.title and song.duration:
                    # 데이터가 valid 할 때만 추가
                    
                    self.store.update_or_new(song)

                    if self.add_playlist:
                        self.jukebox.append_song_playlist(song)
                
            except Exception as e:
                print(e)
Example #23
0
 def test_cache_set(self, value):
     self.redis_base.flushall()
     store = Store(RedisStore())
     key, val = value
     is_set = store.cache_set(key, val, 10)
     self.assertTrue(is_set)
     self.assertEqual(val, self.redis_base.get(key))
     self.assertIsInstance(self.redis_base.get(key), basestring)
Example #24
0
def insert(name, data):
    global connection
    doc = {'time': datetime.utcnow(), 'name': name, 'data': data}
    print doc
    s = Store('localhost:27017')
    s.store('test', 'doc', doc)
    connection.push('stats', doc)
    return 'Inserted [%s:%d]' % (name, data)
Example #25
0
    def test_cache_set_expire(self, value):
        store = Store(RedisStore())
        key = hashlib.md5("".join(value) + time.ctime()).hexdigest()

        is_set = store.cache_set(key, value[1], 1)
        self.assertTrue(is_set)
        time.sleep(1.1)
        self.assertIs(None, self.redis_base.get(key))
Example #26
0
def create_domain():
    domain = request.forms.domain
    if domain:
        store = Store()
        store.create(domain)
        flash("%s added" % domain)
        fanout.update()
    return redirect(url("domains"))
Example #27
0
    def test_concept_storing(self):
        s = Store()
        s.add_concept(1)
        s.add_concept(2)

        l = set(s.concepts())

        self.assertEqual(l, {1, 2})
Example #28
0
 def test_receipt_nonexempt_output(self):
     store = Store([{
         'quantity': '1',
         'item': 'music CD',
         'price': '14.99'
     }])
     expected = "1 music CD: 16.49\nSales Taxes: 1.50\nTotal: 16.49"
     self.assertEqual(expected, store.print_receipt())
Example #29
0
    def __init__(self):
        if JukeBox.music_player == None:
            JukeBox.music_player = AsyncMusicPlayer()
            JukeBox.music_player.start()
        self.store = Store()

        songs = self.store.get_songs()
        self.append_song_list(songs)
Example #30
0
 def test_receipt_imported_exempt(self):
     store = Store([{
         'quantity': '1',
         'item': 'imported box of chocolates',
         'price': '10.00'
     }])
     expected = "1 imported box of chocolates: 10.50\nSales Taxes: 0.50\nTotal: 10.50"
     self.assertEqual(expected, store.print_receipt())
Example #31
0
 def test_receipt_imported_nonexempt(self):
     store = Store([{
         'quantity': '1',
         'item': 'imported bottle of perfume',
         'price': '47.50'
     }])
     expected = "1 imported bottle of perfume: 54.65\nSales Taxes: 7.15\nTotal: 54.65"
     self.assertEqual(expected, store.print_receipt())
Example #32
0
	def __init__(self, file_path):
		if Store.is_usable:
			Store.file_name = file_path
			print "\nFound Your File!\n\n"
		else:
			Store.file_name = file_path
			Store.create_file()
			print "Your file has been created!\n\n"
Example #33
0
 def fill_tables():
     """ fill the tables after creation except historic table:
         five tables can be filled with the datas at the beginning ."""
     Category.insert()
     Product.insert()
     Store.insert()
     CategoryProduct.insert()
     StoreProduct.insert()
Example #34
0
 def setUp(self):
     self.redis_storage = RedisStore(host=os.getenv('REDIS_HOST',
                                                    'localhost'),
                                     port=os.getenv('REDIS_PORT', 6379))
     self.store = Store(self.redis_storage)
     self.store.connect()
     self.key = 'sample_key'
     self.value = 'sample_value'
Example #35
0
 def __init__(self, import_name, downstream_channels=[]):
     super(FlaskGCRun, self).__init__(import_name)
     self.PROJECT_ID = os.getenv('GCP_PROJECT')
     if os.getenv('BUCKET_PIPELINE') != None:
         self._store = Store(os.getenv('BUCKET_PIPELINE'))
     if os.getenv('BUCKET_OUTPUT') != None:
         self._output_store = Store(os.getenv('BUCKET_OUTPUT'))
     self.downstream_channels = downstream_channels
     self.init_app()
Example #36
0
 def load(self, iterator, append=True):
     if not append:
         self.records = Store()
     count = 0
     for record in iterator:
         record['record_has_been_loaded'] = True
         self.write(**record)
         count += 1
     return count
Example #37
0
 def __init__(self):
     super().__init__()
     self.setupUi(self)
     self.store = Store()
     self.refresh()
     self.pushButton.clicked.connect(self.search_func)
     self.add_btn.clicked.connect(self.adding)
     self.rmv_btn.clicked.connect(self.remove)
     self.ed_btn.clicked.connect(self.edit)
Example #38
0
 def __init__(self, botName, version, author, subreddit, trigger):
     self.bot = redditBot.RedditBot(botName, version, author)
     self.botName = botName
     self.subreddit = self.bot.get_subreddit(subreddit)
     self.trigger = trigger
     self.retries = 0
     self.startTime = datetime.datetime.now()
     self.pusher = Trigger(self.botName, "triggered")
     self.store = Store(self.botName, "notifications")
Example #39
0
File: test.py Project: ildarkit/hw3
def has_storage():
    result = False
    try:
        store = Store(connect_timeout=2, attempts=2)
        store.connect()
        result = True
    except redis.ConnectionError:
        pass
    return result
Example #40
0
 def setUp(self):
     HOST = 'host'
     PASSWORD = '******'
     PORT = 15377
     TIMEOUT = 3
     DB = 0
     MAX_RETRIES = 3
     self.context = {}
     self.store = Store(host=HOST, port=PORT, password=PASSWORD, db=DB, timeout=TIMEOUT, max_retries=MAX_RETRIES)
Example #41
0
    def issue_coupons(self, store: Store) -> None:
        assert store in self.stores, f"Cannot issue coupons to {store.name} since {store.name} is not registered!"

        # populate initial coupon stock for each type of coupon
        for rule, coupon_amount in INITIAL_COUPON_STOCK.items():
            coupons_list = []
            for i in range(coupon_amount):
                coupons_list += [self.generator.generate_coupon(rule)]
            store.add_coupons_to_inventory(coupons_list, rule)
Example #42
0
 def validate(self):
     """Verify current configuration and updates KID"""
     opt = Store(kid=self.kid_base)
     opt.desc = self.desc
     failed = opt.validate()
     if len(failed) > 0:
         print 'Failed validations', failed
     for k, v in opt.desc.iteritems():
         self.desc[k] = v
     return failed
Example #43
0
    def load_face(self):
        store = Store()
        self.faces = store.list_faces()

        self.known_face_encodings = []
        self.known_face_names = []

        for face in self.faces["faces"]:
            self.known_face_encodings.append(face["face_encoding"])
            self.known_face_names.append(face["name"])
Example #44
0
    def test_parse_interference_with_child_links(self):
        store = Store()
        # F links to person and slow
        store.integrate(parse("F(person,slow)"))

        # this should not link itself to the integrated "slow" concept
        parse("F(person,slow)", store)

        integrated = store.get_concept(parse("slow"))
        self.assertEqual(len(integrated.children), 1)
Example #45
0
def success_get_batch_more_than_values_count():
    values = [
        np.asarray([1., 2., 3.]),
        np.asarray([4., 5., 6.]),
        np.asarray([7., 8., 9.])
    ]
    store = Store(values, 2019)
    next_values = store.next(4)
    expected = np.asarray([values[1], values[2], values[0], values[2]])
    equals(expected, next_values)
Example #46
0
    def GET(self,id1,id2,id3,id4,type):
#		image_url_regex = r'/([a-z0-9]{2})/([a-z0-9]{2})/([a-z0-9]{19,36})(-[sc]\d{2,4})?\.(gif|jpg|jpeg|png)$'
		id = '{0}{1}{2}'.format(id1,id2,id3)
		from store import Store
		store = Store()
		file = store.get(id)
		if file is None:
		    store.close()
		    return render.error("not found",'/')

		org_path = '{0}/{1}/{2}.{4}'.format(id1,id2,id3,id4,type)
		org_file = '{0}/{1}'.format(THUMB_ROOT, org_path)
		if not os.path.exists(org_file):
		    save_file(file, org_file)
		if id4 is None:
		    dst_path = org_path
		    dst_file = org_file
		else:
		    dst_path = '{0}/{1}/{2}{3}.{4}'.format(id1,id2,id3,id4,type)
		    dst_file = '{0}/{1}'.format(THUMB_ROOT, dst_path)
		    #print(ids[3][1:])
		    size = int(id4[2:])
		    if size not in SUPPORTED_SIZE:
		        print('unsupported size: {0}'.format(size))
		        store.close()
		        return render.error("not found",'/')
		    thumb_image(org_file, size, dst_file)
#		print(org_file)
#		print(dst_file)
#		print web.ctx.env
		server_soft = web.ctx.env['SERVER_SOFTWARE']
#		print server_soft
		if server_soft[:5] == 'nginx' and os.name != 'nt':
			print("in")
			store.close()
			#start_response('200 OK', [('X-Accel-Redirect', '{0}/{1}'.format(THUMB_PATH, dst_path))])
			web.header('X-Accel-Redirect', '{0}/{1}'.format(THUMB_PATH, dst_path))
			return ;
		
#		print(file.type) 
		web.header('Content-Type',  str(file.type))
		web.header('Content-Length', '{0.length}'.format(file))
		web.header('Via','store')
		#print(headers)
		
		# TODO: response file content
		distfile = open(dst_file, 'rb')
		data = distfile.read()
		store.close()
		return data; #200OK
		#return [data]
		
		#fd = open(dst_file,'r')
		#return environ['wsgi.file_wrapper'](fd, 4096)
		return render.error("not found",'/')
Example #47
0
 def test_buy(self):
     store = Store(2)
     count = 8
     while count > 0:
         buy_res = store.buy("PROVINCE")
         count -= 1
         self.assertEqual(store.base_inventory["PROVINCE"], count)
         self.assertEqual(buy_res, "PROVINCE")
     self.assertEqual(store.buy("PROVINCE"), None)
     self.assertEqual(store.buy("PROVINCE"), None)
     self.assertEqual(store.base_inventory["PROVINCE"], 0)
 def setUp(self):
     self.port = self.get_port()
     self.handler_url = f'http://localhost:{self.port}/method/'
     self.server = HTTPServer(('localhost', self.port), api.MainHTTPHandler)
     self.server_thread = Thread(target=self.server.serve_forever)
     self.server_thread.setDaemon(True)
     self.server_thread.start()
     self.store = Store(MemcacheAdapter(
         address=os.environ['STORE_PORT_11211_TCP_ADDR'],
         port=os.environ['STORE_PORT_11211_TCP_PORT']
     ))
Example #49
0
def main():
    new_store = Store()

    with codecs.open('data/nouns.tsv', 'r', 'utf-8') as f:
        new_store.read_nouns(f)

    with codecs.open('data/verbs.tsv', 'r', 'utf-8') as f:
        new_store.read_verbs(f)

    with codecs.open('data/store.p', 'w', 'utf-8') as w:
        pickle.dump(new_store, w)
Example #50
0
    def add_mac_ip(self, drone, macaddr, IPlist):
        '''We process all the IP addresses that go with a given MAC address (NICNode)
        The parameters are expected to be canonical address strings like str(pyNetAddr(...)).
        '''
        nicnode = self.store.load_or_create(NICNode,
                                            domain=drone.domain,
                                            macaddr=macaddr)
        macprefix = str(nicnode.macaddr)[0:8]
        try:
            org = str(netaddr.EUI(nicnode.macaddr).oui.registration().org)
        except netaddr.NotRegisteredError:
            local_OUI_map = self.config['OUI']
            if macprefix in local_OUI_map:
                org = local_OUI_map[macprefix]
            else:
                org = macprefix
        if not Store.is_abstract(nicnode):
            # This NIC already existed - let's see what IPs it already owned
            currips = {}
            oldiplist = self.store.load_related(nicnode, CMAconsts.REL_ipowner,
                                                IPaddrNode)
            for ipnode in oldiplist:
                currips[ipnode.ipaddr] = ipnode
                #print >> sys.stderr, ('IP %s already related to NIC %s'
                #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            # See what IPs still need to be added
            ips_to_add = []
            for ip in IPlist:
                if ip not in currips:
                    ips_to_add.append(ip)
            # Replace the original list of IPs with those not already there...
            IPlist = ips_to_add

        # Now we have a NIC and IPs which aren't already related to it
        for ip in IPlist:
            ipnode = self.store.load_or_create(IPaddrNode,
                                               domain=drone.domain,
                                               ipaddr=ip)
            #print >> sys.stderr, ('CREATING IP %s for NIC %s'
            #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            if not Store.is_abstract(ipnode):
                # Then this IP address already existed,
                # but it wasn't related to our NIC...
                # Make sure it isn't related to a different NIC
                for oldnicnode in self.store.load_in_related(
                        ipnode, CMAconsts.REL_ipowner, GraphNode.factory):
                    self.store.separate(oldnicnode, CMAconsts.REL_ipowner,
                                        ipnode)
            print >> sys.stderr, (
                'RELATING (%s)-[:ipowner]->(%s)	[%s]' %
                (str(nicnode.macaddr), str(ipnode.ipaddr), org))
            self.store.relate(nicnode, CMAconsts.REL_ipowner, ipnode)
            if org != macprefix and not hasattr(nicnode, 'OUI'):
                nicnode.OUI = org
 def delrole(self, roles):
     'Delete a role from our GraphNode'
     if isinstance(roles, tuple) or isinstance(roles, list):
         for role in roles:
             self.delrole(role)
         return self.roles
     assert isinstance(roles, str) or isinstance(roles, unicode)
     if roles in self.roles:
         self.roles.remove(roles)
     # Make sure the 'roles' attribute gets marked as dirty...
     Store.mark_dirty(self, 'roles')
     return self.roles
Example #52
0
 def post_db_init(self):
     '''Create IS_A relationship to our 'class' node in the database, and set creation time'''
     if not self._baseinitfinished:
         self._baseinitfinished = True
         if Store.is_abstract(self) and self.nodetype != CMAconsts.NODE_nodetype:
             store = Store.getstore(self)
             if self.nodetype not in GraphNode.classtypeobjs:
                 GraphNode.initclasstypeobj(store, self.nodetype)
             store.relate(self, CMAconsts.REL_isa, GraphNode.classtypeobjs[self.nodetype])
             assert GraphNode.classtypeobjs[self.nodetype].name == self.nodetype
             self.time_create_ms = int(round(time.time()*1000))
             self.time_create_iso8601  = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
Example #53
0
def analyze_bonus():
    s = Store()
    codes = s.get_all_stocks()
    arr = []
    for code in codes:
        print "analyzing %s..."%(code)
        bonus = s.get_bonus(code)
        if len(bonus) > 0:
            bonus = bonus.set_index("announce_date")
            bonus = bonus.sort_index()
        else:
            continue
        finance = s.get_finance(code)
        if len(finance) > 0:
            finance = finance.set_index("date")
            finance = finance.sort_index()
        
        sql = "select MIN(date) from market where code = \"%s\""%(code)
        cursor = conn.cursor()
        count = cursor.execute(sql)
        if count == 0:
            continue
        start_date = cursor.fetchone()[0]
        cursor.close()
        bonus_index = 0
        for i in range(0, len(finance)):
            if bonus_index >= len(bonus):
                break;
            
            bonus_date = bonus.index[bonus_index]
            date = finance.index[i]
            if date < start_date:
                continue
            next_date = datetime.date(2199, 12, 31)
            if i < len(finance) - 1:
                next_date = finance.index[i + 1]
            
            if bonus_date > date and bonus_date <= next_date:
                f = finance.iloc[i]
                b = bonus.iloc[bonus_index]
                dict = {}
                dict["date"] = date
                dict["announce_date"] = bonus_date
                dict["code"] = f.code
                dict["fund"] = f.fund_per_share
                dict["total"] = b.bonus_stock + b.tranadd_stock
                dict["exright_date"] = b.exright_date
                arr.append(dict)
                bonus_index = bonus_index + 1
    
    return pandas.DataFrame(arr)
def initstore():
    global version_printed
    db = neo4j.Graph(None)
    if not version_printed:
        print >> sys.stderr, 'USING NEO4J VERSION %s' % str(db.neo4j_version)
        print >> sys.stderr, 'USING py2neo VERSION %s' % str(py2neo.__version__)
        version_printed = True
    GraphNode.clean_graphnodes()
    db.delete_all()
    CMAinit(None)
    OurStore = Store(db, uniqueindexmap=uniqueindexes, classkeymap=keymap)
    OurStore.clean_store()
    CreateIndexes(db, [cls.__name__ for cls in Classes])
    return OurStore
    def __str__(self):
        "Default routine for printing GraphNodes"
        result = "%s({" % self.__class__.__name__
        comma = ""
        for attr in Store.safe_attrs(self):
            result += "%s%s = %s" % (comma, attr, str(getattr(self, attr)))
            comma = ",\n    "
        if Store.has_node(self):
            if Store.is_abstract(self):
                result += comma + 'HasNode = "abstract"'
            else:
                result += comma + "HasNode = %d" % Store.id(self)

        result += "\n})"
        return result
Example #56
0
    def __str__(self):
        'Default routine for printing CMAclass objects'
        result = '%s({' % self.__class__.__name__
        comma  = ''
        for attr in Store.safe_attrs(self):
            result += '%s%s = %s'% (comma, attr, str(getattr(self, attr)))
            comma = ",\n    "
        if Store.has_node(self):
            if Store.is_abstract(self):
                result += comma + 'HasNode = "abstract"'
            else:
                result += (comma + 'HasNode = %d' %Store.id(self))

        result += "\n})"
        return result
Example #57
0
def init():
    if intconf('use_hdfs_tree') != 0:
        shell_cmd("mv %s %s.bak" % (conf('tree_pb'), conf('tree_pb')))
        shell_cmd("hadoop fs -get %s/%s/%s %s" % (conf('upload_url'), conf('data_dir'), conf('tree_filename'), conf('tree_pb')))

    _LIB_NAME = 'libselector.so'
    ctypes.CDLL(_LIB_NAME, ctypes.RTLD_GLOBAL)

    from store import Store
    from dist_tree import DistTree
    s = Store(conf('tree_store_config'))
    s.load(conf('tree_pb'))
    tree = DistTree()
    tree.set_store(s.get_handle())
    tree.load()
Example #58
0
    def add_mac_ip(self, drone, macaddr, IPlist):
        '''We process all the IP addresses that go with a given MAC address (NICNode)
        The parameters are expected to be canonical address strings like str(pyNetAddr(...)).
        '''
        nicnode = self.store.load_or_create(NICNode, domain=drone.domain, macaddr=macaddr)
        macprefix = str(nicnode.macaddr)[0:8]
        try:
            org = str(netaddr.EUI(nicnode.macaddr).oui.registration().org)
        except netaddr.NotRegisteredError:
            local_OUI_map = self.config['OUI']
            if macprefix in local_OUI_map:
                org = local_OUI_map[macprefix]
            else:
                org = macprefix
        if not Store.is_abstract(nicnode):
            # This NIC already existed - let's see what IPs it already owned
            currips = {}
            oldiplist = self.store.load_related(nicnode, CMAconsts.REL_ipowner, IPaddrNode)
            for ipnode in oldiplist:
                currips[ipnode.ipaddr] = ipnode
                #print >> sys.stderr, ('IP %s already related to NIC %s'
                #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            # See what IPs still need to be added
            ips_to_add = []
            for ip in IPlist:
                if ip not in currips:
                    ips_to_add.append(ip)
            # Replace the original list of IPs with those not already there...
            IPlist = ips_to_add

        # Now we have a NIC and IPs which aren't already related to it
        for ip in IPlist:
            ipnode = self.store.load_or_create(IPaddrNode, domain=drone.domain
            ,       ipaddr=ip)
            #print >> sys.stderr, ('CREATING IP %s for NIC %s'
            #%       (str(ipnode.ipaddr), str(nicnode.macaddr)))
            if not Store.is_abstract(ipnode):
                # Then this IP address already existed,
                # but it wasn't related to our NIC...
                # Make sure it isn't related to a different NIC
                for oldnicnode in self.store.load_in_related(ipnode, CMAconsts.REL_ipowner
                    , GraphNode.factory):
                    self.store.separate(oldnicnode, CMAconsts.REL_ipowner, ipnode)
            print >> sys.stderr, ('RELATING (%s)-[:ipowner]->(%s)	[%s]'
            %       (str(nicnode.macaddr), str(ipnode.ipaddr), org))
            self.store.relate(nicnode, CMAconsts.REL_ipowner, ipnode)
            if org != macprefix and not hasattr(nicnode, 'OUI'):
                nicnode.OUI = org
Example #59
0
 def initclasstypeobj(store, nodetype):
     '''Initialize GraphNode.classtypeobjs for our "nodetype"
     This involves
      - Ensuring that there's an index for this class, and the NODE_nodetype class
      - Caching the class that goes with this nodetype
      - setting up all of our IS_A objects, including the root object if necessary,
      - updating the store's uniqueindexmap[nodetype]
      - updating the store's classkeymap[nodetype]
      - updating GraphNode.classtypeobjs[nodetype]
      This should eliminate the need to do any of these things for any class.
     '''
     if nodetype != CMAconsts.NODE_nodetype and CMAconsts.NODE_nodetype not in store.classkeymap:
         # Have to make sure our root type node exists and is set up properly
         GraphNode.initclasstypeobj(store, CMAconsts.NODE_nodetype)
     ourclass = GraphNode.classmap[nodetype]
     rootclass = GraphNode.classmap[CMAconsts.NODE_nodetype]
     if nodetype not in store.classkeymap:
         store.uniqueindexmap[nodetype] = True
         keys = ourclass.__meta_keyattrs__()
         ckm_entry = {'kattr': keys[0], 'index': nodetype}
         if len(keys) > 1:
             ckm_entry['vattr'] = keys[1]
         else:
             ckm_entry['value'] = 'None'
         store.classkeymap[nodetype] = ckm_entry
     store.db.get_or_create_index(neo4j.Node, nodetype)
     ourtypeobj = store.load_or_create(rootclass, name=nodetype)
     assert ourtypeobj.name == nodetype
     if Store.is_abstract(ourtypeobj) and nodetype != CMAconsts.NODE_nodetype:
         roottype = store.load_or_create(rootclass, name=CMAconsts.NODE_nodetype)
         store.relate(ourtypeobj, CMAconsts.REL_isa, roottype)
     GraphNode.classtypeobjs[nodetype] = ourtypeobj
    def __init__(self, designation, port=None, startaddr=None
    ,       primary_ip_addr=None, domain=CMAconsts.globaldomain
    ,       status= '(unknown)', reason='(initialization)', roles=None, key_id=''):
        '''Initialization function for the Drone class.
        We mainly initialize a few attributes from parameters as noted above...

        The first time around we also initialize a couple of class-wide query
        strings for a few queries we know we'll need later.

        We also behave as though we're a dict from the perspective of JSON attributes.
        These discovery strings are converted into pyConfigContext objects and are
        then searchable like dicts themselves - however updating these dicts has
        no direct impact on the underlying JSON strings stored in the database.

        The reason for treating these as a dict is so we can easily change
        the implementation to put JSON strings in separate nodes, or perhaps
        eventually in a separate data store.

        This is necessary because the performance of putting lots of really large
        strings in Neo4j is absolutely horrible. Putting large strings in is dumb
        and what Neo4j does with them is even dumber...
        The result is at least DUMB^2 -not 2*DUMB ;-)
        '''
        SystemNode.__init__(self, domain=domain, designation=designation)
        if roles is None:
            roles = ['host', 'drone']
        self.addrole(roles)
        self._io = CMAdb.io
        self.lastjoin = 'None'
        self.status = status
        self.reason = reason
        self.key_id = key_id
        self.startaddr = str(startaddr)
        self.primary_ip_addr = str(primary_ip_addr)
        self.time_status_ms = int(round(time.time() * 1000))
        self.time_status_iso8601 = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime())
        if port is not None:
            self.port = int(port)
        else:
            self.port = None

        self.monitors_activated = False

        if Drone.IPownerquery_1 is None:
            Drone.IPownerquery_1 = (Drone.IPownerquery_1_txt
                                    % (CMAconsts.REL_ipowner, CMAconsts.REL_nicowner))
            Drone.OwnedIPsQuery_subtxt = (Drone.OwnedIPsQuery_txt    \
                                          % (CMAconsts.REL_nicowner, CMAconsts.REL_ipowner))
            Drone.OwnedIPsQuery =  Drone.OwnedIPsQuery_subtxt
        self.set_crypto_identity()
        if Store.is_abstract(self) and not CMAdb.store.readonly:
            #print 'Creating BP rules for', self.designation
            from bestpractices import BestPractices
            bprules = CMAdb.io.config['bprulesbydomain']
            rulesetname = bprules[domain] if domain in bprules else bprules[CMAconsts.globaldomain]
            for rule in BestPractices.gen_bp_rules_by_ruleset(CMAdb.store, rulesetname):
                #print >> sys.stderr, 'ADDING RELATED RULE SET for', \
                    #self.designation, rule.bp_class, rule
                CMAdb.store.relate(self, CMAconsts.REL_bprulefor, rule,
                                   properties={'bp_class': rule.bp_class})