Example #1
0
 def test_add_update_delete(self):
     if os.path.exists('testing/data/reference_types/Type1/X.json'):
         os.remove('testing/data/reference_types/Type1/X.json')
     ds = DataStore('testing/data/reference_types')
     typex = Type1(ref='X', name='Type_X', desc='Type X')
     self.assertFalse(
         os.path.exists('testing/data/reference_types/Type1/X.json'))
     ds.type('Type1').add(typex)
     self.assertTrue(
         os.path.exists('testing/data/reference_types/Type1/X.json'))
     ds = DataStore('testing/data/reference_types')
     added = ds.type('Type1').get('X')
     self.assertEqual(typex, added)
     added.name = 'UPDATED'
     ds.type('Type1').update(added)
     ds = DataStore('testing/data/reference_types')
     updated = ds.type('Type1').get('X')
     self.assertEqual(added, updated)
     self.assertTrue(
         os.path.exists('testing/data/reference_types/Type1/X.json'))
     ds.type('Type1').delete(updated)
     self.assertFalse(
         os.path.exists('testing/data/reference_types/Type1/X.json'))
     with self.assertRaises(Type1.DoesNotExist):
         x = ds.type('Type1').get('X')
Example #2
0
    def __init__(self, prodj):
        super().__init__()
        self.prodj = prodj
        self.queue = Queue()

        self.pdb_enabled = True
        self.pdb = PDBProvider(prodj)

        self.dbc_enabled = True
        self.dbc = DBClient(prodj)

        # db queries seem to work if we submit player number 0 everywhere (NOTE: this seems to work only if less than 4 players are on the network)
        # however, this messes up rendering on the players sometimes (i.e. when querying metadata and player has browser opened)
        # alternatively, we can use a player number from 1 to 4 without rendering issues, but then only max. 3 real players can be used
        self.own_player_number = 0
        self.request_retry_count = 3

        self.metadata_store = DataStore(
        )  # map of player_number,slot,track_id: metadata
        self.artwork_store = DataStore(
        )  # map of player_number,slot,artwork_id: artwork_data
        self.waveform_store = DataStore(
        )  # map of player_number,slot,track_id: waveform_data
        self.preview_waveform_store = DataStore(
        )  # map of player_number,slot,track_id: preview_waveform_data
        self.beatgrid_store = DataStore(
        )  # map of player_number,slot,track_id: beatgrid_data
Example #3
0
 def test_open_data_store_raises_FileNotFound(self):
     with self.assertRaisesRegex(
             FileNotFoundError,
             "The root of the data store: 'testing/data/does_not_exist' does not exist"
     ):
         ds = DataStore('testing/data/does_not_exist')
     with self.assertRaisesRegex(
             FileNotFoundError,
             "The root of the data store: 'testing/data/xxx' is not a directory"
     ):
         ds = DataStore('testing/data/xxx')
Example #4
0
    def setUp(self):
        db = sqlite3.connect(":memory:")

        # Populate the datastore
        self.ds = DataStore(db)
        data = [1, 2, 3, 4, 5]
        self.ds.segments.addSegment(0x1234, len(data), "ROM", data)
        self.seg = self.ds.segments.__iter__().next()
        del self.ds

        # Reload the datastore from the db
        self.ds = DataStore(db)
Example #5
0
 def test_add_reference_raises_DuplicateReference(self):
     ds = DataStore('testing/data/reference_types')
     type1 = ds.type('Type1').get("1")
     with self.assertRaises(Type1.DuplicateReference):
         x = ds.type('Type1').add(type1)
     with self.assertRaises(Type1.DuplicateReference):
         x = ds.type('Type1').add({}, ref="1")
Example #6
0
 def test_get_references(self):
     ds = DataStore('testing/data/reference_types')
     with self.assertRaises(NotAManagedType):
         ds.type('XXX')
     references = ds.type('Type1')
     self.assertTrue(hasattr(references, 'get'),
                     'References does not include a get attribute')
     self.assertTrue(callable(getattr(references, 'get')),
                     'References.get is not callable')
     self.assertTrue(hasattr(references, 'add'),
                     'References does not include a add attribute')
     self.assertTrue(callable(getattr(references, 'add')),
                     'References.add is not callable')
     self.assertTrue(hasattr(references, 'update'),
                     'References does not include a update attribute')
     self.assertTrue(callable(getattr(references, 'update')),
                     'References.update is not callable')
     self.assertTrue(hasattr(references, 'filter'),
                     'References does not include a filter attribute')
     self.assertTrue(callable(getattr(references, 'filter')),
                     'References.filter is not callable')
     self.assertTrue(hasattr(references, 'delete'),
                     'References does not include a delete attribute')
     self.assertTrue(callable(getattr(references, 'delete')),
                     'References.delete is not callable')
Example #7
0
 def test_filter_reference(self):
     ds = DataStore('testing/data/reference_types')
     results = ds.type('Type1').filter(name='Item_1')
     self.assertEqual(1, len(results))
     self.assertEqual('1', results[0].ref)
     self.assertEqual('Item_1', results[0].name)
     self.assertEqual('Item 1', results[0].description)
Example #8
0
 def test_delete_reference_raises_DoesNotExist(self):
     ds = DataStore('testing/data/reference_types')
     typex = Type1(ref='X', name='Type_X', desc='Type X')
     with self.assertRaises(Type1.DoesNotExist):
         x = ds.type('Type1').delete(typex)
     with self.assertRaises(Type1.DoesNotExist):
         x = ds.type('Type1').delete({}, ref="X")
Example #9
0
    def getcompany_info(self, name, url):
        logger = Logger(logname='error.log', logger="58com").getlog()
        ds = DataStore()
        try:
            company_text = []
            html = proxy.proxy_request(url)
            soup = BeautifulSoup(html, 'html.parser')
            tag = soup.find(class_="basicMsg")
            ul = tag.find("ul")
            li_tags = ul.find_all(name='li')
            strinfo = re.compile('\s')
            for li in li_tags:
                txt = strinfo.sub('', li.get_text())
                company_text.append(txt.split(':')[1])
            #获取工商信息
            #gongshang_info = tianyan.tianyan_search(name)
            #gongshang_info = ','.join(gongshang_info)
            ds.insert_database(name, company_text)

        except urllib.error.URLError as e:
            if hasattr(e, "code"):
                print(e.code)
            if hasattr(e, "reason"):
                print(e.reason)
            logger.error("Get company info fail, company name: %s, url: %s",
                         name, url)  #记录解析失败的公司和url
        except Exception as e:
            print("exception:" + str(e))
            sleep(1)
Example #10
0
 def test_overriding_unique_constraint(self):
     with self.assertWarns(UserWarning):
         datastore = DataStore(indices_config={
             ('id', ): True,
             ('id', 'name'): False,
         })
     self.assertTrue(datastore.indices_config[('id', 'name')])
Example #11
0
def init_datastore():
    host = os.getenv('REDIS_HOST')
    port = os.getenv('REDIS_PORT')
    logging.info("Connecting to {host}:{port}".format(host=host, port=port))

    ds = DataStore(host=host, port=int(port))
    return ds
Example #12
0
 def handle(self, *args, **options):
     db = DataStore()
     if db.exists():
         db.delete()
         self.stdout.write('Successfully deleted DataStore structure and data.')
     else:
         self.stdout.write('DataStore structure not created. Skipping delete action')
Example #13
0
def render(cid):
    """
    Fetch the data for given category id from datastore.
    If available, render an html script with the data tree.
    If unavailable, print an error message.
    """

    # connect to database
    datastore = DataStore()
    datastore.connect()

    # fetch record from database with given id
    record = datastore.retrieve_by_cid(cid)

    # check if table doesn't exist
    if record == -1:
        return

    # if record doesn't exist in table, return
    if not record:
        print("No category with ID: {}".format(cid))
        return

    # if exists, fetch all children

    records = fetch_all_children(datastore, record)

    # connection close
    datastore.disconnect()

    # create an html file with name as cid
    create_html_page(cid, records)
Example #14
0
 def test_save_empty(self):
     table = self.create_session_table()
     d = DataStore('dynamodb', 'Sessions', 'PatientId', 'SessionId')
     response = d.save(SESSION_INFO_EMPTY)
     self.assertEqual('BAD', response['status'])
     response = table.scan()
     self.assertEqual(0, len(response['Items']))
Example #15
0
    def __init__(self):
        self.state = State.Init
        self.config = {
            'hover_height':
            1.0,

            # States for which movement.fix_hover() will NOT be called (to make sure the drone is at `hover_height`)
            'exclude_from_fix_hover': [
                State.Init,
                State.Takeoff,
                State.Land,
                State.Done,
            ],

            # Radius in meters around a blacklisted goal that the robot will ignore
            'blacklisted_goal_radius':
            2.0
        }

        self.store = DataStore()
        self.movement = MovementHandler(core=self, datastore=self.store)
        self.actions = Actions(core=self,
                               datastore=self.store,
                               movement_handler=self.movement)
        self.planner = Planner(core=self,
                               datastore=self.store,
                               movement_handler=self.movement)

        # Aux files
        self.temp_data = {}
        self.last_goal = None
Example #16
0
def preprocess(dbPath):
    '''
	This is a preprocess module
	'''
    logging = DefaultLogger()

    if not os.path.exists(dbPath):
        logging.debug('PreProcess: can\'t find database at path')
        return

    datastore = DataStore(dbPath)
    loopcount = 0

    while True:
        sleep(5)

        if loopcount % 10 == 0:
            logging.debug('PreProcess is alive')
        loopcount += 1

        data = datastore.recordsForHashing()
        for record in data:
            logging.debug(record)

            key_id = record.id
            filePath = record.fileName

            if not os.path.exists(filePath):
                logging.debug(
                    'PreProcess: Will update record status as the file no longer exists'
                )
                datastore.updateRecordAsMissingWithID(key_id)
                continue

            try:
                logging.debug('PreProcess: locking file to calculate hash...')
                ##UPDATE HASH OPERATION START HERE
                startTime = datetime.datetime.now().strftime(
                    "%Y-%m-%d %H:%M:%S")
                datastore.updateRecordWithHashStart(startTime, key_id)

                fileToHash = open(filePath, 'rb')
                portalocker.lock(fileToHash, portalocker.LOCK_SH)
                hashString = "NO_OP"  #hashForFile(fileToHash)
                endTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
                fileToHash.close()

                logging.debug('PreProcess: unlocking file...')
                logging.debug(
                    'PreProcess: Will update record status with Hash string and times'
                )

                datastore.updateRecordWithHashForStartTimeAndEndTime(
                    hashString, startTime, endTime, key_id)

            except Exception as e:
                info = 'PreProcess: There was an error when calculating the hash for file: ' + os.path.basename(
                    filePath) + ' ' + e.message
                sendFailureEmail(info)
                logging.error(e.message)
Example #17
0
    def __init__(self, entity_name=None, source_language_script=ENGLISH_LANG, translation_enabled=False):
        """
        Initializes a TextDetector object with given entity_name

        Args:
            entity_name: A string by which the detected substrings that correspond to text entities would be replaced
                         with on calling detect_entity()
            source_language_script: ISO 639 code for language of entities to be detected by the instance of this class
            translation_enabled: True if messages needs to be translated in case detector does not support a
                                 particular language, else False
        """
        # assigning values to superclass attributes
        self._supported_languages = [ENGLISH_LANG, HINDI_LANG]
        super(TextDetector, self).__init__(source_language_script, translation_enabled)

        self.text = None
        self.text_dict = {}
        self.tagged_text = None
        self.text_entity_values = []
        self.original_texts = []
        self.processed_text = None
        self.entity_name = entity_name
        self.tag = '__' + self.entity_name + '__'

        # defaults for auto mode
        self._fuzziness = "auto:4,7"
        self._fuzziness_lo, self._fuzziness_hi = 4, 7
        self._min_token_size_for_fuzziness = self._fuzziness_lo
        # self.set_fuzziness_threshold(fuzziness=(self._fuzziness_lo, self._fuzziness_hi))

        # defaults for non-auto mode
        self.set_fuzziness_threshold(fuzziness=1)
        self._min_token_size_for_fuzziness = 4

        self.db = DataStore()
Example #18
0
    def mainloop(self, filenames):
        if (len(filenames) < 1):
            print "usage: %s filename" % sys.argv[0]
            sys.exit(-1)

        self.filename = filenames[0]

        self.app = QtWidgets.QApplication(sys.argv)

        if not os.path.exists(self.filename):
            # File doesn't exist, show arch
            # prompt window and create the datastore
            apw = ArchPromptWindow(self.newWithArchCallback)
            apw.show()
        else:
            # File exists, make sure the architecture type is properly set
            self.ds = DataStore(self.filename, applogic.tools.typeFactory)
            self.user_ds = DefaultMockProxy(self.ds)
            try:
                self.global_archname = \
                    self.ds.properties.get("f0fd.HACK_arch_name")

                self.engineSetupCompletion()
                self.createMainWindow()

            except KeyError:
                apw = ArchPromptWindow(self.runShowCallBack)
                apw.show()

        self.app.exec_()
Example #19
0
def init(args):

    if args.dev_mode and not Path("db_dev.json").exists():
        print("Create db_dev.json first")
        return
    elif not args.dev_mode and not Path("db.json").exists():
        print("Create db.json first")
        return

    print("Downloading nltk packages")
    nltk.download("stopwords")
    nltk.download("punkt")
    nltk.download("wordnet")
    nltk.download("averaged_perceptron_tagger")

    print("Creating the database tables")
    datastore = DataStore(args)
    datastore.create_tables()
    datastore.close()

    print("Creating the classifier")
    Model(args)

    print("Scraping the data")
    scrape_data(args)
Example #20
0
    def __init__(self, entity_name=None):
        """
        Initializes a TextDetector object with given entity_name

        Args:
            entity_name: A string by which the detected substrings that correspond to text entities would be replaced
                         with on calling detect_entity()
        """
        self.text = None
        self.regx_to_process = Regex([(r'[\'\/]', r'')])
        self.text_dict = {}
        self.tagged_text = None
        self.text_entity = []
        self.original_text_entity = []
        self.processed_text = None
        self.entity_name = entity_name
        self.tag = '__' + self.entity_name + '__'

        # defaults for auto mode
        self._fuzziness = "auto:4,7"
        self._fuzziness_lo, self._fuzziness_hi = 4, 7
        self._min_token_size_for_fuzziness = self._fuzziness_lo
        # self.set_fuzziness_threshold(fuzziness=(self._fuzziness_lo, self._fuzziness_hi))

        # defaults for non-auto mode
        self.set_fuzziness_threshold(fuzziness=1)
        self._min_token_size_for_fuzziness = 4

        self.db = DataStore()
Example #21
0
    def test_dataStoreOverlap(self):
        ds = DataStore(":memory:", arch.getDecoder)
        data = [0x02, 0xFF, 0x9A, 0x00, 0x00, 0x00]   # 8051 ljmp + 3 * 0x0

        ds.segments.addSegment(0x00, len(data), "ROM", data)
        m = ds.infostore.setType(0, "8051")

        # Verify we can lookup objects by an address within them
        rc, val = ds.infostore.lookup(2)
        self.assertEquals(rc, InfoStore.LKUP_OVR)
        self.assertEquals(val, m)

        rc, val = ds.infostore.lookup(3)

        # Verify that we can't find an address that doesn't exist,
        #   but is in memory
        self.assertEquals(rc, InfoStore.LKUP_NONE)
        self.assertEquals(val, None)

        # Verify that mapped addresses w/o obs don't have a startaddr
        self.assertEquals(None, ds.infostore.findStartForAddress(3))

        # Verify that objs work properly
        self.assertEquals(0, ds.infostore.findStartForAddress(2))
        self.assertEquals(0, ds.infostore.findStartForAddress(1))
        self.assertEquals(0, ds.infostore.findStartForAddress(0))

        # Make sure that non-mapped addresses don't have startaddr
        self.assertEquals(None, ds.infostore.findStartForAddress(24))
    def handle(self, *args, **options):
        entity_data_directory_path = None
        csv_file_paths = None
        if ('entity_data_directory_path' in options and options['entity_data_directory_path']) or \
                ('csv_file_paths' in options and options['csv_file_paths']):

            if 'entity_data_directory_path' in options and options['entity_data_directory_path']:
                entity_data_directory_path = options['entity_data_directory_path']
                if not os.path.exists(entity_data_directory_path):
                    entity_data_directory_path = None

            if 'csv_file_paths' in options and options['csv_file_paths']:
                csv_file_paths = options['csv_file_paths'].split(',')
                csv_file_paths = [csv_file_path for csv_file_path in csv_file_paths if csv_file_path and
                                  csv_file_path.endswith('.csv')]
            db = DataStore()
            db.repopulate(entity_data_directory_path=entity_data_directory_path, csv_file_paths=csv_file_paths)
            if entity_data_directory_path:
                self.stdout.write(
                    'Successfully Repopulated entity data from csv files at "%s"' % entity_data_directory_path)
            if csv_file_paths:
                self.stdout.write('Successfully Repopulated entity data from %d other csv file(s)"'
                                  % len(csv_file_paths))
        else:
            self.stdout.write(self.style.ERROR('argument --entity_data_directory_path or --csv_file_paths required'))
Example #23
0
def _find_datastore():
    """Get the shared datastore in DyanmoDB"""
    datastore_table_name = os.environ.get('DATASTORE_TABLE_NAME',
                                          'url_shortener_table')
    datastore = DataStore(datastore_table_name,
                          ['shortcodes', 'api_keys', 'config'],
                          uncached=['api_keys'])
    return datastore
Example #24
0
 def handle(self, *args, **options):
     if 'entity_name' in options and options['entity_name']:
         entity_name = options['entity_name']
         db = DataStore()
         db.delete_entity(entity_name=entity_name)
         self.stdout.write('Successfully deleted entity data for "%s"' % entity_name)
     else:
         self.stdout.write(self.style.ERROR('argument --entity_name required'))
Example #25
0
    def newWithArchCallback(self, archname):
        self.global_archname = archname

        self.ds = DataStore(self.filename, applogic.tools.typeFactory)
        self.user_ds = DefaultMockProxy(self.ds)

        self.ds.properties.set("f0fd.HACK_arch_name", archname)
        self.engineSetupCompletion()
        self.createMainWindow()
Example #26
0
def unknown_negative():
    global ds
    _ds = DataStore('/tmp/pood/ds')
    paths = _ds.fetch(1).all().paths()
    if len(paths) > 0:
        path = paths[0]
        ds.store(0).tile(Image.open(open(path, mode='rb')), 100, size=(16, 16))
        os.unlink(path)
    return redirect('/')
Example #27
0
    def test_actualPersistence(self):
        sql = sqlite3.connect(":memory:")

        ds = DataStore(sql, getDecoder)
        data = [0, 1, 2, 3, 4, 5, 6, 7]
        ds.segments.addSegment(0x0, len(data), "ROM", data)

        ds.infostore.setType(1, "8051")

        ds.flush()
        del ds

        #print [i for i in sql.execute("SELECT * from memory_info")]
        ds2 = DataStore(sql, getDecoder)
        rc, obj = ds2.infostore.lookup(1)

        self.assertEquals(rc, InfoStore.LKUP_OK)
        self.assertEquals(obj.addr, 1)
Example #28
0
def unknown():
    paths = DataStore('/tmp/pood/ds').fetch(1).all().paths()

    if len(paths) == 0:
        # return 404
        return 'No images found', 404

    resp =  send_file(paths[0])
    resp.headers['Cache-Control'] = 'no-cache, must-revalidate'
    return resp
Example #29
0
    def test_defaultsNoPropogate(self):
        ds = DataStore(":memory:")

        defs = DefaultMockProxy(ds)
        ds.segments.addSegment(0x0, 3, "ROM", [1, 2, 3])

        defs.infostore[0]

        rc, obj = ds.infostore.lookup(0)
        self.assertEquals(rc, InfoStore.LKUP_NONE)
Example #30
0
    def test_inBasicDNoSDefault(self):
        ds = DataStore(":memory:")
        data = [0, 1, 2, 3, 4, 5, 6, 7]
        ds.segments.addSegment(0x0, len(data), "ROM", data)

        # These are IDENT based addresses
        self.assertEqual(False, -1 in ds.infostore)
        self.assertEqual(False, 0 in ds.infostore)
        self.assertEqual(False, 7 in ds.infostore)
        self.assertEqual(False, 8 in ds.infostore)