Example #1
0
class Achievements:
    __borg_state = {}
    __achievements_id = "account_achievements"

    def __init__(self, config):
        self.__dict__ = self.__borg_state
        self.config = config
        self.api = GW2_API(config)
        self.ds = DataStore(config)
        self.render = Render(config)

    def _get_current(self):
        """Get the current achievements for the character."""

        cheeves = self.api.get("account/achievements")
        cheeves_by_id = {}
        for cheeve in cheeves:
            cheeves_by_id[cheeve.id] = cheeve

        return cheeves_by_id

    def _get_new_unlock_cheeves(self, old_cheeves, new_cheeves):
        """Given a dict of old and new Achievements, find those that are
        newly unlocked.

        Returns a tuple of:
            (unlocks, newness) where
                    -unlocks is newly completed cheeves
                    -newness is new added cheeves
        """

        unlocks = []
        newness = []
        for cheeve in new_cheeves:
            if cheeve['id'] not in old_cheeves:
                newness.append(cheeve)
            elif cheeve['done'] != old_cheeves[cheeve['id']]['done']:
                unlocks.append(cheeve)
        return (unlocks, newness)

    def _get_new_progress_cheeves(self, old_cheeves, new_cheeves):
        """Given a dict of old and new Achievements, find those that have
        new progress on them."""

        new_prog = []
        for cheeve in new_cheeves:
            if cheeve.get('current', 0) != \
                    old_cheeves[cheeve['id']].get('current', 0):
                new_prog.append(cheeve)

    def update(self, cheeves=None):
        """Will update the datastore with the current cheeves. Intended to be
        called once per day, per week, per cycle (whatever).

        If 'cheeves' is ommitted, will get the current cheevese via API"""

        if cheeves is None:
            cheeves = self._get_current()

        self.ds.put(self.__achievements_id, cheeves)
Example #2
0
def handle(config):
	try:
                flagcheck=True
                logger.info("calling jmx for metrics")
		#check for jmx hosts file
		#TODO add code for handling metrics from multiple JMX hosts	
		#
		#JAVA(libjvm='./lib/jmx/libjvm.so')
		#JAVA()
		jpype.attachThreadToJVM()
		jmx=JMX(host='96.119.153.107',port=9999)
		DS=DataStore()
                for condition in config.get('if').get('jmx'):
			baseline=DS.getbaseline(condition)
    			current=jmx.get_attr(condition.get('object'),condition.get('type'),condition.get('attribute'))
			logger.debug(current)
			logger.debug(str(current) + condition.get('operator') + repr(baseline))
			out=eval(str(current) + condition.get('operator') + repr(baseline))
			if not bool(out):
                                flagcheck=False
                                break
			DS.setbaseline(current.floatValue(),baseline,condition)
    		del jmx
                return flagcheck
        except Exception,e:
		print "in exception"
		print e
                logger.error(e)
                return False
Example #3
0
class EncryptorWatcher(LoggingEventHandler):
    '''
    This class enters all file 'created' events to a database pointed to by dbFolder
    '''
    def __init__(self, pathStructure, dbFolder):
        super(LoggingEventHandler, self).__init__()
        self.pathStructure = pathStructure
        self.dataStore = DataStore(dbFolder) 

    def on_modified(self, event):
        path = os.path.join(self.pathStructure['inBox'], event.src_path)
        logging.debug("encryptorWatch on_modified file")
        info = "Modified: " +  event.src_path + " " + str(os.path.getsize(path))
        logging.debug(info)

    def on_created(self, event):
        path = os.path.join(self.pathStructure['inBox'], event.src_path)
        
        if os.path.isdir(os.path.abspath(event.src_path)):
            logging.debug('WatchProcess: Folder Encryption is not supported.')
            return

        self.dataStore.addFilePathToDataBaseStoreWithType(os.path.abspath(event.src_path), self.pathStructure['watchType'], self.pathStructure['name'])

        info = "Created: " +  event.src_path + " " + str(os.path.getsize(path))
        logging.debug("encryptorWatch on_created file")
        logging.debug(info)
Example #4
0
def perform_search(search_str,results_collector):

    base_url = 'http://sfbay.craigslist.org'

    cl_html_results = requests.get(search_str)
    soup = BeautifulSoup(cl_html_results.text, 'html.parser')

    ds = DataStore(storetype='sql')

    for result in soup.find_all(attrs={"data-pid": re.compile('\d+')}):

        link_title = result.find(id='titletextonly')

        if link_title is None:
            # print "Cannot find title for entry %s", result
            next
        else:
            datapid = result.attrs['data-pid']
            link_title_text = link_title.text

            link = '{0}{1}'.format(base_url, result.find('a').attrs['href'])

            #print "debug: {0} | {1} | {2}".format(datapid, link_title_text, link)

            ds.save_entry(datapid=datapid, title=link_title_text, url=link)

    for i in ds.new_listings:
        results_collector.append(i)
Example #5
0
def main():
	"""Parse the command line arguments, expecting one of the following formats:
		-) (-i ChannelID | -u Username) (add | check | remove)
		-) check | list
	and perform the appropriate action
	"""
	parser = get_parser()
	args = parser.parse_args()

	youtube = YouTube()
	store = DataStore('%s-data.sqlite3' % sys.argv[0], 'schema.sql')

	channel = None
	if args.username is not None:
		channel = youtube.get_channel_by_username(args.username)
	elif args.id is not None:
		channel = youtube.get_channel_by_id(args.id)

	if args.action == 'add':
		store.store_channel(channel)
	elif args.action == 'remove':
		store.remove_channel(channel)
	elif args.action == 'list':
		data = []
		for item in store.get_channels():
			data.append([
				item['id'],
				item['title'],
				arrow.get(item['added_on']).humanize(),
				arrow.get(item['last_checked']).humanize()
			])

		pretty_print(['ID', 'Title', 'Added', 'Last Checked'], data)
	elif args.action == 'check':
		# If the user passed a specific channel, check for new uploads
		# otherwhise check for uploads from every previously added channel
		channels = []
		if channel is not None:
			channels.append(store.get_channel_by_id(channel['id']))
		else:
			channels = store.get_channels()

		data = []
		to_check = dict()
		for channel_item in channels:
			to_check[channel_item['id']] = channel_item['last_checked']

		uploads = youtube.get_uploads(to_check)
		for upload in uploads:
			data.append([
				upload['channel_title'],
				upload['title'],
				arrow.get(upload['published_at']).humanize(),
				'https://youtube.com/watch?v=%s' % (upload['id'], )
			])

		pretty_print(['Channel', 'Title', 'Published', 'Link'], data)

		for channel_id in to_check.keys():
			store.update_last_checked(channel_id)
Example #6
0
 def close(self):
     self.commitStockBatch()
     self.commitBatch()
     self.conn.commit()
     self.conn.close()
     self.conn = None
     DataStore.close(self)
Example #7
0
class singleFileWatcher(LoggingEventHandler):
    '''
    This class enters all file 'created' events to a database pointed to by dbFolder
    '''
    def __init__(self, pathStructure, dbFolder):
        super(LoggingEventHandler, self).__init__()
        self.pathStructure = pathStructure
        self.dataStore = DataStore(dbFolder) 

    def on_created(self, event):
        
        for ignoreFile in ['.DS_Store', 'Thumbs.db']:
            if ignoreFile in os.path.abspath(event.src_path):
                info = 'File ignored: ' +  os.path.abspath(event.src_path)
                logging.debug(info)
                return

        info = 'On created: ' +  os.path.abspath(event.src_path)
        logging.debug(info)

        if os.path.isdir(os.path.abspath(event.src_path)):
            info = 'Directory analysis is not available'
            logging.debug(info)
            return

        self.dataStore.addFileToDatabase(os.path.abspath(event.src_path))
        info = 'adding ' + event.src_path + ' to the database'
        logging.debug(info)
Example #8
0
 def test1(self):
     con = make_dbcon()
     ds = DataStore(con)
     col = ds.collection("users")
     
     i = Index(con, col, 'email')
     
     self.assertEqual(i.name(), 'email')
def handle_get(key):
    """Return a tuple containing True if the key exists and the message
    to send back to the client."""
    if key not in POROCESSING:
        ds = DataStore()
        data = ds.get(key)
        if data:
            return(True, (data[0],data[1]))
    return(False, 'ERROR: Key [{}] not found'.format(key))
Example #10
0
def update_poi(id):
    store = DataStore()
    poi = PointOfInterest.from_request(request)
    poi.id = id
    valid = poi.validate()
    if len(valid) == 0:
        poi = store.update_poi(poi)
        return to_json(poi)
    else:
        return to_json(valid)
Example #11
0
def update_poi_test(id):
    store = DataStore()
    poi = PointOfInterest.get_test_item(id)
    poi.name = "t_name_toUpdate"
    poi.category = "t_category_toUpdate"
    poi.creator = "t_creator_toUpdate"
    poi.description = "_description_toUpdate"
    poi.latitude = 20.00000
    poi.longitude = 10.00000
    poi = store.update_poi(poi)
    return to_json(poi)
Example #12
0
 def test3(self):
     con = make_dbcon()
     ds = DataStore(con)
     
     users = ds.collection("users")  
     users.add_index("email")
     
     uuid = users.save({'email': '[email protected]', 'name':'John Doe!'})
     self.assertIsNotNone(users.index("email").find("[email protected]"))
     
     users.delete(uuid)
     
     self.assertEqual(len(users.index("email").find("[email protected]")), 0)
Example #13
0
def start():
    fileConfig('conf/log.conf')
    logging.getLogger('garage').log(logging.DEBUG, 'Log setup complete')

    logging.getLogger('garage').log(logging.DEBUG, 'Initializing datastore ')
    db = DataStore(setup='true')
    db.shutdown()
    logging.getLogger('garage').log(logging.DEBUG, 'Complete')

    #butler = None
    butler = Butler()

    web.start(butler)
def get_tasks():
	u = request.form['url'].lower()
	
	url = Utilities.get_shortened_url(u)
	url_3 = Utilities.get_shortened_url(u,3)

	return_only_parent = False

	# If url is same as parent url, return everything just for parent
	# Dont redundantly return for parent and itself
	if url == url_3 or url+'/' == url_3:
			return_only_parent = True

	ds = DataStore()

	if not return_only_parent:

		all_urls = Utilities.modify_url(url)
		print all_urls

		# If the same url is also a parent url, return all results of parent .
		# And skip individual url results

		for url in all_urls:
			result = ds.fetch(url)
			if result == False:
				print " Tried for url " + url
			else:
				x = {"result":result}
				return jsonify(x)

	# If for our exact url and its modifications , nothing got returned

	outer_url = "parent::" + Utilities.get_shortened_url(url,3)
	print outer_url
	
	result = ds.fetch_all_from_parent(outer_url)
	if result : 
		x = {"result":result}
		return jsonify(x)
	else:
		if outer_url[-1] == '/':
			result = ds.fetch_all_from_parent(outer_url[:-1])
		else:
			result = ds.fetch_all_from_parent(outer_url + '/')
		if result : 
			x = {"result":result}
			return jsonify(x)

	# If there is still nothing to show
	return 'No Response'
def handle_put(seq,key, value):
    """Return a tuple containing True and the message
    to send back to the client."""

    if key not in POROCESSING:
        POROCESSING.append(key)
        ds = DataStore(key,value)
        if ds.put(seq):
            POROCESSING.remove(key)
            return (True, 'Key [{}] set to [{}]'.format(key, value))
        else:
            ds.roll_back(seq)
            POROCESSING.remove(key)
    return (False, 'Could Not be added')
Example #16
0
def get_pois():
    store = DataStore()
    query_dic = dict()
    use_query = False
    for k in PointOfInterest().__dict__.keys():
        v = request.args.get(k, None)
        if v is not None:
            query_dic[k] = v
            use_query = True
    if use_query:
        pois = store.get_poi_filtered(query_dic)
    else:
        pois = store.get_all_poi()
    return to_json(pois)
Example #17
0
    def test2(self):
        con = make_dbcon()
        ds = DataStore(con)
        
        col = ds.collection("users")  
        col.add_index("email")
        
        uuid = col.save({'email': '[email protected]', 'name':'My Name!'})

        i = col.index('email')
        self.assertIsNone(i.findone('[email protected]'))
        
        doc = i.findone('[email protected]')
        self.assertEqual(doc['uuid'], uuid)
def handle_delete(seq,key):
    """Return a tuple containing True if the key could be deleted and
    the message to send back to the client.

    use datastore.get and then before doing doing datastore.delete
    """
    ds = DataStore()
    if ds.get(key):
        POROCESSING.append(key)
        if ds.delete(seq,key):   
            POROCESSING.remove(key)
            return (True,'Done')
        else:
            ds.roll_back(seq) 
            POROCESSING.remove(key)
    return (False,'ERROR: Key [{}] not found and could not be deleted'.format(key))
Example #19
0
 def test4(self):
     con = make_dbcon()
     ds = DataStore(con)
     
     users = ds.collection("users")  
     users.add_index("email")
     
     uuid = users.save({'email': '[email protected]', 'name':'John Doe!'})
     doc = users.load(uuid)
     
     self.assertIsNotNone(users.index.email.findone('[email protected]'))
     
     del doc['email']
     users.save(doc)
     
     self.assertIsNone(users.index.email.findone('[email protected]'))
Example #20
0
    def test3(self):
        con = make_dbcon()
        ds = DataStore(con)
        
        col = ds.collection("users")
        uuid = col.save({
        'email':'[email protected]', 
        'name':'Roel Gerrits'
        })
                
        
        self.assertTrue(col.delete(uuid))
        self.assertFalse(col.delete(uuid))

        
        doc = col.load(uuid)
        
        self.assertIsNone(doc)
Example #21
0
 def test2(self):
     con = make_dbcon()
     ds = DataStore(con)
     
     col = ds.collection("users")
     uuid = col.save({
     'email':'[email protected]', 
     'name':'Roel Gerrits'
     })
             
     
     doc = col.load(uuid)
     
     self.assertDictEqual(doc, {
     'uuid': uuid, 
     'email':'[email protected]', 
     'name':'Roel Gerrits'
     })
Example #22
0
    def test1(self):
        con = make_dbcon()
        ds = DataStore(con)

        col = ds.collection("users")
        col.save({
        'uuid': 'f8e5519ee55811e2839a0024219c6b57', 
        'email':'[email protected]', 
        'name':'Roel Gerrits'
        })
                
        
        doc = col.load('f8e5519ee55811e2839a0024219c6b57')
        
        self.assertDictEqual(doc, {
        'uuid': 'f8e5519ee55811e2839a0024219c6b57', 
        'email':'[email protected]', 
        'name':'Roel Gerrits'
        })
Example #23
0
def preprocess(dbPath):
	'''
	This is a preprocess module
	'''
	logging = DefaultLogger()

	if not os.path.exists(dbPath):
		logging.debug('PreProcess: can\'t find database at path')
		return

	datastore = DataStore(dbPath)
	loopcount = 0

	while True:
		sleep(5)

		if loopcount % 10 == 0:
			logging.debug('PreProcess is alive')
		loopcount += 1

		data = datastore.recordsForHashing()
		for record in data:
			logging.debug(record)

			key_id = record.id
			filePath = record.fileName

			if not os.path.exists(filePath):
				logging.debug('PreProcess: Will update record status as the file no longer exists')
				datastore.updateRecordAsMissingWithID(key_id)
				continue

			try:
				logging.debug('PreProcess: locking file to calculate hash...')
				##UPDATE HASH OPERATION START HERE
				startTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
				datastore.updateRecordWithHashStart(startTime, key_id)

				fileToHash = open(filePath, 'rb')
				portalocker.lock(fileToHash, portalocker.LOCK_SH)
				hashString = "NO_OP"#hashForFile(fileToHash) 
				endTime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
				fileToHash.close()

				logging.debug('PreProcess: unlocking file...')
				logging.debug('PreProcess: Will update record status with Hash string and times')

				datastore.updateRecordWithHashForStartTimeAndEndTime(hashString, startTime, endTime, key_id)

			except Exception as e:
				info = 'PreProcess: There was an error when calculating the hash for file: ' + os.path.basename(filePath) + ' ' + e.message
				sendFailureEmail(info)
				logging.error(e.message)
Example #24
0
	def initialize_connections(self):
		if len(self.socket_table) < TaskHandler.PEER_REPICK:
			if DBGMODE : print("initialize_connections [start]")
			DataStore.mutex.acquire()
			self.datastore = DataStore()
			self.peers = self.datastore.get_highest_rating(10)
			for peer in self.peers:
				if peer.id not in self.socket_table and (peer.ip != self.remote_ip or peer.port !=self.remote_port):
					self.socket_table[peer.id] = (peer.ip, int(peer.port))
			self.datastore.close()
			DataStore.mutex.release()
Example #25
0
 def __init__(self, filename):
     """Initializes a Text."""
     assert filename
     self.filename = filename
     self.text = self.get_text_from_txt_file()
     
     if DataStore.is_to_be_computed(filename):
         database = DataStore(filename, self.__compute_ds_keys_values())
     
     else:
         database = DataStore(filename)
     
     self.ds = database
Example #26
0
	def update_database(self, peers):
		DataStore.mutex.acquire()
		database_conn = DataStore()
		current_nodes = database_conn.get_all_peers()
		new_list = []
		for peer in peers:
			overlay = False
			for node in current_nodes:
				if node.ip == peer["ip"] and node.port == peer["port"]:
					if node.rating < 30:
						node.rating = 30
					database_conn.update(node)
					overlay = True

			if overlay == False:
				new_list.append(PeerNode(0, peer["id"], peer["ip"], peer["port"], 30))
		database_conn.insert_peers(new_list)
		database_conn.close()
		DataStore.mutex.release()
    def __init__(self):
        super(CalendarPlugin, self).__init__()
        self.first_day = self.last_day = self.numdays = None

        builder = Gtk.Builder()
        builder.add_from_file("calendar_view.glade")
        handlers = {
            "on_window_destroy": Gtk.main_quit,
            "on_today_clicked": self.on_today_clicked,
            "on_combobox_changed": self.on_combobox_changed,
            "on_add_clicked": self.on_add_clicked,
            "on_edit_clicked": self.on_edit_clicked,
            "on_remove_clicked": self.on_remove_clicked,
            "on_next_clicked": self.on_next_clicked,
            "on_previous_clicked": self.on_previous_clicked,
        }
        builder.connect_signals(handlers)

        self.window = builder.get_object("window")
        self.window.__init__()
        self.window.set_title("GTG - Calendar View")
        self.window.connect("destroy", Gtk.main_quit)

        # DataStore object
        self.ds = DataStore()
        self.req = Requester(self.ds)
        self.ds.populate()  # hard-coded tasks

        self.today_button = builder.get_object("today")
        self.header = builder.get_object("header")

        self.controller = Controller(self, self.req)
        vbox = builder.get_object("vbox")
        vbox.add(self.controller)
        vbox.reorder_child(self.controller, 1)

        self.current_view = None
        self.combobox = builder.get_object("combobox")
        self.combobox.set_active(2)

        self.statusbar = builder.get_object("statusbar")

        self.window.show_all()
Example #28
0
            "reference:": reference,
            "title": documents[0]["title"].decode("utf-8"),
            "date": datestring,
            "attachment": solr_attachments.values(),
            "session": solr_sessions.values(),
            "committee": solr_committees.values(),
            "street": unique(solr_streets.keys()),
            "person": solr_people.values(),
            "content": solr_body,
            #'location': positions_for_streetnames(unique(solr_streets.keys()))
        }


if __name__ == "__main__":
    s = solr.SolrConnection(config.SOLR_URL)
    db = DataStore(config.DBNAME, config.DBHOST, config.DBUSER, config.DBPASS)
    parser = OptionParser()
    parser.add_option(
        "-s",
        "--sample",
        dest="sample",
        default=1,
        help="z.B. die Zahl 10 um nur jedes zehnte Dokument zu importieren. Beschleunigt den Import beim Entwickeln.",
    )
    parser.add_option(
        "-v",
        "--verbose",
        dest="verbose",
        default=False,
        action="store_true",
        help="Aktiviert die detailliertere Ausgabe (Dokumententitel etc.).",
	def on_data(self,data):
		try:

			data = json.loads(data)
			newd = {}

			# Get Tweet
			tweet = Utilities.clean_tweet(data['text'])

			for key in self.recent_tweets:
				#print Utilities.similarity(key,tweet)
				if Utilities.similarity(key,tweet) > 70:
					return
			'''
			if tweet in self.recent_tweets:
				return
			else:
			'''
			if len(self.recent_tweets) > 50:
				self.recent_tweets.popitem(last=False)
			self.recent_tweets[tweet] = True
			#print tweet

			# Get Redirected url
			try:
				url_name = Utilities.get_redirected_url(str(data['entities']['urls'][0]['expanded_url']))
			except:
				return
				raise BaseException("Url for tweet did not exist")

			# Get shortened url for key --> Upto 5th '/' or entire address (whichever is shorter)

			url_name = Utilities.get_shortened_url(url_name).lower()

			#Get timestamp
			timestamp = str(data['created_at'])

			# Verify authenticity of website by checking if it has the word coupon
			# If it does , assume it is not a vendor site. Maybe blog, maybe coupon site

			try:
				Utilities.check_url_validity(url_name)
			except:
				return
				raise BaseException("Url was not a valid site")


			with open("x.txt","a") as f:
				f.write(tweet + '\n')
				f.write("--------------------" + '\n')
			# Code to extract important information from this tweet
			#self.tweets += 1
			#print tweet
			#print "Tweet Number : " + str(self.tweets)
			e = Extraction()
			code,date = e.extract_all(tweet)
			if not code:
				#print " --------------- "
				return
				raise BaseException("Did not have coupon code information")

			if not date :
				date = 183600
			else :
				self.tweets_with_dates += 1
				self.total_expiry_time += date
				self.exp_time.append(date/3600)
				print self.tweets_with_dates, int(numpy.median(self.exp_time))
				#print date
				#print self.tweets_with_dates
				print tweet
				#print " ----------------------------------- "
				#print "Tweet : ",

				#print "Url : ",
				#print url_name
				#print "Date : "


			#print "Coupons : " + str(self.tweets_with_coupons)
			#print "Dates : " + str(self.tweets_with_dates)
			#print "Total Expiry Time :" + str(self.total_expiry_time/3600) + "hours"
			#print "Avg Expiry Time :" + str((self.total_expiry_time/(self.tweets_with_dates+1))/3600) + "hours"
			print '--------------------------------------'

			#print "CODE : " + code
			key = url_name + ':::' + code
			#print "KEY : " + key

			#print "Tweet : "
			#print tweet
			#print "Url : ",
			#print url_name
			#print " ----------------------------------- "

			ds = DataStore()
			#print url_name,code,date
			#get outer url - url uptil 3 '/'s . eg - http://www.etsy.com/
			outer_url = "parent::"+Utilities.get_shortened_url(url_name,3)
			ds.insert(key,url_name,code,tweet,date,outer_url)
			#print '-----------------------'

			return True
		except BaseException as e:
			if str(e) != "'text'":
				#print " *************** " + str(e) + " *************** "
				#print "----------------------------------------"
				pass
			time.sleep(1)
def handle_keys():
    ds = DataStore()
    keys = ds.mykeys()
    keys = [x[0] for x in keys]
    keys_n = '-'.join(str(x) for x in keys) 
    return (True,keys_n)