Beispiel #1
0
 def __init__(self, settings):
     self.settings = settings
     self.fileRestClient = ParseFileRestClient(settings)
     self.galleryService = GalleryService(settings)
     register(self.settings["parse"]["application_id"],
              self.settings["parse"]["rest_api_key"])
     self.batcher = ParseBatcher()
def setup_ghosts(g):
    """
    Create 1 - 50 Ghost objects by "batch_save"-ing them to Parse using 
    ParsePy's ParseBatcher().

    """

    # Start a function timer.
    function_start_time = time.time()

    # We must subclass Object for the class names we want to use from Parse.
    class Ghost(Object):
        pass

    list_of_Ghost_objects_to_upload = []

    for ghost_number in range(1, g + 1, 1):
        new_Ghost_object = Ghost(
            username = "******",
            ghostNum = ghost_number,
            firstName = "Ghost",
            sex = "G",
            array_eventsRegistered = [1,2,3,4,5,6,7,8,9,10]
        )
        list_of_Ghost_objects_to_upload.append(new_Ghost_object)

    batcher = ParseBatcher()
    batcher.batch_save(list_of_Ghost_objects_to_upload)

    print ("\n{} Ghost objects uploaded to Parse in {} seconds.\n".format(g, time.time() - function_start_time))
Beispiel #3
0
 def tearDown(self):
     city_name = getattr(self.sao_paulo, 'name', None)
     game_score = getattr(self.score, 'score', None)
     if city_name:
         ParseBatcher().batch_delete(City.Query.filter(name=city_name))
     if game_score:
         ParseBatcher().batch_delete(
             GameScore.Query.filter(score=game_score))
Beispiel #4
0
 def tearDown(self):
     game_score = getattr(self.score1, 'score', None)
     game_name = getattr(self.game, 'name', None)
     if game_score:
         ParseBatcher().batch_delete(
             GameScore.Query.filter(score=game_score))
     if game_name:
         ParseBatcher().batch_delete(Game.Query.filter(name=game_name))
def main():
	soup = BeautifulSoup(requests.get('https://www.mturk.com/mturk/viewhits?searchWords=&pageNumber=4&searchSpec=HITGroupSearch%23T%231%2310%23-1%23T%23%21%23%21NumHITs%211%21%23%21&sortType=NumHITs%3A1&selectedSearchType=hitgroups').text, "html.parser")
	titles = soup.findAll('a', {"class" : "capsulelink"})

	num_results = int(soup.findAll('td', {"class" : "title_orange_text"})[0].text.strip()[8:-7])

	print("\nTotal number of HITs: " + str(num_results))
	count = 0
	page = 1
	requestErrors = 0
	privateCount = 0

	register("DKJjvfvhnCGRK0cAdOpJN9MwR7zhIpuYya5xvbuF", "d8hIYrBrcW4r2ujEkL79vE03FmLxE2QCJgSwuXYv")
	HITClass = ParseObject.factory("HIT")
	all_hits = HITClass.Query.all()
	batcher = ParseBatcher()
	batcher.batch_delete(all_hits)
	while (count < 200):
		soup = BeautifulSoup(requests.get('https://www.mturk.com/mturk/viewhits?searchWords=&pageNumber=' + str(page)  + '&searchSpec=HITGroupSearch%23T%231%2310%23-1%23T%23%21%23%21NumHITs%211%21%23%21&sortType=NumHITs%3A1&selectedSearchType=hitgroups').text, "html.parser")
		titles = soup.findAll('a', {"class" : "capsulelink"})
		for t in titles:
			time.sleep(.3)
			count = count + 1
			print("\n" + str(count) + "\nTitle: " + t.text.strip())
			linkA = t.parent.parent.findAll('span')[1].a
			# check if the link is public
			if linkA.has_attr('href'):
				link = linkA['href']
				hitPage = BeautifulSoup(requests.get('https://www.mturk.com' + link).text, "html.parser")
				form = hitPage.findAll('form', {'name' : 'hitForm'})
				# Check for error 
				if len(form) >= 3:
					form = form[2]
					requester = form.find("input", {'name' : 'prevRequester'})['value']
					print('Requester: ' + requester)
					reward = form.find("input", {'name' : 'prevReward'})['value']
					print('Reward: ' + reward)
					groupID = form.find("input", {'name' : 'groupId'})['value']
					print('Group id: ' + groupID)
				  	
					anyObject = HIT(requester=requester, reward=float(reward[3:]), 
									title=t.text.strip(), groupID=groupID)
					anyObject.save()

				else:
					requestErrors = requestErrors + 1
					print(link)
					print(form)
			else:
				link = linkA['id']
				print(link)
				privateCount = privateCount + 1
		page = page + 1

	print("\n\nErrors: " + str(requestErrors))
	print("Private HITs: " + str(privateCount))
Beispiel #6
0
 def tearDown(self):
     city_name = getattr(self.sao_paulo, 'name', None)
     game_score = getattr(self.score, 'score', None)
     collected_item_type = getattr(self.collected_item, 'type', None)
     if city_name:
         ParseBatcher().batch_delete(City.Query.filter(name=city_name))
     if game_score:
         ParseBatcher().batch_delete(GameScore.Query.filter(score=game_score))
     if collected_item_type:
         ParseBatcher().batch_delete(CollectedItem.Query.filter(type=collected_item_type))
Beispiel #7
0
    def accept_post(self, post):
        snippet_posts = [Post.get(snip['objectId']) for snip in self.snippets]
        for snippet_post in snippet_posts:
            snippet_post.archived = True
            snippet_post.original_story = self

        batcher = ParseBatcher()
        batcher.batch_save(snippet_posts)

        self.snippets = []
        self.accepted_posts.append(post)
Beispiel #8
0
    def setUpClass(cls):
        """save a bunch of GameScore objects with varying scores"""
        # first delete any that exist
        ParseBatcher().batch_delete(GameScore.Query.all())
        ParseBatcher().batch_delete(Game.Query.all())

        cls.game = Game(title="Candyland", creator=None)
        cls.game.save()

        cls.scores = [GameScore(score=s, player_name='John Doe', game=cls.game) for s in range(1, 6)]
        ParseBatcher().batch_save(cls.scores)
Beispiel #9
0
 def accept_post(self, post):
     snippet_posts = [Post.get(snip['objectId']) for snip in self.snippets]
     for snippet_post in snippet_posts:
         snippet_post.archived = True
         snippet_post.original_story = self
         
     batcher = ParseBatcher()
     batcher.batch_save(snippet_posts)
 
     self.snippets = []
     self.accepted_posts.append(post)
Beispiel #10
0
    def testBatch(self):
        """test saving, updating and deleting objects in batches"""
        scores = [
            GameScore(score=s, player_name='Jane', cheat_mode=False)
            for s in range(5)
        ]
        batcher = ParseBatcher()
        batcher.batch_save(scores)
        self.assertEqual(
            GameScore.Query.filter(player_name='Jane').count(), 5,
            "batch_save didn't create objects")
        self.assertTrue(all(s.objectId is not None for s in scores),
                        "batch_save didn't record object IDs")

        # test updating
        for s in scores:
            s.score += 10
        batcher.batch_save(scores)

        updated_scores = GameScore.Query.filter(player_name='Jane')
        self.assertEqual(sorted([s.score for s in updated_scores]),
                         list(range(10, 15)),
                         msg="batch_save didn't update objects")

        # test deletion
        batcher.batch_delete(scores)
        self.assertEqual(
            GameScore.Query.filter(player_name='Jane').count(), 0,
            "batch_delete didn't delete objects")
Beispiel #11
0
    def testCanBatchUpdate(self):
        user = self._get_logged_user()
        phone_number = "555-0134"

        original_updatedAt = user.updatedAt

        user.phone = phone_number
        batcher = ParseBatcher()
        batcher.batch_save([user])

        self.assertTrue(User.Query.filter(phone=phone_number).exists(),
                        'Failed to batch update user data. New info not on Parse')
        self.assertNotEqual(user.updatedAt, original_updatedAt,
                            'Failed to batch update user data: updatedAt not changed')
Beispiel #12
0
 def batchSaveList(self, listOfParseObjects):
     if len(listOfParseObjects)==0:
         return;
         
     print 'batch saving objects'
     self.appendToLog('batch saving %d objects' % len(listOfParseObjects))
     
     #batch save a list of parseobjects. the batch limit is 50!
     batcher = ParseBatcher()
     batchLimit = 50
     while len(listOfParseObjects)> 0:
         #save the first @batchLimit amount of objects
         batcher.batch_save(listOfParseObjects[0:batchLimit])
         
         #clear the list of those saved objects
         listOfParseObjects = listOfParseObjects[batchLimit:]
Beispiel #13
0
def storeMonthlyPrices():
    all_scores = Crop.Query.all()
    data = retrieveMonthly("", 12, 2014)
    #length = 0
    crops = []
    #batches = []
    #batches.append(batcher)
    if data and len(data) > 0:
        for x in range(0, 49):
            crop = Crop(name=data[x]['commodity'],
                        price=data[x]['mean'],
                        size=random.randint(1, 5),
                        soil="all")
            crops.append(crop)
    batcher = ParseBatcher()
    batcher.batch_save(crops)
Beispiel #14
0
    def testRelations(self):
        """Make some maps, make a Game Mode that has many maps, find all maps
        given a Game Mode"""
        maps = [GameMap(name="map " + i) for i in ['a', 'b', 'c', 'd']]
        ParseBatcher().batch_save(maps)

        gm = GameMode(name='test mode')
        gm.save()
        gm.addRelation("maps", GameMap.__name__, [m.objectId for m in maps])

        modes = GameMode.Query.all()
        self.assertEqual(len(modes), 1)
        mode = modes[0]
        maps_for_mode = GameMap.Query.filter(maps__relatedTo=mode)
        self.assertEqual(len(maps_for_mode), 4)

        gm.delete()
        ParseBatcher().batch_delete(maps)
def load_spell_data(db):
	conn = get_db_connection(db)
	curs = conn.cursor()
	find_central_index(curs, **{"type": "spell"})
	index_lines = curs.fetchall()
	batch = []
	batcher = ParseBatcher()
	count = 0
	for line in index_lines:
		spell = get_parse_spell(line['url'])
		if spell:
			batch.append(make_spell(conn, line, spell))
		else:
			batch.append(make_spell(conn, line))
		if len(batch) >= 50:
			batcher.batch_save(batch)
			batch = []
			count += 50
			print "Saving through %s" % count
	batcher.batch_save(batch)
Beispiel #16
0
class BatchSaver:
    def __init__(self, delay):
        self.batcher = ParseBatcher()
        self.objects_to_save = []
        self.delay = delay
        self.save_count = 0

    def add_object_to_save(self, object_to_save):
        self.objects_to_save.append(object_to_save)
        if len(self.objects_to_save) == 50:
            self.save()

    def save(self, delay=True):
        if len(self.objects_to_save) > 0:
            print "Saving", self.save_count + 1, "-", self.save_count + len(
                self.objects_to_save)
            self.batcher.batch_save(self.objects_to_save)
            self.save_count += len(self.objects_to_save)
            self.objects_to_save = []
            if delay:
                time.sleep(self.delay)
Beispiel #17
0
def push():
    old_recipes = get_recipes() if not force_override else []
    raw_recipes = yaml.safe_load(open(recipes_filename).read())
    recipes = []
    ingredients = []
    for recipe in raw_recipes:
        if recipe not in old_recipes:
            recipes.append(Recipe(dictionary=recipe))
            for ingredient in recipe["ingredients"]:
                ingredient.update({"recipe": recipe['name']})
                ingredients.append(Ingredient(dictionary=ingredient))

    print "Found {} new recipes.".format(len(recipes))
    print "Found {} new ingredients.".format(len(ingredients))

    if add_cocktaildb_url:
        filename = "cocktaildb.csv"
        cocktaildb_data = open(filename).readlines()
        cocktaildb_data = [line.split(",") for line in cocktaildb_data]
        cocktaildb_data = {
            line[0].lower(): line[1].strip()
            for line in cocktaildb_data
        }

    old_bases = get_bases() if not force_override else []
    raw_bases = yaml.load(open(bases_filename).read())
    bases = []
    brands = []
    for base in raw_bases:
        if base not in old_bases:
            base_object = IngredientBase(dictionary=base)
            bases.append(base_object)
            for brand in base["brands"]:
                brand.update({"base": base["name"]})
                brands.append(Brand(dictionary=brand))
            if add_cocktaildb_url:
                base_object.cocktaildb = cocktaildb_data.get(
                    base_object.name.lower(), "")

    print "Found {} new bases.".format(len(bases))
    print "Found {} new brands.".format(len(brands))

    print "Pushing data."

    max_batch_size = 50
    for chunk in chunks(recipes + bases + ingredients + brands,
                        max_batch_size):
        ParseBatcher().batch_save(chunk)
        print "Pushed {} objects.".format(len(chunk))
Beispiel #18
0
class ParseService(Base, LogMixin):

    def __init__(self, settings):
        self.settings = settings
        self.fileRestClient = ParseFileRestClient(settings)
        self.galleryService = GalleryService(settings)
        register(self.settings["parse"]["application_id"], self.settings["parse"]["rest_api_key"])
        self.batcher = ParseBatcher()

    def getByFilePath(self, filePath):
        return ContentItem.Query.get(filePath=filePath)

    def post(self, item):
        return item.save()

    def drop(self):
        # There is no truncate on parse, so we iterate and delete all...
        if(self.settings["drop"]):
            items = ContentItem.Query.all()
            #self.logger.info(dir(items)
            self.logger.info("Truncating items... %s" % items.count())
            if items.count() > 0:
                self.batcher.batch_delete(items)
            self.logger.info("Done.")
def update_answers_in_Parse():

    # Get a list of all Answers in Parse.
    ct_a = Answer.Query.all().count()
    queryset = []
    batcher = ParseBatcher()
    print("{} Answers exist in Parse.".format(ct_a))
    if ct_a == 0: # None exist; upload whole list
        pass
    elif ct_a > 0: # There's at least 1 to get
        for i in range(0, ct_a, min(ct_a,1000)): # for each chunk of <= 1000 answers
            queryset += list(Answer.Query.all().skip(i).limit(1000)) # get the chunk, add to queryset
        queryset.sort(key = attrgetter("num"))
        for A, a in zip(queryset, [a for a in _Answer.LIA if queryset[a.num-1].num == a.num]): # for each answer with the same num
            # compare all attributes of the _Answer class.
            # if different, set Parse object's attribute to _Answer object's attribute;
            # if all are same, keep in Parse and delete from LIA
            for key in _Answer.LI_ATTR: # for all attributes of the _Answer class
                if getattr(A, key) != getattr(a, key): # if different
                    print(key, getattr(A,key), getattr(a,key))
                    batcher.batch_delete([A])
                    batcher.batch_save([a])
                    # print("{} updated in Parse".format(a.ID))
                    break
                elif _Answer.LI_ATTR[-1] == key:
                    _Answer.LIA.remove(a)
        print("{} Answers updated in Parse.".format(len(queryset)-len(_Answer.LIA)))
        print("{} Answers must be created in Parse.".format(len(_Answer.LIA)))

    # Now, upload those remaining in _Answer.LIA to Parse
    # (should put batch_upload_with_sleep in a separate function)
    # batch_save in chunks of no more than 50
    len_lia = len(_Answer.LIA)
    batcher = ParseBatcher()
    lili_chunks = [_Answer.LIA[i:i+50] for i in range(0, len_lia, 50)]
    for index, chunk in enumerate(lili_chunks):
        while True:
            try:
                batcher.batch_save(chunk)
                print "\r{} of {} Answers uploaded to Parse".format(50*(index+1), len_lia),
                sys.stdout.flush()
                break
            except:
                print("Locked. Sleeping for 5 seconds.")
                time.sleep(5)
    print
Beispiel #20
0
class ParseService(Base, LogMixin):
    def __init__(self, settings):
        self.settings = settings
        self.fileRestClient = ParseFileRestClient(settings)
        self.galleryService = GalleryService(settings)
        register(self.settings["parse"]["application_id"],
                 self.settings["parse"]["rest_api_key"])
        self.batcher = ParseBatcher()

    def getByFilePath(self, filePath):
        return ContentItem.Query.get(filePath=filePath)

    def post(self, item):
        return item.save()

    def drop(self):
        # There is no truncate on parse, so we iterate and delete all...
        if (self.settings["drop"]):
            items = ContentItem.Query.all()
            #self.logger.info(dir(items)
            self.logger.info("Truncating items... %s" % items.count())
            if items.count() > 0:
                self.batcher.batch_delete(items)
            self.logger.info("Done.")
def put_answers_in_Parse():

    # Query for first 1000 Answers
    queryset = list(Answer.Query.all().limit(1000))
    while True:
        if not queryset:
            print("No Answers to delete from Parse -- none exist.")
            break # skip to batch_save without deleting
        elif len(queryset) == len(_Answer.LI_A):
            print("{} Answers already exist in Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Continue with delete anyway? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print "Delete skipped. Upload skipped."
                return
        else:
            print("There are {} Answers to delete from Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Delete Answers from Parse? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print "Delete skipped. Upload skipped."
                return

        # batch_delete in chunks of no more than 50
        batcher = ParseBatcher()
        lili_chunks = [queryset[i:i+50] for i in range(0, len(queryset), 50)]
        for index, chunk in enumerate(lili_chunks):
            batcher.batch_delete(chunk)
            print "\r{} of {} Answers deleted from Parse".format(50*(index+1), len(queryset)),
            sys.stdout.flush()
        print
        break # go to batch_save

    # batch_save in chunks of no more than 50
    len_lia = len(_Answer.LIA)
    batcher = ParseBatcher()
    lili_chunks = [_Answer.LIA[i:i+50] for i in range(0, len_lia, 50)]
    for index, chunk in enumerate(lili_chunks):
        while True:
            try:
                batcher.batch_save(chunk)
                print "\r{} of {} Answers uploaded to Parse".format(50*(index+1), len_lia),
                sys.stdout.flush()
                break
            except:
                print("Locked. Sleeping for 5 seconds.")
                time.sleep(5)
    print
    pass
Beispiel #22
0
    def testBatch(self):
        """test saving, updating and deleting objects in batches"""
        scores = [GameScore(score=s, player_name="Jane", cheat_mode=False) for s in range(5)]
        batcher = ParseBatcher()
        batcher.batch_save(scores)
        self.assertEqual(GameScore.Query.filter(player_name="Jane").count(), 5, "batch_save didn't create objects")
        self.assertTrue(all(s.objectId is not None for s in scores), "batch_save didn't record object IDs")

        # test updating
        for s in scores:
            s.score += 10
        batcher.batch_save(scores)

        updated_scores = GameScore.Query.filter(player_name="Jane")
        self.assertEqual(
            sorted([s.score for s in updated_scores]), list(range(10, 15)), msg="batch_save didn't update objects"
        )

        # test deletion
        batcher.batch_delete(scores)
        self.assertEqual(GameScore.Query.filter(player_name="Jane").count(), 0, "batch_delete didn't delete objects")
def load_spell_data(db):
    conn = get_db_connection(db)
    curs = conn.cursor()
    find_central_index(curs, **{"type": "spell"})
    index_lines = curs.fetchall()
    batch = []
    batcher = ParseBatcher()
    count = 0
    for line in index_lines:
        spell = get_parse_spell(line['url'])
        if spell:
            batch.append(make_spell(conn, line, spell))
        else:
            batch.append(make_spell(conn, line))
        if len(batch) >= 50:
            batcher.batch_save(batch)
            batch = []
            count += 50
            print "Saving through %s" % count
    batcher.batch_save(batch)
def put_questions_in_Parse():

    # Query for Questions
    queryset = list(Question.Query.all().limit(1000))
    while True:
        if not queryset:
            print("No Questions to delete from Parse -- none exist.")
            break
        elif len(queryset) == len(_Question.LI_Q):
            print("{} Questions already exist in Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Continue with delete anyway? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print("Delete skipped. Upload skipped.")
                return
        else:
            print("There are {} Questions to delete from Parse.".format(len(queryset)))
            srsly_delete_stuff = raw_input("Delete Questions from Parse? (Y/n): ")
            if srsly_delete_stuff != "Y":
                print("Delete skipped. Upload skipped.")
                return

        # batch_delete in chunks of no more than 50
        batcher = ParseBatcher()
        lili_chunks = [queryset[i:i+50] for i in range(0, len(queryset), 50)]
        for index, chunk in enumerate(lili_chunks):
            batcher.batch_delete(chunk)
            print("\r{} of {} Questions deleted from Parse".format(50*(index+1), len(queryset)), end = "\r")
            sys.stdout.flush()
        print
        break

    # batch_save in chunks of no more than 50
    len_li_q = len(_Question.LIQ)
    batcher = ParseBatcher()
    lili_chunks = [_Question.LIQ[i:i+50] for i in range(0, len_li_q, 50)]
    for index, chunk in enumerate(lili_chunks):
        batcher.batch_save(chunk)
        print("\r{} of {} Questions uploaded to Parse".format(50*(index+1), len_li_q), end = "\r")
        sys.stdout.flush()
    print
    pass
Beispiel #25
0
 def tearDown(self):
     ParseBatcher().batch_delete(Review.Query.all())
def setup_event_users(m, f, mg, fg, ep):
    """
    Create zE0001_User objects by "batch_save"-ing them to Parse using 
    ParsePy's ParseBatcher(). Event User objects are _User objects whose 
    array_eventsRegistered contains the eventNum of this current event.

    """

    # Start a function timer.
    function_start_time = time.time()

    # Get the correct class name from the ep = Event Prefix (passed in).
    eventUser_ClassName = ep + "_User"
    eventUser_Class = Object.factory(eventUser_ClassName)

    # add some Users
    qset_all_users = User.Query.all().order_by("userNum")
    li_meu = list(qset_all_users.filter(sex = "M").limit(m))
    li_feu = list(qset_all_users.filter(sex = "F").limit(f))
    li_mgeu = list(qset_all_users.filter(sex = "MG").limit(mg))
    li_fgeu = list(qset_all_users.filter(sex = "FG").limit(fg))

    li_users_at_event = li_meu + li_feu + li_mgeu + li_fgeu

    count_eu = len(li_users_at_event)

    li_eu_obj_to_upload = []

    for n, eu_obj in enumerate(li_users_at_event):
        new_EU_object = eventUser_Class(
            user_objectId = eu_obj.objectId,
            event_userNum = n + 1,
            username = eu_obj.username,
            sex = eu_obj.sex
        )
        li_eu_obj_to_upload.append(new_EU_object)

    # # now add some ghosts
    # g, mg, fg, s = determine_ghosts_and_stations(meu_count, feu_count)
    # qset_all_ghosts = User.Query.filter(userNum__gte = 1000000).order_by("userNum")
    # list_male_ghosts = list(qset_all_ghosts.filter(userNum__lte = 1000007))[:mg]
    # list_female_ghosts = list(qset_all_ghosts.filter(userNum__gte = 1000006))[:fg]
    # list_ghosts_at_event = list_male_ghosts + list_female_ghosts
    # print (len(list_ghosts_at_event))
    # print (len(list_male_ghosts))
    # print (len(list_female_ghosts))
    # print (list_ghosts_at_event)
    # print (list_male_ghosts)
    # print (list_female_ghosts)
    # print (g)

    # for gu_num in range(g):
    #     new_Event_User_object = zE0001_User(
    #         user_objectId = list_ghosts_at_event[gu_num].objectId,
    #         event_userNum = gu_num + 100 + 1,
    #         username = list_ghosts_at_event[gu_num].username,
    #         sex = list_ghosts_at_event[gu_num].sex
    #         )
    #     list_of_eu_objects_to_upload.append(new_Event_User_object)


    # Call batcher.batch_save on slices of the list no larger than 50.
    batcher = ParseBatcher()

    for k in range(count_eu/50 + 1):

        lo = 50*k
        hi = min(50*(k + 1), count_eu)
        batcher.batch_save(li_eu_obj_to_upload[lo:hi])


    print ("\n{} zE0001_User objects uploaded to Parse in {} seconds.\n"
          .format(count_eu, round(time.time() - function_start_time, 2)))

    return li_eu_obj_to_upload
Beispiel #27
0
 def __init__(self, settings):
     self.settings = settings
     self.fileRestClient = ParseFileRestClient(settings)
     self.galleryService = GalleryService(settings)
     register(self.settings["parse"]["application_id"], self.settings["parse"]["rest_api_key"])
     self.batcher = ParseBatcher()
Beispiel #28
0
        
    client.Name = name
    client.Email = email
    client.Company = {"__type": "Pointer", "className": "Companies", "objectId": "AgViN0JqQq"}
    client.Key = company
    client.Country = country
    #client.save()
    
    clientslist.append(client)
    
    print email
    
    counter += 1
    
    if(counter == 20):
    
        batcher = ParseBatcher()
        batcher.batch_save(clientslist)
        clientslist = []

        print "Esperando algunos segundos"

        #esperar un poco
        time.sleep(.1)
        
        counter=0
        
        print "Trabajando ..."
    
print str(len(lines)) + " lines analized"
Beispiel #29
0
def searchEventbrite(location, date):
	
	date = datetime.datetime(date.year, date.month, date.day, 0, 0, 0)
	beg_date = date - datetime.timedelta(days=0)
	end_date = date + datetime.timedelta(days=6)
	
	# run Parse query
	parse_event = get_event_type()
	events = parse_event.Query.all().filter(City=location)
	events = events.order_by("createdAt")
	events = events.limit(1)
	events = [e for e in events]
	if events:
		last_update = events[0].createdAt
	else:
		last_update = None
	"""
	eb_auth_tokens = {'app_key':  EVENTBRITEKEYS['app_key'],
					  'access_code': EVENTBRITEKEYS['access_code']}
	eb_client = eventbrite.EventbriteClient(eb_auth_tokens)
	
	event_search = eb_client.event_search({'city': location, 'start_date.range_start': beg_date, 'start_date.range_end': end_date})
	"""
	params =   {'token': EVENTBRITEKEYS['access_code'],
				'venue.city': location,
				'start_date.range_start': '%s-%s-%sT00:00:00Z' % (beg_date.year, beg_date.month, beg_date.day),
				'start_date.range_end': '%s-%s-%sT00:00:00Z' % (end_date.year, end_date.month, end_date.day),
				}
			

	try:
		event_search = sendRequest('https://www.eventbriteapi.com/v3/events/search', data=params)
		if not event_search['success']:
			raise Exception("Error searching EventBrite for %s - %s" % (location, event_search['error']))
		
		results = []
		for event in event_search['response']['events']:
			results.append(event)
		
		total_pages = event_search['response']['pagination']['page_count']
		
		if total_pages > 1:
			for p in range(2, total_pages+1):
				# rerun search on next page
				params['page'] = p
				event_search = sendRequest('https://www.eventbriteapi.com/v3/events/search', data=params)
				if not event_search['success']:
					raise Exception("Error searching EventBrite for %s" % (location))
				for event in event_search['response']['events']:
					results.append(event)
				

		# get address data
		for i in results:
			venue_search = sendRequest(i['venue']['resource_uri'], data={'token': EVENTBRITEKEYS['access_code'],})
			if not venue_search['success']:
				raise Exception("Error searching EventBrite for %s" % (location))
			i['venue'] = venue_search['response']


		# save event data into parse
		batcher = ParseBatcher()
		items = []
		index = 1
		
		for i in results:
		
			if index == 50:
				try:		
					batcher.batch_save(items)
				except Exception as err:
					raise Exception("Error saving to Parse for %s - %s" % (location,err))
				
				items = []
				index = 1
			
			entry = Event()	
				
			entry.Name = i['name']['text']
			
			entry.Address = "" 
			if i['venue']['address_1']:
				  entry.Address += i['venue']['address_1']
			if i['venue']['address_2']:
				  entry.Address += " %s" % (i['venue']['address_2'])
			entry.City = location
			entry.Country = "United States"
			entry.Lat = i['venue']['latitude']
			entry.Lng = i['venue']['longitude']

			entry.Capacity = int(i['capacity'])

			entry.Timezone = i['start']['timezone']
			dt = i['start']['local'].split("T")
			d = dt[0].split("-")
			entry.StartTime = dt[1]
			entry.StartDay = int(d[2])
			entry.StartMonth = int(d[1])
			entry.StartYear = int(d[0])
			dt = i['end']['local'].split("T")
			d = dt[0].split("-")
			entry.EndTime = dt[1]
			entry.EndDay = int(d[2])
			entry.MonthEnd = int(d[1])
			entry.YearEnd = int(d[0])

			entry.StartDate = Date(datetime.datetime(entry.StartYear, entry.StartMonth, entry.StartDay,0,0,0))
			entry.EndDate = Date(datetime.datetime(entry.YearEnd, entry.MonthEnd, entry.EndDay,0,0,0))
			
			items.append(entry)
			index += 1
			
		if items:
			try:		
				batcher.batch_save(items)
			except Exception as err:
				raise Exception("Error saving to Parse for %s - %s" % (location,err))

		return True 
		
	except Exception as err:
		return err	
Beispiel #30
0
import json
from pprint import pprint
from parse_rest.connection import register, ParseBatcher
from parse_rest.datatypes import Object as ParseObject
from parse_rest.datatypes import ParseType, ParseResource

APPLICATION_ID = "2yokKd96SUq3dKCQDcSI7LlGPJ7ZddnCMwbCIvX7"
REST_API_KEY = "MyfLxYfGm8iaxVahmsTCeKSNNuiz2wKzkQIOCyhS"

register(APPLICATION_ID, REST_API_KEY)

with open('doc.json') as data_file:
	data = json.load(data_file)
data_to_upload = []
for course in range(len(data)):
	current = data[course]
	if current['Term'] == '20151':
		if current['DivisionCode'] == 'CC' or current['DivisionName'] == 'SCH OF ENGR & APP SCI: UGRAD' or current['DivisionCode'] == 'BC' or current['DivisionCode'] == 'GS':
				newClass = ParseObject()
				newClass.class_code = current['Course']
				newClass.instructor = current['Instructor1Name']
				newClass.name = current['CourseTitle']
				#call function that gets location, start, and end time
				#newClass.location, newClass.startTime, newClass.endTime = parseMeetString(current)
				data_to_upload.append(newClass)

batcher = ParseBatcher()
for x in range(0, len(data_to_upload), 50):
	batcher.batch_save(data_to_upload[x: (x+50) < len(data_to_upload) ? x+50 : len(data_to_upload)])
Beispiel #31
0
 label = myfont.render("selected: ", 1, (0,0,0))
 screen.blit(label, (450, 68))
 
 # back button
 if button_alloc((450, 618, 85, 20)," remove last"):
     enemies[selected_object].remove(enemies[selected_object][len(enemies[selected_object])-1])
     
 # export
 if button_alloc((450, 648, 85, 20)," export"):
     # delete last one
     dev_delete = pointBase.Query.filter(id_devices=device_ID)
     dev_number = 1
     while 1:
         this_delete = dev_delete.limit(49)
         if this_delete.exists():
             batcher = ParseBatcher()
             batcher.batch_delete(this_delete)
             dev_delete.skip(49*dev_number).limit(49)
             dev_number += 1
         else:
             break
     
     for rocket_num, one in enumerate(enemies):
         # create new one
         rocket_id = str(time.time())
         pointts = []
         
         batcher = ParseBatcher()
         save_limit = 0
         all_object = 0
         for count in range(0, len(one)):
Beispiel #32
0
exit()


for agency in Agencies.Query.all():
    
    print agency.Company.objectId
    
    agency.Company = {"__type": "Pointer", "className": "Companies", "objectId": "ATTeW8GRQt"}

    storeslist.append(agency)

    counter += 1
    
    if(counter == 20):
        total += 20
        batcher = ParseBatcher()
        batcher.batch_save(storeslist)
        storeslist = []

        print "Esperando algunos segundos"

        #esperar un poco
        time.sleep(.1)
        
        counter=0
        
        print "Trabajando ..."

print "Total: ", total
Beispiel #33
0
articles = []
for entry in d.entries:
    # Is a real new with source? The rrs return images and other stuff
    if 'source' in entry:
        source = createSource(**entry.source)

        articles.append(createArticle(
            title=entry.title,
            description=entry.description,
            source=source,
            date=entry.published
        ))

printExplain("To save several objects, use the batcher")

batcher = ParseBatcher()
batcher.batch_save(articles)

print "Our news sources:"

for source in sources.values():
    printTab(source.title)

print "The news from ", sources.values()[0].title

for new in Article.Query.filter(source=sources.values()[0]):
    printSubTitle(new.title)
    print new.description

printTitle("Conclusion")
print """
#     connection.request('POST', '/1/batch', json.dumps({
#         "requests": requests_list
#         }), {
#            "X-Parse-Application-Id": "AKJFNWcTcG6MUeMt1DAsMxjwU62IJPJ8agbwJZDJ",
#            "X-Parse-Master-Key": "LbaxSV6u64DRUKxdtQphpYQ7kiaopBaRMY1PgCsv",
#            "Content-Type": "application/json"
#          })
#     creation_result = json.loads(connection.getresponse().read())
#     batch_upload_counter += 1
#     print "Batch {} of 4 containing {} _User objects uploaded to Parse.\n".format(
#         batch_upload_counter, len(creation_result))



# upload Ghost objects
batcher = ParseBatcher()
batcher.batch_save(list_of_ghosts_to_upload)
print "\nBatch 1 of 1 containing 50 Ghost objects uploaded to Parse.\n"

print "Program complete.\n"

print "Program time: {} seconds.\n".format(time.time() - program_start_time)








Beispiel #35
0
from parse_rest.datatypes import Object
from parse_rest.connection import ParseBatcher
from parse_rest.connection import register
register('bR0fkne3E6YiRq4Q6UOU8DQsn5bzorLepjKZpFDz', 'dskQ992bFTvxTszIkMUtx5WmLO4cJKZpr7d0VNoA')

class Deal(Object):
    pass

deals = []

input_file = open("grocerydata.csv")
for line in input_file:
    line = line.split(',')
    deals.append(Deal(name=line[0], store=line[1], price=line[2], discount=line[3], image=line[4]))
    batcher = ParseBatcher()
    batcher.batch_save(deals)
Beispiel #36
0
	time = meetString[1][:13]
	startTime, endTime = time.split('-')
	startTime += 'M'
	endTime += 'M'
	startTime = datetime.time(datetime.strptime(startTime, "%I:%M%p"))
	endTime = datetime.time(datetime.strptime(endTime, "%I:%M%p"))
	building = ''.join([i for i in meetString[2] if not i.isdigit()])
	roomNumber = filter(str.isdigit, meetString[2])

	return [m, t, w, r, f], startTime, endTime, building, roomNumber

with open('doc.json') as data_file:
	data = json.load(data_file)
data_to_upload = []
for course in range(len(data)):
	current = data[course]
	if current['Term'] == '20151' and current['Meets1'] != '' and 'RTBA' not in str(current['Meets1']):
		if current['DivisionCode'] == 'CC' or current['DivisionName'] == 'SCH OF ENGR & APP SCI: UGRAD' or current['DivisionCode'] == 'BC' or current['DivisionCode'] == 'GS':
				newClass = ParseObject()
				newClass.class_code = current['Course']
				newClass.instructor = current['Instructor1Name']
				newClass.name = current['CourseTitle']
				#call function that gets location, start, and end time
				newClass.days, newClass.startTime, newClass.endTime, newClass.building, newClass.roomNumber = parseMeetString(current['Meets1'])
				data_to_upload.append(newClass)

print "x"
batcher = ParseBatcher()
for x in range(0, len(data_to_upload), 50):
	batcher.batch_save(data_to_upload[x: x+50 if (x+50) < len(data_to_upload) else len(data_to_upload)])
Beispiel #37
0
        
    agency.Name = name
    agency.Email = email
    agency.Company = {"__type": "Pointer", "className": "Companies", "objectId": "ATTeW8GRQt"}
    agency.Key = company
    agency.Country = country
    #client.save()
    
    agencieslist.append(agency)
    
    print email
    
    counter += 1
    
    if(counter == 20):
    
        batcher = ParseBatcher()
        batcher.batch_save(agencieslist)
        agencieslist = []

        print "Esperando algunos segundos"

        #esperar un poco
        time.sleep(.1)
        
        counter=0
        
        print "Trabajando ..."
    
print str(len(lines)) + " lines analized"
def prepare_R1(m, f, mg, fg, ep, li_eu):
    """
    (males, females, male ghosts, female ghosts)

    Create Round-1 Interactions.

    For now, station Nums, iPad Nums, etc. start at 1, 2, 3, ...,
    but that will be changed so that the correct Nums are chosen 
    and put into lists.

    (It's tricky to do these random simulations because to make it realistic
        I'd have to pull only those objects with array_eventsRegistered 
        containing the eNum. To truly randomize things, I guess I could
        create an event with a random number of men and women every time...
        and while that seems like it's a lot more work, it also seems helpful
        in the long run -- testing will be easier, and the DEV program will
        be closer to the real thing. There's NO reason to rush into getting
        some kind of version of this completed...I've built a version before,
        so I already know I can do it. Now I just need to do it really well.
    """

    # Get the correct class names from the ep = Event Prefix (passed in).
    eventUser_ClassName = ep + "_User" # "zE####_User"
    eventUser_Class = Object.factory(eventUser_ClassName)

    eventIxnR1_ClassName = ep + "R1" # "zE####R1"
    eventIxnR1_Class = Object.factory(eventIxnR1_ClassName)

    ###
    ### Create all other vars and lists needed.
    ###

    # Calculate nums of stations, questions, and iPads from given parameters.
    s = m + mg
    i = s * 2
    q = s

    # Make lists of station nums and iPad nums, to be rotated between subrounds.
    li_staNums = list(x+1 for x in range(s))
    li_m_ipadNums = list(x+1 for x in range(0, s, 1))
    li_f_ipadNums = list(x+1 for x in range(s, 2*s, 1))
    li_qNums = list(x+1 for x in range(s))

    # Split the event user list into males and females; make enumeration useful
    # and list rotation between subrounds possible. Includes "ghosts."
    li_males = li_eu[:m+mg]
    li_females = li_eu[m+mg:]

    # Initiate interaction counter and list-to-upload.
    counter_ixn = 0
    li_R1_obj_to_upload = []

    # Iterate through the subrounds.
    for j in range(s): # s = stations, and also subrounds, as used here

        for k, eu_obj in enumerate(li_males):
            # enumerator goes through the male half (0-m)

            # increment the interaction counter
            counter_ixn += 1
        
            # create one interaction object
            new_ixn_object = eventIxnR1_Class(
                ixNum = counter_ixn,
                subNum = j + 1,
                staNum = li_staNums[k],
                m_userNum = li_males[k].event_userNum,
                f_userNum = li_females[k].event_userNum,
                m_username = li_males[k].username,
                f_username = li_females[k].username,
                m_user_objectId = li_males[k].user_objectId,
                f_user_objectId = li_females[k].user_objectId,
                m_thisEvent_objectId = li_males[k].objectId,
                f_thisEvent_objectId = li_females[k].objectId,
                m_ipadNum = li_m_ipadNums[k],
                f_ipadNum = li_f_ipadNums[k],
                qNum = li_qNums[k]
            )

            # add the object to a list to upload to Parse
            li_R1_obj_to_upload.append(new_ixn_object)

            print(new_ixn_object.m_thisEvent_objectId)

            # add the object to Firebase
            ### INSERT CODE HERE

        ###
        ### Rotate the lists between subrounds (in "for j in range(s)" loop).
        ###   (li_staNum will be iterated through correctly without alteration,
        ###   as will the lists of ipadNums.)
        ###

        # the m list will have its last item put in the front
        li_males = [li_males[-1]] + li_males[:-1]

        # the f list will have its first item put in the back
        li_females = li_females[1:] + [li_females[0]]

        # the qNums list happens to move the first two to the back
        li_qNums = li_qNums[2:] + li_qNums[:2]      

    ### END ITERATIONS ###

    # # Save objects to Firebase, grouped by iPad.
    # # Structure looks like:

    # """

    # "zE####R1_inx_obj_by_iPadNum": {

    #     "ipadNum_####": {

    #         "ixnNum_####": {

    #             "{}".format(li_R1_obj_to_upload.ixnNum): {

    #                 "subNum": li_R1_obj_to_upload.subNum,
    #                 "staNum": li_R1_obj_to_upload.staNum,
    #                 ...      

    #             }, 

    #             ...

    #         }, 

    #         ...
       
    #     }, 

    #     ...
    
    # }

    # """
    
    # # Create references to Firebase.
    # ref_root = Firebase('https://burning-fire-8681.firebaseio.com')
    # ref_R1_ixn_objs_by_iPadNum = ref_root.child(
    #     '{}R1_inx_objs_by_iPadNum'.format(ep)
    #     )

    # # Create references for all iPads, and put them in a dictionary.
    # # In the dictionary, the keys are the ipadNums, and the values are
    # # lists containing the FB reference and a list of interaction objects.
    # # { "i" : [ ref, [ ixn_obj, ixn_obj, ... ] ] }
    # dll_ix_objs = {} 
    # for a in li_m_ipadNums + li_f_ipadNums:
    #     ref_ipadNum = ref_R1_ixn_objs_by_iPadNum.child('{}'.format(a))
    #     # ref_ipadNum.patch(
    #     #     {
    #     #         "something": "goes here"
    #     #     })
    #     print ("iPad {} has been put into Firebase.".format(a))
    #     #li_ref_ipadNum["{}".format(a)] = [ref_ipadNum, []]
    #     #li_ref_ipadNum.append([a, ref_ipadNum, []])
    #     dll_ix_objs[str(a)] = [ref_ipadNum, []]

    # pprint(dll_ix_objs)

    # # Iterate through all objects, adding them to the right place in 
    # # the dictionary, then upload the dictionary into Firebase
    # for ix_obj in li_R1_obj_to_upload:
    #     str_correct_m_iPad = str(ix_obj.m_ipadNum)
    #     str_correct_f_iPad = str(ix_obj.f_ipadNum)
    #     dll_ix_objs[str_correct_m_iPad][1].append(ix_obj)
    #     dll_ix_objs[str_correct_f_iPad][1].append(ix_obj)

    # pprint(dll_ix_objs)

    # # put into Firebase
    # for k,v in dll_ix_objs.iteritems():
    #     ref = Firebase(v[0])
    #     #ref.patch(v[1])
    #     print("List added for iPad with ipadNum = {} at location: {}".format(k, ref))

    # for obj in li_R1_obj_to_upload:
    #     pprint(vars(obj))



    # Save multiple objects to Parse.
    # Call batcher.batch_save on slices of the list no larger than 50.
        # Parse will timeout if 1800 requests are made in 60 seconds,
        # hence the time.sleep(1.67) every 50 objects saved. I could probably
        # get away with sleeping less, but no reason to take chances.
    batcher = ParseBatcher()
    for b in range(counter_ixn/50 + 1):
        lo = 50*b
        hi = min(50 * (b+1), counter_ixn)
        batcher.batch_save(li_R1_obj_to_upload[lo:hi])

        sys.stdout.write("\r{} of {} new inx's uploaded ({}{})".format(
            50 + (50*b), 
            counter_ixn, 
            int(round((50*(b+1)*100.0)/counter_ixn, 0)), 
            "%"
            ))
        sys.stdout.flush() # must be done for it to work (why?)
        # time.sleep(1.67) # explained above

    sys.stdout.write("\n") # move the cursor to the next line after we're done
Beispiel #39
0
from parse_rest.connection import register
from parse_rest.datatypes import Object
import re

register("8fbBNwG2gvwFskbc3SjlO34qmidJkF3pCVPTuVc0", "qNgE46H7emOYu3wsuRLGpMSZVeNxCUfCP81hFSxz", master_key="HhJryin0t8OMP2mOBC3UkJKqyIDFxXMfVGFLtxCq")
class Media(Object):
	pass

medias = []
media = Media.Query.all().limit(1000)
for m in media:
	if hasattr(m, "desc"):
		m.desc = re.sub("<[br|img].*>", "", m.desc)
		print m.desc
		medias.append(m)

def chunks(l, n):
    """ Yield successive n-sized chunks from l.
    """
    for i in xrange(0, len(l), n):
        yield l[i:i+n]

from parse_rest.connection import ParseBatcher
batcher = ParseBatcher()
for chunk in chunks(medias, 50):
	batcher.batch_save(chunk)
Beispiel #40
0
				break
	except Exception, e:
		country=''
	
	url = 'https://maps.googleapis.com/maps/api/place/nearbysearch/json?rankby=prominence&location='+lat+','+lng+'&key='+ keys['API_KEY']+'&radius=500000'
	response = urllib2.urlopen(url)
	obj2 = json.loads(response.read())['results']
	register(keys['APPLICATION_ID'], keys['REST_API_KEY'], master_key=keys['MASTER_KEY'])
	try:
		for landmark in obj2:
			parse(city.replace('+', ' '), landmark, country) 
	except Exception,e:
		print city, "Error:", e
	global videos
	if len(videos)>0:
		batcher = ParseBatcher()
		batcher.batch_save(videos)
		videos=[]

if __name__ == "__main__":
	with open("keys.txt") as myfile:
		for line in myfile:
			name, var = line.partition("=")[::2]
			keys[str(name.strip())] = str(var.strip())
	

	cities = []
	done = ['Goa', 'Kathmandu', 'Oslo', 'St Petersburg', 'Doha', 'Bucharest', 'Budapest', 'Stockholm', 'Al Ain', 'Abu Dhabi', 'Glasgow', 'Birmingham', 'Montreal', 'Chicago', 'Lisbon', 'Dallas', 'Bangkok', 'Los Angeles', 'Taipei', 'Milan', 'Seoul', 'Hong Kong', 'Kuala Lumpur', 'Florida', 'Washington', 'San Francisco', 'Osaka', 'Las Vegas', 'Damascus', 'Madina', 'Mecca', 'Santiago', 'Sao Paulo', 'Brasilia', 'Colombia', 'Interlaken', 'Candy', 'Bangalore', 'Wellington', 'Pune', 'Sharjah', 'Fujairah', 'Copenhagen', 'Amsterdam', 'London', 'Tripoli', 'Buenos Aires', 'Ecuador', 'Caracas', 'El Salvador', 'Nairobi', 'Ontario', 'Riyadh', 'Johannesburg', 'Cape Town', 'Colombo', 'Tibet', 'Bhutan', 'Novosibirsk', 'Saint Petersburg', 'Perth', 'Adelaide', 'Melbourne', 'Sydney', 'Tehran', 'Muscat', 'Brussels', 'Bali', 'Honolulu', 'Edinburgh', 'Wellington', 'Jakarta', 'Zurich', 'Dublin', 'Miami', 'Shanghai', 'Istanbul', 'Cairo', 'Prague', 'Vienna', 'Rio de Janeiro', 'Berlin', 'Tokyo', 'Mexico City', 'Munich', 'Boston', 'Baghdad', 'Warsaw', 'Johannesburg', 'Moscow', 'Mumbai', 'Delhi', 'Kolkata', 'Chennai', 'Lahore', 'Karachi', 'Dammam', 'Barcelona', 'Rome', 'Egypt', 'Cape Town', 'Krakow', 'Brazil', 'Florence', 'Peru', 'Paris', 'Canberra', 'Hamburg', 'Venice', 'Sydney', 'Rome', 'Maldives', 'Singapore']
	cities = list(set(cities)-set(done))
	for city in cities:
		landmarks(city, '')
Beispiel #41
0
 def __init__(self, delay):
     self.batcher = ParseBatcher()
     self.objects_to_save = []
     self.delay = delay
     self.save_count = 0
def setup_questions(q):

    """
    """

    # Start a function timer.
    function_start_time = time.time()

    # We must subclass Object for the class names we want to use.
    class Question(Object):
        pass

    # Print the "function is starting" message.
    # (Later, I'd like to make a decorator that does this.)
    print("\nFunction \"setup_questions({})\" is now running.".format(q))
    
    # Instantiate the list to upload.
    list_Question_objects_to_upload = []

    # Get a list of "q" questions and a list of "q" 4-answer lists.
    list_q_question_strings = get_q_question_strings(q)
    list_q_answer_lists = get_q_answer_lists(q)

    # # Initialize the question counter.
    # question_counter = 0

    # Create new Question objects and put them into a big ol' list.
    for index, question_str in enumerate(list_q_question_strings):

        new_Question_object = Question(
            qNum = index + 1,
            qText = question_str,
            liAnswers = list_q_answer_lists[index]
            )

        list_Question_objects_to_upload.append(new_Question_object)

    # Upload the list of new iPad objects to Parse.
    # The Parse batch request limit is 50, 
    #     and the Parse request limit is 30/sec = 1800/min.
    # Other functions are being run surrounding this, so to avoid going over
    #     the 1800/min limit, call time.sleep(q/30 - time_spent_uploading). 

    
    # Create a ParseBatcher object.
    batcher = ParseBatcher()

    print("\n{} Question objects are being uploaded...".format(q))

    # Start an "uploading" timer.
    uploading_start_time = time.time()

    # Call batcher.batch_save on slices of the list no larger than 50.
    for k in range(q/50 + 1):
        ### lower = 50*k
        ### upper = 
        try:
            batcher.batch_save(list_Question_objects_to_upload[
                50*k : 50*(k + 1)
                ])
        except:
            batcher.batch_save(list_Question_objects_to_upload[
                50*k : q
                ])

    # Calculate time spent uploading and how long to sleep for.
    time_spent_uploading = time.time() - uploading_start_time
    how_long_to_sleep_for = (q/30.0) - time_spent_uploading
    how_long_to_sleep_for_rounded = round(how_long_to_sleep_for, 3)
    print ("\nUploading took {} seconds.".format(round(time_spent_uploading, 3)))

    # Sleep.
    for k in range(1, 101, 1):
        sys.stdout.write("\r{}{} of {}s sleep complete.".format(k, "%", 
            how_long_to_sleep_for_rounded)
            ) # \r puts cursor back to start of line i/o onto the next line
        sys.stdout.flush() # this must be called to refresh the line
        time.sleep(how_long_to_sleep_for / 100.0)
    sys.stdout.write("\n") # move the cursor to the next line

    # Print results.
    function_total_time = round(time.time() - function_start_time, 3)

    print ("\n\
            \n***********************************************************************\
            \n*****                                                             *****\
            \n*****   Function \"setup_questions({})\" ran in {} seconds.   *****\
            \n*****                                                             *****\
            \n***********************************************************************\
            \n_______________________________________________________________________\
            \n=======================================================================\
            \n\n\
            ".format(q, function_total_time))
Beispiel #43
0
 def tearDownClass(cls):
     '''delete all GameScore and Game objects'''
     ParseBatcher().batch_delete(chain(cls.scores, [cls.game]))
Beispiel #44
0
        try:
            print "-- " + i.Telephone
            i.Telephone = i.Telephone.splitlines()[0]
        except:
            if hasattr(i, "Telephone"):
                i.Telephone = ""
        
            
        try:
            i.Email = i.Email.splitlines()[0]
        except:
            if hasattr(i, "Email"):
                i.Email = ""
            
        cc += 1
            
        #i.save()
        
        
        if len(batchclients) == 50:
            batcher = ParseBatcher()
            batcher.batch_save(batchclients)
            
            batchclients = []
        
    n = len(res)
    counter += n


print len(clients)
Beispiel #45
0
 def tearDown(self):
     '''delete additional helper objects created in perticular tests'''
     if self.test_objects:
         ParseBatcher().batch_delete(self.test_objects)
         self.test_objects = []
def setup_ipads(i, purchaseDate = time.strftime("%Y.%m.%d")):

    """ Create i objects of the class "IPad" and upload them to Parse with ParsePy.

          *  'purchaseDate' is a string formatted like this: "2015.04.18".

          *   WARNING: Program currently assumes there are no existing IPad objects,
              so it starts at iPadNum = 1. Will be fixed to query the existing IPad object with
              the highest iPadNum and begin from that number + 1.                                     
    """

    # Start a function timer.
    function_start_time = time.time()

    # We must subclass Object for the class names we want to use.
    class IPad(Object):
        pass

    # Print the "function is starting" message.
    # (Later, I'd like to make a decorator that does this.)
    print ("\
        \n\n*********************************************************\
        \n*****                                               *****\
        \n*****   Function \"setup_ipads\" is now running.   *****\
        \n*****                                               *****\
        \n*****                                               *****\
        \n\n{} IPad objects are being created...".format(i))

    # Instantiate the list to upload.
    list_IPad_objects_to_upload = []

    # Get a (fictitious) list of i serial numbers for our new IPad objects.
    list_iPadSerialNumbers = get_s_ipad_serial_numbers(i)

    # Create new iPad objects and put them into a big ol' list.
    for index, serial_number in enumerate(list_iPadSerialNumbers):
        
        new_IPad_object = IPad(
            ipNum = index + 1,
            ipSerialNum = serial_number,
            purchaseDate = purchaseDate
            )
        list_IPad_objects_to_upload.append(new_IPad_object)

    print("Done.")

    # Upload the list of new iPad objects to Parse.
        # The Parse batch request limit is 50, and the Parse request limit is 30/sec = 1800/min.
        # Other functions are being run before and/or after this, so to avoid going over
        #     the 1800/min limit, call time.sleep(i/30 - time_spent_uploading). 
    
    # Create a ParseBatcher object.
    batcher = ParseBatcher()

    print ("\n{} IPad objects are being uploaded...".format(i))

    # Start an "uploading" timer.
    uploading_start_time = time.time()

    # Call batcher.batch_save on slices of the list no larger than 50.
    for k in range(i/50 + 1):
        ### lower = 50*k
        ### upper = 
        try:
            batcher.batch_save(list_IPad_objects_to_upload[
                50*k : 50*(k + 1)
                ])
        except:
            batcher.batch_save(list_IPad_objects_to_upload[
                50*k : i
                ])

    # Calculate time spent uploading and how long to sleep for.
    time_spent_uploading = round(time.time() - uploading_start_time, 3)
    how_long_to_sleep_for = (i/30.0) - time_spent_uploading
    how_long_to_sleep_for_rounded = round(how_long_to_sleep_for, 3)
    print("Done.\n")
    print("{} IPad objects uploaded in {} seconds.\n".format(i, time_spent_uploading))

    # Sleep.
    for k in range(1, 101, 1):
        sys.stdout.write("\r{}{} of {}s sleep complete.".format(k, "%", how_long_to_sleep_for_rounded)) # \r puts the cursor back to the start of the line i/o onto the next line
        sys.stdout.flush()
        time.sleep(how_long_to_sleep_for / 100.0)
    sys.stdout.write("\n") # move the cursor to the next line

    # Print results.
    function_total_time = round(time.time() - function_start_time, 3)

    print_str = "*****   Function \"setup_ipads({})\" ran in {} seconds.   *****".format(i, function_total_time)
    ast_str = "*" * (len(print_str))
    space_str = "*****   {}   *****".format(" "*(len(print_str) - 16))
    und_str = ("_" * (len(print_str))) + "\n" + ("=" * (len(print_str)))

    print ("\n\n{}\n{}\n{}\n{}\n{}\n{}\n\n".format(space_str, space_str, print_str, space_str, ast_str, und_str))
Beispiel #47
0
def postEvents(events):
    batcher = ParseBatcher()
    batcher.batch_save(events)
    'data/2014-2 UNLOCODE CodeListPart3.csv'
]
for file_name in file_names:
    with open(file_name, 'rb') as csvfile:
        reader = csv.reader(csvfile, delimiter=',', quotechar='"')
        for row in reader:
            country_codes.add(row[1])

# Create regions for countries that have subdivisions
regions = []
with open('data/2014-2 SubdivisionCodes.csv', 'rb') as csvfile:
    reader = csv.reader(csvfile, delimiter=',', quotechar='"')
    for row in reader:
        region = Region(countryISOCode=row[0], regionISOCode=row[1])
        regions.append(region)
        country_codes.discard(row[0])

# Create regions for countries that dont have subdivisions
for country_code in country_codes:
    region = Region(countryISOCode=country_code)
    regions.append(region)

# Save all the created regions to parse
batcher = ParseBatcher()
if len(regions) > 0:
    for index, chunk in enumerate(chunks(regions, 50)):
        print "Saving", index * 50, "-", min((index + 1) * 50 - 1,
                                             len(regions))
        # batcher.batch_save(chunk)
        time.sleep(args.delay)
	# if we're going too fast for the request limit of 1800 per minute, slow down
	# Test 1: Slept after batch 35 for 47.557 seconds. Success (no errors, all batches saved).
	# Test 2: Slept after batch 35 for 49.203 seconds. Success.
	# Test 3: Slept after batch 35 for 45.496 seconds. Success.

	time_uploading_before_sleep = time.time() - batch_uploading_start_time
	if (time_uploading_before_sleep < 60) and (interaction_counter > 1799):
		print "\nSleeping for {} seconds.".format(round((60 - time_uploading_before_sleep), 3))
		pause_time = time.time()
		time.sleep(60 - time_uploading_before_sleep)
		print "\nUploading will now resume.\n"
		resume_time = time.time()

	# save these 50 interactions to Parse
	batcher = ParseBatcher()
	batcher.batch_save(interactions_list_to_be_saved)
	print "batch " + str(subround + 1) + " of 50 has been saved."



	# rotate lists
	# (I'm getting an error that says "Slice is not supported for now.",
	# so I've had to do something slightly more complicated.)

	# males: take the last, put in front 
	# (guys are moving toward increasing station nums)
	all_males_at_event.insert(0, all_males_at_event[-1])
	all_males_at_event.pop(-1) # default is -1, but I left it in for clarity

	# females: take the first, put in back