def check_if_current_goal_is_reached(type_of_activity: str, coll_of_activity: pymongo.collection, data_of_activities: pymongo.collection): number_of_reached_goals = 0 for param in ["duration", "distance"]: current_goal = coll_of_activity.find_one( {'activity': type_of_activity})['objective_' + str(param)] current_value = sum( sum_parameter(param, type_of_activity, data_of_activities)) if current_value >= current_goal: number_of_reached_goals += 1 next_goal = list( filter(lambda y: y > current_value, road_to_master))[0] coll_of_activity.update_one( {'activity': type_of_activity}, {"$set": { str('objective_' + param): next_goal }}) if number_of_reached_goals > 0: msg = QMessageBox() msg.setWindowTitle("Achievements") msg.setText( "You have completed {} challenges".format(number_of_reached_goals)) msg.setIcon(QMessageBox.Information) msg.setStandardButtons(QMessageBox.Ok) msg.buttonClicked.connect(lambda: msg.exec_) x = msg.exec_()
def change_offer_status (col: pymongo.collection, offer_id: int, closed_date = None, was_executed = None): """ Changes closed_date and/or was_executed """ try: if col.find_one({'offer_id': offer_id}): if closed_date: col.update_one({'offer_id': offer_id}, {"$set": {'closed_date': closed_date} } ) if was_executed: col.update_one({'offer_id': offer_id}, {"$set": {'was_executed': was_executed} } ) else: print('Offer not found') except: print('Offer status could not be changed')
def new_credit (col: pymongo.collection, credit_data: dict): """ Saves credit data in collection specified. """ try: col.insert_one(credit_data) except: print('Credit could not be saved')
def new_offer (col: pymongo.collection, offer_data: dict): """ Saves offer data in collection specified. """ try: #Search if offer with offer_id already exists. col.insert_one(offer_data) except: print('Offer could not be saved')
def mongo_multi_column(docs: list, collect: collection) -> namedtuple: """ docs == [{ "time": datetime, <symbol>: float}, ] :param docs: :param collection: :return: """ duplicate_count = 0 # not used at that moment, need additional find before update new_doc_count = 0 modified_count = 0 result = namedtuple('result', ['new_doc_count', 'modified_count', 'duplicate_count']) if docs == []: logger.warning("empty docs list") return result(new_doc_count, modified_count, duplicate_count) for doc in docs: time = doc["time"] document = dict(doc) logger.debug("time= {}, document= {}".format(time, document)) update_result = collect.update_one({'time': time}, {'$set': document}, upsert=True) if update_result.upserted_id is not None: new_doc_count += 1 modified_count += update_result.modified_count return result(new_doc_count, modified_count, duplicate_count)
def update_history(docs: list, collection_: collection) -> namedtuple: """ input like: [{'rhodium': 1015.0, 'time': datetime.datetime(2017, 3, 24, 0, 0, tzinfo=<UTC>)}, {'iron-ore-price': 88.35, 'time': datetime.datetime(2017, 3, 24, 0, 0, tzinfo=<UTC>)}, {'copper-price': 5804.76, 'time': datetime.datetime(2017, 3, 24, 0, 0, tzinfo=<UTC>)}] collection like timeseries DB, modify or add field in collection for time (which is _id in DB) :param docs: list({'time': datetime, <commodity_name>: float}) :param collection_: DATABASE['commodities'] :return: namedtuple('update_history', ['matched_count', 'modified_count', 'upserted']) """ result_obj = namedtuple('update_history', ['matched_count', 'modified_count', 'upserted']) matched_count, modified_count, upserted = 0, 0, 0 for doc in docs: if not allowed_to_insert(doc, collection_): continue _id = doc.pop('time') # remove key 'time', and set value to _id keys = list(doc.keys()) # convert into metric doc[keys[0]] = convert_bushel_tonn(keys[0], doc[keys[0]]) update_result = collection_.update_one({'_id': _id}, {'$set': doc}, upsert=True) matched_count += update_result.matched_count modified_count += update_result.modified_count logger.debug('updating in DB doc= {}'.format(doc)) if update_result.upserted_id is not None: upserted += 1 result = result_obj(matched_count=matched_count, modified_count=modified_count, upserted=upserted) logger.info('"update_history" result= {}'.format(result)) return result
def change_credit_status (col: pymongo.collection, credit_id: int, end_date = None, earn_money = None, paid_fees = None): """ Changes end_date """ try: if col.find_one({'credit_id': credit_id}): if end_date: col.update_one({'credit_id': credit_id}, {"$set": {'end_date': end_date} } ) if earn_money: col.update_one({'credit_id': credit_id}, {"$set": {'earn_money': earn_money} } ) if paid_fees: col.update_one({'credit_id': credit_id}, {"$set": {'paid_fees': paid_fees} } ) else: print('Credit not found') except: print('Credit status could not be changed')
def new_wallet_snapshot (col: pymongo.collection, doc_data :dict): """ Parameters ---------- col : pymongo.collection Collection where wallets snapshots are being store. doc_data : dict Dictionary with keys: ['uid', 'timestamp', 'currency', 'amount', 'available', 'rate_curr_usd'] Returns ------- None. """ try: col.insert_one(doc_data) except: print('Snapshot could not be saved')
def bulk_insert_documents(db_collection: collection, documents: dict): """ Inserts multiple documents into a collection. :param db_collection: the collection to insert to :param documents: the documents to insert :return: a list of ObjectId for the inserted documents """ inserted_documents = db_collection.insert_many(documents) return inserted_documents.inserted_ids
def query_document(db_collection: collection, filters: dict = None): """ Queries a collection for the first document that matches the filters. :param db_collection: the collection to make the query in :param filters: the dict containing key-value pairs to filter with :return: a dict representing the found document """ document = db_collection.find_one(filters) return document
def insert_document(db_collection: collection, document: dict): """ Inserts a single document into a collection. :param db_collection: the collection to insert to :param document: the document to insert :return: an ObjectId for the inserted document """ inserted_document = db_collection.insert_one(document) return inserted_document.inserted_id
def query_all_documents(db_collection: collection, filters: dict = None): """ Queries a collection for all the documents that match the filters :param db_collection: the collection to make the query in :param filters: the dict containing the key-value pairs to filter with :return: a dict representing the found documents """ documents = db_collection.find(filters) return documents
def get_random_word(collection: pymongo.collection, should_be_high_frequency=False) -> dict: ''' Given the collection instance, retrieve details for a random word. ''' print(f"[VOCABUILDER] Getting random word...") count = collection.estimated_document_count() if should_be_high_frequency: words = [ x for x in collection.find( {"is_high_frequency_word": should_be_high_frequency}) ] else: words = [x for x in collection.find()] randomIndex = random.randint(0, len(words)) word_data = words[randomIndex] print(f"[VOCABUILDER] Found `{word_data.get('word')}`") return word_data
def credit_status (col: pymongo.collection, credit_id: int): """ Checks status, returns a tuple. """ try: credit = col.find_one({'credit_id': credit_id}) end_date = credit['end_date'] print(f'Offers closed_date value is {end_date}') return end_date except: print('Credit not found') return 'Not_Found'
def insert_record_list(list_of_records: list, collection: pymongo.collection, threshold: float) -> int: """ Insert all records in a list into a pymongo collection, merging records with records in the collection using astropy coordinate matching :param list_of_records: :param collection: :param threshold: :return: count of records inserted """ final_record_list = [] for new_record in list_of_records: # Generate mongo query to find objects whose coordinates are within the # bounds of the threshold (a square area) # (limits the number of comparisons while using actual coordinate matching) ra_min = new_record[COORDS_KEY]["ra"]["min"] - threshold ra_max = new_record[COORDS_KEY]["ra"]["max"] + threshold dec_min = new_record[COORDS_KEY]["dec"]["min"] - threshold dec_max = new_record[COORDS_KEY]["dec"]["max"] + threshold query = { "%s.%s.%s" % (COORDS_KEY, "ra", "min"): {"$lte": ra_max}, "%s.%s.%s" % (COORDS_KEY, "ra", "max"): {"$gte": ra_min}, "%s.%s.%s" % (COORDS_KEY, "dec", "min"): {"$lte": dec_max}, "%s.%s.%s" % (COORDS_KEY, "dec", "max"): {"$gte": dec_min} } # Compare against existing records matching the query # (since threshold is radius, not the length of a square) for existing_record in collection.find(query): if should_merge_by_distance(new_record, existing_record, threshold): new_record = merge_records(new_record, existing_record) collection.delete_one(existing_record) final_record_list.append(new_record) inserted_count = insert_records(collection, final_record_list) return inserted_count
def get_previous_value(commodity: str, current_time: datetime, collection_: collection) -> dict: """ :param commodity: :param current_time: :param collection_: :return: dict of previous data for commodity { "_id" : datetime, 'commodity': 185.66 } """ doc = collection_.find_one({commodity: {'$exists': True}, '_id': {'$lt': current_time}}, projection={commodity: True}, sort=[('_id', -1)], limit=1) logger.debug('previous data for {} = {}'.format(commodity, doc)) return doc
def offer_status (col: pymongo.collection, offer_id: int): """ Checks status, returns a tuple. """ try: offer = col.find_one({'offer_id': offer_id}) closed_date = offer['closed_date'] was_executed = offer['was_executed'] print(f'Offers closed_date value is {closed_date}') print(f'Offers was_executed value is {was_executed}') status = (closed_date, was_executed) except: print('Offer not found') status ('Not_Found', 'Not_found') return status
def insert_records(collection: pymongo.collection, record_list: list) -> int: """ Inserts many records into a Mongo DB. :param collection: Mongo collection to insert into :param record_list: List of records (dicts) to insert :return: Number of records successfully written """ log.info('Inserting %d records... ' % len(record_list)) try: insert_result = collection.insert_many(record_list) return len(insert_result.inserted_ids) except pymongo.errors.OperationFailure as of: log.error("Insertion of %d records failed..." % len(record_list)) log.error("%s" % str(of)) exit(1)
def get_sub_and_sol_of_day(Record: pymongo.collection, username: str, date: datetime) -> Tuple[int, int]: all_record = [] records = Record.find({ "username": username, "date": { "$gte": date, "$lt": date + datetime.timedelta(days=1) } }) for record in records: all_record.append( (record["submission"], record["solved"], record["date"])) all_record.sort(key=lambda r: r[2]) if all_record: return all_record[-1][0], all_record[-1][1] return 0, 0
async def triggercookieclaim(message: discord.Message, ax: pymongo.collection, bot: commands.Bot): if len(message.attachments) == 1: msg = await message.reply( "Thanks for the screenshot. Please wait while we verify the screenshot.", mention_author=True) await msg.add_reaction("🍪") await msg.add_reaction("👎") try: def check2(reaction1: discord.Reaction, user1: discord.Member): approved_roles = [ "Admin (Discord)", "Mod (Discord)", "Admin (Mindustry)", "Mod (Mindustry)", "Admin (Minecraft)", "Mod (Minecraft)" ] has_perms = False for role in user1.roles: if str(role) in approved_roles: has_perms = True return has_perms and (str(reaction1.emoji) in [ "🍪", "👎" ]) and reaction1.message == msg reaction2, user2 = await bot.wait_for('reaction_add', timeout=3600.0 * 24, check=check2) duuid = message.author.id if str(reaction2.emoji) == "🍪": # approved balance = 0 if ax.find_one({"duuid": duuid}) is None: ax.insert_one({"duuid": duuid, "ax": 0}) else: balance = ax.find_one({"duuid": duuid})["ax"] ax.find_one_and_replace({"duuid": duuid}, { "duuid": duuid, "ax": balance + 10 }) msg_react = await message.reply( f"Claim approved by {user2.mention}. 10{ej.ax_emoji} awarded to {message.author.mention}." f"\nYou now have {balance + 10} {ej.ax_emoji}.", mention_author=True) await msg_react.add_reaction(ej.blob_emoji) else: await message.reply(f"Claim rejected by {user2.mention}.", mention_author=True) except asyncio.TimeoutError: await message.reply( "Claim not processed, please ping a moderator.", mention_author=True) else: await message.reply("Please attach 1 screenshot for review.", mention_author=True)
def longest_continuous_series(collection: pymongo.collection) -> int: date_daily_activity = [ daily_post["time"] for daily_post in collection.find({}).sort("time") ] difference_between_activity = [ (date_daily_activity[i + 1] - date_daily_activity[i]).days for i in range(len(date_daily_activity[:-1])) ] max_days = tmp = 0 for diff in difference_between_activity: if diff in [0, 1]: tmp += 1 else: if tmp > max_days: max_days = tmp tmp = 0 return max_days
def sum_parameter(param: str, type_of_activity: str, coll_of_activity: pymongo.collection, period: int = 1) -> list: value_of_param = [ daily_post[param] for daily_post in coll_of_activity.find({ "activity": type_of_activity }).sort("time") ] def divide_list(list_to_divide: list, n: int): for i in range(0, len(list_to_divide), n): yield list_to_divide[i:i + n] value_of_param = list(divide_list(value_of_param, period)) sum_in_duration = [sum(period) for period in value_of_param] return sum_in_duration
def get_previous_value(commodity: str, current_time: datetime, collection_: collection) -> dict: """ :param commodity: :param current_time: :param collection_: :return: dict of previous data for commodity { "_id" : datetime, 'commodity': 185.66 } """ doc = collection_.find_one( { commodity: { '$exists': True }, '_id': { '$lt': current_time } }, projection={commodity: True}, sort=[('_id', -1)], limit=1) logger.debug('previous data for {} = {}'.format(commodity, doc)) return doc
def _try(coll: collection, args:dict): try: coll.insert_one(args) except errors.DuplicateKeyError: print(f"{PrintColors.FAIL} \"{args['_id']}\" key already exists, skipped...{PrintColors.ENDC}")
def get_all_words(collection: pymongo.collection) -> list: ''' Given the collection instance, retrieve all words from it. ''' return [_delete_key_from_dict(word, "_id") for word in collection.find()]