def fetch_members_for_list(self, list_id): """ gets all members from given list saves to mongo updates member list for list_id """ def save_member(mbr): Member.objects(member_id=mbr["member_id"]).update_one(upsert=True, set__email=mbr["email"], set__updated_at=datetime.utcnow()) return mbr["member_id"] members_euid = [save_member(mbr) for mbr in self.mw.get_members(list_id)] List.objects(list_id=list_id).update(set__members_euid=members_euid, set__updated_at=datetime.utcnow())
def get_lists(self): """ get all active lists for this user return in form of dicts with list id "id" and name "name" """ return [{"id": lst["list_id"], "name": lst["name"]} for lst in List.objects(user_id=self.user_id, active=True)]
def create_list(list_name, user): new_list = List(list_name=list_name, user=user) db.session.add(new_list) db.session.commit() return new_list
def create_list(self, wlist): self.service.url = WunderList.LISTS_URL self.service.json = wlist.get_json() self.service.method = 'POST' prepReq = self.service.prepare() response = self.session.send(prepReq) response = json.loads(response.content) print response return List(response)
def add_new_list(user_id, list_name): """Adds new list to List table.""" new_list = List(user_id=user_id, list_name=list_name) db.session.add(new_list) db.session.commit() return new_list
def form_segment(self, node_oid): """ for given drip campaign node get the set of applicable members for this node and create a segment based on it there are two cases: 1. node is initial node - then the segment is the whole list 2. node is not initial node - gather the set based on segments of previous nodes by applying the trigger filters """ # init empty segment and stuff new_segment = Segment() new_segment.save() name = "%s_seg_%s" % (self.PREFIX, new_segment.id) node = Node.objects(id=node_oid)[0] list_id = DripCampaign.objects(id=node["drip_campaign_id"])[0]["list_id"] node.update(set__segment_oid=new_segment.id, set__updated_at=datetime.utcnow()) # gather all users that apply for this node after triggers on previous nodes all_euids = set() if node["initial"]: all_euids = set(List.objects(list_id=list_id)[0]["members_euid"]) else: for trg in Trigger.objects(node_to=node_oid): for euids, to_node_oid in self.segment_by_triggers(trg["node_from"]): if to_node_oid == node_oid: all_euids.update(set(euids)) # # intersect euids with current state of the list # # it might be the case that some people are removed from the list since previous email self.fetch_members_for_list(list_id) all_euids = all_euids & set(List.objects(list_id=list_id)[0]["members_euid"]) all_euids = list(all_euids) # apply the user list to segment n stuff # if user list is empty, save only meta info and don't actually work with mailchimp if all_euids: segment_id = self.mw.create_segment(list_id, name) self.mw.update_segment_members(list_id, segment_id, all_euids) else: segment_id = None new_segment.update(set__segment_id=segment_id, set__name=name, members_euid=all_euids, set__updated_at=datetime.utcnow())
def get_lists(self): url = WunderList.LISTS_URL self.service.url = url self.service.method = 'GET' prepReq = self.service.prepare() response = self.session.send(prepReq) response = json.loads(response.content) lists = [] for wl in response: lists.append(List(wl)) return lists
def update_lists(self): """ there are three types of lists: * not in db but in current list - get, save, set active * in db and in current list - set active * in db but not in current list - set inactive returns all active lists """ current_lists = self.mw.get_lists() current_list_ids = set([lst["list_id"] for lst in current_lists]) previous_list_ids = set([lst["list_id"] for lst in List.objects(user_id=self.user_id)]) # set active:false to all not in current set List.objects(list_id__in=list(previous_list_ids-current_list_ids)).update(set__active=False, set__updated_at=datetime.utcnow()) # delete duplicates List.objects(list_id__in=list(previous_list_ids & current_list_ids)).delete() # save all new lsits for lst in current_lists: new_list = List(user_id=self.user_id, name=lst["name"], list_id=lst["list_id"], active=True, members_euid=[]) new_list.save() # return all active lists return self.get_lists()
def create_new_list(): user = session.get("user") list_name = request.form["list_name"] timestamp = datetime.now() new_list = List(user_id=user['id'], name=list_name, date_created=timestamp) #TODO: Error Handling db.session.add(new_list) db.session.commit() # For now redirect to home because currently this redirect only returns json # Plus it's buggy # return redirect("/list/" + str(new_list.list_id)) return redirect("/")
def post_list(): user = request.json.get('user', None) label = request.json.get('label', None) done = request.json.get('done', False) if not user: return jsonify({"msg": "name is required"}), 400 if not label: return jsonify({"msg": "label is required"}), 400 list = List() list.user = user list.label = label list.done = done list.save() return jsonify(list.serialize()), 201
from ptb_small_ref import (vocab_size, layer_size, optimizer, train_set, valid_set, test_set, epochs) from model import List from layers import * # model model = List([ Embed(vocab_size, layer_size), BNLSTM(layer_size, layer_size), BNLSTM(layer_size, layer_size), Linear(layer_size, vocab_size) ]) if __name__ == '__main__': model.train(train_set, valid_set, test_set, optimizer, epochs) model.dump('ptb_small_norm_results.pkl')
def test_pickle(): model = List([Embed(input_size, layer_size), Softmax()]) model.pickle() from utils import unpickle_model unpickle_model('model.pkl')
def test_train(): model = List([Embed(input_size, layer_size), Softmax()]) model.train(Dataset(), Dataset(), SGD(), 10) model.dump('test.pkl')
from ptb_large_ref import (vocab_size, layer_size, drop_prob, weight_init, decay_epoch, decay_rate, optimizer, train_set, valid_set, test_set, epochs) from model import List from layers import * # model model = List([ Embed(vocab_size, layer_size, weight_init=weight_init), Dropout(drop_prob), BNLSTM(layer_size, layer_size, weight_init=weight_init), Dropout(drop_prob), BNLSTM(layer_size, layer_size, weight_init=weight_init), Dropout(drop_prob), Linear(layer_size, vocab_size, weight_init=weight_init) ]) model.decay_epoch = decay_epoch model.decay_rate = decay_rate if __name__ == '__main__': model.train(train_set, valid_set, test_set, optimizer, epochs) model.dump('ptb_large_norm_results.pkl')