def run(): y = 0 while y < 100: longitude = random.uniform(17.0, 18.0) lat = random.uniform(60.0, 61.0) A = GEOSGeometry('POINT('+str(longitude)+' ' + str(lat)+')', srid=4326) e = Event(user=MyUser.objects.get(pk=1), title="Lunch", description="En riktigt god lunch utlovas, kyckling och potatis. Kommer endast att kosta runt 30 kr.", numOfPeople=random.randint(3,12), date_start="2016-07-"+str(random.randint(1,18)) +" 13:00:00.00Z", price=3, location=A) e.save() print y y += 1
def createTestEvent(): e = Event( id="1234", name="Test Event", description= "Test Description - isn't it long? No? Better? lorium ipsum?", url="http://notarealevent.com/1234", vanity_url="http://iamsupercool.com/1234", start=datetime.strptime("2019-10-13 10:00:00", "%Y-%m-%d %H:%M:%S").astimezone(pytz.utc), end=datetime.strptime("2019-10-13 12:00:00", "%Y-%m-%d %H:%M:%S").astimezone(pytz.utc), org_id="1234", date_created=datetime.now(pytz.utc), date_modified=datetime.now(pytz.utc), date_published=datetime.now(pytz.utc), status="live", currency="USD", online_event=True, hide_start_date=False, hide_end_date=False, max_price=10.32, min_price=7.34, ) return e
def __map_to_event_bo(self, suggestion_id): event_bo = Event() event_bo.created = datetime.now() event_bo.modified = datetime.now() event_bo.event_type = EventTypes.ACTION event_bo.sub_type = EventActionSubTypes.SYSTEM event_bo.text = f"Lisätty tunnisteet: " event_bo.suggestion_id = suggestion_id return event_bo
def post(self, request): data = json.loads(request.body) form = EventForm(data) if form.is_valid(): event = Event.create(**form.cleaned_data) data = {'event': event.to_dict()} else: data = {'error':form.errors} return render_json_response(request, data)
def get(self, request, *args, **kwargs): csrf_token = get_token(request) dict_events = [] events = Event.query() for event in events: dict_events.append(event.to_dict()) data = {'events': dict_events, 'csrf_token': csrf_token} return render_json_response(request, data)
def get(self, request, *args, **kwargs): csrf_token = get_token(request) id = kwargs.get('id') event = Event.get_by_pk(id) if event is None: data = {'event': {}, 'csrf_token': csrf_token} else: data = {'event': event.to_dict(), 'csrf_token': csrf_token} return render_json_response(request, data)
def handle(self, *args, **options): url = options['url'] scraper.scraper(url, scraper_op) for i in golfers: # print first try: g = Golfer.objects.get(name=i) except Golfer.DoesNotExist: print i + " does not exist. Creating..." g = Golfer(name=i) g.save() name = raw_input('Name of event?: ') event = Event(name=name, active=True) event.save()
def put(self, request, *args, **kwargs): id = kwargs.get('id') data = json.loads(request.body) form = EventForm(data) if form.is_valid(): event = Event.update(id, form.cleaned_data) data = {'event': event.to_dict()} else: data = {'error': form.errors} return render_json_response(request, data)
def create(fields: dict) -> Event: event_categories_ids = fields.get("event_categories_ids", []) fields.pop('event_categories_ids', None) event_categories = eventCategoryService.getAllIds(event_categories_ids) if event_categories: fields["event_categories"] = event_categories new_event = Event(**fields) db.session.add(new_event) db.session.commit() return new_event
def handle_event_proxy(event, chain): event_key = get_event_key(event) existed_event = Event.query.filter(Event.key == event_key).first() if existed_event: return serialized_event = dict( event.__dict__, **{ 'args': event['args'].__dict__, 'transactionHash': event['transactionHash'].hex(), 'blockHash': event['blockHash'].hex(), }, ) # TODO: event handler print(chain, event['event']) new_event = Event(key=event_key, chain=chain, type=event['event'], content=json.dumps(serialized_event)) new_event.save()
def test_dates_between_threshold(self): start = date(2019, 8, 18) end = date(2019, 8, 31) weekdays = [MONDAY, WEDNESDAY, FRIDAY] expected = [ date(2019, 8, 19), date(2019, 8, 21), date(2019, 8, 23), date(2019, 8, 26), date(2019, 8, 28), date(2019, 8, 30), ] result = Event.calculate_dates_in_threshold(start, end, weekdays) self.assertEqual(result, expected)
def generate_event(self, event_num): e = Event() min_date = (date.today() + timedelta(days=-5)).toordinal() max_date = (date.today() + timedelta(days=5)).toordinal() e.start_date = random_day = date.fromordinal(random.randint(min_date, max_date)) e.end_date = e.start_date + timedelta(0, 60 * 60) # duration 60 minutes e.title = 'Event %d' % event_num e.descr = 'Description for event %d' % event_num e.lat = random.randint(24000, 26000)/1000.0 e.lon = random.randint(58000, 62000)/1000.0 ''' e.street_address = models.CharField(max_length=500) e.city = models.CharField(max_length=500) e.country = models.CharField(max_length=100) ''' e.org_name = 'Sample Organizer' e.org_email = '*****@*****.**' e.org_phone = '+358501234567' e.save() return
def extract_events(self, url): event_urls = [] events = [] handle = urllib2.urlopen(url) dom = parse(handle) sanitizer_re = re.compile(r'(style|id|class)="[^"]*"') items = dom.getElementsByTagName('item') for item in items: e = Event() e.remote_source_name = 'AaltoEvents' e.remote_url = self.getdata(item, 'link') if Event.objects.filter(remote_url__iexact=e.remote_url).count() > 0: print 'Event already exists, continuing.' continue e.title = smart_unicode(self.getdata(item, 'title')) e.descr = smart_unicode(self.getdata(item, 'description')) e.descr = sanitizer_re.sub('', e.descr) # Strip style, id, class attrs try: e.start_date = dateutil.parser.parse(self.getdata(item, 'xcal:dtstart')) except Exception: print 'Err' try: e.end_date = dateutil.parser.parse(self.getdata(item, 'xcal:dtend')) except Exception: print 'Err' try: point = self.getdata(item, 'georss:point') if not point: raise Exception('No georss:point') point = point.replace(',', '.') point = point.split(' ') e.lat = float(point[0]) e.lon = float(point[1]) except Exception as ex: #print '%s' % ex e.lat = 0 e.lon = 0 e.org_name = smart_unicode(self.getdata(item, 'author')) try: #print e.lat, e.lon if not (e.lat and e.lat != 0 and e.lon and e.lon != 0): raise Exception('Missing lat or lon') res = reverse_geocode(e.lat, e.lon) #print res if hasattr(res, 'street_address'): e.street_address = res.street_address else: e.street_address = '' if hasattr(res, 'city'): e.city = res.city else: e.city = '' if hasattr(res, 'country'): e.country = res.country else: e.country = 'Finland' except Exception as ex: #print 'Error fetching street address: %s' % ex e.street_address = '' e.city = '' e.country = '' e.org_email = '' e.org_phone = '' e.save() events.append(e) return events
def delete(self, request, *args, **kwargs): id = kwargs.get('id') event = Event.get_by_pk(id) event.delete() data = {'event': {}} return render_json_response(request, data)
def handle(self, *args, **options): print "Scraping AYY event calendar" datere = re.compile(r"(\d+\.\d+\.\d+), (\d+)\.(\d+)") baseurl = "http://ayy.fi/kalenteri/" url = baseurl fetched_main_pages = 0 while fetched_main_pages < 6 and url: print "Getting month [current+%d]: %s" % (fetched_main_pages, url) event_listing = urllib2.urlopen(url) fetched_main_pages = fetched_main_pages + 1 soup = BeautifulSoup(event_listing.read()) try: url = baseurl + soup.find(id="content").find_all("a")[1]["href"] print "Next url: %s" % url except Exception: print "No next url found, this will be the last page" url = None for row in soup.find_all("tr"): try: link = row.a if not link: continue # No links found detail_url = link["href"] if detail_url == "": continue # No valid url print "\tParsing: %s" % (detail_url) # Get data, parse detail_data = urllib2.urlopen(detail_url) dsoup = BeautifulSoup(detail_data.read()) event = Event() event.remote_source_name = "ayy" event.remote_url = detail_url container = dsoup.find(id="content") if not container: continue # Extract title try: event.title = container.find("h1", {"class": "main-title"}).string except Exception as e: print "\tError extracting title: %s" % e # print "Title: %s" % (title) # Extract description try: content = container.find("div", {"class": "entry-content"}) for c in content.find_all("script"): c.extract() # Remove scripts for c in content.find_all("div", {"class": "pd-rating"}): c.extract() event.descr = "" for c in content.contents: event.descr += str(c) except Exception as e: print "\tError extracting description: %s" % e # print "Descr: %s" % (contentstr[:100]) # Extract times try: metadata = content.next_sibling.next_sibling.next_sibling start_time = metadata.p for c in start_time.find_all("b"): c.extract() # Remove b tags == titles start_time = start_time.get_text().split("\n") # Split remaining by line end_time = start_time[1] start_time = start_time[0] # Check if dates contain . as time separator, replace with : if so s = datere.match(start_time) if s: start_time = s.expand(r"\1 \2:\3") s = datere.match(end_time) if s: end_time = s.expand(r"\1 \2:\3") # Parse event.start_date = parser.parse(start_time, dayfirst=True) event.end_date = parser.parse(end_time, dayfirst=True) # print "Start: %s, end: %s" % (event.start_date, event.end_date) except Exception as e: print "\tError resolving date: %s" % e raise e # Fatal, dates are required # Extract location try: info = metadata.contents[7].get_text().split("\n") event.venue = info[0].split("Paikka: ")[1] event.street_address = info[1].split("Osoite: ")[1] event.city = info[2].split("Kaupunki: ")[1] event.country = "Finland" query = "" if event.street_address: query += "%s, " % self.normalize_street_address(event.street_address) if event.city: query += "%s, " % event.city if event.country: query += "%s" % event.country query = smart_unicode(query) geores = geocoder.geocode(query) if geores: event.lat = geores["lat"] event.lon = geores["lon"] else: print "\tUnable to resolve coordinates for query %s" % query except Exception as e: print "\tError resolving location: %s" % e # print 'Loc: %s, addr: %s, city: %s' % (loc, addr, city) # Extract links try: metadata = metadata.next_sibling.next_sibling info = metadata.contents[3] links = info.find_all("a") homepage = links[0]["href"] facebookev = links[1]["href"] except Exception as e: print "\tError resolving links: %s" % e # print 'Homepage: %s, FB: %s' % (homepage, facebookev) # Extract contact info try: info = metadata.contents[7].get_text().split("\n") event.org_name = info[0].split(u"Järjestäjä: ")[1] event.org_email = info[1].split(u"Sähköposti: ")[1] event.org_phone = info[2].split("Puhelin: ")[1] except Exception as e: print "\tError resolving organizer info: %s" % e # print 'Name: %s, email: %s, phone: %s' % (org_name, org_email, org_phone) event.save() except Exception as e: print "\tFATAL ERROR handling event, discarded" # print '----------------------------------------' # break return