def poll_leganes_wu(): attempts = 0 while attempts < 5: try: leganes_location = Location.get(Location.name == 'leganes') response = requests.get( 'https://api.weather.com/v2/pws/observations/current', params={ 'apiKey': CONFIG['wu_api_key'], 'stationId': 'ILEGAN9', 'numericPrecision': 'decimal', 'format': 'json', 'units': 'm' }) data = response.json().get('observations')[0] Record.get_or_create(date=arrow.get( data['epoch']).to('Europe/Madrid').datetime.replace( tzinfo=None), location=leganes_location, defaults={ 'temperature': data['metric']['temp'], 'humidity': data['humidity'] }) break except Exception: time.sleep(60) attempts += 1
def get_records_by_execution_id(execution_id, conn): """This function returns a records list of Execution for a specific execution id""" records = [] # generated_records = [] cursor = conn.cursor() sql_records_by_execution_id = "select id, business_key, execution_id, record_hash, record_type from Record where " \ "execution_id = '%s' and record_type in ('source', 'generated') " # SQL Records query_result = cursor.execute(sql_records_by_execution_id % execution_id).fetchall() for row in query_result: record = Record(str(row["business_key"]), get_columns_by_record_id(row["id"], conn)) # The business key could be a simple int but we handle it as string since python is not # statically typed. record.record_hash = row["record_hash"] record.record_type = row["record_type"] records.append(record) # Generated Records (record_type = 'framework') # No Use-Case # but could be used to re-produce a generated report file at any time if not records: print "Error : no record found for execution " + execution_id return records
def merge_author(d = {}): merged_d = {} for a in d.items(): r = a[1] k = "{0}_{1}".format(r.name.replace(" ", '-').lower(), r.organization.replace(' ', "_").lower()) if k not in merged_d: merged_d[k] = Record(k) merged_d[k].name = r.name merged_d[k].organization = r.organization merged_d[k].first.extend(r.first) merged_d[k].other.extend(r.other) return merged_d
def calculate_tweet_score(self, text, followers): # Compare feature sets record = Record(1, text, followers) record.get_features_set(self.feature_set) probability = self.classifier.prob_classify(record.features_set) record.probability = probability.prob('1') # Calculate tweet score from provided tweet, user pair # record.impact_score = record.probability * (float(record.followers)+1) #for now, just show the probability record.impact_score = record.probability * (float(record.followers)+1) return record.impact_score
def save_info(file_name): if not os.path.exists(file_name): return 'file not exists' Record.dropCollection() with open(file_name) as file: for line in file.readlines(): result = Record.create() time, name, view, ip = line_parser(line) result["ip"] = ip result["name"] = name result["view"] = view resp_time = datetime.strptime(time, "%Y-%m-%dT%H:%M:%S") result["time"] = resp_time result.save()
def get(self): data = genfromtxt("./input.txt", delimiter=',', skip_header=1, dtype=str) data = data.tolist() for row in data: description = row[1] datetime = row[2] longitude = row[3] latitude = row[4] elevation = row[5] record = Record(description=description, datetime=datetime, longitude=longitude, latitude=latitude, elevation=elevation) db.session.add(record) # db.session.flush() db.session.commit() return {"message": "CSV file uploaded to database!"}
def get_last_record_for_location(location, delay_minutes=0): last_date = datetime.datetime.now() - datetime.timedelta( minutes=delay_minutes) return Record.select().where(Record.location == location, Record.date <= last_date).order_by( Record.date.desc()).first()
def postData(): if request.method == 'POST': username = request.form.get('username') distance = request.form.get('distance') duration = request.form.get('duration') averagespeed = request.form.get('averagespeed') pathline = request.form.get('pathline') startpoint = request.form.get('startpoint') endpoint = request.form.get('endpoint') date = request.form.get('date') if session.get(username): newRecord = Record(username, distance, duration, averagespeed, pathline, startpoint, endpoint, date) try: db.session.add(newRecord) db.session.commit() db.session.close() print('用户 {} 添加一条记录'.format(username)) return jsonify({'msg': '同步记录成功!', 'state': 'success'}) except: db.session.rollback() return jsonify({'msg': 'sql 执行错误!', 'state': 'fail'}) else: return jsonify({'msg': '用户未登录!', 'state': 'fail'})
def poll_leganes_cm(): attempts = 0 while attempts < 5: try: leganes_location = Location.get(Location.name == 'leganes') response = requests.get( 'http://gestiona.madrid.org/azul_internet/html/web/DatosEstacion24Accion.icm?ESTADO_MENU=2_1', params={ 'estaciones': 2, 'aceptar': 'Aceptar' }) soup = BeautifulSoup(response.text, 'html.parser') for table in soup.findAll('table'): if table.find('td', text='Parámetros Meteorológicos'): for record in table.find('tbody').findAll('tr'): try: values = record.findAll('td') record_time = values[0].text.strip() record_time = arrow.get(record_time, 'HH:mm', tzinfo='UTC') now = arrow.utcnow() day = now if record_time.time() <= now.time( ) else now.shift(days=-1) record_time = day.replace( hour=record_time.hour, minute=record_time.minute, second=0, microsecond=0 ).to('Europe/Madrid').datetime.replace(tzinfo=None) record_temp, record_hr, record_pre = map( lambda x: float(x.text.strip()), values[3:6]) Record.get_or_create(date=record_time, location=leganes_location, defaults={ 'pressure': record_pre, 'temperature': record_temp, 'humidity': record_hr }) except Exception: continue break except Exception: time.sleep(60) attempts += 1
def add_record(self): """add new record to the database""" item_list = view.enter_trip_details(input, input, input) if mdl.check_validity(item_list): self.model.records.append( Record(item_list[0], item_list[1], item_list[2])) else: view.invalid_value()
def addRecord(**args): session = connectdb() cat = session.query(Catagory).filter_by(name=args['cat']) if cat.all(): catobj = cat.one() else: catobj = Catagory(args['cat']) taskobj = Task(args['task']) taskobj.cat = catobj # if True: # session.add(task) # date = args['date'].today() record = Record(args['date'], args['start_time'], args['big_or_not']) record.task = taskobj session.merge(record) # cat and task will save/update automatically and cascade due to the session's default setting? my_db_commit(session) session.close()
def parse_query_result(query_result, this_execution, columns, columns_mapping): """Reads and parses data source query result and add it to existing execution instance""" columns_in_business_key = [] for column_name, column_definition in columns_mapping.items(): if "is_business_key" in column_definition: if column_definition["is_business_key"] == '1': columns_in_business_key.append(str(column_name)) for row in query_result: # Construct the business key (record.id) # this_record = Record(row[0]) # New method business_key = [] for a_column in columns_in_business_key: business_key.append((a_column, row[a_column])) business_key.sort(key=lambda column_tuple: column_tuple[0]) record_id = [column_tuple[1] for column_tuple in business_key] if len(record_id) == 1: record_id = str(record_id[0]) else: record_id = ''.join(record_id) this_record = Record(record_id=record_id) # End of new method for business key this_record.record_type = 'source' # This record was read from a report query # This approach is independent of the column order in the SQL defined # in the report configuration for a_column in columns: a_column = str(a_column) this_column = Column( a_column, str(row[a_column]), columns_mapping[a_column]["is_used_for_compare"]) this_record.columns.append(this_column) this_record.hash_record() this_execution.add_record(this_record)
def addRecord(**args): session = connectdb() cat = session.query(Catagory).filter_by(name=args['cat']) if cat.all(): catobj = cat.one() else: catobj = Catagory(args['cat']) taskobj = Task(args['task']) taskobj.cat = catobj # if True: # session.add(task) # date = args['date'].today() record = Record(args['date'], args['start_time'], args['big_or_not']) record.task = taskobj session.merge( record ) # cat and task will save/update automatically and cascade due to the session's default setting? my_db_commit(session) session.close()
def load_record(): test1 = Record(profile_id=1, food_id=1, date=datetime.date.today(), serving_qty=1, serving_unit='container', serving_weight_grams=111) test2 = Record(profile_id=1, food_id=2, date=datetime.date.today(), serving_qty=3, serving_unit='cup', serving_weight_grams=246) db.session.add(test1) db.session.add(test2) db.session.commit()
def user_populate_records(user): for challenge in CHALLENGES: record_uuid = str(uuid.uuid4()) app.logger.info("user %s: adding record %s %s", user.email, challenge.identifier, record_uuid) record = Record.create(uuid=record_uuid, user=user, state="available", challenge=challenge.identifier) record.save()
def add_record(self, classes, day): # 添加上课记录,同时添加学生的逻辑关联对象Homework record = Record(Day=day) classes.record.append(record) for student in classes.student: homework = Homework(student_id=student.id, submit=False, score=None) record.homework.append(homework) self.Session.commit()
def save_image(self, image, user_id, detected_keyword): for k in detected_keyword: image_name = f'{time.time()}.jpg' cv2.imwrite(f'/media/storage/images/{image_name}', image, [cv2.IMWRITE_JPEG_QUALITY, 20]) record = Record(channel=self.args.channelname, time=datetime.datetime.now(), keyword=k, image=image_name, user_id=user_id) self.session.add(record) self.session.flush() self.session.commit()
def filter_info(days): now = datetime.now() cond_time = now-timedelta(days) cond = {} cond["$and"] = [{"time": {"$gt": cond_time}}, {"time": {"$lt": now}}] result = Record.aggregate([ {"$match": cond}, # ip和view为组合key分组 {"$group": {"_id": {"ip": "$ip", "view": "$view"}, "num": {"$sum": 1}}}, {"$sort": {"num": -1}} ])["result"] return result
def generate_statistics(): locations = Location.select().where( Location.outdoor == True, Location.hidden == False, Location.remote == True, ) record_qs = Record.select().where(Record.location.in_(locations)) start = record_qs.order_by(Record.date).first().date start = start.replace(hour=0, minute=0, second=0, microsecond=0) yesterday = datetime.datetime.today() - datetime.timedelta(days=1) date_range = list(rrule(DAILY, dtstart=start, until=yesterday)) for d in tqdm.tqdm(date_range): if not Statistics.select().where(Statistics.date == d.date()).exists(): record_max, record_min = None, None temperature_avg = 10000 for location in locations: location_record_max, location_record_min = None, None records = record_qs.where( Record.date >= d, Record.date < d + datetime.timedelta(days=1), Record.location == location.id) if not records.exists(): continue for r in records: if not location_record_max or r.temperature > location_record_max.temperature: location_record_max = r if not location_record_min or r.temperature < location_record_min.temperature: location_record_min = r if not record_max or location_record_max.temperature < record_max.temperature: record_max = location_record_max if not record_min or location_record_min.temperature < record_min.temperature: record_min = location_record_min temperature_avg = min( temperature_avg, (sum(map(lambda record: record.temperature, records)) / len(records))) Statistics(date=d.date(), temperature_max=record_max.temperature, temperature_min=record_min.temperature, temperature_avg=round(temperature_avg, 1), time_max=record_max.date.time(), time_min=record_min.date.time()).save() send_daily_statistics()
def poll_aemet(): attempts = 0 while attempts < 5: try: amet_location = Location.get(Location.name == 'aemet') r = requests.get( 'https://opendata.aemet.es/opendata/api/observacion/convencional/datos/estacion/3195/', params={'api_key': CONFIG['aemet_api_key']}) data_url = r.json().get('datos') r = requests.get(data_url) for record in r.json(): Record.get_or_create(date=arrow.get( record['fint']).datetime.replace(tzinfo=None), location=amet_location, defaults={ 'pressure': record['pres'], 'temperature': record['ta'], 'humidity': record['hr'] }) break except Exception: time.sleep(60) attempts += 1
def poll_sensor(mac, location_id): attempts = 0 while attempts < 5: try: poller = MiTempBtPoller(mac, BluepyBackend) t = poller.parameter_value(MI_TEMPERATURE, read_cached=False) h = poller.parameter_value(MI_HUMIDITY, read_cached=False) now = datetime.datetime.now() Record(temperature=t, humidity=h, date=now, location=location_id).save() break except BluetoothBackendException: time.sleep(10) attempts += 1
def load_records(): """Load fake records with fake information into database""" with open('seed_data/records.csv', 'rb') as csvfile: data = csv.reader(csvfile) for row in data: record_id, user_id, common_name, date_time, latitude, longitude, notes, seen, num_birds = row record = Record(record_id=record_id, user_id=user_id, common_name=common_name, date_time=date_time, latitude=latitude, longitude=longitude, notes=notes, seen=seen, num_birds=num_birds) db.session.add(record) db.session.commit()
def _on_message(self, client, userdata, msg): payload = json.loads(msg.payload.decode()) data = { 'temperature': payload.get('temperature'), 'humidity': payload.get('humidity'), 'pressure': payload.get('pressure') } now = arrow.now().replace(second=0, microsecond=0).datetime.replace(tzinfo=None) record, created = Record.get_or_create(date=now, location=self.location.id, defaults=data) record.temperature = data['temperature'] record.humidity = data['humidity'] record.pressure = data['pressure'] record.save()
def make_plt(start, location, temperature, humidity, single=True, i=0, historical_lines=False): records = Record.select().where(Record.date >= start, Record.location == location).order_by( Record.date) times, t, h = [], [], [] for r in records: times.append(r.date) t.append(r.temperature) h.append(r.humidity) if temperature: if historical_lines: with open('utils/mean_temps.bin', 'rb') as file: mean_temps = pickle.load(file) tmax, tmin = [], [] for time in times: date = '%02d-%02d' % (time.month, time.day) tmax.append(mean_temps[date][0]) tmin.append(mean_temps[date][1]) plt.plot(times, tmax, label='max_temperature 1975-2019', color='Red') plt.plot(times, tmin, label='min_temperature 1975-2019', color='Blue') plt.plot(times, t, label='temperature' + ('' if single else f' {location.name}'), color=cm.colors[4 + i]) if humidity: plt.plot(times, h, label='humidity' + ('' if single else f' {location.name}'), color=cm.colors[i])
def post(self): json_data = request.get_json(force=True) if not json_data: return {'message': 'No input data provided'}, 400 # Validate and deserialize input data, errors = record_schema.load(json_data) if errors: return errors, 422 record = Record(description=data['description'], longitude=data['longitude'], latitude=data['latitude'], elevation=data['elevation']) db.session.add(record) db.session.commit() result = record_schema.dump(record).data return {"status": 'success', 'data': result}, 201
def addRecord(): if 'username' in session: print('I will add record in ' + session['username']) theDatetime = request.form['theDatetime'] content = request.form['content'] #生成待插入记录的对象并进行插入 record = Record(username=session['username'], content=content, state=0, timing=theDatetime) db.session.add(record) db.session.commit() #获取datetime进行格式化 print(theDatetime) d = datetime.strptime(theDatetime, "%Y-%m-%dT%H:%M") print(d) scheduler.add_job(doJob, 'date', run_date=d, args=[session['username'], content]) return redirect(url_for('show'))
def get_speakers(poster, speaker_dict = {}): print("get speakers by poster: " + poster.id) try: r = session.get(poster_detail_url.format(poster.id)) soup = BeautifulSoup(r.text, features="html.parser") tags = soup.find_all('button', {"onclick": re.compile(r"showSpeaker.*")}) for i, a in enumerate(tags): ids = re.compile(r"[\d | -]+").findall(a["onclick"]) if len(ids)>0 : id = ids[0] if (id not in speaker_dict): speaker_dict[id] = Record(ids[0]) record = speaker_dict[id] if i == 0 and (poster.title not in record.first): record.add_first(poster.title) if i > 0 and (poster.title not in record.other): record.add_other(poster.title) get_speakers_detail(record) except requests.exceptions.RequestException as e: # This is the correct syntax print(e) return speaker_dict
def handle_maths_game(json): if json['status'] == 'finish': finalScore = json['final_score'] pre_record = Record.query.filter_by( score_type="math", record_date=datetime.now().date()).first() if pre_record is None: db.session.add( Record(email=session['ind_email'], score_type="math", score=finalScore)) db.session.commit() else: pre_record.score = max(pre_record.score, finalScore) db.session.commit() emit('maths', json, broadcast=True, namespace='/individual/' + session['ind_uuid']) print('Maths received json: ' + str(json))
def new_record(): """Form for user to add new record. Store in database""" user_id = request.form.get("user_id") common_name = request.form.get("common_name") date_time = request.form.get("date_time") latitude = request.form.get("lat") longitude = request.form.get("lng") notes = request.form.get("notes") seen = request.form.get("seen") num_birds = request.form.get("num_birds") new_record = Record(user_id=user_id, common_name=common_name, date_time=date_time, latitude=latitude, longitude=longitude, notes=notes, seen=seen, num_birds=num_birds) db.session.add(new_record) db.session.commit() # jsonify dictionary and return that # This is just for display purposes, so it doesn't need everything new_rec_dic = { "common name": common_name, "date_time": date_time, "latitude": latitude, "longitude": longitude, "notes": notes, "seen": seen, "num birds": num_birds } return jsonify(new_rec_dic)
def store_record(patient, date_text, injury_text, treatment_text, followup_text): record = Record(date_text, injury_text, treatment_text, treatment_text, followup_text ) print record.record_details controller.store_record(patient['stu_id'],record.record_details) call_patient_frame_on_top(patient)
def souvik_verify(start_date, days=365, data_present=False): _start_date = start_date sys.stdout.write('Startng work for Year: {}\n'.format(_start_date)) if not data_present: hmi_images, vis_images, aia_images = get_all_images_from_server( _start_date, days) else: hmi_images, vis_images, aia_images = get_data_objects_from_local( _start_date) download_only = int(sys.argv[6]) if download_only == 1: return get_corresponding_images = prepare_get_corresponding_images( aia_images, vis_images) divisor = int(sys.argv[7]) remainder = int(sys.argv[8]) hmi_images.sort(key=get_julian_day) for index, hmi_image in enumerate(hmi_images): if index % divisor != remainder: continue aia_image, vis_image, status = get_corresponding_images(hmi_image) if not status: sys.stdout.write( 'No AIA or VIS image found for filename: {}\n'.format( hmi_image.filename)) continue _date = get_date(hmi_image) sys.stdout.write('Startng work for Date: {}\n'.format(_date)) record = Record.find_by_date(_date) if not record: do_souvik_work(hmi_image, aia_image, vis_image) else: sys.stdout.write('Data Exists for Date: {}\n'.format(_date)) aia_image.delete('aiaprep') aia_image.delete('aligned_data') aia_image.delete('ldr') aia_image.delete('mask', suffix='plages') aia_image.delete('mask', suffix='active_networks') vis_image.delete('aiaprep') vis_image.delete('mask') hmi_image.delete('aiaprep') hmi_image.delete('crop_hmi_afterprep') hmi_image.delete('souvik')
def create(self, request, relpath, name=None, value=None): client = ndb.Client() with client.context(): r = Record(name=name, value=value) r.put() return "OK"
def retrieve(self, id: str) -> Optional[Record]: data = self._load_database() return Record.from_dict(id, data.get(id)) if id in data else None
def save(self, record: Record) -> Record: data = self._load_database() data[record.id] = record.to_dict() with open(self._file_path, 'w') as json_file: json.dump(data, json_file, sort_keys=True, indent=2)
def add_chat_record(self, content): question_answer = Record(content, self.user) question_answer.save()