def pressed(self): logger.info('Button has been pressed') delta = datetime.now() - self.__last_pressed if delta.seconds >= int(config.get('BUTTON_PRESS_THRESHOLD')): chime = Chime() chime.start() notification = Notification() notification.start() camera = Camera() camera.start() self.__last_pressed = datetime.now() else: logger.debug('Button was pressed too quickly!') if not self._led_always_on: if Sundial().is_day(): self.__led.on() else: self.__led.off() else: self.__led.off() # Stop play doorbell wav file (if any) Sender.send({ 'action': SoundReceiver.STOP, 'file': 'doorbell' }, SoundReceiver.TYPE) # Start play doorbell wav file Sender.send({ 'action': SoundReceiver.START, 'file': 'doorbell' }, SoundReceiver.TYPE)
def return_response(cls, data={}, status=200, message="", headers={}): try: logger.debug('return %s %s', status, message) except: logger.error(traceback.format_exc()) try: filters = request.args.get('filters') if filters: fd = BaseDict(data).filter(*filters.split(',')) data = fd except: logger.error(traceback.format_exc()) data = data try: if isinstance(data, db.Model): data = data.format() elif isinstance(data, list) and data and \ isinstance(data[0], db.Model): data = [o.format() for o in data] except: logger.error(traceback.format_exc()) data = data res = cls( data=data, status=status, message=message ).to_dict() return make_response(jsonify(res), status, headers)
def get_children(self, curr_num, curr_depth): """ Recursively fetches the children of the current patent up to the maximum depth and add to the edge list :param curr_num: the current patent being munged :param curr_depth: the current depth away from the root patent number """ # logger.debug("At depth {}/{}".format(curr_depth, self.depth)) if curr_depth == 1: self.completed_branches += 1 info = self.query({ "q": { "patent_number": curr_num }, "f": self.query_fields }) if curr_depth == 0: logger.debug("Exploring {} branches".format( len(info['patents'][0]['citedby_patents']))) if curr_depth > self.depth: return if info.get('patents') is not None: # TODO: include bcites, and recurse once more to get bcites for the leaves but not fcites df = self.query_to_dataframe(info, bcites=False) if self.df is None: self.df = df else: self.df = self.df.append(df, ignore_index=True) # iterate through all children, recursively if curr_depth + 1 < self.depth: for patent in info['patents']: for fcite in patent.get('citedby_patents'): self.get_children(fcite['citedby_patent_number'], curr_depth + 1)
def send_request(self, url, additional_headers=None): """Check for erfpacht at MijnErfpacht based on a BSN""" if not API_KEY: raise Exception( "No api key found in environment variables or key is None/empty string" ) headers = {"X-API-KEY": API_KEY} if additional_headers: headers.update(additional_headers) res = requests.get(url, headers=headers, timeout=ERFPACHT_API_REQUEST_TIMEOUT) logger.debug("Response status: {}, text: {}".format( res.status_code, res.text)) if res.status_code == 403: raise Exception( "Unable to authenticate to source API. Check if the provided api key is correct and if you are making the request through a whitelisted environment (e.g. secure VPN)." ) # Handle 400 range responses if str(res.status_code)[0] == "4": raise Exception( "The source API responded with 4xx status code, saying: {}". format(res.text)) return res
def statistics_article(cls): begin = time.time() items = Article.query_items() for item in items: item.crawler_self() logger.debug('statistics_article time: %s', (time.time() - begin))
def query_data(self): # logger.debug(self.patent_number) t = Timer("Fetching children recursively") # TODO - also query patent features and include as attributes in network self.get_children(self.patent_number, 0) logger.debug("Examined {} branches".format(self.completed_branches)) t.log()
def test_forecasting(endpoint, bin_size=20, relative_series=False): cache_name = os.path.abspath("data/regression/forecasting_cache.pkl") keys = [ "engines_25Mar19", "radio_25Mar19", "robots_25Mar19", "transportation_25Mar19", "xray_25Mar19", "coherent-light_25Mar19" ] bin_size_weeks = np.timedelta64(bin_size, 'W') try: df = pickle.load(open(cache_name, 'rb')) except FileNotFoundError: df = FeatureTransformer(get_stacked_df(keys, endpoint="TIME-DATA")).fit_transform() pickle.dump(df, open(cache_name, 'wb')) df_endog = df[["log(knowledge_forward_cites)", "t", "patent_date"]] features, protected = get_features(True, df) df_exog = df[features][df["t"] == 0] logger.debug(features) if endpoint == "arima": regress_arima(df_endog, bin_size_weeks, relative_series) if endpoint == "pooled": # regress_pooled(df_endog, df_exog, bin_size_weeks) entity_res = fit_write(None, "entity") plot_coeffs(entity_res)
def run(self): try: _, temp_path = mkstemp() if config.get('USE_MOTION') is True: response = requests.get(config.get('MOTION_EYE_SNAPSHOT_URL')) buffered_reader = BufferedReader(BytesIO(response.content)) logger.debug('Image retrieved from motionEye') with open(temp_path, 'wb') as capture: capture.write(response.content) else: command = [ config.get('WEBCAM_BIN'), '--device', config.get('WEBCAM_DEVICE', '/dev/video0'), '--resolution', config.get('WEBCAM_RESOLUTION', '1920x1080'), '--no-banner', '--delay', config.get('WEBCAM_DELAY', '2'), '--rotate', config.get('WEBCAM_ROTATE', '180'), '--jpeg', config.get('WEBCAM_JPG_COMP', '80'), temp_path ] if logger.level > logging.DEBUG: command.insert(1, '--quiet') call(command) buffered_reader = open(temp_path, 'rb') logger.debug('Image {} captured from webcam'.format(temp_path)) notification = Notification(buffered_reader) notification.run() # Move picture in place as background for WebRTC call. destination_folder = os.path.join( config.ROOT_PATH, 'app', 'www', 'mobile', 'static', 'img' ) # Create destination folder if it does not exist if not os.path.exists(destination_folder): os.mkdir(destination_folder) destination = os.path.join( destination_folder, 'capture.jpg' ) move(temp_path, destination) except Exception as e: logger.error('Camera Helper: {}'.format(str(e))) return
def load(self, *kwargs): logger.debug("Attempting to load from cache {}".format(self.cache)) df_endog, stored = pickle.load(open(self.cache, 'rb')) if stored != kwargs: logger.warn("Load failed due to param mismatch, refitting") raise ValueError return df_endog
def __del__(self): if self.__button: self.__button.close() logger.debug("Button's GPIO is closed") if self.__led: self.__led.close() logger.debug("LED's GPIO is closed")
def identity(value): logger.debug('JSON {}'.format(value)) if isinstance(value, (str, bool, int, float)): return value.__class__(value) elif isinstance(value, (list, dict)): return value else: return None
def released(self): logger.debug('Button has been released') if not self._led_always_on: if Sundial().is_day(): self.__led.off() else: self.__led.on() else: self.__led.on()
def query_items(cls, **kw): query_date = kw.get('date', date.today().isoformat()) logger.debug(query_date) sql = 'is_bot = 0 and is_available = 1 and url like "%wxnacy.com%"\ and visit_date = :date' items = cls.query.filter(text(sql)).params(date=query_date).order_by( desc(cls.create_ts)).all() return items
def delete_policy(policy: Policy, db: Session) -> bool: try: db.delete(policy) db.commit() except Exception as e: logger.debug(e) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="not found") else: return True
def handle_external(self): if not self.allow_external: # now erase any edges containing patents that aren't in the original query (the source list) # this will limit the network to only patents that were in the original query old_size = self.df.size logger.debug("Size before {}".format(old_size)) self.df = self.df[(self.df[self.get_citation_keys()[0]].isin( self.queried_numbers)) & (self.df[ self.get_citation_keys()[1]].isin(self.queried_numbers))] logger.debug("Stripped {} external cites".format(old_size - self.df.size))
def run(self): try: logger.debug('Ring bell, ring bell...') for i in range(0, self.__times): self.__buzzer.on() sleep(self.PAUSE_BETWEEN_STATES) self.__buzzer.off() sleep(self.PAUSE_BETWEEN_STATES) except Exception as e: logger.error(e) return # Close thread
def __call__(self, user: User = Depends(get_current_active_user)): for permission in self.permissions: allowed_actions_for_resource = [ policy.permission.action for policy in user.role.policies if policy.permission.resource.name == permission.resource ] if permission.action not in allowed_actions_for_resource: logger.debug( f"user with role {user.role} not allowed to perform {permission.action} on {permission.resource}" ) raise HTTPException(status_code=403, detail="operation not permitted") return user
def query_rank(cls, **kw): logger.debug(kw) qd = date.today() if 'day' in kw: qd = kw['day'] res = db.session.query( cls.url, func.count( cls.url).label('rank')).filter(cls.visit_date == qd).group_by( cls.url).order_by(cls.rank.name.desc()).all() return res
def delete_otp(revoke_code: str, db: Session): try: statement = select(OTP).filter_by(revoke_code=revoke_code) otp = db.execute(statement).scalar() db.delete(otp) db.commit() except IntegrityError as e: logger.debug(e) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="code not found.") except Exception as e: logger.debug(e, e.__class__.__name__) raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="code not found.")
def update_role(role_id: int, role_data: RoleModelUpdate, db: Session): statement = update(Role).where(Role.id == role_id).values( **role_data.dict()) try: db.execute(statement) db.commit() role = db.execute(select(Role).where(Role.id == role_id)).scalar() except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="unknown error.") else: return role
def cron(cls): """ To be called by `app.threads.cron.Cron()`. See `CRON_TASKS` in `app.config.default.py:DefaultConfig` """ call_heartbeat_interval = config.get('WEBRTC_CALL_HEARTBEAT_INTERVAL') date_ = datetime.now() - timedelta(seconds=call_heartbeat_interval * 2) if Call.select().where(Call.status == Call.ON_CALL, Call.modified_date < date_).exists(): logger.debug('Dead calls have been found') cls.hang_up() return
def parse_literal(ast): logger.debug('JSON {}'.format(ast)) if isinstance(ast, (StringValue, BooleanValue)): return ast.value elif isinstance(ast, IntValue): num = int(ast.value) return num elif isinstance(ast, FloatValue): return float(ast.value) elif isinstance(ast, ListValue): return [JSON.parse_literal(value) for value in ast.values] elif isinstance(ast, ObjectValue): return {field.name.value: JSON.parse_literal(field.value) for field in ast.fields} else: return None
def update_policy(name: str, policy_data: PolicyModelUpdate, db: Session): statement = update(Policy).where(Policy.name == name).values( **policy_data.dict()) try: db.execute(statement) db.commit() policy = db.execute( select(Policy).where(Policy.name == name)).scalar() except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="unknown error.") else: return policy
def update_user(username: str, user_data: UserModelUpdate, db: Session): statement = update(User).where(User.username == username).values( **user_data.dict()) try: db.execute(statement) db.commit() user = db.execute( select(User).where(User.username == username)).scalar() except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="unknown error.") else: return user
def query_by(cls, **kw): logger.debug(kw) order_by = kw.pop('order_by', 'create_ts desc') if 'order_by' in kw: order_by = kw['order_by'] filter_by = kw.pop('filter_by', '') params = kw.pop('params', {}) page = kw.pop('page', 1) per_page = kw.pop('per_page', 10) logger.debug(order_by) items = cls.query.filter(text(filter_by)).params(params ).order_by(order_by).paginate(page, per_page, False) return items
def _statistics(vd): begin = time.time() VL = VisitorLog items = db.session.query(VL.url, func.count( VL.url)).filter(VL.visit_date == vd).group_by(VL.url).all() for item in items: article = Article.query_or_create(url=item[0]) if article: ad = cls.query_or_create(article_id=article.id, visit_date=vd) ad.pv = item[1] db.session.add(ad) db.session.commit() logger.debug('statistics_article_data time: %s', (time.time() - begin))
def fit(self, df_endog, bin_size_weeks, n, ascending=True): df_endog["i"] = ForecastingTransformer.entity_id(df_endog) df_endog["t"] = df_endog.patent_date + df_endog.t * bin_size_weeks df_endog = df_endog.set_index( [df_endog.index.get_level_values(0), "i", "t"] ).drop(columns="patent_date") # iterate through patents ("i" index) with groupby # for each patent # 1. extend the maximum date to the maximum end-of-data date # 2. join the dataframes on the date index up to n weeks ago date_max = df_endog.index.get_level_values("t").max() manager = enlighten.get_manager() ticker = manager.counter(total=df_endog.index.get_level_values("i").nunique(), desc='Patent Samples Transformed', unit='patents') subs = [] for date, subdf in df_endog.groupby(level="i"): local_max = subdf.index.get_level_values("t").max() num_new_vals = int((date_max - local_max) / bin_size_weeks) + 1 subdf = subdf.reset_index().drop(["i", "level_0"], 1) vals = np.full((num_new_vals, subdf.shape[1]), subdf[-1:].values) index = np.array( [pd.Timestamp(np.datetime64(local_max + (i + 1) * bin_size_weeks)) for i in range(num_new_vals - 1)] + [ date_max] ) vals[:, 0] = index df_append = pd.DataFrame( data=vals, columns=["t", "log(knowledge_forward_cites)"] ) subdf = subdf.append(df_append).set_index("t").sort_index(level="t", ascending=ascending).reset_index(drop=True) if subdf.shape[0] >= n: subs.append(subdf.head(n)) ticker.update() ticker.close() df_endog = pd.concat(subs, axis=1) df_endog.columns = range(df_endog.shape[1]) logger.debug(df_endog.describe()) df = df_endog logger.debug("Loaded transformed endogenous set") return df
def update_permission(permission_id: int, permission_data: PermissionModelUpdate, db: Session): statement = update(Permission).where( Permission.id == permission_id).values(**permission_data.dict()) try: db.execute(statement) db.commit() permission = db.execute( select(Permission).where( Permission.id == permission_id)).scalar() except Exception as e: logger.debug(e) raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="unknown error.") else: return permission
def activate_led(self, cron_mode=False): if self._led_always_on: if cron_mode is not True: logger.debug('LED should be always on, turn it on') self.__led.on() else: sundial = Sundial() is_day = sundial.is_day() if self.__is_day != is_day: if sundial.mode == Sundial.DAY: logger.info("Day mode: Turn button's LED off") self.__led.off() else: logger.info("Night mode: Turn button's LED on") self.__led.on() self.__is_day = is_day
def read(cls, message, last_time_received): try: message_datetime = datetime.fromtimestamp(message['timestamp']) delta = message_datetime - last_time_received if delta.seconds >= int(config.get('BUTTON_PRESS_THRESHOLD')): if message.get('device') == config.get( 'BACK_DOORBELL_DEVICE_MAC'): telegram = Notification(front_door=False) telegram.start() chime = Chime( times=config.get('BACK_DOORBELL_RINGS_NUMBER')) chime.run() # No need to run it as thread else: logger.debug('Relax dude! Stop pushing the button') except KeyError: logger.error('Message is invalid.') return False