def get_submits(self): start, size = 0, 50 while True: url = httputil.url_concat(self.status_url, self._gen_status_params(start, size)) response = yield self.fetch(url) res = json.loads(response.body.decode('utf-8')) status_data = res['aaData'] if len(status_data) == 0: break status_list = [] for row in status_data: run_time = row[5][:-3] if row[5] != '' else '-1' memory = row[6][:-3] if row[6] != '' else '-1' status = { 'type': DataType.Submit, 'account': self.account, 'status': submit.SubmitStatus.BROKEN, 'run_id': row[1], 'pro_id': row[2], 'result': row[3], 'lang': row[4], 'run_time': run_time, 'memory': memory, 'submit_time': row[8], 'code': None } status_list.append(status) logger.debug('{} {} Success to get {} new status'.format( self.TAG, self.account, len(status_list))) self.put_queue(status_list) start += size
async def get_subjects_json(subjects, term, cookies, page): """Gets the JSON representation from each subject that is crawled. :param subjects: List of subjects :param term: Term dictionary containing code and description :param cookies: Page cookies :param page: Pyppeteer page :return: JSON list of the subjects crawled """ subjects_json = [] for idx, subject in enumerate(subjects): logger.debug( "Crawling subject", extra={ "subject": subject["description"], "subjectIndex": idx + 1, "totalSubjects": len(subjects), "term": term["description"], }, ) unique_session_id = await pyppeteer.get_unique_session_id(page) authenticate_current_session(term, unique_session_id, cookies) sched_json = get_schedule_json(subject, term, unique_session_id, cookies) if "data" in sched_json.keys(): subjects_json.append(sched_json["data"]) else: logger.warning( "No course data found.", extra={"subject": subject["description"]} ) return subjects_json
def get_slot(name): try: slot = intent.slots[name].value except (KeyError, TypeError): logger.debug(f"failed to read value of slot '{name}'") slot = '' return slot
async def create_user(self, name, email, password) -> User: sql = "INSERT INTO users (id, name, email, password, created) VALUES ($1, $2, $3, $4, $5);" user_id = uuid4() hashed = hashpw(password.encode("utf8"), gensalt()).decode("utf8") created = datetime.utcnow() try: async with self.pool.acquire() as con: # type: Connection await con.execute(sql, user_id, name, email, hashed, created) except UniqueViolationError as exc: logger.debug(exc.__str__()) logger.warning("Tried to create user: "******" but e-mail: " + email + " was already in use") return None sql = 'INSERT INTO placements ("user", points, level) VALUES ($1, $2, $3);' async with self.pool.acquire() as con: # type: Connection await con.execute(sql, user_id, 0, 1) email_dao = EmailDao(self.pool) link = await email_dao.create_email_verify_link(email) await send_email(email, "Welcome to Crew DB", "Please confirm that this is your e-mail.", True, link) user = User() user.id = user_id user.name = name user.email = email user.created = created return user
async def verify_email_by_link(self, link: UUID) -> bool: sql = 'SELECT "email" FROM email_verify_links WHERE link = $1;' async with self.pool.acquire() as con: # type: Connection row = await con.fetchrow(sql, link) if row is None or row["email"] is None or row["email"] == "": logger.debug('Verify ID: "' + link.__str__() + '" was not associated with any email') return False email = row["email"] sql = 'INSERT INTO verified_emails ("email") VALUES ($1);' try: async with self.pool.acquire() as con: # type: Connection await con.execute(sql, email) except Exception as e: logger.error('Failed to insert email "' + email + '" into database table "verified_emails"') logger.error(str(e)) return False logger.debug('E-mail: "' + email + '" was verified') sql = 'DELETE FROM email_verify_links WHERE "email" = $1;' async with self.pool.acquire() as con: # type: Connection await con.execute(sql, email) return True
async def get_subjects_json(subjects, term, cookies, page): subjects_json = [] for idx, subject in enumerate(subjects): logger.debug( "Crawling subject", extra={ "subject": subject["description"], "subjectIndex": idx + 1, "totalSubjects": len(subjects), "term": term["description"], }, ) unique_session_id = await pyppeteer.get_unique_session_id(page) authenticate_current_session(term, unique_session_id, cookies) sched_json = get_schedule_json(subject, term, unique_session_id, cookies) if "data" in sched_json.keys(): subjects_json.append(sched_json["data"]) else: logger.warning("No course data found.", extra={"subject": subject["description"]}) return subjects_json
def save_to_database(data): committee_preferred = data["committee_preferred"] choice = data["choice"] email = data["email"] name = data["name"] senate_division = data["senate_division"] department_name = data["department"] job_title = data["job_title"] is_interested = data["is_interested"] expertise = data["expertise"] logger.debug(f"Saving committee preferences for {name}") committee_id = None if committee_preferred: committee_record = CommitteeManager.add_committee(committee_preferred) committee_id = committee_record.committee_id department_record = DepartmentManager.add_department(department_name) department_id = department_record.department_id SenateDivisionManager.add_senate_division(senate_division) faculty_record = FacultyManager.add_faculty(name, email, job_title, senate_division) faculty_email = faculty_record.email SurveyChoiceManager.add_survey_choice(faculty_email, committee_id, choice) SurveyDataManager.add_survey_data(faculty_email, is_interested, expertise) DepartmentAssociationsManager.add_department_association( email, department_id)
def _collect_essid_hamming(essid: str, hamming_dist_max=1): essid_hamming = set() essid_hamming.update(hamming_ball(s=essid, n=hamming_dist_max)) essid_hamming.update(hamming_ball(s=essid.lower(), n=hamming_dist_max)) logger.debug( f"Essid {essid} -> {len(essid_hamming)} hamming cousins with dist={hamming_dist_max}" ) return essid_hamming
def process(self, handler_input, response): if os.environ['log_level'] == 'debug' and os.environ.get( 'LOG_ALL_EVENTS', 'false') == 'true': if 'AWS_EXECUTION_ENV' in os.environ: logger.debug(f"Response: {response}") else: logger.debug( f"Response: {json.dumps(response.to_dict(), indent=2)}") handler_input.attributes_manager.session_attributes = session.attributes
def get_submits(self): first = '' while True: status_list = yield self.fetch_status(first) if not status_list or len(status_list) == 0: break logger.debug('{} {} Success to get {} new status'.format(self.TAG, self.account, len(status_list))) self.put_queue(status_list) first = int(status_list[-1]['run_id']) - 1
def get_suggestion(title, req_year=None): if not session.attributes['zooqle']['suggestions']: logger.debug(f'searching zooqle for {title}') session.attributes['zooqle']['suggestions'] = get_zooqle_suggestions( title, req_year=req_year) try: return session.attributes['zooqle']['suggestions'][0] except IndexError: return None
def handle(self, handler_input): """ (STATEMENT) Handles the 'cancel' built-in intention. """ logger.debug('CancelIntent') speech = render_template('cancel_bye') handler_input.response_builder.speak(speech).set_should_end_session( True) return handler_input.response_builder.response
def handle(self, handler_input): """ (QUESTION) Handles the 'help' built-in intention. You can provide context-specific help here by rendering templates conditional on the help referrer. """ logger.debug('HelpIntent') speech = render_template('help_text') handler_input.response_builder.speak(speech) return handler_input.response_builder.response
def _run_essid_hamming(self, hcap_fpath_essid: Path, essid: str, hamming_dist_max=2): essid_hamming = set() essid_hamming.update(hamming_ball(s=essid, n=hamming_dist_max)) essid_hamming.update(hamming_ball(s=essid.lower(), n=hamming_dist_max)) logger.debug(f"Essid {essid} -> {len(essid_hamming)} hamming cousins with dist={hamming_dist_max}") with tempfile.NamedTemporaryFile(mode='w') as f: f.write('\n'.join(essid_hamming)) hashcat_cmd = self.new_cmd(hcap_file=hcap_fpath_essid) hashcat_cmd.add_wordlists(f.name) subprocess_call(hashcat_cmd.build())
def _crack_async(attack: CapAttack): """ Called in background process. :param attack: hashcat attack to crack uploaded capture """ attack.check_not_empty() attack.run_all() attack.read_key() logger.info(f"Finished cracking {attack.file_22000}") for name, timer in attack.timers.items(): elapsed = timer['elapsed'] / timer['count'] logger.debug(f"Timer {name}: {elapsed:.2f} sec")
def get_db(request: Request)-> Session: """ DBの取得処理 Args: request (Request): request Returns: Session: DBのセッション情報を返却する """ logger.debug(f'request: {request}') db = request.state.db logger.debug(f'db: {db}') return db
def download(self): if self.path is None or self.path.exists(): return if self.url is None: return gzip_file = self.url.split('/')[-1] gzip_file = self.path.with_name(gzip_file) logger.debug(f"Downloading {gzip_file}") while calculate_md5(gzip_file) != self.checksum: subprocess_call(['wget', self.url, '-O', gzip_file]) with lock_app: subprocess_call(['gzip', '-d', gzip_file]) logger.debug(f"Downloaded and extracted {self.path}")
def convert_hash(target: str, encode: str = "utf-8") -> str: """ ハッシュコード変換処理 Args: target (str): ハッシュ化対象文字列 encode (str, optional): 文字列のエンコード. Defaults to "utf-8". Returns: str: ハッシュ化後の文字列 """ logger.debug(f'target: {target}') hash_str = hashlib.sha256(target.encode(encode)).hexdigest() logger.debug(f'hash_str: {hash_str}') return hash_str
def is_accessible(self): if current_user.is_anonymous: logger.warning('anonymous пытается зайти в админку') return False if not current_user.is_active or not current_user.is_authenticated: logger.warning('кто-то ломится в админку') return False logger.debug('UsersReportAdmin current_user: {}'.format( current_user.id)) # if current_user.has_role('superuser'): # return True return True
def get_code(self, run_id): url = self.source_code_prefix.format(run_id) print(url) try: response = yield self.load_page(url, {'cookie': self.cookie}) if not response: return False soup = self.get_lxml_bs4(response.body) pre_node = soup.find('pre') if not pre_node: return False logger.debug("fetch code {} success".format(run_id)) return pre_node.text except Exception as ex: logger.error(ex) logger.error('{} fetch {}\'s {} code error'.format(self.TAG, 'Raychat', run_id))
def subprocess_call(args: List[str]): """ :param args: shell args """ args = list(map(str, args)) logger.debug(">>> {}".format(' '.join(args))) if not all(args): raise ValueError(f"Empty arg in {args}") completed = subprocess.run(args, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) if completed.stderr or completed.returncode != 0: logger.debug(completed.stdout) logger.error(completed.stderr) return completed.stdout, completed.stderr
def check_uuid(uuid: Union[UUID, str]) -> Union[UUID, None]: if uuid is None: logger.warning("UUID is None") return None if type(uuid) is str: try: uuid = UUID(uuid) except ValueError as exc: logger.debug(exc) logger.warning("Badly formatted UUID string: " + uuid) return None elif type(uuid) is not UUID: logger.warning("UUID is wrong type: " + type(uuid).__str__()) return None return uuid
def subprocess_call(args: List[str]): """ :param args: shell args """ args = list(map(str, args)) logger.debug(">>> {}".format(' '.join(args))) if not all(args): raise ValueError(f"Empty arg in {args}") process = subprocess.Popen(args, universal_newlines=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() if err or process.returncode != 0: logger.debug(out) logger.error(err) return out, err
def get_code(self, run_id): url = self.source_code_prefix.format(run_id) print(url) try: response = yield self.load_page(url, {'cookie': self.cookie}) if not response: return False soup = self.get_lxml_bs4(response.body) pre_node = soup.find('pre') if not pre_node: return False logger.debug("fetch code {} success".format(run_id)) return pre_node.text except Exception as ex: logger.error(ex) logger.error('{} fetch {}\'s {} code error'.format( self.TAG, self.account, run_id))
def send(from_email, to_email, subject, content, content_type="text/plain"): sg = SendGridAPIClient(apikey=settings.SENDGRID_API_KEY) from_email = Email(from_email) to_email = Email(to_email) subject = "[pystock] {}".format(subject) content = Content(content_type, content) mail = Mail(from_email, subject, to_email, content) try: response = sg.client.mail.send.post(request_body=mail.get()) except Exception: response = False if response and response.status_code != 202: logger.debug("Failed to send message: {}. Status code {}".format( content, response.status_code)) elif not response: logger.debug("Failed to send message: {}. response is {}".format( content, response))
def get_code(self, run_id): url = self.code_prefix.format(run_id) try: response = yield self.load_page(url, {'cookie': self.cookie}) if not response: logger.error('{} {} Fail to load code {} page'.format( self.TAG, self.account, run_id)) logger.error('{}: {}'.format(self.TAG, response)) return False res = json.loads(response.body.decode('utf-8')) code = res['source'] logger.debug('{} {} Success to load code {} page'.format( self.TAG, self.account, run_id)) return unescape(code) except Exception as ex: logger.error('{} fetch {}\'s {} code error {}'.format( self.TAG, 'Rayn', run_id, ex))
def get_code(self, run_id): url = self.source_code_prefix.format(run_id) try: response = yield self.load_page(url, {'Cookie': self.cookie}) if not response: return False soup = self.get_lxml_bs4(response.body) code_area = soup.find('textarea', id='usercode') if not code_area: logger.error('{} {} Fail to load code {} page'.format(self.TAG, self.account, run_id)) logger.error('{}: {}'.format(self.TAG, code_area)) return False code = code_area.text logger.debug('{} {} Success to load code {} page'.format(self.TAG, self.account, run_id)) return code except Exception as ex: logger.error(ex) logger.error('{} fetch {}\'s {} code error'.format(self.TAG, self.account, run_id))
def create_pivot_value_request_param(request_params: dict, postgrest_host: str) -> dict: """Find the pivot value for a seek pagination. This should only happen if the user is sorting by anything other than `int_id`, and wants to get data after a certain `int_id`. NOTE: This function performs an O(1) SELECT to find the pivot value. Args: request_params (dict): Original request params postgrest_host (str): URL to PostgREST Returns: dict: Request params with pivot value added """ try: split_sort = request_params['order'].split(",") int_id_q = request_params.get('int_id', None) split_int_id_q = int_id_q.split(".") except AttributeError: return request_params if split_sort[0] != "int_id" and split_int_id_q[0] == "gt": sort_col = split_sort[0] try: int_id = int(split_int_id_q[1]) except ValueError: logger.debug("Could not get int_id.") return request_params pivot_value_payload = { "int_id": int_id, "col": sort_col } resp = requests.post(urljoin(postgrest_host, "rpc/pivot_value"), data=pivot_value_payload) if resp.status_code >= 300: raise PostgrestHTTPException(resp) pivot_value = resp.json() if pivot_value: return {**request_params, sort_col: f"gte.{pivot_value}"} return request_params
def process(self, handler_input): if os.environ['log_level'] == 'debug': try: if handler_input.request_envelope.request.object_type == 'IntentRequest': intent_name = handler_input.request_envelope.request.intent.name else: intent_name = handler_input.request_envelope.request.object_type except AttributeError: intent_name = '' logger.debug('intent: {}'.format(intent_name)) if os.environ.get('LOG_ALL_EVENTS', 'false') == 'true': if 'AWS_EXECUTION_ENV' in os.environ: logger.debug( f"Incoming request\n{handler_input.request_envelope}") else: request = handler_input.request_envelope.to_dict() request['request']['timestamp'] = str( request['request']['timestamp']) logger.debug( f"Incoming request\n{json.dumps(request, indent=2)}") session.attributes = handler_input.attributes_manager.session_attributes session.user_id = handler_input.request_envelope.session.user.user_id try: intent.slots = handler_input.request_envelope.request.intent.slots except AttributeError: intent.slots = {}
def login(): logger.debug('login url: {} {} {}'.format(flask.request.url, flask.request.method, flask.request.data)) # Here we use a class of some kind to represent and validate our # client-side form data. For example, WTForms is a library that will # handle this for us, and we use a custom LoginForm to validate. form = LoginForm() if form.validate_on_submit(): # Login and validate the user. # user should be an instance of your `User` class login = flask.request.form['login'] password = flask.request.form['password'] user = get_user(login, password) if not user: logger.warning( 'Не удачная попытка авторизоваться (login: {})'.format(login)) return flask.render_template( flask.url_for('auth.login'), form=form, error='Не корректен логин или пароль. Попробуйте ещё раз.') flask_login.login_user(user) flask.flash('Logged in successfully.') logger.debug('Logged in successfully.') next = flask.request.args.get('next') logger.debug('Redirect: next({}) or {}'.format( next, flask.url_for('utmbill.utmpays_statistic'))) # is_safe_url should check if the url is safe for redirects. # See http://flask.pocoo.org/snippets/62/ for an example. if not is_safe_url(next): return flask.abort(400) return flask.redirect(next or flask.url_for('index')) return flask.render_template('login.html', form=form)
def upload_to_bucket(contents): """Uploads contents to Cloud Storage bucket. :param contents: The contents to put in the bucket :return: None """ assert isinstance(contents, dict), f"Expected dict but got {type(contents)}" storage_client = storage.Client() bucket_name = config.BUCKET_NAME bucket = storage_client.lookup_bucket(bucket_name) if bucket is None: bucket = storage_client.create_bucket(bucket_name) logger.debug("Bucket {} created.".format(bucket.name)) else: logger.debug("Bucket {} already exists.".format(bucket.name)) filename = utils.generate_filename() term_code = next(iter(contents)) lambda_filename = write_lambda_file(filename, contents) blob = bucket.blob(filename) # uploads the file in the cloud function to cloud storage blob.upload_from_filename(lambda_filename) renamed_filename = f"{term_code}/{filename}" bucket.rename_blob(blob, renamed_filename) logger.debug("File {} uploaded to {}.".format(renamed_filename, bucket_name))
def _request(self, url, args=dict()): if self.token: args["X-Plex-Token"] = self.token try: result = self.session.get("%s%s" % (self.url, url), params=args) logger.debug(u"PLEX => requested url: %(url)s" % {"url": url}) logger.debug(u"PLEX => requests args: %s" % args) if result.status_code == 401 and config.PMS_USER != "username" and config.PMS_PASS != "password": logger.debug(u"PLEX => request failed, trying with auth") self.session.headers.update({'X-Plex-Client-Identifier': 'plexivity'}) self.session.headers.update({'Content-Length': 0}) self.session.auth = (config.PMS_USER, config.PMS_PASS) x = self.session.post("https://my.plexapp.com/users/sign_in.xml") if x.ok: json = xml2json(x.content, strip_ns=False) self.token = json["user"]["authentication-token"] args["X-Plex-Token"] = self.token logger.debug(u"PLEX => auth successfull, requesting url %(url)s again" % {"url": url}) result = self.session.get("%s%s" % (self.url, url), params=args) else: return False if result and "xml" in result.headers['content-type']: import xml.etree.ElementTree as ET #json = xml2json(result.content, strip_ns=False) json = ET.fromstring(result.content) return json elif result.ok: return result.content else: logger.error(u"PLEX => there was an error with the request") return False except requests.ConnectionError: logger.error(u"PLEX => could not connect to Server!!!") return False
def set_notified(db_key): logger.debug("setting %s to notified" % db_key) res = get_from_db(db_key) res.notified = 1 db.session.merge(res) db.session.commit()
def notify(info): if "orig_user" in info and info["orig_user"] in config.EXCLUDE_USERS: logger.info("'%s' is set as an EXCLUDE_USER, i'm not sending a notification!" % info["orig_user"]) return True #notify all providers with the given stuff... if info["ntype"] == "recentlyadded" and config.NOTIFY_RECENTLYADDED: try: message = config.RECENTLYADDED_MESSAGE % info except KeyError: logger.error("Unable to map info to your recently added notification string. Please check your settings!") elif info["ntype"] == "start" and config.NOTIFY_START: try: message = config.START_MESSAGE % info except KeyError: logger.error("Unable to map info to your start notification string. Please check your settings!") elif info["ntype"] == "stop" and config.NOTIFY_STOP: try: message = config.STOP_MESSAGE % info except KeyError: logger.error("Unable to map info to your stop notification string. Please check your settings!") elif info["ntype"] == "pause" and config.NOTIFY_PAUSE: try: message = config.PAUSE_MESSAGE % info except KeyError: logger.error("Unable to map info to your pause notification string. Please check your settings!") elif info["ntype"] == "resume" and config.NOTIFY_RESUME: try: message = config.RESUME_MESSAGE % info except KeyError: logger.error("Unable to map info to your resume notification string. Please check your settings!") elif info["ntype"] == "test": message = "plexivity notification test" else: message = False status = False if message and config.USE_PPSCRIPTS: from app.providers import scripts scripts.run_scripts(info, message) if message: #only log notify args if it actually calls notify! logger.debug("notify called with args: %s" % info) if config.NOTIFY_PUSHOVER: from app.providers import pushover status = pushover.send_notification(message) if config.NOTIFY_PUSHBULLET: from app.providers import pushbullet status = pushbullet.send_notification(message) if config.NOTIFY_MAIL: from app.providers import mail status = mail.send_notification(message) if config.NOTIFY_BOXCAR: from app.providers import boxcar status = boxcar.send_notification(message) if config.NOTIFY_TWITTER: from app.providers import twitter status = twitter.send_notification(message) return status return False
def process_update(xml, session_id): #xml = ET obj z, sess, key = session_id.split("_") # check for valid xml if not xml.get("title"): return False if not xml.get("key"): return False status_change = False if session_id: ## get paused status -- needed for real time watched extra = "" p_counter = 0 state = xml.find("Player").get("state") if "buffering" in state: state = "playing" p = db.session.query(models.Processed).filter(models.Processed.session_id == session_id).first() p_counter = p.paused_counter if not p_counter: p_counter = 0 p_epoch = p.paused if p_epoch: prev_state = "paused" else: prev_state = "playing" if state and prev_state != state: #status_change = 1 logger.debug("Video State: %s [prev: %s]" % (state, prev_state)) cur = db.session.query(models.Processed).filter(models.Processed.session_id == session_id).first() now = datetime.datetime.now() if state and "paused" in state: if not p_epoch: extra = "%s, paused = %s" % (extra, now) logger.debug("Marking as Paused on %s [%s]" % (now, now)) status_change = "pause" cur.paused = now else: p_counter += (now - p_epoch).total_seconds() #debug display no update! logger.debug("Already marked as Paused on %s" % p_epoch) else: if p_epoch: sec = (now - p_epoch).total_seconds() p_counter += sec extra = "%s,paused = null" % extra extra = "%s,paused_counter = %s" % (extra, p_counter) logger.debug("removeing Paused state and setting paused counter to %s seconds [this duration %s sec]" % ( p_counter, sec ) ) status_change = "resume" cur.paused = None cur.paused_counter = int(p_counter) logger.debug("total paused duration: %s [p_counter seconds]" % int(p_counter)) cur.xml = ET.tostring(xml) cur.progress = "%.0f" % float( float(xml.get("viewOffset")) / float(xml.get("duration")) * 100 ) db.session.merge(cur) db.session.commit() return status_change
def get_from_db(session_id): logger.debug("loading database entry for %s" % session_id) return db.session.query(models.Processed).filter(models.Processed.session_id == session_id).first()
def task(): p = plex.Server(config.PMS_HOST, config.PMS_PORT) live = p.currentlyPlaying() started = get_started() playing = dict() recentlyAdded = p.recentlyAdded() if len(recentlyAdded): logger.debug("processing recently added media") for x in recentlyAdded: check = db.session.query(models.RecentlyAdded).filter(models.RecentlyAdded.item_id == x.get("ratingKey")).first() if check: logger.debug("already notified for recently added '%s'" % check.title) continue if x.get("type") == "season" or x.get("type") == "epsiode": fullseason = p.episodes(x.get("ratingKey")) for ep in fullseason: if x.get("addedAt") == ep.get("addedAt"): xml = p.getInfo(ep.get("ratingKey")).find("Video") else: xml = p.getInfo(x.get('ratingKey')).find("Video") if not xml: logger.error("error loading xml for recently added entry") continue info = info_from_xml(xml, "recentlyadded", 1, 1, 0) info["added"] = datetime.datetime.fromtimestamp(float(x.get("addedAt"))).strftime("%Y-%m-%d %H:%M") if notify(info): logger.info(u"adding %s to recently added table" % info["title"]) new = models.RecentlyAdded() new.item_id = x.get("ratingKey") new.time = datetime.datetime.now() new.filename = xml.find("Media").find("Part").get("file") new.title = info["title"] new.debug = "%s" % info db.session.merge(new) db.session.commit() else: logger.debug("nothing was recently added") if live and not len(live): logger.debug("seems like nothing is currently played") for session in live: #logger.debug(session.tostring()) userID = session.find('User').get('id') if not userID: userID = "Local" db_key = "%(id)s_%(key)s_%(userid)s" % { "id": session.get('sessionKey'), "key": session.get('key'), "userid": userID } playing[db_key] = 1 logger.debug(playing) did_unnotify = 0 un_done = get_unnotified() if un_done: logger.debug("processing unnotified entrys from database") for k in un_done: start_epoch = k.time stop_epoch = k.stopped if not stop_epoch: stop_epoch = datetime.datetime.now() ntype = "stop" if k.session_id in playing: ntype = "start" paused = get_paused(k.session_id) info = info_from_xml(k.xml, ntype, start_epoch, stop_epoch, paused) logger.debug(info) logger.debug("sending notification for: %s : %s" % (info["user"], info["orig_title_ep"])) #TODO: fix this.... for now just dont notify again! if notify(info): k.notified = 1 #make sure we have a stop time if we are not playing this anymore! if ntype == "stop": k.stopped = stop_epoch k.progress = int(info["percent_complete"]) db.session.commit() set_notified(k.session_id) did_unnotify = 1 else: logger.info("nothing found to send (new) notifications for") did_unnotify = 1 ## notify stopped ## redo this! currently everything started is set to stopped? if did_unnotify: logger.info("processing recently started entrys from db and checking for stopped") #started = get_started() for k in started: logger.debug("checking if %s is still in playling list" % k.session_id) if not k.session_id in playing: logger.debug("%s is stopped!" % k.session_id) start_epoch = k.time stop_epoch = datetime.datetime.now() xml = ET.fromstring(k.xml) xml.find("Player").set('state', 'stopped') process_update(xml, k.session_id) paused = get_sec_paused(k.session_id) info = info_from_xml(k.xml, "stop", start_epoch, stop_epoch, paused) k.stopped = datetime.datetime.now() k.paused = None k.notified = 0 #set_stopped(started[k.session_id, stop_epoch) #https://github.com/ljunkie/plexWatch/blob/master/plexWatch.pl#L552 info["decoded"] = 1 if notify(info): k.notified = 1 k.progress = info['percent_complete'] db.session.merge(k) db.session.commit() ## notify start/now playing logger.debug("processing live content") was_started = dict() for k in live: if k.get('librarySectionID') in config.EXCLUDE_SECTIONS: logger.info("Watching something from section: %s which is in EXCLUDE_SECTIONS: %s" % (k.get('librarySectionID'), config.EXCLUDE_SECTIONS)) continue if k.get('type') == "clip": logger.info("Skipping Video-Clip like trailers, specials, scenes, interviews etc..") continue start_epoch = datetime.datetime.now() stop_epoch = None #not stopped yet xml_string = ET.tostring(k) info = info_from_xml(k, "start", start_epoch, stop_epoch, 0) info["decoded"] = 1 # logger.debug(info) userID = info["userID"] if not userID: userID = "Local" db_key = "%(id)s_%(key)s_%(userid)s" % { "id": k.get('sessionKey'), "key": k.get('key'), "userid": userID } logger.debug("plex returned a live element: %s " % db_key) ## ignore content already been notified #TODO: get_startet should return a dict accessable by db_key #so we can check: if x in startet: check for change, if not mark as started now #first go through all started stuff and check for status change if started: logger.debug("we still have not stopped entrys in our database checking for matches") for x in started: logger.debug("checking if db entry '%s' is in live content " % x.session_id) state_change = False if x.session_id == db_key: logger.debug("that was a match! check for status changes") #already in database only check for status changes! state_change = process_update(k, db_key) was_started[db_key] = x if state_change: info["ntype"] = state_change logger.debug("%s: %s: state changed [%s] notify called" % (info["user"], info["title"], info["state"])) notify(info) else: logger.debug("all entrys in our database have been set to stopped") #also check if there is a element in the db which may be a resumed play from up to 24 hours ago if not db_key in was_started: logger.debug("trying to search for similar plays which stopped in the last 24 hours") view_offset = k.get("viewOffset") max_time = datetime.datetime.now() - datetime.timedelta(hours=24) like_what = "%" + k.get('key') + "_" + userID restarted = db.session.query(models.Processed).filter(models.Processed.session_id.like(like_what)).filter(models.Processed.time > max_time).filter(models.Processed.view_offset <= view_offset).filter(models.Processed.stopped != None).first() if restarted: logger.debug("seems like someone repeated an stopped play, updating db key from %s to %s" % (restarted.session_id, db_key)) restarted.session_id = db_key restarted.stopped = None db.session.commit() state_change = process_update(k, db_key) was_started[db_key] = restarted info["ntype"] = "resume" notify(info) else: #if still not processed till now, its a new play! logger.debug("we got those entrys which already where in the database: %s " % was_started) logger.info("seems like this is a new entry: %s" % db_key) #unnotified insert to db and notify process_start(xml_string, db_key, info) if notify(info): set_notified(db_key)