Exemple #1
0
 def _persist_data(self,
                   var_data,
                   session_id="",
                   channel="",
                   business_name="",
                   flow_id="",
                   business_id=""):
     # change this method to perform async
     if var_data == {}:
         return 1
     object_id = str(uuid.uuid4())
     timestamp = datetime.datetime.utcnow()
     collection = DB["user_data"]
     document = {
         "_id": object_id,
         "user_id": self.user_id,
         "flow_id": flow_id,
         "session_id": session_id,
         "business_id": business_id,
         "data": var_data,
         "channel": channel,
         "business_name": business_name,
         "timestamp": timestamp
     }
     try:
         saved_document_id = collection.insert_one(document).inserted_id
         logger.info(
             f"Variable data saved with object_id {saved_document_id}")
         return 1
     except Exception as err:
         logger.error(err)
         raise
Exemple #2
0
    def cmdloop(self):
        line = ''
        args = []
        stop = Command.SUCCESS
        logger.debug(f"Start the command loop on class `{__class__.__name__}`")
        self.preloop()
        while stop != Command.EXIT:
            try:
                line = self.raw_input(self.prompt)
                try:
                    args = self.parsecmd(line)
                except ValueError as e:
                    logger.debug('Failed to parse the command line')
                    logger.error(str(e))
                    continue

                args = self.precmd(args)

                stop = self.cmdhandler(args)
                stop = self.postcmd(stop, args)
            except (KeyboardInterrupt, EOFError):
                stop = self.interrupt_handler(args)
            except BaseException as e:
                stop = self.exception_handler(args, e)

        self.postloop()
        logger.debug(f"End the command loop on class `{__class__.__name__}`")
    def run(self):

        ret = self.operationWarmup()

        if ret:
            try:
                self.subSytemDatabase.conn.commit()
            except Exception as e:
                logger.error(f"{e}")
                logger.error(e, exc_info=True)
                return
            else:
                self.signalConsole.emit(f"warmup complete, scrapping now...")

        else:
            self.signalConsole.emit(f"error: warmup failed, not scrapping")
            return

        ret = self.operationInit()

        if ret:
            self.signalConsole.emit(f"scrap completed, performing post process functions")
        else:
            self.signalConsole.emit(f"error: scrap process failed!")
            return

        self.subSytemScrap.driver.close()
Exemple #4
0
    def __get_next_node_id(cls, data, state, node_data):

        next_node_id = node_data.get('NextNodeId', '')  # Fallback node id

        for button in node_data.get('Buttons', []):
            try:
                root_key = re.split(r'\.|\[',
                                    button.get("ConditionMatchKey"))[0]

                if data.get(root_key) is None:
                    data[root_key] = None

                logger.debug("rootKey " + root_key)

                path = button.get("ConditionMatchKey")
                obj = {root_key: data[root_key]}
                variable_value = Util.deep_find(obj, path)

                match_operator = button.get("ConditionOperator")
                match_value = AnaHelper.verb_replacer(text=button.get(
                    "ConditionMatchValue", ""),
                                                      state=state)

                condition_matched = AnaHelper.is_condition_match(
                    variable_value, match_operator, match_value)

                if condition_matched:
                    next_node_id = button["NextNodeId"]
                    break
            except:
                logger.error("error in btn " + str(button))
                pass

        return next_node_id
Exemple #5
0
    def send_messages(messages, sending_to):

        endpoints = {"USER": application_config["GATEWAY_URL"], \
                "AGENT": application_config["AGENT_URL"]}
        url = endpoints.get(sending_to)

        if url is None:
            return 0

        headers = {"Content-Type": "application/json"}
        if messages == []:
            logger.info(f"No messages to send to {sending_to}")
            return 1
        #This is deliberately synchronous to maintain order of messages being
        #sent
        for message in messages:
            json_message = json.dumps(message)
            try:
                response = requests.post(url,
                                         headers=headers,
                                         data=json_message)
                logger.info(response)
                logger.info(f"Message sent to {sending_to} {message}")
            except Exception as err:
                logger.error(err)
                return 0
        return 1
Exemple #6
0
def test_for_nans(data, name):
    if not len(data):
        raise ValueError('Data is empty!')
    nan_rows = data[data.isnull().values.any(axis=1)]

    if not len(nan_rows):
        logger.info('\t\tNo Nans found in {}'.format(name))
        return

    logger.info(
        '\t\tFound {} rows ({:.1f}%) and {} columns (out of {}) with Nans in {}'
        .format(len(nan_rows), 100 * len(nan_rows) / len(data),
                data.isnull().values.any(axis=0).sum(), len(data.columns),
                name))

    nan_counts = {}
    for source, source_data in data.groupby('source'):
        source_nans = {}
        for column in source_data.columns:
            col_nans = source_data[column].isnull().values.sum()
            if col_nans:
                source_nans[column] = col_nans
        nan_counts[source] = source_nans

    for k, v in nan_counts.items():
        logger.error('\t\tFor source {}, found NaNs in {} columns:'.format(
            k, len(v)))
        for l, c in v.items():
            logger.error('\t\t\t{:20} {:8} ({:.1f}%)'.format(
                l, c, 100 * c / len(data)))

    raise ValueError
Exemple #7
0
def parse_settings(settings_file) -> dict:
    """ Creates multiple dictionaries containing the settings parsed from a settings file.
    Each type of plot has its own settings dictionary.
    
    settings_file is the name of the text file containing the settings

    Return values:
    data is a pandas.DataFrame object which contains the alternative splicing data
    hive_plot_settings is a dictionary containing the settings for the hive plot
    struct_plot_settings is a dictionary containing the settings for the structure plot
    """
    try:
        config = configparser.ConfigParser()
        logger.info('Reading settings from {0}...'.format(settings_file))
        config.read(settings_file)

        # hive_plot_settings = parse_hive_plot_settings(config)
        # struct_plot_settings = parse_struct_plot_settings(config)

        return parse_sashimi_settings(config)

        # logger.error('Done reading settings.')
        # return hive_plot_settings, struct_plot_settings, sashimi_plot_settings
    except IOError:
        logger.error('{0} is not a valid file path')
        sys.exit(1)
Exemple #8
0
 def hook_start(self, session: WebshellSession):
     '''当session创建成功时执行'''
     super().hook_start(session)
     p = PHPPayload('php/base/baseinfo.php')
     ret = self.eval(p)
     if not ret.is_success():
         logger.error("Basic info gather failed!")
         return
     info = json.loads(ret.data)
     for k, v in info.items():
         if isinstance(v, str):
             info[k] = base64.b64decode(v.encode()).decode(
                 self.options.encoding, 'ignore')
     session.state['name'] = info['host']
     session.state['pwd'] = info.get('pwd').strip()
     session.state['description'] = self.help.lstrip('\r\n ').split('\n')[0]
     session.server_info.lang = self.PHP
     session.server_info.user = info.get('user').strip()
     session.server_info.webshell_root = info.get('pwd').strip()
     session.server_info.os_type = info.get('os_type').strip()
     session.server_info.tmpdir = info.get('tmpdir').strip()
     session.server_info.sep = info.get('sep').strip()
     session.server_info.domain = info.get('domain')
     session.server_info.group = info.get('group')
     session.server_info.os_bits = info.get('os_bits')
Exemple #9
0
 def exception_handler(self, args: Cmdline, e: BaseException) -> int:
     if isinstance(e, SystemExit):
         return Command.STOP
     elif isinstance(e, CommandNotFound):
         logger.error(e)
         return Command.STOP
     return super().exception_handler(args, e)
Exemple #10
0
    def _payload(self, _____path:str, isstring=False, **kw)->Payload:
        '''根据服务端语言构造payload
        '''
        if not isinstance(self.session, WebshellSession):
            return None
        payload = None
        if self.session.server_info.lang == WebshellBase.PHP:
            payload = PHPPayload(None, **kw)
        elif self.session.server_info.lang == WebshellBase.ASP_NET_CS:
            payload = CSharpPayload(None, **kw)
        else:
            payload = Payload(None, **kw)

        if payload:
            if isstring:
                payload.from_string(_____path)
            else:
                path = os.path.join(os.path.dirname(call_path(2)), _____path)
                try:
                    payload.from_file(path)
                except FileNotFoundError:
                    logger.error(f"Cannot find file `{path}`, maybe the accessed payload is not implemented!")
                    raise SystemExit

        return payload
def filter_album_tracks(oauth_token: str, recent_tracks: list) -> list:
    try:
        tracks = []
        for track in recent_tracks:
            album_tracks = get_album_tracks(oauth_token, track['album_id'])
            df_album_tracks = pd.DataFrame(album_tracks)
            df_album_tracks = df_album_tracks.filter(['items'])
            df_album_tracks = pd.json_normalize(
                df_album_tracks.to_dict('records'))
            df_album_tracks = df_album_tracks.filter(
                ['items.id', 'items.track_number'])
            df_album_tracks.rename(columns={
                'items.id': 'track_id',
                'items.track_number': 'track_number'
            },
                                   inplace=True)
            df_album_tracks[
                'track_number'] = df_album_tracks['track_number'] - 1
            current_track = pd.merge(pd.DataFrame(recent_tracks),
                                     df_album_tracks,
                                     on='track_id',
                                     how='inner').to_dict('records')[0]
            tracks.append(current_track)
        logger.info('Album tracks have been filtered successfully')
        return tracks
    except Exception as error:
        logger.error(f'Error to filter album tracks: {error}')
        raise
Exemple #12
0
    def get_dir_content(self, dir_path):
        if dir_path == "/" or dir_path == "\\":
            dir_path = ""
        response = self.dbx.files_list_folder(dir_path)
        entries = []
        for entry in response.entries:
            if type(entry) == dropbox.files.FolderMetadata:
                entries.append(FolderInfo(entry.name, entry.path_display))
            elif type(entry) == dropbox.files.FileMetadata:
                entries.append(
                    FileInfo(entry.name, entry.size, entry.path_display))
            else:
                logger.error(
                    "DROPBOX : Unknown entry in dir {0}".format(dir_path))

        while response.has_more:
            response = self.dbx.files_list_folder_continue(response.cursor)
            for entry in response.entries:
                if type(entry) == dropbox.files.FolderMetadata:
                    entries.append(FolderInfo(entry.name, entry.path_display))
                elif type(entry) == dropbox.files.FileMetadata:
                    entries.append(
                        FileInfo(entry.name, entry.size, entry.path_display))
                else:
                    logger.error(
                        "DROPBOX : Unknown entry in dir {0}".format(dir_path))

        return entries
def send_email(args):
    logger.info("input parameter is {} {}".format(args.PLATFORM,
                                                  args.REPORTWEEKLY))
    if args.REPORTWEEKLY:
        weekly = args.REPORTWEEKLY
    else:
        weekly = "WW" + str(int(time.strftime("%W")) + 1)
    if args.PLATFORM == "all":
        platforms = allplatform
    else:
        platforms = args.PLATFORM.split(":")
    logger.info("{} | {}".format(platforms, weekly))
    for platform in platforms:
        execlpath, emaillist = get_config(platform)
        logger.info("{} | {}".format(execlpath, emaillist))
        if execlpath:
            execlcontent = read_execl(execlpath, weekly)
            summaryinfo = analysis_reult(execlcontent)
            print summaryinfo
            template_dir = os.path.join(cur_dir, "email_template")
            mail_title = "{0} Platform {1} Cycling Test Report".format(
                platform, weekly)
            #mail_sender = "*****@*****.**"
            mail_sender = "*****@*****.**"
            template_file = "template.html"
            logser = r"\\ccr\ec\proj\deg\PID\BKC\Auto\ProductCyclingRecord"
            if dirdict.has_key(platform):
                print platform
                logdir = os.path.join(logser, dirdict[platform])
            else:
                logdir = logser
            print 'Generate mail content'
            res_dict = {
                "test_result_list": execlcontent,
                "log_dir": logdir,
                "summary_info": summaryinfo,
                "platform": platform,
                "weekly": weekly
            }
            env = Environment(loader=FileSystemLoader(template_dir))
            template = env.get_template(template_file)
            mail_body = template.render(res_dict)

            logger.info('Send mail to {}'.format(emaillist))
            attchment_list = []
            if os.path.exists(execlpath):
                attchment_list.append(execlpath)
            if send_mail(mail_sender,
                         emaillist,
                         mail_title,
                         mail_body,
                         ccs="",
                         attachment=attchment_list):
                print 'Mail has been sent successfully'
            else:
                print 'Failed to send mail'
        else:
            logger.error("Can not found the execl file")
Exemple #14
0
 def deep_find(obj, path):
     try:
         val = jsonpath(obj, path)
         if bool(val) and isinstance(val, list) and len(val) == 1:
             val = val[0]
         return val
     except Exception as err:
         logger.error(err)
         return None
Exemple #15
0
    def log_event(self, type_of_event, data):

        logger_method = getattr(self, "log_%s" % type_of_event.lower(), None)

        if logger_method is None:
            logger.error(f"Unknown event type published {type_of_event}")

        message_logged = logger_method(data)
        return message_logged
Exemple #16
0
def _task_status_from_pull_request(pull_request: PullRequest) -> str:
    if not pull_request.closed():
        return "Open"
    elif pull_request.closed() and pull_request.merged():
        return "Merged"
    elif pull_request.closed() and not pull_request.merged():
        return "Closed"
    else:
        logger.error("Pull request is in an invalid state")
        return ""
Exemple #17
0
def _automate_auth():
    user_name, password, is_live = creds_conf["ib_user_name"], creds_conf[
        "ib_password"], creds_conf["is_live_account"]

    sms_receiver = MFASMSReceiver(60)
    if is_live:
        sms_receiver.start_listening_for_auth_code()

    driver = webdriver.PhantomJS(service_args=['--ignore-ssl-errors=true'],
                                 service_log_path="/tmp/phantom_logs.log")
    driver.get("https://localhost:5000")

    un_box = driver.find_element_by_id("user_name")
    pw_box = driver.find_element_by_id("password")
    submit_btn = driver.find_element_by_id("submitForm")

    un_box.send_keys(user_name)
    pw_box.send_keys(password)
    submit_btn.click()

    if is_live:
        # IB uses MFA only for live accounts
        SECURITY_CODE_BOX_ID = "chlginput"
        try:
            WebDriverWait(driver, 5).until(
                EC.presence_of_element_located((By.ID, SECURITY_CODE_BOX_ID)))
        except TimeoutException:
            logger.error(
                "IB auth failed: MFA security code element wasn't present")
            driver.quit()
            raise

        auth_code = sms_receiver.auth_code
        if auth_code is not None:
            sec_code_box = driver.find_element_by_id(SECURITY_CODE_BOX_ID)
            sec_code_box.send_keys(auth_code)

            submit_btn = driver.find_element_by_id("submitForm")
            submit_btn.click()
        else:
            raise SystemError("IB auth code for MFA was not received")

    try:
        WebDriverWait(driver, 10).until(
            EC.text_to_be_present_in_element((By.CSS_SELECTOR, "pre"),
                                             LOGIN_SUCCEEDS_PHRASE))
    except TimeoutException:
        logger.error(
            f"Login to IB failed, success page loading timed out, page source: {driver.page_source}"
        )
        raise
    finally:
        driver.quit()

    return True
 def cmdhandler(self, args: Cmdline)-> int:
     if args.cmd is None:
         return Command.STOP
     cmd = args.cmd
     func = self.session.command_map.get(cmd)
     if func is None:
         logger.error(f'No command named `{cmd}`.')
         return Command.STOP
     ret = func.run(args)
     logger.debug(f'A command line `{args.cmdline}` is executed, returns `{ret}`')
     return ret
Exemple #19
0
 def normalize(self, url):
     if url.endswith('.'):
         url = url[:-1]
     try:
         session = requests.Session()
         resp = session.head(url, allow_redirects=True, timeout=5)
         unshorten_url = resp.url
         return url_normalize(unshorten_url)
     except requests.exceptions.RequestException as e:
         logger.error('Error {e} {url}'.format(e=e, url=url))
         return INVALID_URL
     return INVALID_URL
def filter_user_playback(oauth_token: str, recent_tracks: list) -> None:
    try:
        for recent_track in reversed(recent_tracks):
            status_code = start_user_playback(oauth_token,
                                              recent_track['album_id'],
                                              recent_track['track_number'])
            if status_code == 204:
                time.sleep(7)
        logger.info(f'Recent tracks have been played successfully')
    except Exception as error:
        logger.error(f'Error to filter user playback: {error}')
        raise
Exemple #21
0
def main():
    load_configuration()
    logger.info("Private Cloud started")

    while True:
        try:
            dispatch_remote_iteration(g.dropbox_client)
            dispatch_local_iteration(g.dropbox_client)
            logger.info("Finish iteration. sleeping...")
            time.sleep(g.sync_interval_in_sec)
        except Exception as e:
            logger.error("Exception occurred: {0}".format(str(e)))
 def __init__(self,
              response: requests.Response,
              hint: str = None,
              description: str = None,
              message: str = None):
     super().__init__(response.status_code, hint, description, message)
     logger.error(
         f"[PostgREST] Error {response.status_code} - {response.url}")
     self.response = response
     # NOTE: Maybe this is bad? We are redifining error_body with a new value
     self.error_body = self.postgres_error_details(self.response,
                                                   self.error_body)
Exemple #23
0
def message_handler():
    message = request.get_json()

    logger.info(f"Message Received {message}")

    handle_message = MessageHandlerPool.submit(
        MessageProcessor(message).respond_to_message)
    exception = handle_message.exception()
    if exception:
        logger.error(exception)

    return jsonify(status="received")
Exemple #24
0
    def _make_dataframe(self):
        file_data = self._bucket.get_response_body(self._filename)
        try:
            dataframe: pd.DataFrame = pd.read_json(file_data)
        except Exception:
            logger.error(f'ERROR {self._filename}')
            # pandas having trouble with opening file bce5476a-09e4-4e44-a3cc-eca0090a106c, and throws protocol error
            # but python json module deals with it nice
            file_data = json.loads(file_data)
            dataframe = pd.DataFrame(file_data)

        return dataframe
Exemple #25
0
 def CreateConnection(self) -> bool:
     try:
         self.socket = Socket(AF_INET, SOCK_DGRAM)
     except Exception as e:
         logger.error(f"could not create socket\n{e}")
         return False
     else:
         logger.info("binding socket")
     finally:
         # set socket receive buffer to 1 packet to avoid buffer bloat and packet accumulation
         self.socket.setsockopt(SOL_SOCKET, SO_RCVBUF, UDP_MSGLEN)
         self.socket.settimeout(self.timeout)
     return True
Exemple #26
0
def configure_telegram():
    """
    Configures the bot with a Telegram Token.

    Returns a bot instance.
    """

    TELEGRAM_TOKEN = os.environ.get('TELEGRAM_TOKEN')
    if not TELEGRAM_TOKEN:
        logger.error('The TELEGRAM_TOKEN must be set')
        raise NotImplementedError

    return telegram.Bot(TELEGRAM_TOKEN)
Exemple #27
0
    def handle_set_session_data(self, event):

        data = event.get("data", "{}")

        try:
            dict_data = json.loads(data)
            var_data = self.state.get("var_data", {})
            final_var_data = Util.merge_dicts(var_data, dict_data)
            self.state["var_data"] = final_var_data
        except ValueError:
            logger.error(f"Set session data payload is not in json format {data}")

        return []
Exemple #28
0
    def __construct_user_messages(cls, input_type, data):
        messages_data = []
        if input_type == "TEXT":
            message_type = MessageType.get_value("SIMPLE")
            text = data.get("val", "")
            message_content = MessageContent(text=text, mandatory=1).trim()
            message_data = MessageData(type=message_type,
                                       content=message_content).trim()
        else:
            logger.error(f"Unsupported input_type for agent {input_type}")

        messages_data.append(message_data)
        return messages_data
Exemple #29
0
    def get_contents(self):

        response = CACHE.get(self.node_key)

        if response is None:
            logger.warning(f"Data not found for {self.node_key}")
            return {}
        try:
            response_dict = json.loads(response)
            return response_dict
        except Exception as err:
            logger.error(err)
            raise
Exemple #30
0
def dispatch_remote_iteration(client):
    """
    Walk on remote cloud files:
        * Create dir if exist only on remote
        * If file name start with upload magic, upload from local to remote (if configured)
        * If file name start with download magic, download from remote to local (if configured)
    :param client: ClientBase
    """
    logger.info("dispatch_remote_iteration started")
    remote_dir = client.get_dir_content('/')
    while len(remote_dir) != 0:
        current = remote_dir.pop()
        if current.__class__ == FolderInfo:
            remote_dir += client.get_dir_content(current.folder_path)
            if not os.path.exists(os.path.advjoin(g.local_cloud_path, current.folder_path)):
                os.mkdir(os.path.advjoin(g.local_cloud_path, current.folder_path))
            elif not os.path.isdir(os.path.advjoin(g.local_cloud_path, current.folder_path)):
                logger.error("dispatch_remote_iteration : remote path is folder, and on local its a file")
        else:
            if g.enable_upload and current.file_name.startswith(g.upload_prefix):
                logger.info("dispatch_remote_iteration : found file name start with upload magic,"
                            " uploading from local cloud")
                local_file = get_local_path_without_prefix(g.upload_prefix, g.local_cloud_path,
                                                           current.file_path, current.file_name)
                remote_file = get_remote_path_without_prefix(g.upload_prefix, current.file_path, current.file_name)

                if not os.path.exists(local_file):
                    logger.error("dispatch_remote_iteration : "
                                 "remote file start with prefix and there is no local file")
                client.delete_file(current.file_path)

                client.upload_file(local_file, remote_file)

            elif g.enable_download and current.file_name.startswith(g.download_prefix):
                logger.info("dispatch_remote_iteration : found file name start with download magic,"
                            " download to local cloud and leave thin version on remote cloud")
                local_path = get_local_path_without_prefix(g.download_prefix, g.local_cloud_path,
                                                           current.file_path, current.file_name)
                remote_path = get_remote_path_without_prefix(g.download_prefix, current.file_path, current.file_name)
                partial_file_path = os.path.advjoin(g.tmp_path, os.path.basename(remote_path))

                client.download_file(current.file_path, local_path)
                client.delete_file(current.file_path)
                create_partial_file(local_path, g.thin_mode_byte_length, partial_file_path)
                client.upload_file(partial_file_path, remote_path)
                os.remove(partial_file_path)

            elif not os.path.exists(os.path.advjoin(g.local_cloud_path, current.file_path)) \
                    and not_start_with_saved_prefix(current.file_name):
                logger.info("dispatch_remote_iteration : found file that only on remote cloud, downloading it")
                client.download_file(current.file_path, os.path.advjoin(g.local_cloud_path, current.file_path))