Beispiel #1
0
def search(query='', field='q1', _operator='and', sort=[('_score', 'desc'), ('quoted_by', 'desc')],
           _filter={}, size=1000, _id=False):
    es = Elasticsearch([elasticsearch_setting])
    if query:
        es_query = {
            'match': {
                field: {
                    'query': query,
                    'operator': _operator,
                    'minimum_should_match': '85%'
                }
            }
        }
    else:
        es_query = {"match_all": {}}
    body = {
        "query": {
            "filtered": {
                "query": es_query,
                "filter": _filter
            }
        },
        'size': size
    }
    sort_item = _build_sort(sort)
    if sort_item:
        body.update({'sort': sort_item})
    logger.debug(body)
    result = es.search(index='qwerty', body=body, _source=True, timeout=55)
    if _id:
        return (x for x in result['hits']['hits'])
    return (x['_source'] for x in result['hits']['hits'])
Beispiel #2
0
    def run(self):
        # Here we write a mini config for the server
        smbConfig = ConfigParser.ConfigParser()
        smbConfig.add_section("global")
        smbConfig.set("global", "server_name", "server_name")
        smbConfig.set("global", "server_os", "UNIX")
        smbConfig.set("global", "server_domain", "WORKGROUP")
        smbConfig.set("global", "log_file", self.__smbserver_log)
        smbConfig.set("global", "credentials_file", "")

        # Let's add a dummy share
        smbConfig.add_section(self.__smbserver_share)
        smbConfig.set(self.__smbserver_share, "comment", "")
        smbConfig.set(self.__smbserver_share, "read only", "no")
        smbConfig.set(self.__smbserver_share, "share type", "0")
        smbConfig.set(self.__smbserver_share, "path", self.__smbserver_dir)

        # IPC always needed
        smbConfig.add_section("IPC$")
        smbConfig.set("IPC$", "comment", "")
        smbConfig.set("IPC$", "read only", "yes")
        smbConfig.set("IPC$", "share type", "3")
        smbConfig.set("IPC$", "path")

        self.localsmb = smbserver.SMBSERVER(("0.0.0.0", 445), config_parser=smbConfig)

        logger.info("Setting up SMB Server")
        self.localsmb.processConfigFile()
        logger.debug("Ready to listen...")

        try:
            self.localsmb.serve_forever()
        except:
            pass
Beispiel #3
0
def lib_user_login(username, password):
    hash_key = 'user'
    response = {}

    try:
        redis_client = redis.StrictRedis.from_url(config['REDIS_LOCAL_URL'])
        redis_data = redis_client.hget(hash_key, username)
    except:
        logger.error("ERROR! Cannot connect to {}".format(config['REDIS_LOCAL_URL']))
        response['status'] = 'err'
        response['data'] = "连接数据库错误!"
        return response

    if redis_data is None:
        logger.warning("WARNING! User {} not exist".format(username))
        response['status'] = 'err'
        response['data'] = "该用户尚未注册!"
        return response

    user_data = json.loads(redis_data.decode('utf-8'))

    if user_data['password'] != password:
        logger.debug("User {} password is not correct".format(username))
        response['status'] = 'err'
        response['data'] = "密码错误!"
    else:
        logger.debug("User {} login success".format(username))
        response['status'] = 'ok'
        response['data'] = {"username": username}
    return response
Beispiel #4
0
def main():
    if output:
        handler = logging.FileHandler(output)
        handler.setLevel(logging.INFO)
        logger.addHandler(handler)
        
    logger.info('-----Start-----')

    if target and wordlist:

        if os.path.isfile(wordlist):
            msg = "File exist"
            logger.debug(msg)
            logger.info('[*] Starting dork scanner from'+ wordlist +' on '+ target)
            actions.scan_wordlist(target, wordlist)
            logger.info('[*] Scan completed')
        else:
            msg = "[-] ERROR: File not exist."
            logger.error(msg)
            sys.exit(1)
            
    else:
        logger.info('[*] Starting dork scanner on '+ target)
        actions.scan(target)
        logger.info('[*] Scan completed')

    if output:
        logger.info('[+] File log written: ' + output)
        
    logger.info('-----End-----\n')
Beispiel #5
0
def lib_send_sms_message(message):
    parse_message = json.loads(message)
    sms_from = parse_message["fromUser"]
    sms_to = parse_message["toUser"]
    sms_message = parse_message["messageBody"] + config["SMS_SUFFIX"]

    sms_data = {
        "username": config["SMS_USERNAME"],
        "password_md5": hashlib.md5(config["SMS_PASSWORD"]).hexdigest(),
        "apikey": config["SMS_API_KEY"],
        "encode": "UTF-8",
        "mobile": sms_to,
        "content": sms_message + "【Drchat: " + sms_from + "】",
    }

    res = requests.post(config["SMS_API_URL"], data=sms_data)

    if res.status_code != 200:
        logger.error("Cannot connect to sms platform: {}".format(config["SMS_API_URL"]))
        return False

    if res.text.split(":")[0] != "success":
        logger.error("Send sms error: {}".format(res.text))
        return False

    logger.debug("Send sms to {} success!".format(sms_to))
    return True
Beispiel #6
0
def lib_user_register(username, password):
    hash_key = 'user'
    response = {}

    try:
        redis_client = redis.StrictRedis.from_url(config['REDIS_REMOTE_URL'])
        redis_data = redis_client.hget(hash_key, username)
    except:
        logger.error("ERROR! Cannot connect to {}".format(config['REDIS_REMOTE_URL']))
        response['status'] = 'err'
        response['data'] = "连接数据库错误!"
        return response

    if redis_data:
        logger.warning("WARNING! User {} already exist".format(username))
        response['status'] = 'err'
        response['data'] = "用户已被注册!"
        return response

    save_data = {
        'username' : username,
        'password' : password,
    }

    redis_client.hset(hash_key, username, json.dumps(save_data))
    logger.debug("Success register user {}".format(username))
    response['status'] = 'ok'
    response['data'] = { "username" : username }
    return response
Beispiel #7
0
def _validate_post(post):
    if ('author' in post or 'q1' not in post or len(post['q1']) > 140 or
            'text' not in post or post['text'] is None):
        return False
    post['text'] = regex.re_a_tag.sub('', post['text'].strip())
    if len(post['text']) < 125 and 'はい' != post['text'] and 'はい(;´Д`)' != post['text']:
        logger.debug(post)
        return post['text']
    return False
Beispiel #8
0
 def update(self, post_id, body, _op_type):
     self.actions.append({'_index': "qwerty", '_type': "log",
                          '_op_type': _op_type, '_id': post_id, '_source': body.copy()})
     if _op_type == 'update':
         b = self.find(post_id)
         b.update(body)
         body = b
     body['dt'] = body['dt'].strftime('%Y-%m-%d-%H-%M-%S')
     logger.debug('%s: %s' % (_op_type.upper(), body))
     self.db.setex('qwerty:%s' % post_id, json.dumps(body), ONE_DAY)
Beispiel #9
0
    def process_message(sid, data):
        logger.debug('process message: {}'.format(data))

        # Send to redis message queen
        lib_send_redis_message(data)

        # TODO: need to reconstruct to use cloud server to send sms message
        # Send to sms
        parse_data = json.loads(data)
        if config['SERVER_TYPE'] == 'LOCAL' and parse_data['sendSmsFlag']:
            lib_send_sms_message(data)
Beispiel #10
0
def scan(target):

    if core.is_valid_url(target):
        msg = "Host Validation OK"
        logger.debug(msg)
        msg = "[+] Url Accepted"
        logger.info(msg)
        msg = "[*] Performing scan"
        logger.info(msg)
        try:
            core.scan(target, settings.WORDLISTFILE)
        except Exception,e: 
            print str(e)
            msg = "[-] ERROR"
            logger.error(msg)
Beispiel #11
0
 def run(self):
     params = kuzuha.gen_params('', {'minute': 20})
     posts = kuzuha.get_log_as_dict('qwerty', params, url=True) or {}
     for (post_id, post) in posts.items():
         if 'date' not in post:
             continue
         stored_body = self.find(post_id)
         body = self.build_body(post, post_id)
         if body and body != stored_body:
             self.update(post_id, body, 'index')
         else:
             logger.debug('NO CHANGE: %s' % body)
     if self.actions:
         logger.info(helpers.bulk(self.es, self.actions))
         self.es.indices.refresh(index='qwerty')
Beispiel #12
0
 def build_body(self, post, post_id):
     body = {}
     if 'quote' in post:
         quoted_post = self.find(post['quote'])
         if quoted_post and post_id not in quoted_post.get('quoted_by', []):
             quoted_by = quoted_post.get('quoted_by', [])
             quoted_by.append(post_id)
             quoted_by = list(set(quoted_by))
             self.update(post['quote'], {'doc': {'quoted_by': quoted_by}}, 'update')
         else:
             logger.debug('Not found post id: %s' % post['quote'])
     body['dt'] = datetime.strptime(post['date'], '%Y-%m-%d-%H-%M-%S')
     for idx in ('q1', 'q2', 'text', 'quote', 'quoted_by', 'author', 'to'):
         if idx in post:
             body[idx] = post[idx]
     return body
Beispiel #13
0
def lib_delete_contact(username, contact_username):
    hash_key = 'contact:' + username
    response = {}

    try:
        redis_client = redis.StrictRedis.from_url(config['REDIS_REMOTE_URL'])
        redis_client.hdel(hash_key, contact_username)
    except:
        logger.error("ERROR! Cannot connect to {}".format(config['REDIS_REMOTE_URL']))
        response['status'] = 'err'
        response['data'] = "连接数据库错误!"
        return response

    logger.debug("Success delete contact contact:{} {}".format(contact_username, username))
    response['status'] = 'ok'
    return response
Beispiel #14
0
def update_ghdb():
    global retry
    msg = "Starting ghdb update"
    logger.debug(msg)
    msg = "[*] Updating Database"
    logger.info(msg)
    try:
        fname = settings.WORDLISTFILE
        with open(fname, 'r') as f:
            content = f.readlines()
        f.close()
        num = len(content)+1
        while True:
            dork = source.get_dork_from_exploit_db(num)
            if dork:
                retry = 0
                with codecs.open(fname, 'a', "utf-8") as f:
                    f.write(dork+"\n")
                f.close()
                msg = "[+] Loaded " + dork
                logger.info(msg)
            else:
                check = source.check_exploit_db(num)
                if check:
                    cont = 0
                    while(cont < check):
                        with codecs.open(fname, 'a', "utf-8") as f:
                            space = " "
                            f.write(space+"\n")
                        f.close()
                        cont +=1
                    num += check -1
                else:
                    break
            num += 1
        msg = "Database update ok"
        logger.debug(msg)
        msg = "[+] Database is up to date"
        logger.info(msg)
        sys.exit(1)
    except SystemExit:
        msg = "End update"
        logger.debug(msg)
    except:
        retry +=1
        msg = "Database update error"
        logger.debug(msg)
        msg = "[-] ERROR: Database update error"
        logger.error(msg)
        if (retry<3):
            msg = "[*] Retrying update"
            logger.info(msg)
            update_ghdb()
        else:
            msg = "[-] CRITICAL ERROR: Maybe Exploit-db or network is donwn"
            logger.error(msg)
            sys.exit(1)
Beispiel #15
0
 def respond(self, mention):
     mention['text'] = self.normalize(mention['text'])
     logger.debug('{id} {user[screen_name]} {text} {created_at}'.format(**mention))
     (valid, reason) = self.is_valid_tweet(mention)
     if not valid:
         logger.debug('skip because this tweet %s' % reason)
         return
     user_info = self.get_userinfo(mention)
     response = self.make_response(mention['text'], user_info, self.global_context)
     if len(self.global_context) > 100:
         self.global_context.pop(0)
     self.global_context.append(response)
     self.db.setex('global_context', json.dumps(self.global_context), ONE_WEEK)
     self.store_userinfo(user_info, mention, response)
     response['text'] = '@%s ' % mention['user']['screen_name'] + response['text']
     response['id'] = mention['id']
     return response
Beispiel #16
0
 def run(self):
     if self.is_duplicate_launch():
         logger.debug('TwitterResponder is already launched')
         return -1
     last_time = time.time()
     for tweet in self.twitter.stream_api.user():
         if 'text' in tweet:
             if tweet['text'].startswith('@sw_words'):
                 self.respond(self.reply_responder, tweet)
             elif (np.random.randint(100) < 2 and self.is_valid_tweet(tweet['text']) and
                   self.db.get('latest_tl_replied') != tweet['user']['screen_name']):
                 self.respond(self.tl_responder, tweet, tl=True)
         if time.time() - last_time > TWO_MINUTES:
             mentions = self.twitter.api.statuses.mentions_timeline(count=200)
             for mention in mentions[::-1]:
                 self.respond(self.reply_responder, mention)
             last_time = time.time()
Beispiel #17
0
def reverse(target, extensive):
    msg = "Reverse probing"
    logger.debug(msg)
    hosts = core.get_reversed_hosts(target, extensive)
    if len(hosts)>0:
        if len(hosts)==1:
            msg = "[+] "+str(len(hosts))+" Domain found"
            logger.info(msg)
            for host in hosts:
                logger.info(host)
        else:
            msg = "[+] "+str(len(hosts))+" Domains found"
            logger.info(msg)
            for host in hosts:
                logger.info(host)
    else:
        msg = "[-] No Domains found"
        logger.error(msg)
Beispiel #18
0
 def respond(self, tweet):
     text = tweet['text']
     text = self.normalize(text)
     logger.debug('{id} {user[screen_name]} {text} {created_at}'.format(**tweet))
     (valid, reason) = self.is_valid_tweet(tweet)
     if not valid:
         logger.debug('skip because this tweet %s' % reason)
         return
     user_info = self.get_userinfo(tweet)
     response = self.make_response(text, user_info, self.global_context)
     if response and response.get('text'):
         if len(self.global_context) > 100:
             self.global_context.pop(0)
         self.global_context.append(response)
         self.db.setex('global_context', json.dumps(self.global_context), reply.ONE_WEEK)
         self.store_userinfo(user_info, tweet, response)
         response['text'] = '@%s ' % tweet['user']['screen_name'] + response['text']
         response['id'] = tweet['id']
         return response
Beispiel #19
0
def get_dork_from_exploit_db(value):
    url = "https://www.exploit-db.com/ghdb/"
    html = core.get_html_from_url(url + str(value))
    if html:
        parser = BeautifulSoup(html.decode("utf-8"), "html.parser")
        table = parser.find("table", {"class": "category-list"})
        if table != None:
            data = table.find("a").get_text().strip()
            if len(data) == 0:
                return " "
            return data
        else:
            msg = "exploit-db returned error"
            logger.debug(msg)
            return False
    else:
        msg = "exploit-db returned badly"
        logger.debug(msg)
        return False
Beispiel #20
0
 def run(self):
     if self.is_duplicate_launch():
         logger.debug("TwitterResponder is already launched")
         return -1
     last_time = time.time()
     for tweet in self.twitter.stream_api.user():
         if "text" in tweet:
             if tweet["text"].startswith("@sw_words"):
                 self.respond(self.reply_responder, tweet)
             elif (
                 np.random.randint(100) < RESPONDING_PROBABILITY
                 and self.is_valid_tweet(tweet["text"])
                 and self.db.get("latest_tl_replied") != tweet["user"]["screen_name"]
             ):
                 self.respond(self.tl_responder, tweet, tl=True)
         if time.time() - last_time > TWO_MINUTES:
             mentions = self.twitter.api.statuses.mentions_timeline(count=200)
             for mention in mentions[::-1]:
                 self.respond(self.reply_responder, mention)
             last_time = time.time()
Beispiel #21
0
 def summarize(self, text):
     text = self.prepare(text)
     logger.debug(text)
     text = self.simplify_sentence(text)
     if text:
         text = text.replace('"', '')
         if text.endswith('んじゃ'):
             text = text.replace('んじゃ', '')
         for (old, new) in sorted(self.paraphrases['after'].items(), key=lambda x: len(x[0]),
                                  reverse=True):
             text = text.replace(old, new)
         for (old, new) in sorted(self.car_shorten_map.items(), key=lambda x: len(x[0]),
                                  reverse=True):
             text = text.replace(old, new)
         if text and text[-1] in ('な', 'だ'):
             text = text[:-1]
         text = text.replace('るてる', 'ってる')
         text = text.replace('書くた', '書いた')
         logger.debug(text)
         return text
Beispiel #22
0
def scan_wordlist(target, wordlist):
    
    if core.is_valid_url(target):
        msg = "Host Validation OK"
        logger.debug(msg)
        msg = "[+] Url Accepted"
        logger.info(msg)
        msg = "[*] Performing scan"
        logger.info(msg)
        try:
            core.scan(target, wordlist)
        except:
            msg = "[-] ERROR"
            logger.error(msg)

    else:
        msg =  "[-] ERROR: You must provide a valid target. Given: "+ target
        showhelp()
        logger.error(msg)
        sys.exit(1)
Beispiel #23
0
def lib_add_contact(username, contact_username, contact_nickname):
    hash_key = 'contact:' + username
    response = {}

    params = {
        'username' : contact_username,
        'nickname' : contact_nickname
    }

    try:
        redis_client = redis.StrictRedis.from_url(config['REDIS_REMOTE_URL'])
        redis_client.hset(hash_key, contact_username, json.dumps(params))
    except:
        logger.error("ERROR! Cannot connect to {}".format(config['REDIS_REMOTE_URL']))
        response['status'] = 'err'
        response['data'] = "连接数据库错误!"
        return response

    logger.debug("Success add contact {} to user {}".format(contact_username, username))
    response['status'] = 'ok'
    return response
Beispiel #24
0
    def verify_symbol(self, file, symbol):
        if self.is_param_none(symbol):
           log.error("Symbol not specified.")
           return False
        status = False
        if os.path.isfile(file):
            with open(file, 'rb') as f:
                elffile = ELFFile(f)
                log.debug('  %s sections' % elffile.num_sections())
                section = elffile.get_section_by_name('.symtab')
                if not section:
                    log.error("Invalid ELF file no symbol table found.")
                    status = False
                elif isinstance(section, SymbolTableSection):
                    num_symbols = section.num_symbols()
                    for symbolNum in range(0, num_symbols):
                        if section.get_symbol(symbolNum).name == symbol:
                            status = True
                            break
        else:
            log.error("File {} does not exist.".format(file))

        return status
Beispiel #25
0
    def linux(self, handler, hostname=None):
        response = BaseResponse()

        try:
            if self.debug:
                # 读取文件
                with open(os.path.join(self.base_dir, 'files', 'disk.out')) as f:
                    ret = f.read()
                response.data=self.parse(ret)

            else:
                # ret = handler.cmd('sudo MegaCli  -PDList -aALL', hostname)
                ret = handler.cmd("lsblk | grep '^s' | grep 'disk'", hostname).decode('utf-8')

                response.data = self.parse_disk(ret)

        except Exception:
            error = traceback.format_exc()
            response.status = False
            response.error = error
            logger.debug(error)

        return response.dict
Beispiel #26
0
def tags_handler(event, context):
    """entry-point for Lambda function."""
    ncr_id = urllib.parse.unquote(event['pathParameters']['ncrId'])
    try:
        ncr_id_parts = ncr_table.parse_ncr_id(ncr_id)
    except TypeError:
        raise HttpInvalidException('invalid NCR ID')

    _, _, account_id, resource_id, _ = ncr_id_parts

    # authorization check
    authz.require_can_read_account(event['userRecord'], [account_id])

    init_configuration_es()  # establish connection to ElasticSearch cluster

    results = es_tag_query(account_id, resource_id)
    logger.debug('ES query results: %s', json.dumps(results, default=str))
    parsed_results = parse_es_tag_results(results)
    logger.debug('Parsed results: %s', json.dumps(parsed_results, default=str))

    response = {'ncrTags': {'ncrId': ncr_id, 'tags': parsed_results}}

    return response
Beispiel #27
0
def create_type_class(name, supertype, params):
    """
    Dynamically creates types of a given name, supertype, and attributes
    with essential fields for the metaclass used by ctypes.

    @note - Intended to be used for new types that inherit from a ctypes type.

    @param name: The name of the new type
    @param supertype: The supertype of the new type
    @param params: A dictionary of fields for the new type
    """
    fields = {
        "_pack_": 1,
        "_fields_": params,
        "__init__": dynamic_init,
        "__str__": to_string
    }
    try:
        return type(name, (supertype, ), fields)
    except Exception as exception:
        log.error("Failed to create type class {}!".format(name))
        log.debug(exception)
        raise CtfTestError("Error in create_type_class") from exception
Beispiel #28
0
def write_to_s3(workbook: Workbook, s3_key: str):
    """
    Function to save workbook to s3.

    Parameters:
    workbook (Workbook): The workbook to be saved.
    s3_key (str): If not a global scan, this will be used for the prefix. Otherwise ignored if global scan.
    """
    logger.debug('Writing spreadsheet to s3://%s/%s', BUCKET, s3_key)
    write_stream = io.BytesIO()
    workbook.save(write_stream)
    workbook_bytes = write_stream.getbuffer().tobytes()
    if os.getenv('WRITE_LOCAL'):
        logger.debug('Writing to local disk, not uploading to s3')
        with open('local.xlsx', 'wb') as local_file_xlsx:
            local_file_xlsx.write(workbook_bytes)
        return

    S3.put_object(
        Bucket=BUCKET,
        Key=s3_key,
        Body=workbook_bytes
    )
Beispiel #29
0
def set_domains():
    global domains

    logger.info('Loading domains')

    if conf.domain is not None:
        logger.debug('Loading domains from command line')
        add_domain(conf.domain)

    if conf.domainsfile is not None:
        logger.debug('Loading domains from file %s' % conf.domainsfile)
        parse_domains_file(conf.domainsfile)

    domains = list(set(domains))

    if len(domains) == 0:
        logger.info('No domains specified, using a blank domain')
        domains.append('')
    elif len(domains) > 0:
        if '' not in domains:
            domains.append('')

        logger.info('Loaded %s unique domain%s' % (len(domains), 's' if len(domains) > 1 else ''))
Beispiel #30
0
def add_credentials(user=None,
                    password='',
                    lmhash='',
                    nthash='',
                    domain='',
                    line=None):
    if line is not None:
        try:
            user, password, lmhash, nthash = parse_credentials(line)

            if user.count('\\') == 1:
                _, user = user.split('\\')
                domain = _
        except credentialsError as _:
            logger.warn('Bad line in credentials file %s: %s' %
                        (conf.credsfile, line))
            return

    if user is not None:
        credential = Credential(user, password, lmhash, nthash, domain)

        logger.debug('Parsed credentials: %s' % credential.get_identity())
        return credential
Beispiel #31
0
def bump_pdns_db_version(new_version, old_version):
    """
        Update the PowerDNS version stored in pdns_meta. Also store the previous version.
    """
    conn = DB()
    try:
        cursor = conn.create_cursor()
        query = f"UPDATE pdns_meta SET db_version='{new_version}', db_version_previous='{old_version}' WHERE db_version='{old_version}'"
        log.debug(f"Bumping DB version: [{old_version} -> {new_version}]")
        cursor.execute(query)
        conn.commit()

    except (Exception, sqlite3.Error) as error:
        conn.rollback()
        log.error(
            f"Was unable to bump the version number to the latest! [{old_version} -> {new_version}]"
        )
        log.debug(error)
        sys.exit(1)

    finally:
        if conn is not None:
            conn.close_all()
Beispiel #32
0
    def Auto(self):
        logger.debug('Auto(AI) start ...')
        flag = True
        if self.vehicle.wp.isNull():
            logger.warn('Waypoint is Null.Please set Waypoint')
            return False
        self.publish('Mode', 'AI_Auto')
        watcher = CancelWatcher()
        for point in self.vehicle.wp.points:
            if watcher.IsCancel():
                logger.warn('Cancel Auto')
                flag = False
                break
            self.publish('Target', point)
            result = self.full_auto()
            if not result:
                logger.error("Navigation except exit")
                flag = False
                break
            self.wp.add_number()

        self.vehicle.Auto_finally()
        return flag
Beispiel #33
0
    def check_output(self,
                     output_contains=None,
                     output_does_not_contain=None,
                     exit_code=0,
                     name="default"):
        """
        Compares the output of the most recently executed command.
        ExecutionRunRemoteCommand or ExecutionRunLocalCommand must be called first.

        @param name: A name already registered with SSH_RegisterTarget to identify the connection. (Optional)
        @param output_contains: A substring that must be contained in stdout. (Example: "PASS") (Optional)
        @param output_does_not_contain: A substring that should not be contained in stdout. (Example: "FAIL") (Optional)
        @param exit_code: The expected exit code after the shell command is executed. (Optional default = 0)

        @return bool: True if successful, False otherwise.

        @par Example:
        @code
        {
            "command": "SSH_CheckOutput",
            "wait": 0,
            "data": {
                "name": "workstation",
                "output_contains": "Built target mission-install",
                "output_does_not_contain": "Error",
                "exit_code": 0
            }
        }
        """
        log.debug("SshPlugin.check_output")
        if name not in self.targets:
            log.error("No Execution target named {}".format(name))
            return False

        return self.targets[name].check_output(output_contains,
                                               output_does_not_contain,
                                               exit_code)
Beispiel #34
0
def host_inspect(target, extensive):

    if core.is_valid_ip(target):
        msg = "Ip Validation OK"
        logger.debug(msg)
        msg = "[+] Valid ip"
        logger.info(msg)
        msg = "[*] Performing hostname conversion"
        logger.info(msg)
        try:
            value = core.get_host_by_ip(target)
            util.list_to_string(value)
        except:
            msg = "[-] ERROR: Cannot resolve hostname"
            logger.error(msg)

    elif core.is_valid_hostname(target):
        msg = "Host Validation OK"
        logger.debug(msg)
        msg = "[+] Valid host"
        logger.info(msg)
        msg = "[*] Performing ip conversion"
        logger.info(msg)
        try:
            value = core.get_host_by_name(target)
            util.list_to_string(value)
        except:
            msg = "[-] ERROR: Cannot resolve hostname"
            logger.error(msg)

    else:
        msg = "[-] ERROR: You must provide a valid target. Given: " + target
        showhelp()
        logger.error(msg)
        sys.exit(1)

    db = GEOIPFILE
    geo = core.ip_to_country(core.get_ip(target), db)
    if geo:
        msg = "[+] The host is situated in " + geo
        logger.info(msg)
    else:
        msg = "[-] Cannot geolocalize the host"
        logger.warning(msg)

    if extensive:
        msg = "Extensive probing"
        logger.debug(msg)

        msg = "[*] Starting extensive information gathering"
        logger.info(msg)

        whois = core.get_extensive_data(target, 0)

        info = core.get_extensive_data(target, 1)

        dns = core.get_extensive_data(target, 2)
Beispiel #35
0
def host_inspect(target, extensive):
    
    if core.is_valid_ip(target):
        msg = "Ip Validation OK"
        logger.debug(msg)
        msg = "[+] Valid ip"
        logger.info(msg)
        msg = "[*] Performing hostname conversion"
        logger.info(msg)
        try:
            value = core.get_host_by_ip(target)
            util.list_to_string(value)
        except:
            msg = "[-] ERROR: Cannot resolve hostname"
            logger.error(msg)
                
    elif core.is_valid_hostname(target):
        msg = "Host Validation OK"
        logger.debug(msg)
        msg = "[+] Valid host"
        logger.info(msg)
        msg = "[*] Performing ip conversion"
        logger.info(msg)
        try:
            value = core.get_host_by_name(target)
            util.list_to_string(value)
        except:
            msg = "[-] ERROR: Cannot resolve hostname"
            logger.error(msg)
            
    else:
        msg =  "[-] ERROR: You must provide a valid target. Given: "+ target
        showhelp()
        logger.error(msg)
        sys.exit(1)
    
    db = GEOIPFILE
    geo = core.ip_to_country(core.get_ip(target), db)
    if geo:
        msg = "[+] The host is situated in "+geo
        logger.info(msg)
    else:
        msg = "[-] Cannot geolocalize the host"
        logger.warning(msg)
    
    if extensive:
        msg = "Extensive probing"
        logger.debug(msg)
        
        msg = "[*] Starting extensive information gathering"
        logger.info(msg)

        whois = core.get_extensive_data(target, 0)

        info = core.get_extensive_data(target, 1)

        dns = core.get_extensive_data(target, 2)
Beispiel #36
0
    def _navigation(self):
        watcher = CancelWatcher()
        radius = self.radius
        frequency = self.frequence
        try:
            target = self.get_target()
            CLocation = self.get_location()
            CYaw = self.get_heading()

            init_angle = angle_heading_target(CLocation, target, CYaw)
            self.condition_yaw(init_angle)

            while not watcher.IsCancel():
                CLocation = self.get_location()
                CYaw = self.get_heading()
                distance = get_distance_metres(CLocation, target)
                angle = angle_heading_target(CLocation, target, CYaw)

                if not self.InAngle(angle, 90) or distance <= radius:
                    logger.info("Reached Target!")
                    self.brake()
                    return True

                EAngle = int(math.degrees(math.asin(radius / distance)))

                logger.debug('{} {} {}'.format(distance, angle, EAngle))

                if not self.InAngle(angle, max(EAngle, self.Epsilon)):
                    self.brake()
                    self.condition_yaw(angle)
                self.forward()
                time.sleep(frequency)
                # raw_input('next')
        except AssertionError, e:
            self.brake()
            logger.error(e)
            return False
Beispiel #37
0
 def process_types_second_pass(self, json_list):
     for typedef in json_list:
         try:
             if 'alias_name' in typedef and 'actual_name' in typedef:
                 # Any aliases left undefined should now evaluate to a custom type
                 alias_name = typedef['alias_name']
                 actual_name = typedef['actual_name']
                 if alias_name not in self.type_dict:
                     if actual_name in self.type_dict:
                         self.type_dict[alias_name] = self.type_dict[
                             actual_name]
                         if self.config.log_ccsds_imports:
                             log.debug("Mapped alias {} to type {}".format(
                                 actual_name, alias_name))
                     else:
                         log.error("Unknown alias name {} in {}".format(
                             actual_name, self.current_file_name))
                 elif self.config.log_ccsds_imports:
                     log.debug(
                         "Alias {} already defined, skipping...".format(
                             alias_name))
         except Exception as e:
             log.error("Unable to parse type definition in {}: {}".format(
                 self.current_file_name, e))
Beispiel #38
0
    def validate_cc_value(self, mid_dict, cc):
        """
        Implementation of helper function validate_cc_value.
        Attempt to convert a value to a CC name and check that it is in the provided mid_dict
        @return str: A valid CC name if found, else None
        """
        # pylint: disable=invalid-name
        # cc may be provided as a literal value, stringified value, or macro. Attempt to find the string of its name.
        available = False
        try:
            available = cc in mid_dict['CC']
            if not available:
                if isinstance(cc, str):
                    cc = self.resolve_macros(cc)
                    try:
                        cc = int(cc, 0)
                    except (TypeError, ValueError):
                        pass

                if isinstance(cc, int):
                    for key, value in mid_dict['CC'].items():
                        if value['CODE'] == cc:
                            cc = key
                            break

                available = cc in mid_dict['CC']
        except TypeError as exception:
            log.error("Failed to query the MID dictionary.")
            log.debug(exception)

        if not available:
            log.error(
                "{0} not in MID object. Ensure {0} is defined in CCSDS Exports."
                .format(cc))

        return cc if available else None
Beispiel #39
0
    def initialize(self):
        """
        Initialize CfsController instance, including the followings: create mid map; import ccsds header;
        create command interface; create telemetry interface; create local CFS interface
        """
        log.debug("Initializing CfsController")
        if not self.process_ccsds_files():
            return False

        log.info("Starting Local CFS Interface to {}:{}".format(
            self.config.cfs_target_ip, self.config.cmd_udp_port))
        command = CommandInterface(self.ccsds, self.config.cmd_udp_port,
                                   self.config.cfs_target_ip,
                                   self.config.endianess_of_target)
        telemetry = TlmListener(self.config.ctf_ip, self.config.tlm_udp_port)
        self.cfs = LocalCfsInterface(self.config, telemetry, command,
                                     self.mid_map, self.ccsds)
        result = self.cfs.init_passed
        if not result:
            log.error("Failed to initialize LocalCfsInterface")
        else:
            log.info("CfsController Initialized")

        return result
Beispiel #40
0
def prepare_requirement_scores(accounts: list, scores: Scores, requirements: dict) -> list:
    """
    Prepares data for direct use by the scorecard for import into the itemized worksheet.

    Parameters:
    accounts (list): list of accounts to report scores for

    Returns:
    list: a list of dictionary which corresponds directly for reporting requirements. Structure of dicts is:
        {
            description: requirement description
            severity: requirement severity
            source: requirement source
            numFailing: [score, score, score] # one score per account in order
        }
    """
    logger.debug('Preparing initial data')
    matrix_rows = []
    for requirement_id, requirement in requirements.items():
        row = {
            'description': requirement['description'],
            'requirementId': requirement['requirementId'],
            'severity': requirement['severity'],
            'numFailing': [],
        }

        for account in accounts:
            try:
                detailed_score = scores[account['accountId']][requirement_id]
                num_failing = next(iter(detailed_score['score'].values()))['numFailing']
                row['numFailing'].append(num_failing)
            except: # pylint: disable=bare-except
                # add 'Err' if the score doesn't exist for some reason (it should always be created)
                row['numFailing'].append('Err')
        matrix_rows.append(row)
    return matrix_rows
Beispiel #41
0
    def driver_commend(self, **kwargs):
        local.desired_caps = {
            'platformName': '',
            'platformVersion': '',
            'deviceName': '',
            "unicodeKeyboard": "True",
            "resetKeyboard": "True",
            'udid': '',
            'noReset': 'True'
        }
        local.desired_caps.update(kwargs)
        url = 'http://{ip}:{port}/wd/hub'.format(
            port=local.desired_caps.get('port'),
            ip=local.desired_caps.get('ip'))
        logger.debug('启动的Url:%s' % url)
        driver = webdriver.Remote(url, local.desired_caps)

        driver_queue.put(driver)
        device_name_queue.put(local.desired_caps.get('name'))
        app = APPPICTUREPATH.format(local.desired_caps.get('name'))
        if os.path.exists(app):
            self.tool.app_clear(app)
        else:
            os.makedirs(app)
Beispiel #42
0
    def init_connection(self,
                        host,
                        user=None,
                        port=None,
                        gateway=None,
                        ssh_config_path=None,
                        args=None,
                        name="default"):
        """
        Establishes an SSH connection with a target host.
        This command must be run before other remote commands will work.
        Command may be used multiple times with the same name to connect to different remote hosts in succession,
        or be used with different names to maintain concurrent connections to multiple hosts.
           - **host**: hostname or IP to connect to, which may include the username and/or port.

        @param name: A name already registered with `SSH_RegisterTarget` to identify the connection. (Optional)
        @param user: User name for the connection. Do not use if you specified the user in `host`. (Optional)
        @param port: Port number for the connection. Do not use if you specified the port in `host`. (Optional)
        @param gateway: SSH gateway command string to proxy the connection to `host` (Optional)
        @param ssh_config_path: Path to an ssh config file which may contain host definitions or additional parameters.
                                If not specfied, `~/.ssh/config` will be assumed. (Optional)
        @param args: Additional SSH connection options, as needed. See [Paramiko API docs] (Optional)
            (http://docs.paramiko.org/en/latest/api/client.html#paramiko.client.SSHClient.connect) for relevant values.

        @return bool: True if successful, False otherwise.
        """
        log.debug("SshPlugin.init_connection")
        if name not in self.targets:
            log.error("No Execution target named {}".format(name))
            return False
        try:
            result = self.targets[name].init_connection(
                host, user, port, gateway, ssh_config_path, args)
        except CtfTestError:
            result = False
        return result
Beispiel #43
0
    def parse_command_packet(self, buffer):
        """
        Parse command packets from received buffer.
        """
        try:
            header = self.ccsds.CcsdsCommand.from_buffer(
                buffer[0:self.cmd_header_offset])
            mid = header.get_msg_id()
        except ValueError:
            log.debug("Cannot retrieve command header.")
            return

        if mid not in self.mid_payload_map:
            self.log_unknown_packet_mid(mid)
            return

        cmd_dict = self.mid_payload_map[mid]
        cc_class = None
        offset = self.cmd_header_offset if self.should_skip_header else 0
        for value in cmd_dict.values():
            if value["CODE"] == header.get_function_code():
                cc_class = value["ARG_CLASS"]

        try:
            payload = {
                "CC":
                header.get_function_code(),
                "ARGS":
                cc_class.from_buffer(buffer[offset:])
                if cc_class is not None else None
            }
        except (ValueError, IOError):
            self.log_invalid_packet(mid)
            return

        self.on_packet_received(mid, payload)
Beispiel #44
0
    def stor_file_FTP(self, path, file):

        status = True
        if self.ftpConnect:
            fileToUpload = os.path.abspath(os.path.join(path, file))
            log.debug("Uploading {}...".format(fileToUpload))
            if os.path.isfile(fileToUpload):
                fh = open(fileToUpload, 'rb')
                if self.ftp:
                    try:
                        self.ftp.storbinary('STOR %s' % file, fh)
                    except:
                        log.warn("FTP put file failed {}".format(file))
                        status = False
                else:
                    log.warn("FTP connection invalid for: %s." % self.ip)
                    status = False
                fh.close()
            else:
                log.warn("File does not exist %s" % fileToUpload)
                status = False
        else:
            log.warn("FTP not Connected")
        return status
Beispiel #45
0
def ncr_handler(event, context):
    scan_id = event['scanId']
    user = event['userRecord']

    multivalue_querystring_parameters = event.get(
        'multiValueQueryStringParameters') or {}
    querystring_parameters = event.get('queryStringParameters') or {}
    account_ids = multivalue_querystring_parameters.get('accountId', [])
    requirement_id = querystring_parameters.get('requirementId', False)
    logger.debug('Account Ids: %s', json.dumps(account_ids, default=str))
    logger.debug('Requirement ID: %s', json.dumps(requirement_id, default=str))
    authz.require_can_read_account(user, account_ids)

    # get requirements
    if requirement_id:
        requirements = {requirement_id: requirements_table.get(requirement_id)}
    else:
        all_requirements = requirements_table.scan_all()
        requirements = {}
        for requirement in all_requirements:
            requirements[requirement['requirementId']] = requirement
    logger.debug('Requirements: %s', json.dumps(requirements, default=str))

    ncr_records, to_parse = [], []
    for account_id in account_ids:
        if isinstance(requirement_id, str):
            to_parse = ncr_table.query_all(
                IndexName='by-scanId',
                KeyConditionExpression=Key('scanId').eq(scan_id)
                & Key('rqrmntId_accntId').eq('{}#{}'.format(
                    requirement_id, account_id)))
        else:
            to_parse = ncr_table.query_all(
                KeyConditionExpression=Key('scanId').eq(scan_id)
                & Key('accntId_rsrceId_rqrmntId').begins_with(account_id))
        logger.debug('To parse: %s', json.dumps(to_parse, default=str))

        for item in to_parse:
            ncr = prepare_allowed_actions_output(
                initialize_output(scan_id, item), item, user, account_id,
                requirements[item['requirementId']])
            ncr_records.append(prepare_resource_output(ncr, item))

    return {'ncrRecords': ncr_records}
Beispiel #46
0
def gen_spreadsheets_handler(event, context):
    scan_id = event['openScan']['scanId']

    # clear CACHE if the scan has changed
    if CACHED_SCAN['scanId'] != scan_id:
        logger.debug('Clearing cache')
        CACHE.clear()
        CACHED_SCAN['scanId'] = scan_id

    accounts, s3_key, sheet_type = get_accounts(event)

    if not accounts:
        logger.info('No accounts, nothing to do')
        return

    load_scores(accounts)
    scores = get_scores()
    requirements = get_requirements()
    ncr_data = get_ncr(scan_id, accounts, sheet_type)

    workbook = create_base_workbook(ncr_data, accounts, requirements, scores)

    if sheet_type in [SheetTypes.GLOBAL, SheetTypes.PAYER_ACCOUNT]:
        add_accounts_tab(workbook, accounts, scores)
        add_sponsor_tab(workbook, accounts, scores)

    if sheet_type == SheetTypes.GLOBAL:
        # write date stamped global spreadsheet
        write_to_s3(workbook, '{}/global/scorecard-{}.xlsx'.format(PREFIX, date.today()))

        logger.debug('Writing global json scores')
        score_export = create_score_export(scan_id, accounts, scores, requirements)
        write_global_json_scores(score_export)

        logger.debug('Writing resource json')
        resource_export = create_resource_export(ncr_data, requirements)
        write_global_resources(resource_export)

    logger.debug('Writing to s3')
    write_to_s3(workbook, s3_key)
Beispiel #47
0
    async def _feature_model_0330_check_for_insert(self,
                                                   cloud_status: SyncStatus,
                                                   remote_status: SyncStatus):
        begin_id = remote_status.feature_model_0330_id
        end_id = cloud_status.feature_model_0330_id
        logger.info(
            "[_feature_model_0330_check_for_insert]->begin: ({}, {})".format(
                begin_id, end_id))

        if begin_id >= end_id:
            logger.info(
                "feature_check_for_insert, begin_id >= end_id, begin_id:{}, end_id:{}"
                .format(begin_id, end_id))
        else:
            while begin_id < end_id:
                rows = await self._cloud_feature_proxy.query_feature_model_0330_range(
                    self.province_code, self.city_code, self.town_code,
                    begin_id, end_id)
                logger.debug("query_data:{} {} {} {}, count:{}".format(
                    self.province_code, self.city_code, self.town_code,
                    begin_id, end_id, len(rows)))
                if not rows:
                    break

                for row in rows:
                    begin_id = row["id"]
                    logger.debug(
                        "[insert_feature_model_0330]->begin {}".format(
                            begin_id))
                    logger.debug("prepare insert data:{}".format(row))
                    await self._remote_feature_proxy.add_feature_model_0330(row
                                                                            )
                    logger.debug(
                        "[insert_feature_model_0330]->end {}".format(begin_id))

        logger.info(
            "[_feature_model_0330_check_for_insert]->end: ({}, {})".format(
                begin_id, end_id))
Beispiel #48
0
    def test_server(self):
        while True:
            for port in self.ports:
                test_out_put = subprocess.getoutput("netstat -ano | grep %s" %
                                                    port)

                if test_out_put:
                    logger.debug('检验服务启动:%s' % test_out_put)
                    self.ports.remove(port)
                else:
                    logger.debug('端口 [%s] 服务启动失败5秒钟后尝试' % port)
            if not self.ports:
                break
            time.sleep(5)
        logger.debug('全部服务启动成功!')
        return True
Beispiel #49
0
 def test_server(self):
     while True:
         for port in self.ports:
             # 通过查看是否有返回值来确定是否启动
             test_out_put = subprocess.getoutput("netstat -ano | findstr %s" % port)
             # 如果有 则从list中删除这个端口 直到这个list为空时 代表启动成功 跳出循环
             if test_out_put:
                 logger.debug('检验服务启动:%s' % test_out_put)
                 self.ports.remove(port)
             else:
                 logger.debug('端口 [%s] 服务启动失败5秒钟后尝试' % port)
         if not self.ports:
             break
         time.sleep(5)
     logger.debug('全部服务启动成功!')
     return True
 def update_exclusion(self, new_exclusion, delete_exclusion=None):
     """Update an exclusion so long as the exclusion has all required keys"""
     logger.debug('New exclusion: %s', json.dumps(new_exclusion,
                                                  default=str))
     logger.debug('Delete exclusion: %s',
                  json.dumps(delete_exclusion, default=str))
     transaction_items = []
     if new_exclusion:
         transaction_items.append({
             'Put': {
                 'TableName': self.table_name,
                 'Item': self.serialize(new_exclusion),
             },
         })
     if delete_exclusion:
         transaction_items.append({
             'Delete': {
                 'TableName':
                 self.table_name,
                 'Key':
                 self.serialize({
                     'accountId':
                     delete_exclusion['accountId'],
                     'rqrmntId_rsrceRegex':
                     delete_exclusion['rqrmntId_rsrceRegex'],
                 }),
             },
         })
     logger.debug('Transaction items: %s',
                  json.dumps(transaction_items, default=str))
     try:
         self.dynamodb.transact_write_items(TransactItems=transaction_items)
     except self.dynamodb.exceptions.TransactionCanceledException as err:
         logger.debug('Error making dynamodb transaction: %s',
                      json.dumps(err.response, default=str))
         raise exceptions.HttpServerErrorException('Internal server error')
Beispiel #51
0
 def process_types(self, json_list):
     for typedef in json_list:
         try:
             if 'alias_name' in typedef and 'actual_name' in typedef:
                 # Aliases are type names that evaluate to ctype or custom types. Custom types must be mapped last.
                 alias_name = typedef['alias_name']
                 actual_name = typedef['actual_name']
                 if alias_name not in self.type_dict:
                     if actual_name in ctypes.__dict__:
                         c_type = ctypes.__dict__[actual_name]
                         self.type_dict[alias_name] = c_type
                     elif self.config.log_ccsds_imports:
                         log.debug(
                             "Alias {} is not a ctype, skipping...".format(
                                 alias_name))
                 elif self.config.log_ccsds_imports:
                     log.debug(
                         "Alias {} already defined, skipping...".format(
                             alias_name))
             elif 'constant_name' in typedef and 'constant_value' in typedef:
                 # Constants are CFS macros that evaluate to literal values
                 constant_name = typedef['constant_name']
                 constant_value = typedef['constant_value']
                 if constant_name not in self.type_dict:
                     self.type_dict[constant_name] = constant_value
                 elif self.config.log_ccsds_imports:
                     log.debug(
                         "Alias {} already defined, skipping...".format(
                             constant_name))
             elif 'target' in typedef and 'mids' in typedef:
                 # Targets are CFS target names that map MID names to values
                 if typedef['target'] == self.config.CCSDS_target:
                     if self.config.log_ccsds_imports:
                         log.info("Found {} MIDs for {}".format(
                             len(typedef['mids']),
                             self.config.CCSDS_target))
                     self.mids.update({
                         mid['mid_name']: int(mid['mid_value'], 0)
                         for mid in typedef['mids']
                     })
             else:
                 log.error("Invalid type definition in {}".format(
                     self.current_file_name))
         except Exception as e:
             log.error("Unable to parse type definition in {}: {}".format(
                 self.current_file_name, e))
Beispiel #52
0
    def navigation(self):
        watcher = CancelWatcher()
        radius = self.radius
        frequency = self.frequence
        try:
            target = self.get_target()
            CLocation = self.get_location()
            CYaw = self.get_heading()

            init_angle = angle_heading_target(CLocation, target, CYaw)
            self.condition_yaw(init_angle)

            while not watcher.IsCancel():
                CLocation = self.get_location()
                CYaw = self.get_heading()

                distance = get_distance_metres(CLocation, target)
                angle = angle_heading_target(CLocation, target, CYaw)

                if not self.InAngle(angle, 90) or distance <= radius:
                    # if distance <= radius:
                    logger.info("Reached Target Waypoint!")
                    self.brake()
                    return True
                EAngle = int(math.degrees(math.asin(radius / distance)))

                logger.debug('{} {} {}'.format(distance, angle, EAngle))

                if self.InAngle(angle, max(EAngle, self.Epsilon)):
                    self.control_FRU(ELE=1)
                else:
                    if angle > EAngle and angle <= 90:
                        logger.debug('Roll Left')
                        self.control_FRU(AIL=-1, ELE=1)
                    elif angle >= 270 and angle < 360 - EAngle:
                        logger.debug('Roll Right')
                        self.control_FRU(AIL=1, ELE=1)
                    else:
                        self.brake()
                        self.condition_yaw(angle)
                time.sleep(frequency)
        except AssertionError, e:
            self.brake()
            logger.error(e)
            return False
Beispiel #53
0
    def check_noLMhash_policy(self):
        logger.debug('Checking NoLMHash Policy')
        ans = rrp.hOpenLocalMachine(self.__rrp)
        self.__regHandle = ans['phKey']
        ans = rrp.hBaseRegOpenKey(self.__rrp, self.__regHandle,
                                  'SYSTEM\\CurrentControlSet\\Control\\Lsa')
        keyHandle = ans['phkResult']
        try:
            dataType, noLMHash = rrp.hBaseRegQueryValue(
                self.__rrp, keyHandle, 'NoLmHash')
        except:
            noLMHash = 0

        if noLMHash == 1:
            logger.debug('LM hashes are NOT being stored')
            return True
        else:
            logger.debug('LM hashes are being stored')
            return False
Beispiel #54
0
    def sendCommand(
            self,
            cmd,
            clientAddres=EnumCommunicator.I2CEnum.I2C_CLIENT_YAI_MOTOR.value):
        bus = smbus.SMBus(self.I2C_DEV)
        log.info(" >> %s" % cmd)
        totalParts = 1
        if (len(cmd) > self.MAX_I2C_CONTENT):
            totalParts = 2
        cmd1 = cmd[:self.MAX_I2C_CONTENT]
        #print cmd1
        cmd1 = self.buildI2Cpackage(cmd1, totalParts, 1)
        log.debug("[p1]>>" + cmd1)
        messageInBytes = self.StringToBytes(cmd1)
        bus.write_i2c_block_data(clientAddres, 0, messageInBytes)

        #data_received_from_Arduino = bus.read_i2c_block_data(clientAddres, 0,32)
        #print(data_received_from_Arduino)
        time.sleep(.25)
        if (totalParts > 1):
            cmd2 = cmd[self.MAX_I2C_CONTENT:]
            #print cmd2
            cmd2 = self.buildI2Cpackage(cmd2, totalParts, 2)
            log.debug("[p2]>>" + cmd2)
            messageInBytes = self.StringToBytes(cmd2)
            bus.write_i2c_block_data(clientAddres, 0, messageInBytes)

        time.sleep(0.2)
        smsMessage = ""
        data_received_from_Arduino = bus.read_i2c_block_data(
            clientAddres, 0, 32)
        for i in range(len(data_received_from_Arduino)):
            smsMessage += chr(data_received_from_Arduino[i])

        log.debug(data_received_from_Arduino)
        msgStr = smsMessage.encode('utf-8')

        log.info(" << %s" % msgStr)

        return msgStr
Beispiel #55
0
 def simplify_sentence(self, sentence):
     nodes = self.to_nodes(sentence)
     for n in nodes:
         logger.debug('%s,%s' % (n.surface, ','.join(n.feature)))
     if nodes:
         return self._to_natural_sentence(nodes)
Beispiel #56
0
 def resolve(self, sock, address, req):
     logger.debug("({}) received request from {}: '{}'".format(decode_msg_id(req), address, req))
     resp = rightdns_resolve(req)
     if resp:
         sock.sendto(resp, address)
Beispiel #57
0
 def disconnect(sid):
     logger.debug('user disconnect {}'.format(sid))
Beispiel #58
0
 def connect(sid, environ):
     logger.debug('user connect {}'.format(sid))
Beispiel #59
0
 def _to_natural_sentence(self, nodes):
     result = [(nodes[0].surface, nodes[0])]
     for (i, node) in enumerate(nodes[1:]):
         if node.subpos == '係助詞' and node.rootform in ('は', 'も'):
             logger.debug('Skip 係助詞 は,も: %d' % i)
             pass
         elif node.surface == 'ん' and '未然形' in result[-1][1].feature:
             result.append((node.surface, node))
         elif node.surface == 'から' and node.subpos == '接続助詞':
             result.append((node.surface, node))
         elif node.surface in ('けど', 'けれど') and node.subpos == '接続助詞':
             result.append(('が', node))
             logger.debug('Replace %s -> が: %d' % (node.surface, i))
         elif node.subpos == '終助詞' and result:
             if result[-1][1].pos == '助動詞' and result[-1][0] != 'ん':
                 del_node = result.pop(-1)
                 logger.debug('Delete %s: %d' % (del_node[0], i))
             logger.debug('Ignore %s: %d' % (node.surface, i))
         elif node.surface in ('たら', 'た') and '連用タ接続' in result[-1][1].feature:
             if result[-1][1].rootform not in ('*', ''):
                 result[-1] = (result[-1][1].surface, result[-1][1])
             result.append((node.surface, node))
             logger.debug('連用タ接続 %s %s: %d' % (result[-1][0], node.surface, i))
         elif '連用タ接続' in node.feature and node.rootform not in ('よい', '良い'):
             if node.rootform not in ('*', ''):
                 result.append((node.rootform, node))
                 logger.debug('連用タ接続 %s %s: %d' % (node.surface, node.rootform, i))
         elif node.pos == '助動詞' and result and result[-1][0] == 'ん':
             del_node = result.pop(-1)
             logger.debug('Delete %s: %d' % (del_node[0], i))
         elif (node.surface == 'か' and node.pos == '助詞' and
                 (result[-1][0] in ('ん', 'の') and result[-1][1].pos == '名詞')):
             del_node = result.pop(-1)
             logger.debug('Delete %s: %d' % (del_node[0], i))
         elif node.pos == '記号' and result and (result[-1][0] == 'か' and result[-1][1].pos == '助詞'):
             del_node = result.pop(-1)
             result.append((node.surface, node))
             logger.debug('Replace %s -> %s: %d' % (del_node[0], result[-1][0], i))
         elif (result and result[-1][0] != 'ん' and 
                 node.subpos in ('接続助詞', '格助詞', '終助詞', 'フィラー', '副詞化')):
             if result[-1][1].rootform not in ('*', ''):
                 result[-1] = (result[-1][1].rootform, result[-1][1])
                 logger.debug('Replace %s -> %s: %d' % (result[-1][1].surface, result[-1][0], i))
         elif node.pos in ('接続詞'):
             pass
         elif node.pos == '助詞' and node.subpos == '連体化':
             pass
         elif node.pos == '助動詞' and node.rootform == 'です':
             pass
         elif node.pos == '名詞':
             if len(node.rootform) < len(node.surface) and node.rootform not in ('*', ''):
                 result.append((node.rootform, node))
             else:
                 result.append((node.surface, node))
         else:
             result.append((node.surface, node))
     return ''.join([n[0] for n in result])
Beispiel #60
0
def search(value):
    msg = "Search probing"
    logger.debug(msg)
    msg = "[-] Not Implemented Yet"
    logger.error(msg)