Esempio n. 1
0
    async def __get_reg_id(self, vid_data):
        """ Get video's register id 

        Args:
            vid_data: like following dictionary 
            {
                "F": 2,
                "V": "c00231h58yj",
                "E": 1
            }

        """
        _, vid, number = [value for key, value in vid_data.items()]
        jquery_id = f"jQuery_{str(int(time.time()))}"
        response = await self.__async_session.get(
            f"https://bullet.video.qq.com/fcgi-bin/target/regist?callback={jquery_id}&otype=json&vid={vid}&cid=p69wlzli02uqwms&lid=&g_tk=1115548900&_={str(int(time.time()))}"
        )
        targetid = json.loads(response.text[len(jquery_id) +
                                            1:-1]).get("targetid")
        if targetid:
            success(f"targetid [{targetid}]")
            self.__results[vid] = {
                "number": number,
                "targetid": targetid,
                "datas": {},
                "single_max_count": 0 if not conf.max_time else conf.max_time,
            }
Esempio n. 2
0
        async def GetItem(item, point=0):
            async with self.__limits:
                try:
                    single_max_count = item["single_max_count"]
                    url = f"https://mfm.video.qq.com/danmu?otype=json&callback={jquery_id}&timestamp={point}&target_id={item['targetid']}&count=80&second_count=5&session_key=45094%2C118%2C{str(int(time.time()))}&_={str(int(time.time()))}"
                    addupdate()
                    warning(url)
                    response = await self.__async_session.get(url)
                    assert response.status_code == 200
                    jsondata = json.loads(response.text[len(jquery_id) + 1:-1],
                                          strict=False)
                    assert jsondata["comments"]
                    for item in jsondata["comments"]:
                        self.__results[key]["datas"][item["commentid"]] = item
                        addtotal()

                    if single_max_count == 0:
                        self.__results[key].update({
                            i: jsondata[i]
                            for i in ["tol_up", "single_max_count"]
                        })
                        self.__results[key]["single_max_count"] = jsondata[
                            "single_max_count"]
                    addsucess()
                    success(url)
                except Exception as e:
                    addfailed()
Esempio n. 3
0
File: add.py Progetto: chshouyu/ursa
def getPkgOpen(subpkgs):
    """获取open 模块
    模块地址 http://ufo.sogou-inc.com/git/open.git
    """
    targetfolder = os.path.join( conf.getConfig()['path'] ,  'static'  , 'js' )
    utils.createfolder(TEMP_FOLDER)
    
    if not os.path.exists( targetfolder ):
        utils.createfolder(targetfolder)

    subpkgs = subpkgs or conf.getConfig()['PKG_OPEN']
    
    subpkgs.insert(0 , 'common')
    
    os.system('git clone http://ufo.sogou-inc.com/git/open.git ' + os.path.join(TEMP_FOLDER , 'open'))
    successpkg = []
    for pkg in subpkgs:
        source = os.path.join( TEMP_FOLDER , 'open' , pkg )
        if not os.path.exists(source):
            log.warn('Sub package ' + pkg + ' not exist in Open.')
            continue
        utils.copyfiles( source , os.path.join( targetfolder , 'open' , pkg ) )
        successpkg.append( pkg )
    

    utils.removefolder(TEMP_FOLDER)
    log.success( 'Adding Open package include ' + ','.join(successpkg) + ' success!' )
Esempio n. 4
0
def telegram_with_pic(pics, details, sid, rid):
    try:
        target = pics[0]
        gps_data = None

        for pic in pics:
            try:
                gps_data = read_exif_gps(copy.deepcopy(pic))
                if gps_data:
                    success(f"find gps data: {gps_data}")
                    target = pic
                    break
            except Exception as e:
                error(e)
        bot.sendPhoto(
            rid, target,
            details + (f"\nEXIF GPS: {gps_data}" if gps_data else ""))
        if gps_data:
            bot.sendLocation(rid, *gps_data[::-1])
        query = DarkNet_Notice.update({
            "telegram": True
        }).where(DarkNet_Notice.sid == sid)
        query.execute()
    except Exception as e:
        error(f"telegram_with_pic: {e}")
Esempio n. 5
0
 def update_cookie_domain(self):
     jar = requests.cookies.RequestsCookieJar()
     for key, value in self.session.cookies.items():
         jar.set(key, value, domain=self.domain)
     self.session.cookies = jar
     success(
         f"update all cookies to current domain: {self.session.cookies}")
Esempio n. 6
0
    def __login(self):
        try:
            """
                ### 再次尝试
                1.因为网络问题重试

                ### 重新注册
                2.因为账户被封重试
                3.因为账户认证错误重试
            """
            warning(f"Login -> [{self.usr}:{self.pwd}]")
            self.__login_payload.update({
                "password": self.pwd,
                "username": self.usr
            })
            resp = self.__refresh_new_target(
                self.session.post(
                    self.__login_url,
                    data=self.__login_payload,
                    verify=False,
                    timeout=120,
                ))
            if self.usr in resp.text and "暗网欢迎您" in resp.text:
                success("Auth Success")
                self.types = Parser.get_current_type(resp)
                # assert self.types != {}
            else:
                error(f"Auth Faild: {self.__clean_log(resp)}")
                self.__save_error("__login.html", resp)
                if "已被封禁" in resp.text:
                    Cursor.ban_user(self.usr)
                    self.__reg()
                raise ValueError
        except KeyboardInterrupt:
            exit()
Esempio n. 7
0
def mail_to(
    content: str,
    subject: str = "",
    usr: str = Config.mail_usr,
    pwd: str = Config.mail_pwd,
    host: str = Config.mail_host,
    port: int = Config.mail_port,
    targets: list = None,
) -> None:
    targets = Config.mail_targets
    if not targets:
        targets = [usr]

    warning(
        f"收到邮件发送任务  发件人: [{usr}]  主机: [{host}:{port}]  目标: [{targets}]  内容: [{len(content)}]"
    )
    with yagmail.SMTP(
            user=usr,
            password=pwd,
            port=port,
            smtp_ssl=False,
            smtp_skip_login=False if pwd else True,
            soft_email_validation=False,
            host=host if host else "smtp." + usr.split("@")[-1],
    ) as client:

        client.send(to=targets, subject=subject, contents=content)
        success(
            f"成功发送  发件人: [{usr}]  主机: [{host}:{port}]  目标: [{targets}]  内容: [{len(content)}]"
        )
Esempio n. 8
0
def getMetaData(fullname, playlists):
    log.info('accessing metadata...')
    index = 0
    tagInfo = []
    for track in playlists:
        name= playlists[track]
        if os.path.isfile(name):
            try:
                filename = os.path.basename(name)
                log.success('-------------------------')
                tag = TinyTag.get(name)
                if tag.title != '' or tag.artist != '':
                    song = str(tag.title+':'+tag.artist)
                    tagInfo.append(song)
                    log.warn('tag info:', filename.encode("ascii", "ignore"))
                    log.info('Artist:', tag.artist)
                    log.info('Album:', tag.album)
                    log.info('Title:', tag.title.encode("ascii", "ignore"))
                    log.info('Track number:', tag.track)
                    index += 1
                else:
                    log.warn('WARN: no id3 info provide')

            except Exception as e:
                log.err("An error occurred while getting metadata of the file:", name)
                log.err("Error:", e)
        else:
            log.err("The file: %s does not exist, check the path or filename" % (name))
    print
    log.err('track processing:', str(index))
    saveMetaData(fullname, tagInfo, index)
    return tagInfo
Esempio n. 9
0
def create_json(datas, filename="res.json"):
    if not datas:
        return
    with check_times():
        with open(filename, "w") as f:
            f.write(json.dumps(datas, ensure_ascii=False, indent=4))
            success(f"Saved {filename}")
Esempio n. 10
0
def ProcessImages(documents_to_process, documents, tempDir):

    if len(documents_to_process) == 0:
        return 0

    idx = 0
    for docID in documents_to_process:

        idx += 1
        log.info('[%i/%i] Processing images for docdb %i' %
                 (idx, len(documents_to_process), docID))

        document = next((doc for doc in documents if doc['id'] == docID), None)

        tempDocDir = tempDir + str(document['id']) + '/'
        thumDir = tempDocDir + '/thumbs/'
        os.mkdir(thumDir)

        for aFile in document['files']:
            base = aFile['base']
            exts = aFile['exts']

            # Favoured versions to make a thumbnail version from
            srcs = ['png', 'jpg', 'jpeg', 'eps', 'pdf', 'ps']

            for src in srcs:
                if src in exts:
                    # Create thumbnail
                    opt = ''
                    opt2 = None
                    if src == 'pdf':
                        opt = ' -define pdf:use-cropbox=true -transparent-color white '
                        opt2 = ' -transparent-color white '  # sometimes the cropbox is trouble
                    cmd = 'convert ' + opt + tempDocDir + base + '.' + src + ' -resize 400 -quiet ' + thumDir + base + '_thumb.png'
                    cmd2 = None
                    if opt2:
                        cmd2 = 'convert ' + opt2 + tempDocDir + base + '.' + src + ' -resize 400 -quiet ' + thumDir + base + '_thumb.png'

                    # In case of tarballs etc there can be subdirs required
                    # in the thumbs directory. Maybe we should have made
                    # them up-front?
                    try:
                        os.makedirs(os.path.dirname(thumDir + base))
                    except:
                        pass

                    if cmd2:
                        os.system(cmd + ' || ' + cmd2)
                    else:
                        os.system(cmd)
                    log.success('Created thumbnail from ' + base + '.' + src +
                                ': ' + thumDir + base + '_thumb.png')
                    break

    os.system('cp -rpf ' + tempDir + '* ' + config.WEB_PATH +
              config.PLOT_SUBDIR)
    shutil.rmtree(tempDir)

    return 0
Esempio n. 11
0
 def register(self):
     try:
         warning("register confirm")
         resp = self.refresh_new_target(
             self.session.get(
                 self.register_url,
                 headers={
                     "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
                     "Content-Type": "application/x-www-form-urlencoded",
                     "Referer": self.register_url,
                 },
             )
         )
         token, creation_time = Parser.get_token_and_creation_time(resp)
         warning("start register")
         resp = self.session.post(
             self.register_url,
             data={
                 "agreed": "===好的,我已明白,请跳转到下一页继续注册====",
                 "autim": self.autim,
                 "change_lang": "",
                 "creation_time": creation_time,
                 "form_token": token,
             },
             headers=self.make_reg_headers(resp),
         )
         token, creation_time = Parser.get_token_and_creation_time(resp)
         confirm_code, confirm_id = Parser.get_captcha(self.get_pic, resp)
         self.create_random_author()
         data = {
             "username": self.usr,
             "new_password": self.pwd,
             "password_confirm": self.pwd,
             "email": "*****@*****.**",
             "lang": "zh_cmn_hans",
             "tz_date": "UTC+08:00 - Antarctica/Casey - "
             + moment.now().format("YYYY-MM-DD HH:mm"),
             "tz": "Asia/Hong_Kong",
             "agreed": "true",
             "change_lang": "0",
             "confirm_code": confirm_code,
             "confirm_id": [confirm_id, confirm_id],
             "creation_time": creation_time,
             "form_token": token,
             "submit": " 用户名与密码已填好, 点此提交 ",
             "autim": self.autim,
         }
         resp = self.session.post(
             self.register_url, data=data, headers=self.make_reg_headers(resp)
         )
         assert "感谢注册" in resp.text
         success("register success")
         Cursor.create_new_user({"user": self.usr, "pwd": self.pwd})
     except KeyboardInterrupt:
         exit()
     except AssertionError as e:
         error("register failed")
         error(self.clean_log(resp))
         raise e
Esempio n. 12
0
def doArchive(manager, archiveDir, filePath, size, md5):
    """
    执行归档操作,记录数据库、拷贝文件等
    """
    relativePath = file_utils.resolveSavePath(archiveDir, filePath)
    manager.insertRecord(relativePath, size, md5)
    copyfile(filePath, f'{archiveDir}/{relativePath}')
    log.success(f'doArchive, filePath={filePath}, size={size}, md5={md5}')
Esempio n. 13
0
def CreateXLSX(datas, columns, filename='res.xlsx'):
    with checkTimes():
        xlsx = pandas.DataFrame(datas)
        xlsx.rename(columns=columns, inplace=True)
        writer = pandas.ExcelWriter(
            filename, options={'strings_to_urls': False})
        xlsx.to_excel(writer, "data")
        writer.save()
        success('Created {filename}')
Esempio n. 14
0
    def login(self):
        try:
            """
                ### 再次尝试
                1.因为网络问题重试

                ### 重新注册
                2.因为账户被封重试
                3.因为账户认证错误重试
            """
            warning(f"login -> [{self.usr}:{self.pwd}]")
            self.login_payload.update({
                "password": self.pwd,
                "username": self.usr
            })
            resp = self.session.post(
                self.login_url,
                data=self.login_payload,
                headers={
                    "Content-Type": "application/x-www-form-urlencoded",
                    "Referer": f"http://{self.domain}/ucp.php?mode=login",
                },
                allow_redirects=False,
            )
            debug(f"login[1] requests header: {resp.request.headers}")
            debug(f"login[1] response header: {resp.headers}")
            if resp.status_code == 302 and "Location" in resp.headers:
                resp = self.refresh_new_target(
                    self.session.get(
                        resp.headers.get("Location"),
                        headers={
                            "Referer":
                            f"http://{self.domain}/ucp.php?mode=login&sid={self.sid}",
                            "Cookie": self.get_cookie_string(),
                        },
                    ))
            else:
                Cursor.ban_user(self.usr)
                self.update_random_user()
                return
            debug(f"login[2] requests header: {resp.request.headers}")
            debug(f"login[2] response header: {resp.headers}")
            if self.usr in resp.text and "暗网欢迎您" in resp.text:
                success("Auth Success")
                self.types = Parser.get_current_type(resp)
            else:
                error(f"Auth Faild: {self.clean_log(resp)}")
                if re.findall("已被封禁|无效的|违规被处理", resp.text):
                    Cursor.ban_user(self.usr)
                    self.update_random_user()
                    # if not self.register():
                    #     return
                    # else:
                    #     raise ValueError
        except KeyboardInterrupt:
            exit()
Esempio n. 15
0
def make_new_tor_id():
    info("New Tor ID")
    controller = Controller.from_port(port=9151)
    controller.authenticate()
    controller.signal(Signal.NEWNYM)
    resp = requests.get(
        "https://ipinfo.info/html/my_ip_address.php",
        proxies={"https": "socks5://127.0.0.1:9150"},
    )
    success(f'Current IP: {jq(resp.text)("#Text10 > p > span > b").text()}')
Esempio n. 16
0
def WriteJSON(documents):
    os.system('cp -pf ' + config.BLESSED_PLOTS + ' ' + config.WEB_PATH)
    log.success('Copied ' + config.BLESSED_PLOTS + ' to ' + config.WEB_PATH)
    jsonSerialized = json.dumps(documents, sort_keys=True, indent=2)

    jsonFile = open(config.WEB_PATH + config.JSON_FILENAME, 'w')
    jsonFile.write(jsonSerialized)
    jsonFile.close()

    log.success('Wrote ' + config.WEB_PATH + config.JSON_FILENAME)
Esempio n. 17
0
def create_xlsx(datas, columns, filename="res.xlsx"):
    if not datas:
        return
    with check_times():
        xlsx = pandas.DataFrame(datas)
        xlsx.rename(columns=columns, inplace=True)
        writer = pandas.ExcelWriter(filename, options={"strings_to_urls": False})
        xlsx.to_excel(writer, "data")
        writer.save()
        success("Created {filename}")
Esempio n. 18
0
 def notice(self, data):
     """
     发起预警
     """
     if datetime.datetime.today().strftime(
             "%m-%d") != data["date"] and not DEBUG:
         return
     title, body = self.format_msg(data)
     success(f"notice {title}")
     feishu.delay(f"{title}\n\n{body}")
     mail_to.delay(body, title)
Esempio n. 19
0
 def AcceptConnections(self):
     log.success("Server", "Server started accepting connections")
     while self._ServerOn == True:
         c, addr = self.s.accept()
         log.info("New connection", "We have a new connection!!")
         c.settimeout(self._timeout)
         conn = connection(c, addr, self)
         new_thread = threading.Thread(target=conn.CheckForData)
         new_thread.start()
         self._connections.append(conn)
     return
Esempio n. 20
0
def getMetadata(identifier,
                user=ezidapp.models.AnonymousUser,
                prefixMatch=False):
    """
  Returns all metadata for a given qualified identifier, e.g.,
  "doi:10.5060/FOO".  'user' is the requestor and should be an
  authenticated StoreUser object.  The successful return is a pair
  (status, dictionary) where 'status' is a string that includes the
  canonical, qualified form of the identifier, as in:

    success: doi:10.5060/FOO

  and 'dictionary' contains element (name, value) pairs.  Unsuccessful
  returns include the strings:

    error: forbidden
    error: bad request - subreason...
    error: internal server error
    error: concurrency limit exceeded

  If 'prefixMatch' is true, prefix matching is enabled and the
  returned identifier is the longest identifier that matches a
  (possibly proper) prefix of the requested identifier.  In such a
  case, the status string resembles:

    success: doi:10.5060/FOO in_lieu_of doi:10.5060/FOOBAR
  """
    nqidentifier = util.normalizeIdentifier(identifier)
    if nqidentifier == None: return "error: bad request - invalid identifier"
    tid = uuid.uuid1()
    if not _acquireIdentifierLock(nqidentifier, user.username):
        return "error: concurrency limit exceeded"
    try:
        log.begin(tid, "getMetadata", nqidentifier, user.username, user.pid,
                  user.group.groupname, user.group.pid, str(prefixMatch))
        si = ezidapp.models.getIdentifier(nqidentifier, prefixMatch)
        if not policy.authorizeView(user, si):
            log.forbidden(tid)
            return "error: forbidden"
        d = si.toLegacy()
        util2.convertLegacyToExternal(d)
        if si.isDoi: d["_shadowedby"] = si.arkAlias
        log.success(tid)
        if prefixMatch and si.identifier != nqidentifier:
            return ("success: %s in_lieu_of %s" %
                    (si.identifier, nqidentifier), d)
        else:
            return ("success: " + nqidentifier, d)
    except ezidapp.models.StoreIdentifier.DoesNotExist:
        log.badRequest(tid)
        return "error: bad request - no such identifier"
    except Exception, e:
        log.error(tid, e)
        return "error: internal server error"
Esempio n. 21
0
def run_test_number(test_number):
    info('Running test {}'.format(test_number))
    with open('test-{}.txt'.format(test_number), 'r') as f, open('test-{}.log'.format(test_number), 'w') as log_file, open('start-{}.kwld'.format(test_number)) as start_kwld:
        contents = f.read().replace('\n', ' ')
        info('Parsing sentence: {}'.format(contents))
        try:
            actions = kjr_parser.parse_sentences(contents, log_file)
        except Exception:
            error('Exception while parsing sentence')
            traceback.print_exc()
            return 0
        robot_pattern = re.compile(r'robot (\d+) (\d+) (\w+) (\d+)')
        robot = None
        for line in start_kwld:
            if 'robot' in line:
                match = re.match(robot_pattern, line)
                y = int(match.group(1))
                x = int(match.group(2))
                direction = match.group(3)
                beepers = int(match.group(4))
                robot = RobotPos(x=x, y=y, direction=direction, beepers=beepers)
        java_file = 'TestRobot{}.java'.format(test_number)
        if robot is not None:
            generate_code(actions, test_number, 'TestRobot.template', java_file,
                          street=robot.y, avenue=robot.x, direction=robot.direction,
                          beepers=robot.beepers)
        else:
            generate_code(actions, test_number, 'TestRobot.template', java_file)

    with open('test-{}.log'.format(test_number), 'a') as log_file:
        info('Compiling {}'.format(java_file))
        ret = subprocess.call(['javac', '-cp', '.:KarelJRobot.jar', java_file], stdout=log_file, stderr=log_file)
        if ret == 0:
            info('Compiliation succeeded')
        else:
            error('Compilation failed. See test-{}.log for details'.format(test_number))
            return 0
        info('Running {}'.format(java_file))
        try:
            ret = subprocess.call(['java', '-cp', '.:KarelJRobot.jar', 'TestRobot{}'.format(test_number)], stdout=log_file, timeout=TIMEOUT_LENGTH)
        except subprocess.TimeoutExpired:
            error('TestRobot{} timed out'.format(test_number))
            return 0
        if ret == 0:
            info('TestRobot{} returned with code {}'.format(test_number, ret))
        else:
            error('TestRobot{} returned with code {}'.format(test_number, ret))
            return 0
    score = evaluate('end-{}-test.kwld'.format(test_number), 'end-{}.kwld'.format(test_number))
    if score == 1:
        success('Test {} passed'.format(test_number))
    else:
        error('Test {} failed'.format(test_number))
    return score
Esempio n. 22
0
        def AcceptConnections(self):
                log.success("Server", "Server started accepting connections")
                while self._ServerOn == True:
                        c, addr = self.s.accept()
			log.info("New connection", "We have a new connection!!")
			c.settimeout(self._timeout)
                        conn = connection(c, addr, self)
                        new_thread = threading.Thread(target = conn.CheckForData )
                        new_thread.start()
                        self._connections.append(conn)
                return
Esempio n. 23
0
def parse_question_from_file(file):
    f = open(file, mode="r")
    q2_raw = f.read()
    f.close()

    soup2 = BeautifulSoup(q2_raw, 'html.parser')

    question = parse_question(soup2)

    log.success(f" question {question.id} parsed", module="parser")
    return question
Esempio n. 24
0
        def CheckForData(self):
                while self._server.ServerOn == True:
                        log.success("Connection", "Waiting for data from connection")
	                try: 
				data = self._conn.recv(64)
	                        if not data:
        	                        return
				self._conn.sendall(self.SendAction(data))
			except:
				log.info("Connection", "Connection has expired")
				self.Close()
				return
Esempio n. 25
0
def add_git_remotes(workspace_path, repo):
    if 'remotes' not in repo or len(repo['remotes']) == 0:
        return

    for remote in repo['remotes']:
        if not remote_exists(workspace_path, repo, remote):
            log.info(' - adding remote "%s" to %s' % (remote['name'], repo['name']))
            repo_path = os.path.join(workspace_path, repo['name'])
            Process('git remote add %s %s && git fetch %s' % (remote['name'], remote['url'], remote['name']),
                    cwd=repo_path,
                    exit_on_fail=True).run()
            log.success(' - remote "%s" added to %s' % (remote['name'], repo['name']))
def build_grid():
    """
    Builds the initial grid within the main map area, then used by the categorization algorithm.

    s log: When the grid has been correctly built.
    """

    data = _load_data("file:///opt/hdfs/app/data/topics.twitter.json")
    grid = _compute_grid(data)
    _save_grid(grid)

    log.success("grid built")
Esempio n. 27
0
def saveMetaData (fullname, data, count):
    log.warn('saving metadata of %s...' % fullname)
    filename=fullname
    thefile = open(filename, 'w')

    thefile.write("[musikker]\n")
    thefile.write("entries=%s\n" % str(count))

    for item in data:
        thefile.write("tag=%s\n" % item)
    pass
    log.success('metadata saved in:', filename)
Esempio n. 28
0
def get_pic_array(url, path):
    resp = requests.get(url)
    success(f"GET PIC From: {url}")
    with open(path, "wb") as f:
        f.write(resp.content)
    new_path = conf.words_background
    if new_path:
        if os.path.exists(new_path):
            path = new_path
        else:
            error(f"pic [{new_path}] not found!")
    return np.array(Image.open(path))
Esempio n. 29
0
def setup_git_workspace(workspace):
    if not workspace_exists(workspace['path']):
        os.makedirs(workspace['path'])

    for repo in workspace['repos']:
        if not repo_exists(workspace['path'], repo):
            setup_git_repo(workspace['path'], repo)
        else:
            log.success('%s %s' % (u'\u2713', repo['name']))

        add_git_remotes(workspace['path'], repo)
        configure_git_repo(workspace['path'], repo)
Esempio n. 30
0
def configure_git_repo(workspace_path, repo):
    if 'gitconfig' not in repo:
        return

    repo_path = os.path.join(workspace_path, repo['name'])

    for key, value in repo['gitconfig'].iteritems():
        current_value = Process('git config %s' % key, cwd=repo_path, exit_on_fail=False).run().stdout.strip()
        if current_value != value:
            log.info(' - setting %s to "%s"' % (key, value))
            Process('git config %s "%s"' % (key, value), cwd=repo_path, exit_on_fail=True).run()
            log.success(' - %s set to "%s"' % (key, value))
Esempio n. 31
0
 def CheckForData(self):
     while self._server.ServerOn == True:
         log.success("Connection", "Waiting for data from connection")
         try:
             data = self._conn.recv(64)
             if not data:
                 return
             self._conn.sendall(self.SendAction(data))
         except:
             log.info("Connection", "Connection has expired")
             self.Close()
             return
Esempio n. 32
0
def executeSearch(
    user,
    constraints,
    from_,
    to,
    orderBy=None,
    selectRelated=defaultSelectRelated,
    defer=defaultDefer,
):
    """
  Executes a search database query, returning an evaluated QuerySet.
  'user' is the requestor, and should be an authenticated StoreUser
  object or AnonymousUser.  'from_' and 'to' are range bounds, and
  must be supplied.  'constraints', 'orderBy', 'selectRelated', and
  'defer' are as in formulateQuery above.
  """
    tid = uuid.uuid1()
    try:
        _modifyActiveCount(1)
        qs = formulateQuery(constraints,
                            orderBy=orderBy,
                            selectRelated=selectRelated,
                            defer=defer)
        log.begin(
            tid, "search/results", "-", user.username, user.pid,
            user.group.groupname, user.group.pid, str(orderBy), str(from_),
            str(to),
            *reduce(operator.__concat__,
                    [[k, unicode(v)] for k, v in constraints.items()]))
        qs = qs[from_:to]
        c = len(qs)
    except Exception, e:
        # MySQL's FULLTEXT engine chokes on a too-frequently-occurring
        # word (call it a "bad" word) that is not on its own stopword
        # list.  We weed out bad words using our own stopword list, but
        # not if they're quoted, and unfortunately MySQL chokes on bad
        # words quoted or not.  Furthermore, we are unable to add to
        # MySQL's stopword list.  If MySQL chokes, we retry the query
        # without any quotes in the hopes that any quoted bad words will
        # be removed by our own processing.
        if _isMysqlFulltextError(e) and any('"' in constraints.get(f, "")
                                            for f in _fulltextFields):
            constraints2 = constraints.copy()
            for f in _fulltextFields:
                if f in constraints2:
                    constraints2[f] = constraints2[f].replace('"', " ")
            log.success(tid, "-1")
            return executeSearch(user, constraints2, from_, to, orderBy,
                                 selectRelated, defer)
        else:
            log.error(tid, e)
            raise
Esempio n. 33
0
def run(port=8150, handler_class=PrHandler):
    try:
        httpd = BaseHTTPServer.HTTPServer(('', port), handler_class)
        log.success('server in http://localhost:' + str(port))
        httpd.serve_forever()
    except (KeyboardInterrupt, SystemExit):
        log.log("^C received, shutting down")
        httpd.socket.close()
    except socket.error:
        log.error('Maybe port ' + str(port) + ' already in use')
        log.error('You can try another port by use "ursa start 8234"')
        raise
        sys.exit(1)
Esempio n. 34
0
def run(port = 8150 , handler_class = PrHandler):
    try:
        httpd = BaseHTTPServer.HTTPServer(('', port), handler_class)
        log.success('server in http://localhost:' + str(port) )
        httpd.serve_forever()
    except (KeyboardInterrupt , SystemExit):
        log.log("^C received, shutting down")
        httpd.socket.close()
    except socket.error:
        log.error('Maybe port ' + str(port) + ' already in use')
        log.error('You can try another port by use "ursa start 8234"')
        raise
        sys.exit(1)
Esempio n. 35
0
def make_new_tor_id():
    info("reload tor")
    try:
        controller = Controller.from_port(port=9151)
        controller.authenticate()
        controller.signal(Signal.NEWNYM)
        resp = requests.get("http://icanhazip.com/",
                            proxies={"https": "socks5://127.0.0.1:9150"})
        success(f"current ip: {resp.text.strip()}")
    except Exception as e:
        raise
    finally:
        controller.close()
Esempio n. 36
0
def entry(host, dry_run):
    try:
        local = Context()
        c = util.connect(host, sudo=True)

        PASS.unlock()  # TODO: only open if needed

        context = build_context(local)

        if dry_run:
            print("DRY RUN")

            @dataclass
            class success:
                ok: bool = True
                exited: int = 0
                stdout: str = ""

            def just_print(*args, **kwargs):
                args = " ".join(args)
                print(f"{args}")
                return success()

            c.run = just_print
            c.sudo = just_print
            c.put = just_print

        # TODO: validate context with jsonschema

        start_time = datetime.now()

        pre_deploy(c, local, context)
        deploy(c, context)
        post_deploy(c, context)

        # util.print_json(context)

        end_time = datetime.now()

        elapsed = end_time - start_time
        total_seconds = int(elapsed.total_seconds())
        hours, remainder = divmod(total_seconds, 60 * 60)
        minutes, seconds = divmod(remainder, 60)

        log.success(
            f"deployment complete, took {hours:02d}:{minutes:02d}:{seconds:02d}"
        )
    except KeyboardInterrupt:
        pass
    except Exception as err:
        log.error(err)
Esempio n. 37
0
def run(params , options):
    ptype = 'mobile' if ( len(params) and params[0]=='mobile') else 'pc'
    if os.listdir( conf.getConfig()['path'] ):
        log.warn('Not an empty folder.\nContinue may change your exists files.\nStill Continue?')
        iscontinue = utils.isyes(raw_input('(y/n):'))
        if not iscontinue:
            log.log('User cancel init.')
            sys.exit(1)

    log.log('Begin to init current folder')
        
    copyfiles(ptype)

    log.success('Init success.')
Esempio n. 38
0
        def CloseServer(self):
                for conn in self._connections:
                        conn.ForceClosing()

                self._ServerOn = False

                s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
                s.connect(("127.0.0.1", self._PORT)) #Find another way
		#close all services
		for sr in self._services:
			self._services[sr].CloseService()

                log.success("Server", "Server is closed.")
                self.s.close()
                return
Esempio n. 39
0
    def CloseServer(self):
        for conn in self._connections:
            conn.ForceClosing()

        self._ServerOn = False

        s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        s.connect(("127.0.0.1", self._PORT))  #Find another way
        #close all services
        for sr in self._services:
            self._services[sr].CloseService()

        log.success("Server", "Server is closed.")
        self.s.close()
        return
Esempio n. 40
0
    def __exit__(self, type, value, traceback):
        handled = False

        if not (type and value and traceback):
            log.success("OK")
            handled = True
        elif type == GuardWarning:
            log.warning("WARNING")
            log.warning(value)
            handled = True
        else:
            log.error("FAILED")

        sys.stdout.flush()
        return handled
Esempio n. 41
0
    def __login(self):
        try:
            """
                ### 再次尝试
                1.因为网络问题重试

                ### 重新注册
                2.因为账户被封重试
                3.因为账户认证错误重试
            """
            warning(f"Login -> [{self.usr}:{self.pwd}]")
            self.__login_payload.update({"password": self.pwd, "username": self.usr})
            resp = self.__refresh_new_target(
                self.session.post(
                    self.__login_url,
                    data=self.__login_payload,
                    verify=False,
                    timeout=120,
                    # headers={
                    #     "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
                    #     "Accept-Encoding": "gzip, deflate",
                    #     "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
                    #     "Cache-Control": "no-cache",
                    #     "DNT": "1",
                    #     "Host": self.__domain,
                    #     "Pragma": "no-cache",
                    #     "Origin":self.__main_url,
                    #     "Referer": f"{self.__main_url}/ucp.php?mode=login&autim={self.__login_payload['autim']}",
                    #     "Upgrade-Insecure-Requests": "1",
                    #     "User-Agent": "Mozilla/5.0 (Windows NT 6.1; rv:60.0) Gecko/20100101 Firefox/60.0"
                    # }
                )
            )
            debug(resp.history)
            debug(resp.request.headers)
            if self.usr in resp.text and "暗网欢迎您" in resp.text:
                success("Auth Success")
                self.types = Parser.get_current_type(resp)
                # assert self.types != {}
            else:
                error(f"Auth Faild: {self.__clean_log(resp)}")
                self.__save_error("__login.html", resp)
                if "已被封禁" in resp.text:
                    Cursor.ban_user(self.usr)
                    self.__reg()
                raise ValueError
        except KeyboardInterrupt:
            exit()
Esempio n. 42
0
def run(params, options):
    ptype = 'mobile' if (len(params) and params[0] == 'mobile') else 'pc'
    if os.listdir(conf.getConfig()['path']):
        log.warn(
            'Not an empty folder.\nContinue may change your exists files.\nStill Continue?'
        )
        iscontinue = utils.isyes(raw_input('(y/n):'))
        if not iscontinue:
            log.log('User cancel init.')
            sys.exit(1)

    log.log('Begin to init current folder')

    copyfiles(ptype)

    log.success('Init success.')
Esempio n. 43
0
        def StartServer(self):
                self.s = socket.socket()
                self.s.bind(("", self._PORT))
                self.s.listen(5)
		#self.s.settimeout(self._timeout)
                self._ServerOn = True
                self._connections = []
                acc_connections_thread = threading.Thread( target = self.AcceptConnections )
                acc_connections_thread.start()

		#initializing services
		log.success("Server", "Initalizing services")
		for string in self._services_array:
                        self._services[string] = __import__("{}.service".format(string)).service.service()
		
                log.success("Server", "Server is on")
                return
Esempio n. 44
0
    def generate_next_packet(self):
        time_delta = distributions.gamma(settings.GAMMA_SHAPE, settings.GAMMA_SCALE)
        packet = Packet(self.id, Scheduler.time+time_delta)
        self.packets_generated += 1

        if len(self.queue)-1 < settings.BUFFER:                                                     # since the queue adds each packet as it
            # logging                                                                               # is *decided* when it is generated and
            log.success(" ".join([str(self.id), "generated packet", str(packet.id)]))               # not as it *is* generated, one element
            # logging                                                                               # the queue is not counted
            self.queue.append(packet)
            heap_push(Scheduler.events, packet)
        else:
            # logging
            log.error(" ".join([str(self.id), "lost packet", str(packet.id), "(queue full)"]))
            # logging
            self.packets_lost += 1
            packet.is_lost = True
            heap_push(Scheduler.events, packet)                                                     # the packet is pushed into the event
Esempio n. 45
0
    def step():
        # logging
        log.plain("\n")
        log.success("stepping")
        # logging

        packet = heap_pop(Scheduler.events)

        Scheduler.time_previous = Scheduler.time
        Scheduler.time = packet.time

        current_node = NODES[packet.sender]                                                         # this works because the way the list is
        if not packet.is_queued:                                                                    # built introduces a 1:1 relation between
            current_node.generate_next_packet()                                                     # the id and the index;
        if not packet.is_lost:                                                                      # if the packet was lost (i.e. the queue
            current_node.handle_packet(packet)                                                      # was full), take no action

        # logging
        log.plain(Scheduler())
Esempio n. 46
0
def eventBattle(player):
    print("Woaw ! An ennemy ship is here. Get ready for battle !")
    # generate ennemy ship
    ennemy = character.Character(5 * player._level, 1 * player._level, 5 * player._level, 0, 0, 10, 0, 1, "ennemy")
    # Fight !
    turn = 0
    while ennemy._life != 0 and player._life != 0:
        order = input("What do you want to do ? [fight - escape - status] \n >>>")
        if order == "fight":
            if turn % 2 == 0:  # player's turn
                ennemy.getDamages(player.dealtDamages())
            else:
                player.getDamages(ennemy.dealtDamages())
        elif order == "escape":
            rand_event = random.randint(1, 3)
            if rand_event == 2:
                log.success("Escaping suceeded !")
                return True
            else:
                log.failure("failed escape")
                player.getDamages(ennemy.dealtDamages())
        elif order == "status":
            player.status()
            ennemy.status()

        turn = turn + 1

    if ennemy._life == 0 and player._life != 0:
        log.success("You defeated the ennemy !")
        player.earnXP(10)
        player.earnCredits(10)
        player.levelUp()
        return True
    elif player._life == 0 and ennemy._life != 0:
        log.failure("You've been defeated !")
        return False
    else:
        log.debug("Should not happen")
        return False
Esempio n. 47
0
def compileHTML( needCompress = False , needHtml = False ):
    """为所有tpl文件加上时间戳
    """
    base = os.path.join(PATH , 'build' , 'template')
    tplfiles = []
    for dirpath , dirnames , filenames  in os.walk(base):
        tplfiles.extend([ os.path.join( dirpath , f ) for f in filenames if f.endswith('.tpl')  ])
    if COMPILE_FOLDER:
        for dirpath , dirnames , filenames  in os.walk(os.path.join(PATH , 'build' , COMPILE_FOLDER)):
            tplfiles.extend([ os.path.join( dirpath , f ) for f in filenames  ])
        
    for tpl in tplfiles:
        f = parser.compileHTML(tpl , needCompress)
        utils.writefile(tpl , f)

    if needHtml:
        log.log('Render html file.\nIt will under build folder.')
        files = os.listdir( os.path.join( base ) )
        tplfiles = []
        for dirpath , dirnames, filenames in os.walk(base):
            tplfiles.extend( [ os.path.join(dirpath , f) for f in filenames if f.endswith('.tpl') ] )
        for fname in tplfiles:
            token = fname.replace( base + '/' , '' ).replace('.tpl' , '')
            try:#有强行编译的需求
                html = parser.parseTpl(token  , isbuild=True)
            
                if token.find('/') != -1:
                    subfolder = os.path.join(PATH , 'build' , 'html'  , token.split('/')[0])
                    if not os.path.exists(subfolder):
                        utils.createfolder(subfolder)
            
                utils.writefile( os.path.join(PATH , 'build' , 'html' , token + '.html' ) , html )
            except Exception as e:
                log.error(str(e))
                if not conf.getConfig().get('html_force_output'):
                    raise
        log.success('Render html success');
Esempio n. 48
0
 def handle_packet(self, packet):                                                                # a new packet, from the point in time
     if self.is_idle():                                                                          # where it *would* have been handled
         # logging
         log.success(" ".join([str(self.id), "sent packet", str(packet.id)]))
         # logging
         self._send_packet(packet)
     elif self.is_sending() and packet.time == Scheduler.time_previous:                          # the node had multiple packets scheduled
         # logging                                                                               # at the same time, so if it is already
         log.success(" ".join([str(self.id), "queued packet", str(packet.id)]))                  # sending it queues the next one(s)
         # logging
         self._queue_packet(packet)
     elif self.is_receiving() and packet.time == Scheduler.time_previous:                        # two (or more) queued packets are sent
         # logging                                                                               # at the same time by neighbouring nodes
         log.warning(" ".join([str(self.id), "sent packet", str(packet.id), "(expect a collision)"]))
         # logging
         self._send_packet(packet)
     else:
         # logging
         log.success(" ".join([str(self.id), "queued packet", str(packet.id)]))
         # logging
         self._queue_packet(packet)
Esempio n. 49
0
def setup_git_repo(workspace_path, repo):
    repo_path = os.path.join(workspace_path, repo['name'])
    log.info('cloning "%s" repo (%s) into %s' % (repo['name'], repo['origin'], repo_path))

    command = 'git clone %s %s' % (repo['origin'], repo['name'])
    Process(command,
            cwd=workspace_path,
            error_msg='cloning "%s" (%s) failed' % (repo['name'], repo['origin']),
            exit_on_fail=True).run()
    log.success('%s repo (%s) cloned into %s' % (repo['name'], repo['origin'], repo_path))

    if 'upstream' in repo:
        log.info('adding upstream to %s' % repo['name'])
        if not remote_exists(workspace_path, repo, {'name': 'upstream'}):
            upstream = repo['upstream']
            command = 'git remote add upstream %s && git fetch %s' % (upstream, upstream)
            Process(command,
                    cwd=repo_path,
                    error_msg='adding upstream to %s failed' % repo['name'],
                    exit_on_fail=True).run()
            log.success('added upstream to %s' % repo['name'])
        else:
            log.success('upstream already exists')
Esempio n. 50
0
 def Close(self):
         self._server.CloseConnection(self)
         self._conn.close()
         log.success("Connection", "Connection is closed!")
         return
Esempio n. 51
0
	def levelUp(self):
		if self._xp >= self._level * 100:
			self._xp = 0
			self._level = self._level + 1
			log.success("You leveled up !")
Esempio n. 52
0
def run(params, options):
    """
    """
    tmbegin = time.time()

    buildtype = None
    if params and len(params):
        if conf.getConfig().get(params[0]):
            buildtype = params[0]
        else:
            log.error("No such build type:" + params[0])
            sys.exit(1)

    utils.removefolder(BUILD_DIR)
    utils.createfolder(BUILD_DIR)
    # 直接在build目录操作哦
    utils.copyfiles("template", os.path.join(BUILD_DIR, "template"))
    utils.copyfiles("static", os.path.join(BUILD_DIR, "static"))

    if COMPILE_FOLDER:
        utils.copyfiles(COMPILE_FOLDER, os.path.join(BUILD_DIR, COMPILE_FOLDER))

    # 模块分为CSS和js,之前使用require_modules代表js,建议现在使用require_js_modules;
    # 而require_css_modules代表css。
    if conf.getConfig().get("require_modules"):
        log.warn("'require_modules' is deprecated,you should use 'require_js_modules' instead!")

    require_modules = conf.getConfig().get("require_js_modules") or conf.getConfig().get("require_modules") or ["main"]
    require_js_modules = require_modules  # [email protected]:历史遗留
    require_css_modules = conf.getConfig().get("require_css_modules") or ["main"]

    # @deprecated
    # maincss  = os.path.join( PATH , BUILD_DIR , 'static' , 'css' , conf.getConfig().get('css_folder') or '' , 'main.css' )

    try:
        log.log("Combine css&js with r.js")
        for module in require_js_modules:
            js = os.path.join(PATH, BUILD_DIR, "static", "js", conf.getConfig().get("js_folder") or "", module + ".js")
            p = subprocess.Popen(
                "r.js -o name="
                + module
                + " out="
                + js
                + " optimize=none baseUrl="
                + os.path.join(PATH, BUILD_DIR, "static", "js", conf.getConfig().get("js_folder") or ""),
                stdout=subprocess.PIPE,
                stderr=subprocess.PIPE,
                shell=True,
            )  # 使用YUICompressor压缩,这里仅合并
            (o, e) = p.communicate()
            print o
            print e
            if not p.returncode == 0:
                raise IOError("Combine js failed")

        # [email protected]:合并css集合
        for module in require_css_modules:
            css = os.path.join(
                PATH, BUILD_DIR, "static", "css", conf.getConfig().get("css_folder") or "", module + ".css"
            )
            p = subprocess.Popen(
                "r.js -o cssIn=" + css + " out=" + css, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True
            )  # cssIn参数带.css后缀
            (o, e) = p.communicate()
            print o
            print e
            if not p.returncode == 0:
                raise IOError("Combine css failed")
        log.success("Combine css&js with r.js success.")
    except:
        log.error("Please insure you have installed r.js on your computer")
        raise

    log.log("Begin to add Timestamps...", True)
    # @todo:HTML内部的css或许也需要时间戳
    compileCss()
    log.success("Success!")

    if options.get("html"):
        utils.createfolder(os.path.join(BUILD_DIR, "html"))

    log.log("Begin to compile tpls...")
    compileHTML(options.get("compress"), options.get("html"))
    log.success("Success!")

    log.log("Begin to replace all token...", True)

    compileCommon(buildtype)
    log.success("Success!")

    if options.get("compress"):
        log.log("Begin to compile Js...", True)
        for module in require_js_modules:
            js = os.path.join(PATH, BUILD_DIR, "static", "js", conf.getConfig().get("js_folder") or "", module + ".js")
            subprocess.call(
                "java -jar " + YCPATH + " --type js --charset " + conf.getConfig()["encoding"] + " " + js + " -o " + js,
                shell=True,
            )
        log.success("Success!")

        log.log("Begin to compile Css...", True)
        for module in require_css_modules:  # [email protected]:编辑CSS集合
            css = os.path.join(
                PATH, BUILD_DIR, "static", "css", conf.getConfig().get("css_folder") or "", module + ".css"
            )
            subprocess.call(
                "java -jar "
                + YCPATH
                + " --type css --charset "
                + conf.getConfig()["encoding"]
                + " "
                + css
                + " -o "
                + css,
                shell=True,
            )
        # subprocess.call( 'java -jar ' + YCPATH + ' --type css --charset ' + conf.getConfig()['encoding'] + ' ' + maincss + ' -o ' + maincss , shell=True);
        log.success("Success!")

    log.success("Compiled successfully.")
    log.success("Time cost %s s." % (time.time() - tmbegin))
Esempio n. 53
0
def run(params , options):
    """
    """
    tmbegin = time.time()

    buildtype = None
    if params and len(params):
        if conf.getConfig().get(params[0]):
            buildtype = params[0]
        else:
            log.error('No such build type:' + params[0])
            sys.exit(1)

    utils.removefolder(BUILD_DIR);
    utils.createfolder(BUILD_DIR);
    #直接在build目录操作哦
    utils.copyfiles( 'template' , os.path.join(BUILD_DIR , 'template') )
    utils.copyfiles( 'static' , os.path.join(BUILD_DIR , 'static') )
    
    if COMPILE_FOLDER:
        utils.copyfiles( COMPILE_FOLDER , os.path.join(BUILD_DIR , COMPILE_FOLDER) )
        
    #模块分为CSS和js,之前使用require_modules代表js,建议现在使用require_js_modules;
    #而require_css_modules代表css。
    if conf.getConfig().get('require_modules'):
        log.warn("'require_modules' is deprecated,you should use 'require_js_modules' instead!")

    require_modules = conf.getConfig().get('require_js_modules') or conf.getConfig().get('require_modules') or ['main']
    require_js_modules=require_modules#[email protected]:历史遗留
    require_css_modules = conf.getConfig().get('require_css_modules') or ['main']
    
    #@deprecated
    #maincss  = os.path.join( PATH , BUILD_DIR , 'static' , 'css' , conf.getConfig().get('css_folder') or '' , 'main.css' )

    try:
        log.log( 'Combine css&js with r.js' )
        for module in require_js_modules:
            js = os.path.join(PATH, BUILD_DIR , 'static' , 'js' , conf.getConfig().get('js_folder') or '' , module + '.js' )
            subprocess.call( 'node ' + RJSPATH +' -o name=' + module + ' out='+ js + ' optimize=none baseUrl=' + os.path.join(PATH , BUILD_DIR , 'static' , 'js' , conf.getConfig().get('js_folder') or '')  , shell=True)#使用YUICompressor压缩,这里仅合并
        
        #[email protected]:合并css集合
        for module in require_css_modules:
            css=os.path.join( PATH , BUILD_DIR , 'static' , 'css' , conf.getConfig().get('css_folder') or '' , module + '.css' )
            subprocess.call( 'node ' + RJSPATH + ' -o cssIn=' + css + ' out=' + css  , shell=True)#cssIn参数带.css后缀
        log.success( 'Combine css&js with r.js success.' )
    except:
        log.error('Please insure you have installed r.js on your computer')
        raise
    
    log.log('Begin to add Timestamps...' , True)
    #@todo:HTML内部的css或许也需要时间戳
    compileCss();
    log.success('Success!')


    if options.get('html'):
        utils.createfolder( os.path.join( BUILD_DIR ,  'html'))

    log.log('Begin to compile tpls...' )
    compileHTML(options.get('compress') , options.get('html'))
    log.success('Success!')
    

    log.log('Begin to replace all token...', True)

    compileCommon(buildtype)
    log.success('Success!')

    if options.get('compress'):
        log.log('Begin to compile Js...' , True)
        for module in require_js_modules:
            js = os.path.join(PATH, BUILD_DIR , 'static' , 'js', conf.getConfig().get('js_folder') or ''  , module + '.js' )
            subprocess.call( 'java -jar ' + YCPATH + ' --type js --charset ' + conf.getConfig()['encoding'] + ' ' + js + ' -o ' + js , shell=True );
        log.success('Success!')

        log.log('Begin to compile Css...' , True)
        for module in require_css_modules:#[email protected]:编辑CSS集合
            css = os.path.join(PATH, BUILD_DIR , 'static' , 'css', conf.getConfig().get('css_folder') or ''  , module + '.css' )
            subprocess.call( 'java -jar ' + YCPATH + ' --type css --charset ' + conf.getConfig()['encoding'] + ' ' + css + ' -o ' + css , shell=True );
       # subprocess.call( 'java -jar ' + YCPATH + ' --type css --charset ' + conf.getConfig()['encoding'] + ' ' + maincss + ' -o ' + maincss , shell=True);
        log.success('Success!')


    log.success('Compiled successfully.')
    log.success('Time cost %s s.' % (time.time()-tmbegin) )