Ejemplo n.º 1
0
def sitemap_data():
    urlset = []
    try:
        for post in Post.where(status=1):
            lastmod_tmp = datetime.datetime.strptime(post.modified,"%Y-%m-%d %H:%M:%S")
            lastmod = lastmod_tmp.strftime("%Y-%m-%dT%H:%M:%SZ")
            url = dict(loc="http://www.talkincode.org/news/post/view/%s"%post.id,
                       lastmod=lastmod,chgfreq="daily")
            urlset.append(url)


        for code in Code.where():
            lastmod_tmp = datetime.datetime.strptime(code.create_time,"%Y-%m-%d %H:%M:%S")
            lastmod = lastmod_tmp.strftime("%Y-%m-%dT%H:%M:%SZ")
            url = dict(loc="http://www.talkincode.org/code/view/%s"%code.id,
                       lastmod=lastmod,chgfreq="monthly")
            urlset.append(url)       

        for proj in Project.where():
            lastmod_tmp = datetime.datetime.strptime(proj.created,"%Y-%m-%d %H:%M:%S")
            lastmod = lastmod_tmp.strftime("%Y-%m-%dT%H:%M:%SZ")
            url = dict(loc="http://www.talkincode.org/open/proj/view/%s"%proj.id,
                       lastmod=lastmod,chgfreq="monthly")
            urlset.append(url)                             
          
        return urlset
    except Exception,e:
        logger.error("login error,%s"%e)
        return []
Ejemplo n.º 2
0
def get_tags(limit):
    tagset = {}
    try:
        for code in Code.where():
            tag = code.tags
            if not tag:
                continue
            ts = tag.split(",")
            for t in ts:
                if tagset.has_key(t):
                    tagset[t] += 1
                else:
                    tagset[t] = 1

        for post in Post.where(status=1):
            tag = post.tags
            if not tag:
                continue
            ts = tag.split(",")
            for t in ts:
                if tagset.has_key(t):
                    tagset[t] += 1
                else:
                    tagset[t] = 1       
        sort_tags = sorted( tagset.items(),key=lambda d:d[1],reverse=True)
        print sort_tags
        return sort_tags[:limit]
    except Exception,e:
        logger.error("get tags error,%s"%e)
        raise e
Ejemplo n.º 3
0
    def open(self):
        # TO DO: Handle possible errors
        device_type = 'WEB'

        token = self.get_argument('token', None)
        app_token = self.get_argument('fbtoken', None)

        device_id = self.get_argument('idandroid', None)
        if device_id is not None:
            device_type = 'ANDROID'

        else:
            device_id = self.get_argument('idios', None)
            if device_id is not None:
                device_type = 'IOS'

        logger.info('WEBSOCKET OPEN: token={}'.format(token))

        if token:
            if token not in TOKENS:
                self.handle_token(token=token,
                                  device_id=device_id,
                                  device_type=device_type,
                                  app_token=app_token)
            else:
                user_id = TOKENS[token]
                self.add_websocket(user_id)
        else:
            logger.error("Websocket attempt without token : IP {} HOST {}".format(self.request.remote_ip,
                                                                                  self.request.host))
 def findAppsWithAutoscaleLabels(self):
     list = self.marathon_cli.list_apps(embed_counts=True,
                                        embed_task_stats=True)
     logger.debug('Lista recebida {}'.format(list))
     if len(list) == 0:
         logger.warning('0 apps loaded. Your marathon have apps?')
     for app in list:
         if LABEL_FOR_AUTOSCALE_ENABLE in app.labels:
             new_app = MarathonApp(app.id)
             new_app.tasksRunning = app.tasks_running
             new_app.tasksStaged = app.tasks_staged
             for label in MANDATORY_LABELS_APP:
                 if label in app.labels:
                     value = app.labels[label]
                     if value.isnumeric():
                         value = int(value)
                     new_app.__setattr__(label, value)
                 else:
                     logger.error(
                         'App: [{}] :: dont have MANDATORY_LABELS :: {}'.
                         format(app.id, label))
             for label in OPTIONAL_LABELS_APP:
                 if label in app.labels:
                     value = app.labels[label]
                     if value.isnumeric():
                         value = int(value)
                     new_app.__setattr__(label, value)
             self.dict_apps[app.id] = new_app
         else:
             logger.debug(
                 'App: [{}] :: dont have {} = True. If you want to scale, please add labels.'
                 .format(app.id, LABEL_FOR_AUTOSCALE_ENABLE))
Ejemplo n.º 5
0
    async def update_by_conditions(cls,
                                   values: dict,
                                   conditions: list = None,
                                   return_fields: list or set = None,
                                   force: bool = False) -> dict or bool:
        # result update
        ru = False
        if not conditions and force:
            conditions = []
        if conditions:
            # create query
            query = update(cls).values(values)
            for condition in conditions:
                query = query.where(condition)

            # list returning-fields
            if not return_fields:
                return_fields = ['id']

            # execute query
            try:
                ru = await DBManager().query_fetch(
                    query.returning(
                        *map(lambda field: cls.__getattribute__(cls, field),
                             return_fields)))
            except Exception as e:
                logger.error(
                    'common.entity.baseEntity.BaseEntity#update: {}'.format(e))

        return ru
Ejemplo n.º 6
0
    async def handle_async_response(self, response):
        if response and response.code == 200:
            user_id = json.loads(response.body.decode())['id']
            TOKENS[self.token] = user_id
            self.add_websocket(user_id)
            logger.info('WEBSOCKET VALIDATED TOKEN: user={}'.format(user_id))

            if self.device_type != 'WEB':
                db = self.settings['db']

                await db.test.find_one_and_delete({
                    'device_id': self.device_id,
                })

                await db.user_credentials.update_one(
                    {
                        'user_id': user_id,
                        'device_id': self.device_id,
                    },
                    {
                        '$set': {
                            'app_token': self.app_token,
                            'device_type': self.device_type,
                        }
                    },
                    upsert=True,
                )

        else:
            logger.error('Error on token validation: {}'.format(response.error))
Ejemplo n.º 7
0
def apply_transactions(transactions, auto=False):
    ''' Apply renaming transactions.
    apply_transactions(transactions)
    transactions = [(old_path, new_path),(old_path),(new_path),...]
    Manual review of transactions is required.
    '''
    if auto:
        logger.warning('Auto is On. No confirmation required.')
    print('=' * 30)
    if not transactions:
        logger.debug('NO TRANSACTIONS')
        sys.exit('No Transactions to apply.')
        return

    for t in transactions:
        print('[{}] > [{}]'.format(t[0].name, t[1].name))
    print('{} Transactions to apply. Renaming...'.format(len(transactions)))
    count = 0
    if auto or input('EXECUTE ? [y]\n>') == 'y':
        for src, dst in transactions:
            try:
                src.rename(dst)
            except:
                logger.error(sys.exc_info()[0].__name__)
                logger.error('Could not rename: [{}]>[{}]'.format(src, dst))
            else:
                logger.debug('[{}] renamed to [{}]'.format(src, dst))
                count += 1

        print('{} folders renamed.'.format(count))
Ejemplo n.º 8
0
async def filter_errors_request(request: web.Request, handler) -> web.Response:
    # allowed method OPTIONS
    if request.method == hdrs.METH_OPTIONS:
        return web.json_response(status=200)

    try:
        response = await handler(request)
    # exception "default params not validate", code = 400
    except IncorrectParamsException as e:
        logger.error(e)
        response = web.json_response(status=400, data=dict(errors=e.errors))
    # exception "access denied"
    except AccessException as e:
        response = web.Response(status=e.code, reason=e.msg)
    # TODO: Write the correct handling of errors
    # other exceptions
    except Exception as e:
        if asyncio.get_event_loop().get_debug():
            raise e
        # else return response with code 500
        else:
            logger.error('Fail request, err: {}'.format(repr(e)))
            response = web.json_response(
                status=500,
                data={
                    'errors': {
                        'reason':
                        'Error on running API-handlers: {}'.format(repr(e))
                    }
                })
    return response
Ejemplo n.º 9
0
def create_net(num_classes, dnn='resnet20', **kwargs):
    ext = None
    if dnn in ['resnet20', 'resnet56', 'resnet110']:
        net = models.__dict__[dnn](num_classes=num_classes)
    elif dnn == 'resnet50':
        net = models.__dict__['resnet50'](num_classes=num_classes)
    elif dnn == 'mnistnet':
        net = MnistNet()
    elif dnn == 'mnistflnet':
        net = MnistFLNet()
    elif dnn == 'cifar10flnet':
        net = Cifar10FLNet()
    elif dnn == 'vgg16':
        net = models.VGG(dnn.upper())
    elif dnn == 'alexnet':
        net = torchvision.models.alexnet()
    elif dnn == 'lstman4':
        net, ext = models.LSTMAN4(datapath=kwargs['datapath'])
    elif dnn == 'lstm':
        net = lstmpy.lstm(vocab_size=kwargs['vocab_size'], batch_size=kwargs['batch_size'])

    else:
        errstr = 'Unsupport neural network %s' % dnn
        logger.error(errstr)
        raise errstr 
    return net, ext
Ejemplo n.º 10
0
async def insert_data(pg, table, values, res=False):
    """
    Universal method for inserting data into table
    :param pg: connect to DB engine(PostgreSQL)
    :param table: table name
    :param values: list of users
    :param res: If true - return result id list
    :param verbose: set logs
    :return: None or list of table_id
    """
    if verbose_insert_data:
        logger.debug('INSERT_DATA_INTO: %s' % table)
    async with pg.acquire() as conn:
        try:
            if res:
                query = table.insert().values(values).\
                    returning(table.c.id)
                cursor = await conn.execute(query)
                resp = await cursor.fetchall()
                return [r[0] for r in resp]
            else:
                query = table.insert().values(values)
                await conn.execute(query)
        except psycopg2.ProgrammingError as e:
            logger.error('INSERT_DATA_INTO: %s' % e)
def create_occurrence(username, body):

    last_occurrences, code = db.get_all(Occurrence, {'user': [username]})
    if code != 200:
        return "Erro ao carregar ocorrências passadas do usuário"

    errors = validate_create_occurrence(body, last_occurrences)
    if errors:
        return errors, 400

    try:
        occurrence = Occurrence(
            user=username,
            occurrence_date_time=datetime.datetime.strptime(
                body['occurrence_date_time'], '%Y-%m-%d %H:%M:%S'),
            physical_aggression=body['physical_aggression'],
            victim=body['victim'],
            police_report=body['police_report'],
            gun=body['gun'],
            location=body['location'],
            occurrence_type=body['occurrence_type'])
        result, code = db.insert_one(occurrence)

        return result, code
    except Exception as error:
        logger.error(error)
        return str(error), 401
Ejemplo n.º 12
0
    def write_new_student(self, telegram: str, email: str):
        conn = self.conn_to_db()
        cursor = conn.cursor()
        if cursor:
            # email add validation
            insert_student_command = "INSERT INTO students (telegram, email) VALUES (%s, %s)"
            try:

                logger.info(
                    f"Trying execute command {insert_student_command} ")
                cursor.execute(insert_student_command, (telegram, email))
                conn.commit()
                logger.info(
                    f"Command {insert_student_command} was executed succesfully"
                )
                cursor.execute(insert_student_command, (telegram, email))
                cursor.close()
                logger.info("Cursor closed ")
                conn.close()
                logger.info("Connection closed")
                return True
            except Exception as e:
                logger.error(e)
                raise Exception(e)
        else:
            logger.error("Something wrong with Postgres - cursor is None")
            raise Exception("Something wrong with Postgres - cursor is None")
Ejemplo n.º 13
0
    async def _init_connect(self, retry_delay=REDIS_RECONNECT_DELAY,
                            num_retries=REDIS_RECONNECT_RETRIES):
        """
        This method create Redis client by config params.
          If Redis server refused connection do retries.

        :param int retry_delay: delay between retries
        :param int num_retries: number of retries

        :return: None
        """
        for _ in range(num_retries):
            try:
                self.pool = await aioredis.create_pool(
                    (self.conf['host'], self.conf['port']),
                    db=self.conf['db'],
                    password=self.conf['password'],
                    encoding=self.conf['encoding'],
                    minsize=self.conf['minsize'],
                    maxsize=self.conf['maxsize'],
                    loop=self.loop)
                break
            except ConnectionRefusedError:
                logger.error(
                    'Cant establish connection to redis. Retry after %s s.', retry_delay)
            await asyncio.sleep(retry_delay)
Ejemplo n.º 14
0
def get_tests_groups_from_jenkins(runner_name, build_number, distros):
    runner_build = Build(runner_name, build_number)
    res = {}
    for b in runner_build.build_data['subBuilds']:

        # Get the test group from the console of the job
        z = Build(b['jobName'], b['buildNumber'])
        console = z.get_job_console()
        groups = [
            keyword.split('=')[1] for line in console
            for keyword in line.split()
            if 'run_tests.py' in line and '--group=' in keyword
        ]
        if not groups:
            logger.error(
                "No test group found in console of the job {0}/{1}".format(
                    b['jobName'], b['buildNumber']))
            continue
        # Use the last group (there can be several groups in upgrade jobs)
        test_group = groups[-1]

        # Get the job suffix
        job_name = b['jobName']
        for distro in distros:
            if distro in job_name:
                sep = '.' + distro + '.'
                job_suffix = job_name.split(sep)[-1]
                break
        else:
            job_suffix = job_name.split('.')[-1]
        res[job_suffix] = test_group
    return res
Ejemplo n.º 15
0
    def discover_functions(self):
	self.self_cursor.execute("SELECT pro_oid,func_name,id FROM function_name WHERE {0}={1} AND alive".format(self.sub_fk,self.id))
	local_funcs=self.self_cursor.fetchall()
	try:
	    self.prod_cursor.execute("""SELECT p.oid AS pro_oid,p.proname AS funcname,p.proretset,t.typname,l.lanname
FROM pg_proc p
LEFT JOIN pg_namespace n ON n.oid = p.pronamespace
JOIN pg_type t ON p.prorettype=t.oid
JOIN pg_language l ON p.prolang=l.oid
WHERE (p.prolang <> (12)::oid)
AND n.oid=(SELECT oid FROM pg_namespace WHERE nspname='public')""")
	except Exception as e:
	    logger.error("Cannot execute function discovery query: {0}".format(e.pgerror))
	    return
	prod_funcs=self.prod_cursor.fetchall()
	for l_func in local_funcs:
	    for p_func in prod_funcs:
		if l_func[0]==p_func[0] and l_func[1]==p_func[1]:
		    break
	    else:
		logger.info("Retired function {0} in schema {1}".format(l_func[1],self.db_fields['sch_name']))
		old_func=FunctionName(l_func[2])
#		old_func.populate()
		old_func.retire()
	for p_func in  prod_funcs:
	    for l_func in local_funcs:
		if p_func[0]==l_func[0] and p_func[1]==l_func[1]:
		    break
	    else:
		logger.info("Created new function: {0} in schema {1}".format(p_func[1],self.db_fields['sch_name']))
		new_func=FunctionName()
		new_func.set_fields(sn_id=self.id,pro_oid=p_func[0],func_name=p_func[1],proretset=p_func[2],prorettype=p_func[3],prolang=p_func[4])
		new_func.create()
		new_func.truncate()
Ejemplo n.º 16
0
def _small_talk(message="Привет"):
    """
        This method connect to Api.ai Small Talk domain
        :param message: input message
        :return: output message from Api.ai
    """
    ai = apiai.ApiAI(api_ai_conf['client_access_token'])
    request = ai.text_request()
    request.lang = 'ru'  # optional, default value equal 'en'
    request.session_id = api_ai_conf['sessid'][:35]
    request.query = message

    response = request.getresponse().read()
    obj = json.loads(response.decode('utf-8'))
    try:
        alternate_result = obj.get('alternateResult')
        if not alternate_result:
            # If response with answer from domain(result) - Small Talk
            answer = obj.get('result').get('fulfillment').get('speech')
            return answer
        else:
            answer_from_domain = obj.get('alternateResult').get('fulfillment').get('speech')
            if not answer_from_domain:
                # If response with answer from agent(result)
                answer = obj.get('result').get('fulfillment').get('speech')
                return answer
            else:
                # If response with answer from domain(alternate result) - Small Talk
                return answer_from_domain
    except AttributeError as e:
        logger.error('Handle ERROR: {0}'.format(e))
Ejemplo n.º 17
0
async def filter_errors_request(request: web.Request, handler) -> web.Response:
    # allowed method OPTIONS
    if request.method == hdrs.METH_OPTIONS:
        return web.json_response(status=200)

    try:
        response = await handler(request)
    # exception "default params not validate", code = 400
    except IncorrectParamsException as e:
        logger.error(e)
        response = web.json_response(status=400, data=dict(errors=e.errors))
    # TODO: Write the correct handling of errors
    except Exception as e:
        # if dev - exception
        raise e
        # TODO: enable code by after fix DEV_MOD
        # if DEV_MOD:
        #     raise e
        # # else return response with code 500
        # else:
        #     logger.error('Fail request, err: ', e)
        #     response = web.json_response(
        #         status=500,
        #         data={'errors': {
        #             'reason': 'Error on running API-handlers: %s' % e
        #         }}
        #     )
    return response
Ejemplo n.º 18
0
async def set_to_cache(cache: BaseCache, key: str, value: Dict) -> None:
    """Save result to cache and ignore errors."""
    try:
        await cache.set(key, value,
                        ttl=60 * 60)  # cache successful items for an hour
    except Exception as e:
        logger.error(e)
Ejemplo n.º 19
0
 def fetch(self, table, cond):
     try:
         for item in getattr(self.db, table).find(cond):
             yield item
     except errors.ServerSelectionTimeoutError as e:
         logger.error("timed out to fetch {0}".format(table))
         raise exceptions.ConnectionError("connecting timeout")
Ejemplo n.º 20
0
def txt_2_xlsx(src_file, rows, category, project_id):
    rm = RecordManager()
    partitions = smash_merged(src_file, rows)
    if not is_valid(partitions, rows):
        logger.error("rows in each partitions are not equal.")
        raise ConvertingError
    else:
        header = settings.XLSX_HEADERS[category]
        xlsx_path = os.path.join(settings.SAVE_DIRECTORY, str(project_id))
        last_end = rm.get_previous_end(project_id, category)

        retry_time = 0
        while not last_end:
            logger.info(
                "retriving value for last end failed, waiting for the next lookup"
            )
            time.sleep(60)  # waiting for next looking up
            retry_time += 1
            last_end = rm.get_previous_end(project_id, category)
            # TODO: use an alternative way to recover
            if retry_time > 10:
                logger.error("unable to find an avaible value for %d" %
                             project_id)
                raise ConvertingError
        start = last_end + 1
        generate_xlsxs(partitions, header, xlsx_path, category, project_id,
                       start)
        rm.update_record(project_id, start, start + len(partitions), rows)
Ejemplo n.º 21
0
 def fetch_result(self, cond={}):
     try:
         for item in self.db.Result.find(cond):
             yield item
     except errors.ServerSelectionTimeoutError as e:
         logger.error("timed out to fetch {0}".format(table))
         raise exceptions.ConnectionError("connecting timeout")
Ejemplo n.º 22
0
def apply_transactions(transactions, auto=False):
    ''' Apply renaming transactions.
    apply_transactions(transactions)
    transactions = [(old_path, new_path),(old_path),(new_path),...]
    Manual review of transactions is required.
    '''
    if auto:
        logger.warning('Auto is On. No confirmation required.')
    print('='*30)
    if not transactions:
        logger.debug('NO TRANSACTIONS')
        sys.exit('No Transactions to apply.')
        return

    for t in transactions:
        print('[{}] > [{}]'.format(t[0].name, t[1].name))
    print('{} Transactions to apply. Renaming...'.format(len(transactions)))
    count = 0
    if auto or input('EXECUTE ? [y]\n>') == 'y':
        for src, dst in transactions:
            try:
                src.rename(dst)
            except:
                logger.error(sys.exc_info()[0].__name__)
                logger.error('Could not rename: [{}]>[{}]'.format(src, dst))
            else:
                logger.debug('[{}] renamed to [{}]'.format(src, dst))
                count += 1

        print('{} folders renamed.'.format(count))
Ejemplo n.º 23
0
    def get_qty(self):
        qty_res = [p.text for p in self.s.select(
            settings.WELL['qty_css_selector'])]

        try:
            # Extract product attributes: |Size|QTY|Weight|Unit_Price|Price
            qty_regex_result = re.findall(
                settings.WELL['qty_regex'], qty_res[0])
            assert(len(qty_regex_result) == 1)

            # Strip unnecessary stuff
            qty_regex_result[0] = re.sub(
                r"swatch_stock_list\[0\]='.*'; ", "", qty_regex_result[0])
            qty_regex_results = qty_regex_result[0].split(" ")

            # Second element is package quantity
            self.pkg_qty = qty_regex_results[1]

            try:
                # Calculate unit price
                self.unit_price = "%.2f" % (
                    (float)((self.current_price).strip('$')) / (float)(self.pkg_qty))
                # Reconvert to string by adding the $ sign
                self.unit_price = "$" + self.unit_price

            except ValueError as err:
                logger.error("Unable calculating price: %s" %
                             (err), exc_info=True)

        except (AssertionError, IndexError) as err:
            logger.error("Unable to get qty: %s" % (err), exc_info=True)
Ejemplo n.º 24
0
    def get(self):
        uid = self.current_user
        type_ = self.get_argument('type', None)
        if not type_:
            self.set_status(400)
            result = dict(code=40011, msg=u'缺少type参数')
            return self.jsonify(result)

        keep_info = self.keep_map(type_)

        key = "uid:{}:keep:{}".format(uid, type_)
        times = rdb.incr(key)
        if times == 1:
            rdb.expire(key, get_to_tomorrow())
        else:
            logger.warning('have try times {}'.format(times))
            result = dict(code=40010, msg=u'每天只能{}一次哦!'.format(keep_info['name']))
            return self.jsonify(result)

        try:
            row = Pet.keep(uid=uid, score=keep_info['score'])
            logger.info('keep pet {}'.format(row))
        except Exception, e:
            self.set_status(500)
            logger.error('keep pet error {}'.format(e))
            result = dict(code=40012, msg=u'更新服务器错误, 请稍后重试!')
            return self.jsonify(result)
Ejemplo n.º 25
0
async def get_from_cache(cache: BaseCache, key: str) -> Optional[Dict]:
    """Loads result from cache and ignore errors."""
    try:
        return await cache.get(key)
    except Exception as e:
        logger.error(e)
        return None
Ejemplo n.º 26
0
 def rank_source_file_reply(self):
     """ 获取日排名请求返回 """
     reply = self.sender()
     request_url = reply.request().url().url()
     # 解析出请求的品种
     request_filename = request_url.rsplit("/", 1)[1]
     request_variety = request_filename.split("_")[0]
     if reply.error():
         reply.deleteLater()
         self.spider_finished.emit(
             "获取{}排名数据文件。\n失败:{}".format(request_variety[:2],
                                         str(reply.error())), True)
         logger.error("获取{}排名数据文件失败了!".format(request_url[:2]))
         return
     save_path = os.path.join(
         LOCAL_SPIDER_SRC,
         'cffex/rank/{}_{}.csv'.format(request_variety,
                                       self.date.strftime("%Y-%m-%d")))
     file_data = reply.readAll()
     file_obj = QFile(save_path)
     is_open = file_obj.open(QFile.WriteOnly)
     if is_open:
         file_obj.write(file_data)
         file_obj.close()
     reply.deleteLater()
     tip = "获取中金所{}_{}日持仓排名数据保存到文件成功!".format(
         request_variety, self.date.strftime("%Y-%m-%d"))
     if request_variety == "T":
         tip = "获取中金所{}日所有品种持仓排名数据保存到文件成功!".format(
             self.date.strftime("%Y-%m-%d"))
     self.spider_finished.emit(tip, True)
     self.event_loop.quit()
Ejemplo n.º 27
0
    def discover_tables(self):
	self.self_cursor.execute("SELECT obj_oid,tbl_name,id FROM table_name WHERE {0}={1} AND alive".format(self.sub_fk,self.id))
	local_tbls=self.self_cursor.fetchall()
	try:
	    self.prod_cursor.execute("""SELECT r.oid,r.relname,
CASE WHEN h.inhrelid IS NULL THEN 'f'::boolean ELSE 't'::boolean END AS has_parent
FROM pg_class r
LEFT JOIN pg_inherits h ON r.oid=h.inhrelid
WHERE r.relkind='r'
AND r.relnamespace=(SELECT oid FROM pg_namespace WHERE nspname='public')""")
	except Exception as e:
	    logger.error("Cannot execute tables discovery query: {0}".format(e.pgerror))
	    return
	prod_tbls=self.prod_cursor.fetchall()
	for l_table in local_tbls:
	    for p_table in prod_tbls:
		if l_table[0]==p_table[0] and l_table[1]==p_table[1]:
		    break
	    else:
		logger.info("Retired table {0} in schema {1}".format(l_table[1],self.db_fields['sch_name']))
		old_table=TableName(l_table[2])
#		old_table.populate()
		old_table.retire()
	for p_table in  prod_tbls:
	    for l_table in local_tbls:
		if p_table[0]==l_table[0] and p_table[1]==l_table[1]:
		    break
	    else:
		logger.info("Created new table: {0} in schema {1}".format(p_table[1],self.db_fields['sch_name']))
		new_table=TableName()
		new_table.set_fields(sn_id=self.id,tbl_name=p_table[1],obj_oid=p_table[0],has_parent=p_table[2])
		new_table.create()
		new_table.truncate()
Ejemplo n.º 28
0
    def discover_indexes(self):
	self.self_cursor.execute("SELECT obj_oid,idx_name,id FROM index_name WHERE tn_id={0} AND alive".format(self.id))
	local_idxs=self.self_cursor.fetchall()
	try:
	    self.prod_cursor.execute("""SELECT i.indexrelid,c.relname,i.indisunique,i.indisprimary
FROM pg_index i
JOIN pg_class c ON i.indexrelid=c.oid
WHERE i.indrelid={0}""".format(self.db_fields['obj_oid']))
	except Exception as e:
	    logger.error("Cannot execute index discovery query: {0}".format(e.pgerror))
	    return
	prod_idxs=self.prod_cursor.fetchall()
	for l_idx in local_idxs:
	    for p_idx in prod_idxs:
		if l_idx[0]==p_idx[0] and l_idx[1]==p_idx[1]:
		    break
	    else:
		logger.info("Retired index {0} in table {1}".format(l_idx[1],self.db_fields['tbl_name']))
		old_idx=IndexName(l_idx[2])
		old_idx.retire()
	for p_idx in prod_idxs:
	    for l_idx in local_idxs:
		if l_idx[0]==p_idx[0] and l_idx[1]==p_idx[1]:
		    break
	    else:
		logger.info("Create new index {0} in table {1}".format(p_idx[1],self.db_fields['tbl_name']))
		new_index=IndexName()
		new_index.set_fields(tn_id=self.id,obj_oid=p_idx[0],idx_name=p_idx[1],is_unique=p_idx[2],is_primary=p_idx[3])
		new_index.create()
		new_index.truncate()
Ejemplo n.º 29
0
 def saveUserInfo(self, appid, username, password):
     """
     将用户注册信息入库
     :param appid:
     :param username:
     :param password:
     :return: 保存成功返回True : False
     """
     sql = """
         BEGIN;
         INSERT INTO userinfo (appid_id, username, createtime, updatetime) VALUES (%s, %s, %s, %s);
         INSERT INTO localauth (userid_id, password) VALUES (%s, %s);
         COMMIT;
     """
     insert_userinfo = "INSERT INTO userinfo (appid_id, username, createtime, updatetime) VALUES (%s, %s, %s, %s)"
     insert_localauth = "INSERT INTO localauth (userid_id, password) VALUES (%s, %s)"
     now = str(int(time.time()))
     try:
         userid_id = self.db.insert(insert_userinfo, appid, username, now, now)
     except Exception as e:
         logger.error(str(e))
         return False
     else:
         try:
             self.db.insert(insert_localauth, userid_id, utils.encodePassword(password))
         except Exception as e:
             logger.error(str(e))
             return False
     return True
Ejemplo n.º 30
0
def r_file(filename, mode=None, content="", action=""):

	is_exists_file = os.path.exists(filename)

	if action == "create":

		if is_exists_file:

			try:
				os.remove(filename)
				os.mknod(filename)
				logger.info("Create File Ok.")

				with open(filename , 'w+') as f:
					f.write(content)

			except OSError, e:
				logger.error("filename: %s " % (filename) + str(e) )
				sys.exit(1)
		else:

			try:
				os.mknod(filename)
				logger.info("Create File Ok.")

				with open(filename , 'w+') as f:
					f.write(content)

			except OSError, e:
				logger.error("filename: %s" % (filename) + str(e))
				sys.exit(1)
Ejemplo n.º 31
0
async def send_message(sid, message):
    """
    Custom event handler with event_name and
    Socket.IO namespace for the event. This handler works like echo-server.
    :param sid: Session ID of the client
    :param message: message payload
    :return: None
    """
    # Added transport mode checker
    transport_mode = sio.transport(sid)
    logger.debug('MESSAGE TRANSPORT MODE (%s): %s' % (sid, transport_mode))
    logger.debug('EVENT("sendMessage"): %s' % message['data'])
    try:
        if isinstance(message, dict):
            if message.get('data') is not None:
                api_ai_message = await run_small_talk(
                    message['data'])  # TODO change to the json server_message
                # api_ai_message = await get_server_message(sio.pg, message)
                await sio.emit('sendMessageResponse', {'data': api_ai_message},
                               room=sid,
                               namespace='/chat')
                logger.debug('EVENT("sendMessageResponse"): %s' %
                             api_ai_message)
            else:
                raise ValueError('Message should have key("data")')
        else:
            raise TypeError('Message should be dict: {"data": "some text"}')
    except ValueError as e:
        logger.error('Handle ERROR: %s' % e)
    except TypeError as e1:
        logger.error('Handle ERROR: %s' % e1)
Ejemplo n.º 32
0
async def send_binary_message(sid):
    """
    Custom event handler with event_name and
    Socket.IO namespace for the event. This handler send
    image file in base64 gzip.
    :param sid: Session ID of the client
    :return: emit file base64 gzip
    """
    content_b64 = ''
    hash_sum = ''
    try:
        async with aiofiles.open('static/test.png', mode='rb') as image_file:
            content = await image_file.read()
            gzip_file = gzip.compress(content)
            content_b64 = base64.b64encode(gzip_file)
            hash_sum = hashlib.md5(content_b64).hexdigest()
    except OSError as e:
        logger.error('Handle ERROR: %s' % e)
    await sio.emit('file response', {
        'data': content_b64.decode('utf-8'),
        'hash_sum': hash_sum
    },
                   room=sid,
                   namespace='/chat',
                   callback=call_back_from_client)
    logger.debug('My EVENT(FILE) (%s): %s' % (sid, content_b64[:20]))
    del content_b64
Ejemplo n.º 33
0
def get_tests_groups_from_jenkins(runner_name, build_number, distros):
    runner_build = Build(runner_name, build_number)
    res = {}
    for b in runner_build.build_data['subBuilds']:

        if b['result'] is None:
            logger.debug("Skipping '{0}' job (build #{1}) because it's still "
                         "running...".format(b['jobName'], b['buildNumber'],))
            continue

        # Get the test group from the console of the job
        z = Build(b['jobName'], b['buildNumber'])
        console = z.get_job_console()
        groups = [keyword.split('=')[1]
                  for line in console
                  for keyword in line.split()
                  if 'run_tests.py' in line and '--group=' in keyword]
        if not groups:
            logger.error("No test group found in console of the job {0}/{1}"
                         .format(b['jobName'], b['buildNumber']))
            continue
        # Use the last group (there can be several groups in upgrade jobs)
        test_group = groups[-1]

        # Get the job suffix
        job_name = b['jobName']
        for distro in distros:
            if distro in job_name:
                sep = '.' + distro + '.'
                job_suffix = job_name.split(sep)[-1]
                break
        else:
            job_suffix = job_name.split('.')[-1]
        res[job_suffix] = test_group
    return res
Ejemplo n.º 34
0
    def get_check_create_test_run(self, plan, cases):
        plan = self.project.plans.get(plan.id)
        suite_cases = self.suite.cases()
        run_name = self.get_run_name()
        runs = plan.runs.find_all(name=run_name)
        run = self.check_need_create_run(plan,
                                         runs,
                                         suite_cases)

        if run is None:
            logger.info('Run not found in plan "{}", create: "{}"'.format(
                plan.name, run_name))

            # Create new test run with cases from test suite
            suite_cases = self.get_suite_cases()

            if not suite_cases:
                logger.error('Empty test cases set.')
                return None

            # suite_cases = self.suite.cases.find(type_id=type_ids[0])
            run = Run(name=run_name,
                      description=self.run_description,
                      suite_id=self.suite.id,
                      milestone_id=self.milestone.id,
                      config_ids=[],
                      case_ids=[x.id for x in suite_cases]
                      )
            plan.add_run(run)
            logger.debug('Run created "{}"'.format(run_name))
        return run
Ejemplo n.º 35
0
def dataframe_to_orm(orm, dataframe):
    field_name_list = orm.__mapper__.tables[0].columns._data._list
    tempdict1 = DataFrame(dataframe)[field_name_list]
    for field_name in field_name_list:
        try:
            stype = orm.__dict__.get(field_name).type.__visit_name__
            if stype == 'string':
                tempdict1[field_name] = tempdict1[field_name].astype(str)
                tempdict1[field_name].replace({
                    'nan': None,
                    'None': None
                },
                                              inplace=True)
            elif stype == 'text':
                tempdict1[field_name] = tempdict1[field_name].astype(str)
                tempdict1[field_name].replace({
                    'nan': None,
                    'None': None
                },
                                              inplace=True)
            elif stype == 'numeric':
                tempdict1[field_name] = tempdict1[field_name].astype(float)
                tempdict1[field_name] = tempdict1[field_name].where(
                    tempdict1[field_name].notnull(), None)
            elif stype == 'datetime':
                tempdict1[field_name] = pd.to_datetime(tempdict1[field_name])
        except:
            logger.error('数据类型转换出错')
    df_dict = tempdict1.to_dict(orient='record')
    recordlist = []
    for r in df_dict:
        record = orm(**r)
        recordlist.append(record)
    return recordlist
Ejemplo n.º 36
0
    def get_check_create_test_run(self, plan, cases):
        plan = self.project.plans.get(plan.id)
        suite_cases = self.suite.cases()
        run_name = self.get_run_name()
        runs = plan.runs.find_all(name=run_name)
        run = self.check_need_create_run(plan, runs, suite_cases)

        if run is None:
            logger.info('Run not found in plan "{}", create: "{}"'.format(
                plan.name, run_name))

            # Create new test run with cases from test suite
            suite_cases = self.get_suite_cases()

            if not suite_cases:
                logger.error('Empty test cases set.')
                return None

            # suite_cases = self.suite.cases.find(type_id=type_ids[0])
            run = Run(name=run_name,
                      description=self.run_description,
                      suite_id=self.suite.id,
                      milestone_id=self.milestone.id,
                      config_ids=[],
                      case_ids=[x.id for x in suite_cases])
            plan.add_run(run)
            logger.debug('Run created "{}"'.format(run_name))
        return run
Ejemplo n.º 37
0
    async def update(cls, values: dict, rec_id: int=None, conditions: list=[], return_fields: list or set=None) -> {dict or bool, str}:
        # message/error
        msg = ''
        # result update
        ru = False
        # add condition by id
        if rec_id:
            conditions.append(cls.id == rec_id)
        # list returning-fields
        if not return_fields:
            return_fields = {'id'}
        # create and execute query
        try:
            # query to db
            query = update(cls).values(values).returning(*map(lambda field: cls.__getattribute__(cls, field), return_fields))
            for condition in conditions:
                if isinstance(condition, elements.ColumnElement):
                    query = query.where(condition)

            ru = await DBManager().query_fetchrow(query)
        except Exception as e:
            logger.error('common.entity.baseEntity.BaseEntity#update: {}'.format(e))
            msg = 'Error update record'

        return ru, msg
Ejemplo n.º 38
0
    def create(self, request):
        if all(value != None for value in request.data.values()):
            email = request.data.get("email")
            username = request.data.get("username")
            password = request.data.get("password") or request.data.get(
                "password1")
            logger.info(f"TRYING SERIALIZE NEW USER: {request.data}")

            serializer = UserSerializer(data={
                "username": username,
                "email": email,
                "password": password
            })
            if serializer.is_valid():
                serializer.save()
                logger.info(f"NEW USER CREATED: {serializer.data} ")
                new_user = User.objects.get(email=email, username=username)
                if UserProfile.objects.create(user=new_user,
                                              id=new_user.id,
                                              user_courses=[]):
                    logger.info(f"USER PROFILE WAS CREATED - {new_user.id}")
                    return Response(serializer.data,
                                    status=status.HTTP_201_CREATED)
            else:
                logger.error(
                    f"NEW USER WAS NOT CREATED {serializer.data} BECAUSE OF {serializer.errors}"
                )
                return Response(serializer.errors,
                                status=status.HTTP_412_PRECONDITION_FAILED)
        else:
            logger.error(
                f"NEW USER WAS NOT CREATED - SOME EMPTY INPUT FIELDS: {request.data}"
            )
            return Response(status=status.HTTP_412_PRECONDITION_FAILED)
Ejemplo n.º 39
0
    def parse_data(self):
        """Parse info for each product"""
        # Extract price information
        regular_price = self.s.select(
            settings.TOYSRUS['regular_price_css_selector'])

        try:
            # Sometimes regular price is lower than current
            assert(len(regular_price) <= 1)
            current_price = self.s.select(
                settings.TOYSRUS['current_price_css_selector'])
            assert(len(current_price) == 1)

            if(regular_price):
                self.regular_price = regular_price[0].text.strip()
            self.current_price = current_price[0].text.strip()
        except AssertionError as err:
            logger.error("Unable to get price: %s" % (err), exc_info=True)

        # Extract upc info
        upc = self.s.select(settings.TOYSRUS['upc_css_selector'])
        try:
            assert(len(upc) == 1)
            self.upc = upc[0].text.strip()
        except AssertionError as err:
            logger.error("Unable to get UPC code: %s" % (err), exc_info=True)

        # Extract qty and unit price info
        self.get_qty()

        # Extract size information
        self.get_size()
Ejemplo n.º 40
0
def send_email(from_, to_, subject, body_text, body_html):
    if not from_:
        from_ = SMTP["default_sender"]
    if not re.match(r"[^@]+@[^@]+\.[^@]+", from_) or not re.match(
            r"[^@]+@[^@]+\.[^@]+", to_):
        logger.error("Invalid email address from {} -- to {}".format(
            from_, to_))
        return False

    # Create message container - the correct MIME type is multipart/alternative.
    msg = MIMEMultipart('alternative')
    msg['Subject'] = subject
    msg['From'] = from_
    msg['To'] = to_

    # Record the MIME types of both parts - text/plain and text/html.
    parts = []
    if body_text:
        parts.append(MIMEText(body_text, 'plain'))
    if body_html:
        parts.append(MIMEText(body_html, 'html'))

    for part in parts:
        msg.attach(part)

    logger.warning("EMAIL: " + msg.as_string())
    return True

    server = smtplib.SMTP_SSL(SMTP["host"], SMTP["port"])
    server.ehlo()
    server.login(SMTP["user"], SMTP["password"])
    server.sendmail(from_, to_, msg.as_string())
    server.close()
Ejemplo n.º 41
0
def package(pkg_name, option="-y"):

	if pkg_name:
		command_line_str = "yum " + option + " install " + pkg_name
		if os.system(command_line_str)!=0:
			logger.error("exec: %s error" % (command_line_str))
			sys.exit(1)
Ejemplo n.º 42
0
	def exec_dcfg(self):

		# first exec support script to install system tables.
		try:
			logger.info("============MYSQL DEFAULT CONFIG===========")
			logger.info("install system db.")
			os.chdir(mysql_home)
			exec_command('./scripts/mysql_install_db --user=mysql')

			logger.info("copy boot script to correct directory.")
			exec_command('cp ' + mysql_boot_script + ' /etc/init.d/')

			# sed config
			exec_command('sed -i -e "46s/basedir=/basedir=\/opt\/magima\/mysql/g" /etc/init.d/mysql.server')

			exec_command('sed -i -e "47s/datadir=/datadir=\/opt\/magima\/mysql\/data/g" /etc/init.d/mysql.server')

			exec_command("/etc/init.d/mysql.server start")
			exec_command("/etc/init.d/mysql.server status")
			exec_command("/etc/init.d/mysql.server stop")
			logger.info("==============TOMCAT DEFAULT CONFIG==============")
			logger.info("copy tomcat bootscript to /etc/init.d/")
			exec_command("cp " + tomcat_bootstrap + " /etc/init.d/tomcat6")
			exec_command("sudo /etc/init.d/tomcat6 start")
			exec_command("sudo /etc/init.d/tomcat6 status")
			exec_command("sudo /etc/init.d/tomcat6 stop")

		except OSError , oserr:
			logger.error("os error: %s " % str(oserr))
			sys.exit(1)
Ejemplo n.º 43
0
    def load_data(self, data_path, test_size=0.15):
        logger.info("Loading data.")
        try:
            data = pickle.load(open(data_path, "rb"))
            logger.debug(f"Loaded {len(data)} samples.")
        except Exception as err:
            # TODO Exception
            logger.error(err)
            exit()

        X = []
        y = []
        for feature, label in data:
            X.append(feature)
            y.append(label)

        X = np.array(
            X).reshape(-1, app_cfg['img_size'], app_cfg['img_size'], 3)
        X = X/255.0

        y = keras.utils.to_categorical(y, num_classes=None)

        logger.info('Splitting dataset.')
        # TODO find another way to split dataset, using too much memory.
        self.train_data, self.test_data, self.train_label, self.test_label = train_test_split(
            X, y, test_size=test_size, random_state=1)
        logger.info(
            f'Data load complete. (Train_data:{len(self.train_data)}; Test_data:{len(self.test_data)})')
Ejemplo n.º 44
0
	def parse_blocks(self):		
		lineno, interval, intervals = 0, {}, []

		def block_ends(interval, intervals):
			interval['lineno'] = lineno
			intervals.append(interval)

		# iterator for MULTILINES_PATTERN
		mp_iter = CycleIterator(self.__pack(TextgridBlocksParser.PATTERN_KEYS, TextgridBlocksParser.MULTILINES_PATTERN))
		# iterator for BLOCK_PATTERN
		bp_iter = CycleIterator(self.__pack(TextgridBlocksParser.PATTERN_KEYS, TextgridBlocksParser.BLOCK_PATTERN))
		line_pattern = bp_iter.next()

		for line in self.lines:
			lineno += 1

			# always try to match the begining pattern at first to avoid missing a normal block 
			# therefore, reset the block parsing once a line was matched to the begining pattern
			# but unmatched to the current one.
			if not bp_iter.begins() and self.__match(bp_iter.head(), line):
				logger.error('unable to parse line %d, ignored' % (lineno-1))
				interval, line_pattern = {}, bp_iter.reset()

			# to match the pattern one by one until it ends
			if self.__match(line_pattern, line):
				self.__update(interval, line_pattern, line)

				# if the end of block was matched
				# block ends here for most situation
				if bp_iter.ends():
					block_ends(interval, intervals)
					interval = {}

			# when a text existed in multiple lines
			elif bp_iter.ends():
					# match the begining of text in multi-lines
					if self.__match(mp_iter.head(), line):
						self.__update(interval, mp_iter.head(), line)
						continue # should not to call the next block pattern

					# match the pattern of end line
					# block also may end here for multiple lines
					elif self.__match(mp_iter.tail(), line): 
						self.__update(interval, mp_iter.tail(), line, append=True)
						block_ends(interval, intervals)
						interval = {}

					# match the pattern without quotes
					else:
						# append the middle part of the text
						self.__update(interval, mp_iter.index(1), line, append=True)
						continue
			else:
				# does not match anything
				# logger.error('unable to parse line %d, ignored' % (lineno-1))
				continue
			
			line_pattern = bp_iter.next()	# match the next pattern

		return intervals
def weater(lat, lng, timezone, unit='celsius', weather_key_=weather_key):
    res = {}
    try:
        owm = pyowm.OWM(weather_key_)
        observation = owm.three_hours_forecast_at_coords(lat, lng)
    except Exception, e:
        logger.error("weater {0}".format(e.message))
Ejemplo n.º 46
0
    def parse_nodes(self):
        node_list = []

        for controller_node in self.topology[self.NODE]:
            node = {}
            prefix_array = []
            node_dict = html_style(controller_node[self.NODE_ID])
            attributes = controller_node[self.NODE_ATTRIBUTES]

            if self.PREFIX in attributes:
                for prefix in attributes[self.PREFIX]:
                    prefix_array.append(prefix[self.PREFIX])

            if self.ROUTER_ID in attributes:
                if self.NAME in attributes:
                    node[self.NAME] = attributes[self.NAME]
                else:
                    success, name = name_check(attributes[self.ROUTER_ID][0])
                    node[self.NAME] = name if success else node_dict[self.ROUTER]
                node[self.LOOPBACK] = attributes[self.ROUTER_ID][0]
            else:
                node[self.NAME] = node_dict[self.ROUTER]
                node[self.LOOPBACK] = "0.0.0.0"

            node[self.PREFIX] = prefix_array
            node[self.ID] = controller_node[self.NODE_ID]

            try:
                node[self.INTERFACE] = self.parse_interfaces(self.fetch_interfaces(node[self.NAME]))
                node_list.append(node)
            except HttpClientException as e:
                logger.error("Fetch interfaces for node '{}' failed.".format(node[self.NAME]))
                logger.exception(e.message)

        return node_list
Ejemplo n.º 47
0
def crypto_in():
    if request.method == "POST":
        auth = request.form.get("ipn_auth")
        if auth != IPN_AUTH:
            abort(401)
        address = request.form.get("address")
        confirmed = float(request.form.get("confirmed"))
        unconfirmed = float(request.form.get("unconfirmed"))
        logger.warning("IPN address {} -- confirmed {} -- unconfirmed {}".format(address, confirmed, unconfirmed))
        session = database.Session(autocommit=False)
        user = CryptoAddress.get_address_user(address=address, session=session)
        if user:
            logger.warning("IPN for user {} -- address {}".format(user, address))
            dep = UserTransactions.get_user_netdeposits(user, session=session)
            if confirmed > dep:
                UserTransactions.add_transaction(user, confirmed-dep, "deposit", reference="IPN", session=session)
                logger.warning("Deposit confirmed! user {} -- address {} -- amount {}".format(user, address, confirmed-dep))
            else:
                logger.warning("Confirmed is lower than net deposit yet. User {} -- address {} -- conf {} -- dep {} "
                               .format(user, address, confirmed, dep))
        else:
            logger.error("Address {} don't match any user.".format(address))
        data = {
            "address": address,
            "confirmed": confirmed,
            "unconfirmed": unconfirmed,
            "ipn_auth": auth
        }
        return jsonify(data)
    else:
        logger.error("Invalid request at IPN url!")
        abort(401)
Ejemplo n.º 48
0
	def process(self, raw_data):
		data, name = raw_data
		try:
			# cipher_text must be a multiple of 16, fill the margin with 0
			data = data if len(data)%16==0 else ''.join([data, '0'*(16-len(data)%16)])
			return (AESDecipherer.decipherer.decrypt(data), name)
		except ValueError, e:
			logger.error(u'unable to decrypt %s by the means of AES' % name)
Ejemplo n.º 49
0
	def retrieve(self, *args, **kwargs):
		self.queue.join()
		try:
			for	data in self.storage.retrieve(*args, **kwargs):
				for t in data:
					self.queue.put(t)
		except ArgumentsError, e:
			logger.error(e.msg)
Ejemplo n.º 50
0
 def getMetadata(self, tag):
     elems = self._root.findall(tag)
     if not elems:
         logger.error("Tag %s is not valid!")
         return None
     elif len(elems) >1:
         logger.warning("Tag %s has more than one element (len = %d)! Returning first!"%(tag,len(elems)))
     return elems[0].text
Ejemplo n.º 51
0
 def OnRspOrderAction(self, pInputOrderAction, pRspInfo, nRequestID,
                      bIsLast):
     if not self.strategy:
         if pRspInfo.ErrorID != 0:
             logger.error(pRspInfo.ErrorMsg.decode('gb2312'))
         return
     self.strategy.on_rsp_order_action(pInputOrderAction, pRspInfo,
                                       nRequestID, bIsLast)
Ejemplo n.º 52
0
 def close(self):
     try:
         self.conn.close()
         self.conn = None
         self.cursor = None
     except AttributeError, e:
         logger.error("connection closed already, invalid call")
         raise AttributeError
Ejemplo n.º 53
0
 def save_to_db(self):
     """Call sqlalchemy methods to insert data into DB"""
     if(self.unit_price is not None):
         product = db_handler.DiaperDealsPipeline()
         product.process_item(self.map_deal_dict())
     else:
         logger.error("Error processing item to database: no unit_price info!")
         logger.error("Link to product for debug: %s" % (self.url))
Ejemplo n.º 54
0
	def process(self, raw_data):
		service_name, path = raw_data
		file_obj = StringIO()
		try:
			self.smb_conn.retrieve_file(service_name, path, file_obj)
		except OperationFailure, e:
			logger.error('unable to retrieve files in path %s' % path)
			raise e
Ejemplo n.º 55
0
 def OnRspOrderInsert(self, pInputOrder, pRspInfo, nRequestID, bIsLast):
     if not self.strategy:
         if pRspInfo.ErrorID != 0:
             logger.error('{}, requestID: {}'.format(
                 pRspInfo.ErrorMsg, nRequestID).decode('gb2312'))
         return
     self.strategy.on_rsp_order_insert(pInputOrder, pRspInfo, nRequestID,
                                       bIsLast)
Ejemplo n.º 56
0
 def on_connection_closed(self, connection, reply_code, reply_text):
     self._channel = None
     if self._closing:
         self.connection.ioloop.stop()
     else:
         logger.error('lost connection...')
         logger.error('reconnect 5 seconds later...')
         self.connection.add_timeout(5, self.reconnect)
Ejemplo n.º 57
0
    def retire(self):
	if self.id:
	    upd_stat="UPDATE {0} SET alive=FALSE WHERE id={1}".format(self.table,self.id)
#	    print upd_stat
	    try:
		self.self_cursor.execute(upd_stat)
	    except Exception as e:
		logger.error("Cannot retire table {0}. {1}".format(self.table,e.pgerror))
		return
Ejemplo n.º 58
0
	def check(*args, **kargs):

		uid, gid = os.getuid(), os.getgid()

		if uid != 0 and gid != 0:
			logger.error("please use root privilege to exec.")
			sys.exit(1)

		return func(*args, **kargs)
Ejemplo n.º 59
0
def exec_command(command):

	try:
		if os.system(command) != 0:
			logger.error("exec " + command + " error!")
			sys.exit(1)
	except OSError, oserr:
		logger.error(str(oserr))
		sys.exit(1)
Ejemplo n.º 60
0
 def get_price(self):
     current_price_list = [
         p.text for p in self.s.select(settings.WELL['current_price_css_selector'])]
     try:
         assert(len(current_price_list) == 1)
         self.current_price = current_price_list[0].strip()
     except AssertionError as err:
         logger.error("Unable to get price information: %s" %
                      (err), exc_info=True)