示例#1
0
    def __on_receive_input(self, fd, cond):

        data = fd.read(32)
        code = tuple([ord(c) for c in data if 0x0 < ord(c) < 0xff])
        print code

        if (code == _KEY_REPEAT):
            if (self.__accept_repeat_time + 1.0 > time.time() >
                    self.__accept_repeat_time):
                keycode = self.__last_keycode
            else:
                return True

        elif (code == _FLAT_BATTERY and not self.__warned_about_battery):
            self.call_service(msgs.NOTIFY_SVC_SHOW_MESSAGE,
                              "AppleRemote has weak battery")
            logging.warning("AppleRemote has weak battery")
            self.__warned_about_battery = True
            return True

        else:
            keycode = _KEYCODES.get(code)
            self.__accept_repeat_time = time.time() + 0.5

        if (keycode):
            self.emit_message(keycode)
            self.__last_keycode = keycode
        else:
            logging.warning("AppleRemote sent unknown code: %s", ` code `)

        return True
示例#2
0
 def process_IN_CREATE(self, event):
     print("Create event:", event.pathname)
     logging.info("Create event : " + event.pathname)
     if (os.path.isdir(event.pathname)):
         watch_manager.add_watch(event.pathname,
                                 config.MONTIOR_MASK,
                                 rec=True)
     else:
         if (os.path.splitext(
                 event.pathname)[1] == config.IMAGE_FORMAT):  #这里只用后缀来判断 不严谨
             stream_id = get_path_dir(event.pathname, -1)
             label, percent = getLabel(stream_id, event.pathname)
             if (label != -1):
                 #发送label
                 logging.info("stream: " + str(stream_id) + " | label " +
                              str(label) + " | percent" + str(percent))
                 http_notify(config.CLASSIFY_CALLBACK_ADDR, {
                     'stream_id': stream_id,
                     'label': label,
                     'percent': percent
                 })
             else:
                 #发送流失败消息
                 logging.warning("stream " + stream_id +
                                 " get -1 from the getLabl")
                 http_notify(
                     config.STREAM_STATUS_CALLBACK_ADDR, {
                         'stream_id': stream_id,
                         'code': config.CALLBACK_CODE_ONLINE_ERROR_2
                     })
             if (percent > config.DEFAULT_MIN_PERCENT):
                 os.remove(event.pathname)
         else:
             logging.warning("There are error format into! : " +
                             event.pathname)
示例#3
0
    def fetch_all_from_db(returns):
        return_raw = 'raw' in returns
        return_ids = 'ids' in returns
        return_dates = 'dates' in returns

        logging.warning(f"Fetching DB: all channels for their ids and dates")

        res_raw = g.run("""MATCH (c:Channel)
            RETURN c.id as id, c.updated_at as updated_at, c.added_to_at as added_to_at
            """).data()

        res_ids = [o['id'] for o in res_raw] if return_ids else None

        if return_dates:
            res_dates = {
                o['id']: {
                    'updated_at':
                    datetime.strptime(o['updated_at'],
                                      '%Y-%m-%dT%H:%M:%S.%fZ'),
                    'added_to_at':
                    datetime.strptime(o['added_to_at'],
                                      '%Y-%m-%dT%H:%M:%S.%fZ'),
                }
                for o in res_raw
            }
        else:
            res_dates = None

        if not return_raw:
            res_raw = None

        return res_raw, res_ids, res_dates
示例#4
0
    def blocks_from_api(self):
        res = []
        per = 100
        current_page = 0
        total_pages = 1

        while (current_page < total_pages):
            self.log(
                f"Fetching API: all blocks for this channel: page {str(current_page + 1)}/{str(total_pages)}"
            )

            response = requests.request(
                "GET",
                "http://api.are.na/v2/channels/" + str(self.id),
                timeout=60,
                headers={"Authorization": "Bearer " + ARENA_API_TOKEN},
                params={
                    "per": per,
                    "page": current_page + 1
                },
            )
            r = response.json()

            res += r['contents']

            current_page += 1
            total_pages = math.ceil(r['length'] / per)

        self.log(f"Done fetching API: {str(len(res))} blocks")
        if len(res) != r['length']:
            logging.warning(
                f"Promised {str(r['length'])} blocks but gotten {str(len(res))}"
            )

        return res
示例#5
0
    def parse_url(self, url, headers=''):
        ''' get soup from url '''

        if headers == '':
            headers = {'User-Agent': self.rotate_UA()}
            content = None
        else:
            # coming from unit testing
            headers = headers

        try:
            response = requests.get(url, headers=headers)
            ct = response.headers['Content-Type'].lower().strip()

            if 'text/html' in ct:
                content = response.content
                soup = BeautifulSoup(content, "html.parser")
            else:
                content = response.content
                soup = None
        except Exception as e:
            log.warning('Parsing error: %s', e)
            return False

        return content, soup, ct
示例#6
0
    def fetch_all_from_api(user_id):
        res = []
        per = 100
        current_page = 0
        total_pages = 1

        while (current_page < total_pages):
            logging.warning(
                f"Fetching API: all channels for user {user_id}: page {str(current_page + 1)}/{str(total_pages)}"
            )
            response = requests.request(
                "GET",
                "https://api.are.na/v2/users/" + str(user_id) + "/channels",
                timeout=60,
                headers={"Authorization": "Bearer " + ARENA_API_TOKEN},
                params={
                    "per": per,
                    "page": current_page + 1
                },
            )
            r = response.json()

            res += r['channels']

            current_page = r['current_page']
            total_pages = r['total_pages']

        return res
示例#7
0
    def _get_position(self):
    
        if (self.__maybe_eof > 0):
            return (0, 0)
    
        if (self.__media_length == 0):
            self.__send_cmd("get_time_length")
            self.__media_length = -1
            timeout = time.time() + 1.0
            gobject.timeout_add(1000, lambda : False)
            while (self.__media_length == -1 and time.time() < timeout):
                gtk.main_iteration(True)
            if (time.time() >= timeout):
                logging.warning("timeout reached for mplayer.get_time_length")

        elif (self.__media_length == -1 and self.__id_length > 0):
            self.__media_length = self.__id_length
        #end if


        self.__media_position = -1
        #if (self.__media_length > 0):
        #    self.__send_cmd("get_percent_pos")
        #else:
        self.__send_cmd("get_time_pos")
        timeout = time.time() + 1.0
        gobject.timeout_add(1000, lambda : False)
        while (self.__media_position == -1 and time.time() < timeout):
            gtk.main_iteration(True)
        if (time.time() >= timeout):
            logging.warning("timeout reached for mplayer.get_time_pos")

        return (self.__media_position, self.__media_length)
async def choose_tasks_action(query: types.CallbackQuery, state: FSMContext):
    #print("TEXT", query.data)
    try:

        id_ = int(query.data)
        Session = sessionmaker(bind=engine)
        session = Session()
        response = session.query(Task).filter(Task.id_ == id_).all()
        if len(response) > 0:

            keyboard = types.ReplyKeyboardMarkup(resize_keyboard=True)
            keyboard.row("Delete", "Update")
            keyboard.add("Cancel")

            await state.update_data(id_=id_)
            await bot.send_message(
                query["from"]["id"],
                f"Выбрана задача ({id_}). Выберите действие",
                reply_markup=keyboard)
            await ManageTasks.waiting_for_action_choice.set()
        else:
            await bot.send_message(
                query["from"]["id"],
                "Неправильно выбрано действие, выхожу из интерфейса",
                reply_markup=ReplyKeyboardRemove())
            await state.finish()
            return

    except Exception as e:
        logging.warning("SNAFU")
        logging.warning(str(e))
        return
示例#9
0
async def send_pm(user: Player, p: bytes) -> None:
    d = reader.handle_packet(p, (("msg", osuTypes.message),))

    msg = d["msg"].msg
    tarname = d["msg"].tarname

    if not (target := await glob.players.get(name=tarname)):
        warning(f"{user.name} tried to send message to offline user {tarname}")
        return
示例#10
0
    def _preprocess(self):
        '''
        Load model and set data loader, collect the variable names for sampling,
        and set activation variables to be persistable.
        '''
        feed_vars = [fluid.framework._get_var(var.name, self._program) \
            for var in self._feed_list]

        self._data_loader = fluid.io.DataLoader.from_generator(
            feed_list=feed_vars, capacity=3 * self._batch_size, iterable=True)
        self._data_loader.set_sample_list_generator(self._dataset.generator(
            self._batch_size, drop_last=True),
                                                    places=self._place)

        # collect the variable names for sampling
        persistable_var_names = []
        for var in self._program.list_vars():
            if var.persistable:
                persistable_var_names.append(var.name)

        for op in self._program.global_block().ops:
            op_type = op.type
            if op_type in self._quantizable_op_type:
                if op_type in ("conv2d", "depthwise_conv2d"):
                    self._quantized_act_var_name.add(op.input("Input")[0])
                    self._quantized_weight_var_name.add(op.input("Filter")[0])
                    self._quantized_act_var_name.add(op.output("Output")[0])
                elif op_type == "mul":
                    if self._is_input_all_not_persistable(
                            op, persistable_var_names):
                        op._set_attr("skip_quant", True)
                        logging.warning(
                            "Skip quant a mul op for two input variables are not persistable"
                        )
                    else:
                        self._quantized_act_var_name.add(op.input("X")[0])
                        self._quantized_weight_var_name.add(op.input("Y")[0])
                        self._quantized_act_var_name.add(op.output("Out")[0])
                else:
                    # process other quantizable op type, the input must all not persistable
                    if self._is_input_all_not_persistable(
                            op, persistable_var_names):
                        input_output_name_list = self._op_real_in_out_name[
                            op_type]
                        for input_name in input_output_name_list[0]:
                            for var_name in op.input(input_name):
                                self._quantized_act_var_name.add(var_name)
                        for output_name in input_output_name_list[1]:
                            for var_name in op.output(output_name):
                                self._quantized_act_var_name.add(var_name)

        # set activation variables to be persistable, so can obtain
        # the tensor data in sample_data
        for var in self._program.list_vars():
            if var.name in self._quantized_act_var_name:
                var.persistable = True
示例#11
0
 def fetch_one_from_api(user_id):
     logging.warning(f"Fetching API: user id {str(user_id)}")
     response = requests.request(
         "GET",
         "https://api.are.na/v2/users/" + str(user_id),
         timeout=60,
         headers={"Authorization": "Bearer " + ARENA_API_TOKEN},
     )
     r = response.json()
     return r
示例#12
0
    def go_for_it(test_mode=True):
        logging.warning("starting backup, starting at user node, user_id " +
                        str(ARENA_USER_ID))

        u = User.from_api(user_id=ARENA_USER_ID)

        u.merge_in_db()

        u.backup_channels(test_mode=test_mode)

        logging.warning("backup finished all ok")

        return "ok"
示例#13
0
    def get_data(self, url):

        content, soup, ct = self.parse_url(url)
        product_list, prod_name_l, prod_price_l, prod_minorder_l, prod_suppliers_l, prod_asin_l = [], [], [], [], [], []

        try:
            # dissect soup
            for soup_src in soup.find_all(
                    "div",
                {
                    "class":
                    "organic-offer-wrapper organic-gallery-offer-inner m-gallery-product-item-v2 img-switcher-parent"
                }):

                product_names = self.extract_text_vtags(
                    soup_src, 'h4', prod_name_l)
                product_prices = self.extract_text_vattributes(
                    soup_src, {"data-e2e-name": "price@@normal"}, prod_price_l)
                product_min_orders = self.extract_text_vattributes(
                    soup_src, {"data-e2e-name": "minOrder"}, prod_minorder_l)
                product_suppliers = self.extract_text_vattributes(
                    soup_src, {"flasher-type": "supplierName"},
                    prod_suppliers_l)

            #generating dict to csv output
            product_dict = {
                'Item Name': product_names,
                'Price Range': product_prices,
                'Minimum Order': product_min_orders,
                'Supplier': product_suppliers
            }

            log.info('Product List: %s', product_dict)

            try:
                df = pd.DataFrame(product_dict)

                # saving the dataframe to csv
                file_name = 'outputs/extract_file_{}-{}.csv'.format(
                    str(date.today()), str(random.randrange(0, 999999)))
                df.to_csv(file_name)
            except Exception as e:
                log.warning('Generating csv report error: %s', e)
                # save to db
                return False

        except Exception as e:
            log.warning('Generating data error: %s', e)
            return False
示例#14
0
    def join_match(self, match: Match, pw: str) -> None:
        if self.match:
            self.enqueue(writer.matchJoinFail())
            return

        if self is not match.host:
            if pw != match.pw:
                warning(
                    f"{self.name} tried to join multiplayer {match.name} with incorrect password",
                )
                self.enqueue(writer.matchJoinFail())
                return

            if not (id := match.next_free()):
                self.enqueue(writer.matchJoinFail())
                return
示例#15
0
    def fetch_all_from_db(returns):
        return_raw = 'raw' in returns
        return_ids = 'ids' in returns
        return_dates = 'dates' in returns

        logging.warning(f"Fetching DB: all blocks for their ids")

        res_raw = g.run("""MATCH (c:Block)
            RETURN c.id as id
            """).data()

        res_ids = [o['id'] for o in res_raw] if return_ids else None

        res_dates = None

        if not return_raw:
            res_raw = None

        return res_raw, res_ids, res_dates
示例#16
0
    def handle_COM_EV_APP_STARTED(self):

        logging.profile(values.START_TIME, "[app] startup complete")

        # load state
        try:
            path, play_files, play_folder, current_file = state.load(
                _STATEFILE)

            path_stack = []
            for p in path:
                f = self.call_service(msgs.CORE_SVC_GET_FILE, p)
                if (f):
                    path_stack.append(f)
                    self.emit_message(msgs.CORE_EV_FOLDER_VISITED, f)
                #end if
            #end for
            self.__browser.set_path_stack(path_stack)

            #self.__play_files = [ self.call_service(msgs.CORE_SVC_GET_FILE, p)
            #                      for p in play_files
            #                      if self.call_service(msgs.CORE_SVC_GET_FILE, p) ]
            self.__play_folder = self.call_service(msgs.CORE_SVC_GET_FILE,
                                                   play_folder)
            self.__current_file = self.call_service(msgs.CORE_SVC_GET_FILE,
                                                    current_file)

        except:
            logging.warning("could not restore navigator state:\n%s",
                            logging.stacktrace())

        self.__arr.set_visible(True)
        self.render()

        if (values.uri and
            (values.uri.startswith("http://") or os.path.exists(values.uri))):
            ext = os.path.splitext(values.uri)[1]
            mimetype = mimetypes.ext_to_mimetype(ext)
            f = self.call_service(
                msgs.CORE_SVC_GET_FILE,
                "adhoc://" + File.pack_path("/", values.uri, mimetype))
            self.__load_file(f, True)
示例#17
0
def typecheck_field_access(node, c, method, t_i, c_i):
    if node.name != 'FieldAccess':
        logging.error('FATAL ERROR: invalid node %s for field access' %
            node.name)
        sys.exit(1)

    receiver_type = typecheck_expr(node[1][0], c, method, t_i,
        c_i)
    
    field_name = node[0][0].value.value

    if receiver_type == None:
        logging.warning("FATAL: FieldAccess")
        return

    if is_array_type(receiver_type):
        if field_name == 'length':
            node.typ = 'Int'
            return 'Int'
        else:
            logging.error('Invalid field access on array type %s' %
                receiver_type)
            sys.exit(42)
    elif primitives.is_primitive(receiver_type) == True:
        logging.error('Invalid field access on primitive type %s' %
            receiver_type)
    else:
        field_decl = name_resolve.field_accessable(c_i, t_i, receiver_type,
            field_name, c.name, False)

        if field_decl is None:
            logging.error('Cannot access field %s of type %s from class %s' %
                (field_name, receiver_type, c.name))
            sys.exit(42)
        elif 'Static' in field_decl.obj.mods:
            logging.error('Instance field method on non-static field')
            sys.exit(42)
        else:
            node.typ = field_decl[1].canon
            return node.typ
示例#18
0
    def __init__(self):

        self.__is_connected = True

        self.__click_count = 0
        self.__click_handler = None

        Component.__init__(self)

        # monitor headset button
        try:
            bus = maemo.get_system_bus()
            obj = bus.get_object(_HAL_SERVICE, _HEADSET_PATH)
            device = dbus.Interface(obj, _HAL_DEVICE_IFACE)
            device.connect_to_signal("Condition", self.__on_hal_condition)
        except:
            logging.warning("unable to monitor headset button")

        # monitor AVRCP
        try:
            bus = maemo.get_system_bus()
            obj = bus.get_object(_HAL_SERVICE, _AVRCP_PATH)
            device = dbus.Interface(obj, _HAL_DEVICE_IFACE)
            device.connect_to_signal("Condition", self.__on_hal_condition)
        except:
            logging.warning("unable to monitor AVRCP status")

        # monitor headset status
        try:
            fd = open(_HEADPHONE_SYS, "r")
            self.__on_connect(None, None)
            self.__watcher = gobject.io_add_watch(fd, gobject.IO_PRI,
                                                  self.__on_connect)
        except:
            logging.warning("unable to monitor headset connection")

        # set up speaker control
        try:
            bus = maemo.get_session_bus()
            obj = bus.get_object("com.nokia.osso_hp_ls_controller",
                                 "/com/nokia/osso_hp_ls_controller")
            self.__speaker = dbus.Interface(
                obj, "com.nokia.osso_hp_ls_controller.loudspeaker")
        except:
            logging.warning("cannot force loudspeaker")
            self.__speaker = None
 def on_disconnect(self, notice):
     logging.warning(notice)
示例#20
0
 def log(self, message, error=False):
     msg = f"Block {str(self.class_).ljust(5)[0:5]} {str(self.id)}: {message}"
     if error:
         logging.error(msg)
     else:
         logging.warning(msg)
 def on_warning(self, warning):
     logging.warning(warning)
示例#22
0
 def log(self, message):
     logging.warning(
         f"Channel {str(self.id)} {str(self.title).ljust(10)[0:10]}: {message}"
     )
示例#23
0
                        #对应文件夹是/data/model/{{ stream_id }}/
                        logging.debug('model exists')
                        http_notify(
                            config.STREAM_STATUS_CALLBACK_ADDR, {
                                'stream_id': child.stream_id,
                                'code':
                                config.CALLBACK_CODE_MANUAL_MODEL_EXISTS
                            })
                    else:
                        logging.debug('model not exists')
            elif (child.stream_status == config.STREAM_STATUS_ONLINE):
                logging.debug("SUCCESS online process")
                #online截图程序运行成功

        else:  #进程执行失败
            logging.warning("ERROR! pid is: " + str(child.pid) +
                            " | stream id is " + str(child.stream_id))
            if (child.stream_status == config.STREAM_STATUS_COLLECTION):
                logging.warning("ERROR collect process")
                #进程执行错误一般就是断流了
                http_notify(
                    config.STREAM_STATUS_CALLBACK_ADDR, {
                        'stream_id': child.stream_id,
                        'code': config.CALLBACK_CODE_COLLECT_ERROR_1
                    })
            elif (child.stream_status == config.STREAM_STATUS_MANUAL):
                logging.warning("ERROR manual process")
            elif (child.stream_status == config.STREAM_STATUS_ONLINE):
                logging.warning("ERROR online process")
                http_notify(
                    config.STREAM_STATUS_CALLBACK_ADDR, {
                        'stream_id': child.stream_id,
 def on_disconnect(self, notice):
     logging.warning(notice)
 def on_timeout(self):
     logging.warning('TIMEOUT')
示例#26
0
path_root = os.path.dirname(os.path.abspath(__file__))
path = path_root + '/../' + config.DATA_DIR

s3 = boto3.resource('s3')

if args.summarize:
    print 'Generating summary of', file_name, '...'
    summary_name = SUMMARY_TEMPLATE.format(date_to_copy)
    cmd = "awk 'NR == 1 || NR % {freq} == 0' {path}/{source} > {path}/{dest}"
    cmd = cmd.format(freq=config.SUMMARIZE_FREQUENCY, path=path, source=file_name, dest=summary_name)

    try:
        subprocess.call(cmd, shell=True)
        print 'Summary created!'
    except OSError as ex:
        logging.warning(ex)
        print 'Summary creation failed for reason:', ex

    try:
        upload_file(summary_name, path)
        print 'Summary copied!'
    except OSError as ex:
        logging.warning(ex)
        print 'Summary copy failed for reason:', ex

print '[{}] Copying tweets from {}...'.format(today, file_name)
try:
    upload_file(file_name, path)
    print 'Tweets copied!'
except OSError as ex:
    logging.warning(ex)
 def on_warning(self, warning):
     logging.warning(warning)
示例#28
0
def typecheck_expr(node, c, method, t_i, c_i):
    # see if type for expr has already been resolved
    if hasattr(node, 'typ') and node.typ != None:
        return node.typ
    
    t = None

    # DO STUFF HERE...
    if node.name == 'Assignment':
        t = typecheck_assignment(node, c, method, t_i, c_i)
    elif node.name == 'MethodInvocation':
        t = typecheck_method_invocation(node, c, method, t_i, c_i)
    elif node.name == 'CreationExpression':
        t = typecheck_creation(node, c, method, t_i, c_i)
    elif node.name in ['AndExpression', 'OrExpression']:
        t = typecheck_conditional(node, c, method, t_i, c_i)
    elif node.name in ['EqualExpression', 'NotEqualExpression']:
        t = typecheck_equality(node, c, method, t_i, c_i)
    elif node.name in ['LessThanExpression', 'LessThanEqualExpression',
        'GreaterThanExpression', 'GreaterThanEqualExpression']:
        t = typecheck_relational(node, c, method, t_i, c_i)
    elif node.name in ['AddExpression', 'SubtractExpression']:
        t = typecheck_add(node, c, method, t_i, c_i)
    elif node.name in ['MultiplyExpression', 'DivideExpression',
        'ModuloExpression']:
        t = typecheck_mult(node, c, method, t_i, c_i)
    elif node.name in ['NotExpression', 'NegateExpression']:
        t = typecheck_unary(node, c, method, t_i, c_i)
    elif node.name == 'PostfixExpression':
        t = typecheck_postfix(node, c, method, t_i, c_i)
    elif node.name == 'CastExpression':
        t = typecheck_cast_expression(node, c, method, t_i, c_i)
    elif node.name == 'InstanceofExpression':
        t = typecheck_instanceof(node, c, method, t_i, c_i)
    elif node.name in ['BinaryAndExpression', 'BinaryOrExpression']:
        t = typecheck_eager_boolean(node, c, method, t_i, c_i)

    # Statements
    elif node.name == 'ReturnStatement':
        t = typecheck_return(node, c, method, t_i, c_i)
    elif node.name == 'IfStatement':
        t = typecheck_if(node, c, method, t_i, c_i)
    elif node.name == 'WhileStatement':
        t = typecheck_while(node, c, method, t_i, c_i)
    elif node.name == 'ForStatement':
        t = typecheck_for(node, c, method, t_i, c_i)
    elif node.name == 'LocalVariableDeclaration' or node.name == 'FieldDeclaration':
        t = typecheck_local_var_decl(node, c, method, t_i, c_i)

    # Primarys
    elif node.name == 'Literal':
        t = typecheck_literal(node, c, method, t_i, c_i)
    elif node.name == 'This':
        t = c.name
    elif node.name == 'FieldAccess':
        t = typecheck_field_access(node, c, method, t_i, c_i)
    elif node.name == 'ArrayAccess':
        t = typecheck_array_access(node, c, method, t_i, c_i)

    elif node.name == 'ExclusiveOrExpression':
        logging.error("SHOULD NOT SEE THESE")
        sys.exit(1)
    else:
        logging.warning("typecheck could not run on " + node.name)

    if not isinstance(t, str) and t != None:
        logging.warning("typecheck found a non-type", node.name, t)

    # set type
    node.typ = t

    # return the type
    return t
示例#29
0
async def auth(_name: str, md5: str, req: Request) -> bool:
    name = unquote(_name)

    if not (player := await glob.players.find_login(name, md5)):
        warning(f"{name} failed authentication")
        return False
 def on_timeout(self):
     logging.warning('TIMEOUT')
示例#31
0
    file_name = obj.key
    date_to_copy = file_name.split('tweets.')[1].split('.summary.json')[0]

    print 'Downloading', file_name
    s3.Bucket(config.SUMMARY_BUCKET).download_file(
        file_name, '{}/{}'.format(path, file_name))

    print 'Generating CSV of', file_name, '...'
    csv_name = SUMMARY_CSV_TEMPLATE.format(date_to_copy)
    try:
        to_csv('{}/{}'.format(path, file_name),
               '{}/{}'.format(path, csv_name),
               level=3)
        print 'CSV created!'
    except Exception as ex:
        logging.warning(ex)
        print 'CSV creation failed for reason:', ex

    try:
        upload_file(csv_name, path, config.SUMMARY_CSV_BUCKET)
        print 'CSV copied!'
    except OSError as ex:
        logging.warning(ex)
        print 'Copy failed for reason:', ex
    else:
        os.remove('{}/{}'.format(path, csv_name))
        print 'CSV deleted!'

    print 'Deleting {}...'.format(file_name)
    os.remove('{}/{}'.format(path, file_name))
    print 'File deleted!'
示例#32
0
        ):  # format client data (hash, utc etc.) & ensure it is valid
            request.resp_headers[
                "cho-token"
            ] = "no"  # client knows there is something up if we set token to 'no'
            return writer.userID(-2)

        username = base_info[0]
        pw = base_info[
            1
        ].encode()  # password in md5 form, we will use this to compare against db's stored bcrypt later

        user = await glob.db.fetchrow("SELECT * FROM users WHERE name = %s", [username])
        if (
            not user
        ):  # ensure user actually exists before attempting to do anything else
            warning(f"User {username} does not exist.")

            request.resp_headers[
                "cho-token"
            ] = "no"  # client knows there is something up if we set token to 'no'
            return writer.userID(-1)

        # if server is migrated then passwords are previously stored as bcrypt
        # lets check if we need to convert and do so if needed
        if glob.config.server_migration and (
            "$" in user["pw"] and len(user["pw"]) == 60
        ):
            user_pw = user["pw"].encode()
            if not bcrypt.checkpw(pw, user_pw):
                warning(
                    f"{username}'s login attempt failed: provided an incorrect password",
示例#33
0
 def log(self, message):
     logging.warning(
         f"User {str(self.id)} {str(self.slug).ljust(10)[0:10]}: {message}")
示例#34
0
            embed.set_author(
                url=f"https://{glob.config.domain}/u/{self.id}",
                icon_url=f"https://a.{glob.config.domain}/{self.id}",
                name=self.name,
            )

            embed.add_field(
                name="New banned user",
                value=f"{self.name} has been banned by {fr.name} for {reason}.",
                inline=True,
            )

            wh.add_embed(embed)
            await wh.post()

        warning(f"{self.name} has been banned for {reason}.")

    async def freeze(self, reason: str, fr: "Player") -> None:
        expire = datetime.now() + timedelta(days=7)

        if self.frozen:
            return  # ?

        self.frozen = True
        self.freeze_timer = expire.timestamp()

        await self.add_priv(Privileges.Frozen)
        await glob.db.execute(
            "UPDATE users SET freeze_timer = %s WHERE id = %s",
            [self.freeze_timer, self.id],
        )