Example #1
0
File: main.py Project: Rlnitsua/Nas
def processDoubanFilm(path, filmName):
    for root, dirs, files in os.walk(path, True):
        if (len(files) == 1 and Utils.isVideoFile(files[0])):
            processOneFilm(root, files[0], filmName)
        elif (len(files) == 0):
            Log.d(tag, "delete -- " + path)
            os.rmdir(path)
Example #2
0
 def delWin(self, friend_jid):
     #窗口关闭,离开会话  ps:leave()是个协程,需要将其加入并发队列
     asyncio.ensure_future(
         self.conversationList[str(friend_jid)]['conversation'].leave())
     del self.conversationList[str(friend_jid)]
     self._Sign_Close_Conv.emit(str(friend_jid))
     Log.info("关闭聊天窗口", self.conversationList)
Example #3
0
File: main.py Project: Rlnitsua/Nas
def deleteDownloadingFiles(root, files):
    for file in files:
        reg = "^.*downloading$"
        if (re.match(reg, file)):
            path = os.path.join(root, file)
            Log.d(tag, "remove : " + path)
            os.remove(path)
Example #4
0
 def start(self, user):
     Log.info('开始登陆', user)
     self.jid = aioxmpp.JID.fromstr(user['JID'])
     self.password = user['PWD']
     try:
         # no_verify=True大坑!,注意关闭认证;参考:https://docs.zombofant.net/aioxmpp/devel/api/public/security_layer.html
         # 可能要自己定义security_layer以达到通信安全
         self.client = aioxmpp.PresenceManagedClient(
             self.jid,
             aioxmpp.make_security_layer(self.password, no_verify=True),
             loop=self._loop)
         self.client.set_presence(
             aioxmpp.PresenceState(available=True,
                                   show=aioxmpp.PresenceShow.FREE_FOR_CHAT),
             {aioxmpp.structs.LanguageTag.fromstr('en'): '在线'})
         self.client.on_failure.connect(self.on_failure)  #启动失败时操作
         self.client.on_stream_established.connect(
             self.on_login_success)  #在链接服务器时操作
         self.client.on_stream_suspended.connect(
             self.on_internet_disconnect)  #服务器链接中断时操作
         self.client.on_stream_destroyed.connect(
             self.on_internet_disconnect)  #服务器链接失败时操作
     except ConnectionRefusedError:
         self._sign_login.emit(3)
         return
     except Exception:
         return
def create_partition(data: dict, row_list: list, part_num: int,
                     output_path: str, oi: list) -> None:
    """
    根据指定的行列表、分块数、输出路径,生成数据集字典的分块文件

    :param data: 数据集字典
    :param row_list: 行列表(由index组成的列表)
    :param part_num: 分块数
    :param output_path: 输出路径
    :param oi: 分块数据文件时保留原索引的数据表列表
    :return: 无返回值
    """
    if output_path is None:
        Log.critical('未配置分块数据文件路径')
        return
    directory = os.path.join(output_path, str(part_num + 1))
    if os.path.exists(directory):
        return
    else:
        os.makedirs(directory)
        for table_name in data:
            tmp_df = pd.DataFrame(data[table_name])
            if table_name in oi:
                tmp_df_subset = tmp_df[tmp_df.index.isin(
                    row_list)].copy().reset_index()
            else:
                tmp_df_subset = tmp_df[tmp_df.index.isin(
                    row_list)].copy().reset_index(drop=True)
            tmp_df_subset.to_csv('{}/{}.csv'.format(directory, table_name),
                                 index=False)
    return
Example #6
0
    def __init__(self, repository_path, name=None, db_session=None, branch='master'):
        """ Initialize the Repository Miner

        Args:
            repository_path: The url to the repository
            name: Optional. The unique name of this repository. Defaults to the last part of the path.
            db_session: Optional if not specified it will create a new one
            branch: Optional. The branch to mine. Defaults to the master branch.
        """

        self.repository = Repo(repository_path)
        self.branch = branch

        self.PROGRAMMING_LANGUAGES = Config.programming_languages
        self.EMPTY_TREE_SHA = "4b825dc642cb6eb9a060e54bf8d69288fbee4904"

        # Size of the return character for calculating file size
        self.RETURN_SIGN_SIZE = 2

        self.init_db_sessions(db_session=db_session)

        self.existing_commit_ids = set()
        self.repository_id = self.__create_new_repository(self.db_session, name, repository_path)

        Log.info("Start mining the repository with path: " + repository_path)
        commits = self.__get_commits()
        self.iterate_commits(commits)
Example #7
0
    def open_session(self, key):

        # If a new session is requested opened several times
        # it probably use of transparency or a error, so
        # just continue to use the same variable and key
        try:
            self.session_jar
        except:
            # Don't have a session, so go on..
            pass
        else:
            # Session allready initiated, just return..
            return

        if type(key) == type(''):
            self.session_key = key
        else:
            try:
                self.session_key = str(hash(key))
            except:
                raise Exception

        Log.out('PLUGIN::SESSION -> try key: %s' % self.session_key)
        self.session_jar = self._open_session_from_disk(self.session_key)

        return self.session_key
Example #8
0
    def iterate_commits(self, commits):
        """Iterate all commits and do the work
        Args:
            commits:
        """
        threads = []
        log_interval = 1

        if len(commits) > 1000:
            log_interval = 100

        for i, commit in enumerate(commits):
            if i % log_interval == 0:
                prc = i / len(commits) * 100
                Log.info("{0:0.2f}% - processed commits: {1}".format(prc, i))

            if commit.parents:
                previous_commit = commit.parents[0]
            else:
                previous_commit = self.EMPTY_TREE_SHA

            if (len(commit.parents) <= 1) and (not self.__commit_exists(str(commit))):
                project_file_count = self.__get_project_file_count(self.repository_id)
                project_size = self.__get_project_size(self.repository_id)
                commit_orm = self.__process_commit(commit, previous_commit, project_size, project_file_count,
                                                   db_session=self.db_session)
                self.db_session.commit()

                # To prevent that half commits are in database (when gtsoog dies)
                commit_orm.complete = True
                self.db_session.commit()
    def account(self, update, context):
        session = Session.get_from(context.user_data)
        accounts = session.datasource.account()
        reply_markup = None
        if isinstance(accounts, dict) and accounts.get('Success') and len(
                accounts.get('Data', [])):
            msg = 'Select the account'
            keyboard = []
            keyboardline = []
            for row in accounts.get('Data', []):
                if len(keyboardline) <= 1:
                    keyboardline.append(
                        InlineKeyboardButton(row.get('name'),
                                             callback_data='account_{}'.format(
                                                 row.get('id'))))
                if len(keyboardline) == 2:
                    keyboard.append(keyboardline)
                    keyboardline = []

            if len(keyboardline):
                keyboard.append(keyboardline)
            if len(keyboard):
                reply_markup = InlineKeyboardMarkup(keyboard)
            Log.info(accounts)
        else:
            Log.error(accounts)
            msg = "Could not get accounts, try later /done"

        self.base_handler.reply_text(update,
                                     context,
                                     text=msg,
                                     reply_markup=reply_markup)
        return self.WELCOME
Example #10
0
 def on_chatwith(self,room):
     jid = str(room.jid)
     if jid not in self.roomList:
         win = self.getWin(room)
         Log.info("新开窗口", self.roomList[jid]['room'])
         self.roomList[jid]['room'].on_message.connect(partial(self.on_message,self.roomList[jid]['room']))
     self.roomList[jid]['win'].show()
Example #11
0
def cal_distinct(pred_list, n_gram=1):
    """
    calculate distinct

    Args:
        pred_id_list: list
        n-gram: int n-gram default 1
    Returns:
        distinct_score: float dist-n score
    """
    Log.info(
        "calculate distinct score start: pred_id_list_size = {}, n_gram = {}".
        format(len(pred_list), n_gram))
    ngram_vectorizer = CountVectorizer(ngram_range=(n_gram, n_gram),
                                       decode_error="ignore",
                                       token_pattern=r'\b\w+\b')
    ngram_arr = ngram_vectorizer.fit_transform(pred_list).toarray()
    exist = (ngram_arr > 0) * 1.0
    factor = np.ones(ngram_arr.shape[1])
    dis_ngram_arr = np.dot(exist, factor)
    sum_arr = np.sum(ngram_arr, 1)
    indics = sum_arr != 0
    sum_arr = sum_arr[indics]
    dis_ngram_arr = dis_ngram_arr[indics]
    # sum_arr[sum_arr == 0] = sys.maxsize
    distinct_arr = dis_ngram_arr / sum_arr
    distinct_score = np.mean(distinct_arr)
    Log.info("calculate distinct score success")
    return distinct_score
Example #12
0
def vec_extrema(tgt_embed_list, pred_embed_list, word_dim=None):
    """
    calculate the vector extrema
    
    Args:
        tgt_embed_list: list list of target sequence' word embedding
        pred_embed_list: list list of predict sequence' word embedding
        word_dim: int word embedding
    Returns:
        score: float score of vector extrema
    """
    Log.info("calculate the vec extrema start: size = {}".format(
        len(tgt_embed_list)))
    if not word_dim:
        word_dim = tgt_embed_list[0][0].shape[0]
    vec_extrema_list = []
    for tgt_embed, pred_embed in zip(tgt_embed_list, pred_embed_list):
        if len(tgt_embed) == 0 or len(pred_embed) == 0:
            vec_extrema_list.append(0.0)
            continue

        tgt_extrema_embed = cal_extrema_embed(tgt_embed, word_dim)
        pred_extrema_embed = cal_extrema_embed(pred_embed, word_dim)
        vec_extrema_score = cal_cosine_similarity(tgt_extrema_embed,
                                                  pred_extrema_embed)
        vec_extrema_list.append(vec_extrema_score)
    vec_extrema_score = np.mean(vec_extrema_list)
    Log.info("calculate the vec extrema success")
    return vec_extrema_score
Example #13
0
def embed_avg(tgt_embed_list, pred_embed_list):
    """
    calculate the embeddign average
    
    Args:
        tgt_embed_list: list list of target sequence' word embedding
        pred_embed_list: list list of predict sequence' word embedding
    Returns:
        embed_avg_score: float score of embedding average
    """
    Log.info("calculate the embed avg start: size = {}".format(
        len(tgt_embed_list)))
    embed_avg_list = []
    for tgt_embed, pred_embed in zip(tgt_embed_list, pred_embed_list):
        if len(tgt_embed) == 0 or len(pred_embed) == 0:
            embed_avg_list.append(0.0)
            continue
        tgt_avg_embed = np.divide(np.sum(tgt_embed, 0),
                                  np.linalg.norm(np.sum(tgt_embed, 0)))
        pred_avg_embed = np.divide(np.sum(pred_embed, 0),
                                   np.linalg.norm(np.sum(pred_embed, 0)))
        embed_avg_score = cal_cosine_similarity(tgt_avg_embed, pred_avg_embed)
        embed_avg_list.append(embed_avg_score)
    embed_avg_score = np.mean(embed_avg_list)
    Log.info("calculate the embed avg success!")
    return embed_avg_score
Example #14
0
    def predict_on_test(self, test):
        """

        :param dataset.Dataset test:
        :return:
        """
        saver = tf.train.Saver()
        with tf.Session() as sess:
            Log.log("Testing model over test set")
            saver.restore(sess, self.model_name)

            y_pred = []

            for batch_data in self._next_batch(data=test):
                feed_dict = {
                    **batch_data,
                    self.dropout_embedding: 1.0,
                    self.dropout_lstm: 1.0,
                    self.dropout_cnn: 1.0,
                    self.dropout_hidden: 1.0,
                    self.is_training: False,
                }
                preds = sess.run(self.labels_pred, feed_dict=feed_dict)
                y_pred.extend(preds)

        return y_pred
Example #15
0
    def predict(self, test):
        """

        :param dataset.Dataset test:
        :return:
        """
        saver = tf.train.Saver()
        with tf.Session() as sess:
            Log.log('Testing model over test set')
            saver.restore(sess, self.model_name)

            y_pred = []
            num_batch = len(test.labels) // self.batch_size + 1

            data = {
                'words': test.words,
                'labels': test.labels,
            }

            for idx, batch in enumerate(
                    self._next_batch(data=data, num_batch=num_batch)):
                words, labels, sequence_lengths = batch
                feed_dict = {
                    self.word_ids: words,
                    self.sequence_lens: sequence_lengths,
                    self.dropout_embedding: 1.0,
                    self.dropout_lstm: 1.0,
                    self.dropout_cnn: 1.0,
                    self.dropout_hidden: 1.0,
                    self.is_training: False,
                }
                preds = sess.run(self.labels_pred, feed_dict=feed_dict)
                y_pred.extend(preds)

        return y_pred
Example #16
0
 def start_tcp_relay(self):
     if not hasattr(settings, 'iproxy_process'):
         if not path.exists('/var/run/usbmuxd.pid'):
             Utils.run('usbmuxd', shell=True)
             Log.w('Please press Trust on your device within the next 10 seconds. Unplug and plug back in if no Trust popup shows up.')
             sleep(10)
         settings.iproxy_process = Utils.run('{iproxy} 2222 22'.format(iproxy=settings.iproxy), shell=True, process=True)
Example #17
0
 def dump_keychain(self, dest='/tmp'):
     dest = dest.replace('\ ', ' ')
     if self.KEYCHAIN_DUMP:
         Log.d('Dumping Keychain data to {dest} with {bin}'.format(bin=self.KEYCHAIN_DUMP, dest=dest))
         self.run_on_ios('cd "{working}"; {keychain}'.format(working=dest, keychain=self.KEYCHAIN_DUMP), shell=True)
     else:
         Log.d('Error: No keychain dump binary found - was prepare_analysis run?')
Example #18
0
 def dump_file_protect(self, file):
     file = file.replace('\ ', ' ')
     if self.DUMP_FILE_PROTECT:
         Log.d('Dumping file protection flags of {file} with {bin}'.format(bin=self.DUMP_FILE_PROTECT, file=file))
         return self.run_on_ios('{dfp} "{file}"'.format(dfp=self.DUMP_FILE_PROTECT, file=file), shell=True)[0]
     else:
         Log.d('Error: No file protection dump binary found - was prepare_analysis run?')
Example #19
0
    def __init__(self, title, issue_id, findings, finding_details=None):
        self.ISSUE_TITLE = title
        self.ID = issue_id
        self.FINDINGS = findings or ''
        self.DETAILS = finding_details or ''

        Log.d('\n'+self.issue())
Example #20
0
    def __create_new_repository(self, db_session, name, repository_url):
        """Create new repository record in database if not exists

        Args:
            db_session:
            name:
            repository_url:

        Returns: repository record

        """
        # Try to retrieve the repository record, if not found a new one is created.
        if name is None:
            name = os.path.split(repository_url)[1]

        self.repository_orm = db_session.query(Repository).filter(Repository.name == name).one_or_none()
        if not self.repository_orm:
            # create new repository
            self.repository_orm = Repository(
                name=name,
                url=repository_url[0:MAX_URL_LENGTH]
            )
            db_session.add(self.repository_orm)
            db_session.flush()
            db_session.commit()
        else:
            # read existing commit ids into memory
            Log.info("Repository " + str(self.repository_orm.name) + " already in database.")
            self.__read_existings_commit_ids(self.repository_orm.id)

        return self.repository_orm.id
Example #21
0
def mix_dataset(data: dict, d_name: set = None) -> dict:
    """
    指定数据集字典、字典中数据集名称,对数据集进行混合操作

    :param data: 数据集字典
    :param d_name: 数据集名称集合,存在以下两种情况:
        1. 当所有数据表在同一层目录中以X_A、X_B为数据集名读入时,传入{A, B}。
        2. 当不同数据表按照数据集文件夹夹存放被读入时,不需要传入该参数
    :return: 混合后的数据集最颠
    """
    need_mix = {}
    if d_name is None:
        d_name = Run_Val.dataset_names
    for k in data:
        for x in d_name:
            if str(k).endswith(x):
                tmp_name = str(k).replace('_' + x, '')
                if tmp_name in need_mix:
                    need_mix[tmp_name].add(k)
                else:
                    need_mix[tmp_name] = {k}
    for k, v in need_mix.items():
        if len(v) > 1:
            Log.info('合并数据表 【{}】 为 【{}】'.format(','.join(v), k))
            data[k] = pd.concat([data.get(x) for x in v],
                                sort=True,
                                ignore_index=True)
            for x in v:
                del data[x]
    return data
 def open_session(self, key):
     
     # If a new session is requested opened several times
     # it probably use of transparency or a error, so 
     # just continue to use the same variable and key 
     try: 
         self.session_jar
     except:
         # Don't have a session, so go on.. 
         pass
     else: 
         # Session allready initiated, just return..
         return
     
     if type(key) == type(''): 
         self.session_key = key
     else:
         try: 
             self.session_key = str( hash( key )  )
         except: 
             raise Exception
     
     Log.out('PLUGIN::SESSION -> try key: %s' % self.session_key)
     self.session_jar = self._open_session_from_disk(self.session_key)
     
     return self.session_key
Example #23
0
    def on_privmsg(self, serv, ev):
        """
        If the published message is a command, format helper as a command,
        else format as a message.
        Call on_privmsg for all enabled plugins.
        """
        try:
            helper = {
                'event':ev.eventtype(),
                'author':irclib.nm_to_n(ev.source()),
                'author_nm':ev.source(),
                'chan':ev.target(),
                'msg':ev.arguments()[0].strip(),
            }

            if helper['msg'][0] == self.settings['cmd prefix']:
                helper['event'] = 'cmd'
                helper['cmd'] = helper['msg'].split(' ')[0][1:]
                helper['args'] = helper['msg'].split(' ')[1:]
                del helper['msg']

                for p in self.enabled_plugins:
                    p.on_cmd(serv, ev, helper)
            else:
                for p in self.enabled_plugins:
                    p.on_privmsg(serv, ev, helper)

        except Exception, e:
            Log.log(Log.log_lvl.ERROR, 'Fail to process privmsg event. Catches exception %s' % e)
            Log.log(Log.log_lvl.DEBUG, format_exc())
Example #24
0
def greedy_match(tgt_embed_list, pred_embed_list):
    """
    calculate the greedy matching

    Args:
        tgt_embed_list: list target sequence' word embedding
        pred_embed_list: list predict sequence' word embedding
    Returns:
        greedy_score: float score of greedy match
    """
    Log.info("calculate the greedy match start: size = {}".format(
        len(tgt_embed_list)))
    greedy_list = []
    for tgt_embed, pred_embed in zip(tgt_embed_list, pred_embed_list):
        if len(tgt_embed) == 0 or len(pred_embed) == 0:
            greedy_list.append(0.0)
            continue

        greedy1 = cal_greedy(tgt_embed, pred_embed)
        greedy2 = cal_greedy(pred_embed, tgt_embed)
        greedy_score = (greedy1 + greedy2) / 2
        greedy_list.append(greedy_score)
    greedy_score = np.mean(greedy_list)
    Log.info("calculate the greedy match success!")
    return greedy_score
Example #25
0
    def parse_dimension(self, line):
        """Currently don't support arithmetic expressions"""
        dim_name, dim_value = Utils.clean_split(line, '=')
        dim_value = int(dim_value)  # Explicitly convert to int

        Log.log("{}: Registering dimension: {}".format(dim_name, dim_value))
        dimension = {'name': dim_name, 'value': dim_value}
        return dimension
Example #26
0
 def get_special(self, line):
     special_features = ['psd', 'nsd', 'diagonal', 'nonnegative']
     special = set()
     for special_feature in special_features:
         if special_feature in line:
             special.add(special_feature)
             Log.log("{}: Registering special behavior: {}".format(special_feature, special))
     return special
Example #27
0
 def get_plist(self, file, ios=True):
     try:
         self.plist_to_xml(file, ios=ios)
         return self.plist_to_dict(self.read_file(file, ios=ios))
     except Exception:
         Log.d('Error getting the plist {file}'.format(file=file))
         Log.d(traceback.format_exc())
         return {}
Example #28
0
    def get_url(self):
        tpl = ('http://www2.nrk.no/teksttv/'
               'index.asp?channel=1&page=%d&subpage=%d')

        url = tpl % (self.page, self.sub)
        if DEBUG:
            Log.out('nrk ttv url: %s' % url)
        return url
Example #29
0
 def next(self):
     Log.notice('PLUGIN -> Stack contains %d elements' % len(self.stack))
     for item in self.stack:
         return_object = self._iter_handler(item)
         if return_object is not None:
             yield return_object
             #item.update(return_object)
     self._post_iter()
Example #30
0
 def get_special(self, line):
     special_features = ['psd', 'nsd', 'diagonal', 'nonnegative']
     special = set()
     for special_feature in special_features:
         if special_feature in line:
             special.add(special_feature)
             Log.log("{}: Registering special behavior: {}".format(special_feature, special))
     return special
 def next(self):
     Log.notice('PLUGIN -> Stack contains %d elements' % len(self.stack))
     for item in self.stack:
         return_object = self._iter_handler(item)
         if return_object is not None:
             yield return_object
             #item.update(return_object)
     self._post_iter()
 def get_url(self):
     tpl = ( 'http://www2.nrk.no/teksttv/'
             'index.asp?channel=1&page=%d&subpage=%d' )
             
     url = tpl % ( self.page, self.sub )
     if DEBUG: 
         Log.out('nrk ttv url: %s' % url)
     return url
Example #33
0
 def load_compliment(self, serv, ev, helper):
     try:
         with open(self.polite_file, "r") as f:
             self.compliments = [l.strip() for l in f.readlines()]
     except:
         Log.log(Log.log_lvl.ERROR, "politebot: politefile %s can't be read" % self.polite_file)
         return False
     else:
         return True
Example #34
0
    def query_provider(self, provider, projection='', selection=''):
        if '"' in projection or '"' in selection:
            Log.d('Error: cannnot query providers with "')
            return ('', '')

        projection = '--projection \\\"{projection}\\\"'.format(projection=projection) if projection else ''
        selection = '--where \\\"{selection}\\\"'.format(selection=selection) if selection else ''
        query = 'shell su -c "content query --uri \'content://{provider}\' {projection} {selection}"'.format(provider=provider, projection=projection, selection=selection)
        return self.ADB._run_on_device(query, shell=True)
Example #35
0
 def on_chatwith_room(self, data):
     roomJID = '{}@{}'.format(data['roomName'], Config._mucService)
     Log.info("发起会话", roomJID)
     if roomJID not in self._ConversationList:
         #一定要有昵称才可以进入聊天室
         room, futrue = self.muc_service.join(JID.fromstr(roomJID),
                                              str(self.core.jid))
         self._ConversationList[roomJID] = room
         Log.info("进入房间", roomJID)
Example #36
0
 def reload_extension(self, name):
     Log.auto(f'reloading extension: {name}')
     old = super().get_cog(name)
     try:
         super().reload_extension(name)
     except ExtensionError as e:
         Log.error(e)
         super().add_cog(old)
         return False
     return True
Example #37
0
    def close(self):
        """
        Shutdown failbot
        """
        for p in self.enabled_plugins:
            p._on_shutdown()

        self.connection.disconnect(self.settings['quit message'])
        Log.close()
        exit(0)
Example #38
0
 def on_welcome(self, serv, ev):
     """
     On welcome, join channels sets in settings.
     """
     if self.settings['password']:
         serv.privmsg('nickserv', 'identify %s' % self.settings['password'])
     for c in self.settings['channels']:
         channel = c[0] + ' ' + c[1] if c[1] else ''
         Log.log(Log.log_lvl.INFO, 'joining %s' % channel)
         serv.join(channel)
Example #39
0
    def parse_dimension(self, line):
        """Currently don't support arithmetic expressions"""
        dim_name, dim_value = Utils.clean_split(line, '=')
        dim_value = int(dim_value)  # Explicitly convert to int

        Log.log("{}: Registering dimension: {}".format(dim_name, dim_value))
        dimension = {
            'name': dim_name,
            'value': dim_value
        }
        return dimension
Example #40
0
def main():
    parser = argparse.ArgumentParser(description='CVXGEN Python Binding Generator')
    parser.add_argument('path', metavar='path', default='./images',
                        help='Give the target path')
    parser.add_argument('-v', '--verbose', action='store_true', default=False,
                        help='Decide verbosity')
    args = parser.parse_args()
    Log.set_verbose(args.verbose)
    path = os.path.realpath(args.path)
    parsed_cvx = ParseCVX.read_file(path)
    write_text = GenCPP.make_cvx_binding(parsed_cvx)
    print write_text
    def save_image_archive(self):
    
        if not self.image_archive_key:
            return
        Log.out('PLUGIN::IMAGEARCHIVER -> save with key: "%s"' % self.image_archive_key)

        if self.cache is True and self.image_archive_is_changed is True:
            Session().put_in_session_jar(
                    self.image_archive_key,
                    self.image_archive,
                    persistent = True
                )
Example #42
0
    def consume_parameter(self, line):
        # -- Array handling
        # Is it an array?
        is_array = self.is_array(line)
        if is_array is not None:

            index_var, name = is_array

            # - Check if we have an initializer, like x[0] or something
            # Required to be 't', for now
            if index_var.isdigit() or (index_var != 't'):
                is_array_initializer = True
                array_bounds = index_var
            else:
                is_array_initializer = False
                array_bounds = self.get_array_bounds(line)

            Log.log("Registering array {} with indexing variable, {}".format(name, index_var))
            if is_array_initializer:
                Log.log("{}: Is an initializer".format(name))

        # -- Not an array
        else:
            array_bounds = None
            is_array_initializer = False
            name = Utils.clean_split(line, None, maxsplit=1)[0]
            Log.log("Registering non-array {}".format(name))

        # -- Get dimensions
        dimensions = self.get_dimensions(line)
        if dimensions is None:
            _type = 'scalar'
        elif dimensions['cols'] != '1':
            _type = 'matrix'
        else:
            _type = 'vector'
        if dimensions is not None:
            Log.log("{}: Registering dimensions as {}x{}".format(name, dimensions['rows'], dimensions['cols']))
        else:
            Log.log("{}: Registering as sclar".format(name))

        special = self.get_special(line)

        parameter = {
            'name': name,
            'dimensions': dimensions,
            'array_bounds': array_bounds,
            'type': _type,
            'special': special,
            'initializer': is_array_initializer
        }
        return parameter
 def _test_image(self, item):
     """ Takes care of xbmc's lack of support for thumbnails
         without file extension. Done by getting mime type from header
     """
     
     ext = self.imga.archive_image(self.state, item.thumbnail, item.key.id)
     if ext:
         match  = re.search(nrk.regex.image_identity, item.thumbnail)
         if match:
             item.thumbnail = nrk.uri.content_image(match.group('id'), ext)
             Log.debug('PLUGIN::THUMBNAIL -> new url: "%s"' % item.thumbnail)
             
     return item
Example #44
0
    def read(self, data):
        '''Parse a list of stripped lines
        TODO:
            - Also include constraints
        '''
        section = None
        sections = ['dimensions', 'parameters', 'variables', 'minimize', 'end']

        content = {
            'dimensions': [],
            'parameters': [],
            'variables': [],
        }

        section_dict = {
            'dimensions': self.parse_dimension,
            'parameters': self.parse_parameter,
            'variables': self.parse_parameter,
        }

        for l_num, dirty_line in enumerate(data):
            Log.log("On line {}".format(l_num))

            line = dirty_line.strip()
            if '#' in line:
                line = line.split('#', 1)[0]

            if line == '':
                pass

            elif line.startswith('#'):
                continue

            elif (line in sections) or (section is None):
                if line == 'end':
                    section = None
                else:
                    section = line

            elif section == 'dimensions':
                content[section].append(self.parse_dimension(line))

            elif section in ['parameters', 'variables']:
                content[section].append(self.parse_parameter(line))

            else:
                Log.warn("Unknown line {}: {}".format(l_num, line))

        return content
Example #45
0
    def make_assignment(self, cvx_vars):
        if cvx_var['array_bounds'] is not None:
            # Deal with arrays of scalars, vectors and so on
            Log.error("Could not handle {}".format(cvx_var['name']))

        if cvx_var['dimension_expr'] is not None:
            text = self.make_copy_loop(cvx_var)

        elif cvx_var['dimension_expr'] is None:
            text = cvx_var['name'] 

        else:
            Log.warn("Did not handle {}".format(cvx_var['name']))

        return text
    def play(self):

        item = self.item
        li = ListItem(item.title, thumbnailImage=item.thumbnail, path=item.url)
        Log.notice('PLUGIN::SPAM -> play url %s' % item.url)
        self.player = utils.PluginPlayer()
            
        if self.state.playable: 
            self.dir.resolve_url(li, True)
        else:
            self.player.play(item.url, li)
        

        if not self.session.data_objects['watched'].has_key(item.id):
            Log.notice('PLUGIN::SPAM -> mark video as watched')
            self.session.update_object('watched')[item.id] = 1
Example #47
0
    def set(self, opt, val):
        if opt == 'cmd prefix':
            self.settings[opt] = val
        elif opt == 'verbose':
            val = int(val)
            if val in range(Log.log_lvl.NONE, Log.log_lvl.DEBUG + 1):
                self.settings[opt] = val
                Log.verbosity = self.settings[opt]
            else:
                return False
        elif opt == 'log file':
            self.settings[opt] = val
            Log.setLogFile(self.settings[opt])
        else:
            return False

        return True
Example #48
0
 def on_cmd(self, serv, ev, helper):
     """
     """
     if helper['author'] in self.allowed_users and helper['cmd'] in self.cmd:
         try:
             ret = self.cmd[helper['cmd']]['func'](serv, ev, helper, *helper['args'])
             if ret == True:
                 self.respond(serv, ev, helper,  helper['author'] + ': done')
             elif ret == False:
                 self.respond(serv, ev, helper,  helper['author'] + ': something failed')
         except TypeError:
             self.respond(serv, ev, helper,  helper['author'] + ': something failed, wrong args')
         except SystemExit:
             raise
         except Exception, e:
             self.respond(serv, ev, helper,  helper['author'] + ': something failed, unknown')
             Log.log(Log.log_lvl.ERROR, 'Admin command %s failed. Catches exception %s' % (helper['cmd'], e))
             Log.log(Log.log_lvl.DEBUG, format_exc())
 def _open_session_from_disk(self, key):
 
     filepath = self.get_path(key)
     
     if not os.path.isfile(filepath):
         Log.out('PLUGIN::SESSION -> no saved data. new empty session created')
         return {}
         
     try:
         file_handle = open(filepath, 'rb')
         data = pickle.load(file_handle)
         file_handle.close()
         
     except pickle.UnpicklingError:
         print 'PLUGIN::SESSION -> An error occured unpickling saved object'
         return {}
     else:
         print 'PLUGIN::SESSION -> data with key "%s" read from disk' % key
         return data
Example #50
0
    def __init__(self):
        super(self.__class__, self).__init__()
        # sync clock while restart or in the midnight
        try:
            self.base_time_diff = datetime.timedelta(0)
            self.base_host = cfg.BASE_TIME_SERVER
            self.pre_snapshot = {}
            self.adjust_day = None

            self.clock_diff = {}
            self.clock_diff[self.base_host] = datetime.timedelta(0)

            self.redis_host_set = MessageSet(cfg.AGENT_OSS_HOSTS)
            self.redis_hash_map = MessageHashMap(cfg.AGENT_OSS_CLOCK)

            self.running = True
            self.cycle = cfg.CLOCK_QUERY_SYCLE
        except:
            Log.error('error happened')
    def _setup_session(self):

        if not self.session_key:
            Log.debug('Session key not set')
            self.session_key = str( hash( self.state ) )
            
        sdata = {
                'cache-time': self.settings['cache_time'], 
                'cache':      self.settings['cache_files'] and self.cache,
                'cache-path': self.cachepath,
                'key':        self.session_key
            }
            
        self.session     = Session(sdata)
        self.session_key = self.session.session_key
        
        cache = self.settings['cache_files'] and self.cache
        self.api  = nrk.Api(self.settings['connection_speed'], cache)
        self.imga = ImageArchiver(cache)
Example #52
0
    def reset_plugin(self, plug_name = None):
        """
        Reset plugins. Reload plugin module and settings.
        If plug_name is None, all plugins are reloaded
        If plug_name is a list, all plugin within this list will be reloaded.
        If plug_name is a str, just this plugin will be reloaded
        """
        try:
            reload(modules['settings.settings'])
            plug_name = [] if not plug_name else [str(plug_name)] if not isinstance(plug_name, list) else plug_name

            for plug in plug_name:
                plug_in = [ p for p in self.enabled_plugins if p.plugin_name == plug ]
                for p in plug_in:
                    p.base_init(settings.plugin_settings[plug] if plug in settings.plugin_settings else {})
        except Exception, e:
            Log.log(Log.log_lvl.ERROR, 'Failed to reset plugin %s. Catches exception %s' % (', '.join(plug_name), e))
            Log.log(Log.log_lvl.DEBUG, format_exc())

            return False
 def _pre_iter(self):
     """ Actions done before iterating through the stack """
     
     if len(self.stack) == 1 and self.settings['transparency']:
         # Got only one item, so if transparent folder is enabled in
         # settings, do transparent..
         if ( (self.stack[0].key.type == nrk.SHOW_CLIP) and 
               self.settings('transparent_media') == False ):
             return True
              
         Log.debug('PLUGIN::SPAM -> Create transparent transparency')
         item = self.stack.pop(0)
         item.key.transparent = True
         return_object = self._iter_handler(item)
         self.state    = item.key
         self.session.swap_session(self.state.parent_session)
         self.dir.is_open = True
         self.open()
         return
     return True
Example #54
0
    def load_plugin(self, plug_name):
        """
        Load a module in plugins package.
        The module must (MUST !) have one subclass of Plugin in it.
        """
        import_string = 'from plugins import %s' % plug_name
        try:
            # don't even try to import if the plugin is already loaded
            if not 'plugins.' + plug_name in modules:
                mod = import_module('plugins.%s' % plug_name)
                candidates = []
                # Find a unique class that is subclass of Plugin in the module
                for attr in dir(mod):
                    try:
                        candidate = getattr(mod, attr)
                        if issubclass(candidate, Plugin) and candidate is not Plugin:
                            candidates.append(candidate)
                    except TypeError:
                        pass

                if len(candidates) != 1:
                    Log.log(Log.log_lvl.ERROR, 'cannot load plugin {p} : one and only one plugin class must be defined in plugins.{p}.'.format(p=plug_name))
                    return False

                self.plugins[plug_name] = candidates[0]

                return True
            else:
                Log.log(Log.log_lvl.WARNING, 'failed to load plugin %s. seems already loaded.' % plug_name)

                return False
        except ImportError:
            Log.log(Log.log_lvl.ERROR, 'Cannot load %s, module do not exists' % plug_name)
            return False
        except Exception, e:
            Log.log(Log.log_lvl.ERROR, 'Failed to load plugin %s. Catches exception ' % e)
            Log.log(Log.log_lvl.DEBUG, format_exc())

            return False
def execute() :
	patches = PATCHES()
	Log.warn("Starting patches!", remaining = len(patches))
	for patch in patches :
		try :
			patch.start()
		except Exception as e :
			Log.error("Patch went wrong", exception = e)
	Log.warn("All patches done.")
Example #56
0
    def disable_plugin(self, plug_name, disable_all=False):
        """
        """
        try:
            to_del = []
            i = 0

            for p in self.enabled_plugins:
                if p.plugin_name == plug_name:
                    to_del.append(i)
                    if self.plugins[plug_name].unique or not disable_all:
                        break
                i = i + 1

            to_del.reverse()

            for a in to_del:
                del self.enabled_plugins[a]

        except Exception, e:
            Log.log(Log.log_lvl.ERROR, 'Failed to disable plugin %s. Catches exception %s' % (plug_name, e))
            Log.log(Log.log_lvl.DEBUG, format_exc())

            return False
Example #57
0
    def __init__(self, settings, plugins):
        """
        """

        self.settings = {
            'nickname':settings.get('nickname', 'failbot'),
            'password':settings['password'],
            'realname':settings.get('realname', 'failbot'),
            'reconnect interval':settings.get('reconnect interval', 60),
            'quit message':settings.get('quit message', 'I\'m out.'),
            'verbose':settings.get('verbose', Log.log_lvl.ERROR),
            'log file':settings.get('log file', None),
            'server':settings['server'],
            'port':settings['port'],
            'channels':settings['channels'],
            'cmd prefix':settings.get('cmd prefix', '!'),
        }

        Log.verbosity = self.settings['verbose']
        Log.setLogFile(self.settings.get('log file', None))

        Log.log(Log.log_lvl.DEBUG, 'Settings loaded: %s' % self.settings)

        ircbot.SingleServerIRCBot.__init__(self,
                                    server_list = [(
                                        self.settings['server'],
                                        self.settings['port']
                                    )],
                                    nickname = self.settings['nickname'],
                                    realname = self.settings['realname'],
                                    reconnection_interval = self.settings['reconnect interval']
                               )
        Log.log(Log.log_lvl.INFO, 'Connection to %s:%d' % (settings['server'], settings['port']))

        # load some plugins
        for f in [ path.splitext(f)[0] for f in listdir(path.dirname(path.realpath(__file__)) + '/plugins')
                                           if f.endswith('.py') and f != '__init__.py' and f != 'plugbase.py' ]:
            if self.load_plugin(f):
                if f in plugins:
                    self.enable_plugin(f)

        Log.log(Log.log_lvl.INFO,
                    "\n".join([p[0] + (' [enabled]' if p[1] else ' [disabled]') for p in self.list_plugins()]))
Example #58
0
    def unload_plugin(self, plug_name):
        """
        """
        try:
            if 'plugins.' + plug_name in modules:
                self.disable_plugin(plug_name, disable_all = True)
                del self.plugins[plug_name]
                del modules['plugins.' + plug_name]
                Log.log(Log.log_lvl.INFO, 'plugin %s unloaded ' % plug_name)
                
                return True
            else:
                Log.log(Log.log_lvl.WARNING, 'cannot unload plugin %. plugin not loaded' % plug_name)

                return False
        except Exception, e:
            Log.log(Log.log_lvl.ERROR, 'Failed to unload plugin %s. Catches exception %s' % (plug_name, e))
            Log.log(Log.log_lvl.DEBUG, format_exc())

            return False
 def open_image_archive(self, key):
 
     self.image_archive_key = key
     self.image_archive_is_changed = False
     
     Log.out('PLUGIN::IMAGEARCHIVER -> open with key: "%s"' % key)
     
     data = Session()._checkout_session_jar(key)
     
     if data:
         Log.out('PLUGIN::IMAGEARCHIVER ->: cache found')
         self.image_archive = data
     else: 
         Log.out('PLUGIN::IMAGEARCHIVER -> Create new archive')
         self.image_archive = {}
Example #60
0
    def __init__(self, model_class=None, parameters=None, qsub=False, client=None, num_engines=None, storage_mode=None,
                 pickled_cluster_input_file=None, log_filename=None):
        """ Constructor """

        self.my_class_name = 'DistributedEnsemble'
        self.log = Log(log_filename=log_filename)

        if model_class is None and pickled_cluster_input_file is None:
            self.log.write_log("Invalid configuration. Either provide a model class object or its pickled file.",
                               logging.ERROR)
            raise MolnsUtilException("Invalid configuration. Either provide a model class object or its pickled file.")

        if model_class is not None and pickled_cluster_input_file is not None:
            self.log.write_log("Invalid configuration. Both a model class and a pickled file are provided.",
                               logging.ERROR)
            raise MolnsUtilException("Invalid configuration. Both a model class and a pickled file are provided.")

        if model_class is not None:
            self.cluster_execution = False
            self.model_class = cloudpickle.dumps(model_class)
        else:
            self.cluster_execution = True
            self.pickled_cluster_input_file = pickled_cluster_input_file

        # Not checking here for parameters = None, as they could be present in the model class.
        self.parameters = [parameters]
        self.number_of_trajectories = 0
        self.seed_base = generate_seed_base()
        self.storage_mode = storage_mode
        # A chunk list
        self.result_list = {}
        self.qsub = qsub
        self.num_engines = num_engines

        if self.qsub is False:
            # Set the Ipython.parallel client
            self._update_client(client)