Esempio n. 1
0
 def __init__(self, time, sample=None, config=None, genqueue=None, outputqueue=None, loggingqueue=None):
     # Logger already setup by config, just get an instance
     # setup default options
     self.profiler = config.profiler
     self.config = config
     self.sample = sample
     self.end = getattr(self.sample, "end", -1)
     self.endts = getattr(self.sample, "endts", None)
     self.generatorQueue = genqueue
     self.outputQueue = outputqueue
     self.time = time
     self.stopping = False
     self.countdown = 0
     self.executions = 0
     self.interval = getattr(self.sample, "interval", config.interval)
     logger.debug('Initializing timer for %s' % sample.name if sample is not None else "None")
     # load plugins
     if self.sample is not None:
         rater_class = self.config.getPlugin('rater.' + self.sample.rater, self.sample)
         self.rater = rater_class(self.sample)
         self.generatorPlugin = self.config.getPlugin('generator.' + self.sample.generator, self.sample)
         self.outputPlugin = self.config.getPlugin('output.' + self.sample.outputMode, self.sample)
         if self.sample.timeMultiple < 0:
             logger.error("Invalid setting for timeMultiple: {}, value should be positive".format(
                 self.sample.timeMultiple))
         elif self.sample.timeMultiple != 1:
             self.interval = self.sample.interval
             logger.debug("Adjusting interval {} with timeMultiple {}, new interval: {}".format(
                 self.sample.interval, self.sample.timeMultiple, self.interval))
     logger.info(
         "Start '%s' generatorWorkers for sample '%s'" % (self.sample.config.generatorWorkers, self.sample.name))
Esempio n. 2
0
 def send_message(self, message):
     logger.info("[MQ] ready to send message, queue: %s, message: %s" %
                 (settings.ALIYUN_MNS_QUEUE_NAME, message))
     self.connection.lpush("MQ:LOCAL:" + settings.ALIYUN_MNS_QUEUE_NAME,
                           message)
     logger.info("[MQ] push message to queue % success, message: %s" %
                 (settings.ALIYUN_MNS_QUEUE_NAME, message))
 def run(self):
     logger.info('{} start'.format(self.name))
     # 计算120个方向的障碍物
     self.CalcDirection()
     # pickle将数据保存
     self.SaveNumpyByNpy()
     logger.info('{} end'.format(self.name))
Esempio n. 4
0
    def next_generation_models(self):
        self.current_generation += 1
        logger.info('============================================\n' +
                    f'Generation {self.current_generation}' +
                    '============================================\n')
        # Elite Selection
        elite = self.elite_model(self.current_generation - 1)
        next_gen_models = [elite]

        # slowest Training Time changes
        n = self.number_of_models_tobe_changed_based_on_training_time
        slow_models = self.top_n_slowest_models(self.current_generation - 1, n)
        for _, slow_model in enumerate(slow_models):
            new_model = CNN.change_for_slow_training_time(slow_model)
            new_model = CNN.change_name_to(self.models[new_model], f'model_gen{self.current_generation}_{_}')
            next_gen_models.append(new_model)

        # Fix Under-fitting and over-fitting for the rest
        prev_gen_model_names = set([model['name'] for model in self.current_generation_models])
        elite_set = set(elite['name'])
        slow_models_names = set([model['name'] for model in slow_models])
        under_fitted_models = list(prev_gen_model_names - elite_set - slow_models_names)
        for _, prev_gen_model in enumerate(under_fitted_models):
            model_hp = self.models[prev_gen_model]
            if self.metrics.loc[prev_gen_model, 'over-fit'] ==1:
                new_model = CNN.change_for_over_fit(model_hp, self.input_shape)
            else:
                new_model = CNN.change_for_under_fitting(model_hp, self.input_shape, self.output_size)
            new_model = CNN.change_name_to(new_model, f'model_gen{self.current_generation}_{_}')
            next_gen_models.append(new_model)

        # Run the New Generation models
        self.current_generation_models = next_gen_models
        self.train_current_generation()
Esempio n. 5
0
    def train_current_generation(self):
        logger.info(f'Training generation {self.current_generation}')
        for model in self.current_generation_models:
            model_name = model['name']
            logger.info(f'Training model {model_name}.')
            try:
                model_runs = [CNN(model, verbose=1) for _ in range(self.model_reruns)]
            except Exception as error:
                logger.error(error)
                # revert Changes
                prev_model = model['prev_model']
                model = self.models[prev_model]
                model = CNN.add_change_log(model, f'Reverted to model {prev_model} due to an exception on training.')
                model_name = model['name']
                model_runs = [CNN(model, verbose=1) for _ in range(self.model_reruns)]

            logger.info(f'Training model {model_name} completed')
            self.metrics.loc[model_name, 'test_Accuracy'] = np.min([cnn.accuracy[0] for cnn in model_runs])
            self.metrics.loc[model_name, 'train_Accuracy'] = np.min([cnn.accuracy[1] for cnn in model_runs])
            self.metrics.loc[model_name, 'training_time'] = np.max([cnn.Training_time for cnn in model_runs])
            self.metrics.loc[model_name, 'over-fit'] = np.any([cnn.is_over_fitted for cnn in model_runs])
            self.metrics.loc[model_name, 'prev_model'] = model['prev_model']
            self.metrics.loc[model_name, 'generation'] = self.current_generation
            model['layers_input_output_shape'] = [ f'layer.name: {layer.input_shape} --- {layer.output_shape}'
                                                  for layer in model_runs[0].model.layers]
            self.save_model(model)
            logger.info(f'Performance results for {model_name}:-\n{self.metrics.loc[model_name, :]}')
        logger.info(f'Generation {self.current_generation} Training completed.\n------------------\n')
Esempio n. 6
0
 def _increase_failed_times(self):
     login_failed_times_key = LOGIN_FAILED_TIMES_FORMAT.format(
         self.form.username.data)
     self.connection.incr(login_failed_times_key)
     self.connection.expire(login_failed_times_key,
                            settings.LOGIN_FAILED_EXPIRED_SECONDS)
     logger.info(u"{} login failed. source: {}".format(
         self.form.username.data, self.form.source.data))
 def print_diversity(self):
     diversity = list()
     for g in range(len(genes)):
         total = 0
         for i in range(self.population_size):
             total += self.population[i].dict_genes[genes[g]]
         diversity.append(total / self.population_size)
     logger.info(f"Average value for every gen: {diversity}")
Esempio n. 8
0
 def flush(self, q):
     self.s.connect(
         (self._tcpDestinationHost, int(self._tcpDestinationPort)))
     logger.info("Socket connected to {0}:{1}".format(
         self._tcpDestinationHost, self._tcpDestinationPort))
     for x in q:
         self.s.send(x['_raw'].rstrip() + '\n')
     self.s.close()
Esempio n. 9
0
    def _validateSeed(self, value):
        """Callback to set random seed"""
        logger.debug("Validating random seed {}".format(value))
        try:
            value = int(value)
        except:
            logger.error("Could not parse int for seed {}".format(value))
            raise ValueError("Could not parse int for seed {}".format(value))

        logger.info("Using random seed {}".format(value))
        random.seed(value)
Esempio n. 10
0
def push_to_hub(repo=None):
    if repo is None:
        return
    try:
        doc.login(username=doc_user, password=doc_pass, email='*****@*****.**')
        response = [line for line in doc.push(repo, stream=True, insecure_registry=True)]
        if not response:
            logger.info('Pushing to Docker hub might not have completed successfully.')
        else:
            logger.info(response)
    except Exception as err:
        logger.error('Pushing to docker hub failed with error: %s', err)
Esempio n. 11
0
 def _success(self, user):
     user_info = {
         'login_name': user.login_name,
         'mobile': user.mobile,
         'roles': [role.role for role in user.roles]
     }
     new_token_id = self.session_manager.set(user_info,
                                             self.form.token.data)
     logger.info(
         u"{} login successful. source: {}, token_id: {}, user_info: {}".
         format(self.form.username.data, self.form.source.data,
                new_token_id, user_info))
     return new_token_id, user_info
Esempio n. 12
0
 def send_message(self, message):
     # send some messages
     logger.info("[MQ] ready to send message, queue: %s, message: %s" %
                 (settings.ALIYUN_MNS_QUEUE_NAME, message))
     try:
         msg = Message(message)
         re_msg = self.my_queue.send_message(msg)
         logger.info(
             "[MQ] send message success, queue: %s, MessageID: %s, message: %s"
             % (settings.ALIYUN_MNS_QUEUE_NAME, re_msg.message_id, message))
     except MNSExceptionBase, e:
         if e.type == "QueueNotExist":
             logger.error(
                 "[MQ] Send Message Fail! Queue not exist, queue name:%s" %
                 settings.ALIYUN_MNS_QUEUE_NAME)
         logger.error("[MQ] Send Message Fail! Exception:%s" % e)
Esempio n. 13
0
def overflow(command=None, text=None):
    """

    :param command:
    :param text:
    :return:
    """
    if command is None or text is None:
        return False

    if '/overflow' == command:

        try:
            qs = so.search(intitle=text, sort=Sort.Votes, order=DESC)
        except UnicodeEncodeError:
            return dict(msg='Only English language is supported. %s is not valid input.' % text,
                        content_type='text/plain; charset=utf-8')

        resp_qs = ['Stack Overflow Top Questions for "%s"\n' % text]
        resp_qs.extend(map(get_response_string, qs[:MAX_QUESTIONS]))

        if len(resp_qs) is 1:
            resp_qs.append(('No questions found. Please try a broader search or '
                            'search directly on '
                            '<https://stackoverflow.com|StackOverflow>.'))

        res = dict(msg='\n'.join(resp_qs), content_type='text/plain; charset=utf-8')

    elif '/todo' == command:

        repo = gh.repository('lots0logs', 'compi')
        res = repo.issues(labels='feature', state='open')
        issues = []
        for i in res:
            issue_str = ':slack: <%s|%s>' % (i.html_url, i.title)
            issues.append(issue_str)

        logger.info(issues)

        resp_qs = []
        resp_qs.extend(issues)
        resp_qs.reverse()
        resp_qs.insert(0, '*Feature Roadmap For Compi*\n')

        res = dict(msg='\n'.join(resp_qs), content_type='text/plain; charset=utf-8')

    return res
Esempio n. 14
0
def main(args):
    mydb = mysql.connector.connect(host="localhost",
                                   user=args.user,
                                   passwd=args.passwd,
                                   database="starwar")

    mycursor = mydb.cursor()

    with open(args.answer_shapes_path, 'r') as fin:
        answer_shapes = json.load(fin)

    result_shapes = {}
    shape_correct = 0
    error = 0
    with open(args.file_path, 'r') as fin:
        lines = fin.readlines()
        for i, line in enumerate(lines):
            try:
                mycursor.execute(line)
            except Exception as err:
                error += 1
                logger.error(err, exc_info=True)
            else:
                results = mycursor.fetchall()
                for x in results:
                    logger.info(x)
                result_shapes[i + 1] = np.array(results).shape

                result_shape = np.array(results).shape
                answer_shape = answer_shapes[f"{i+1}"]
                if answer_shape != list(result_shape):
                    logger.error(
                        f"Question {i+1} shape not match: "
                        f"yours: {result_shape} / ans: {answer_shape}")
                else:
                    shape_correct += 1
                    logger.info(f"Question {i+1} shape correct")

    logger.info("-------------------------------------------------------")
    logger.info(f"Shape correct: {shape_correct} / {len(answer_shapes)}")
    logger.info(f"Error: {error} / {len(answer_shapes)}")
    logger.warning("Note that this checker only checks the shape."
                   "Your answer may still be wrong.")
    logger.warning("The answer is not guaranteed to be correct as well; "
                   "open a issue if you think the answer shape is incorrect.")
Esempio n. 15
0
    def _buildConfDict(self):
        """Build configuration dictionary that we will use """

        # Abstracts grabbing configuration from Splunk or directly from Configuration Files
        if self.splunkEmbedded and not STANDALONE:
            logger.info('Retrieving eventgen configurations from /configs/eventgen')
            import splunk.entity as entity
            self._confDict = entity.getEntities('configs/conf-eventgen', count=-1, sessionKey=self.sessionKey)
        else:
            logger.info('Retrieving eventgen configurations with ConfigParser()')
            # We assume we're in a bin directory and that there are default and local directories
            conf = ConfigParser()
            # Make case sensitive
            conf.optionxform = str
            conffiles = []
            # 2/1/15 CS  Moving to argparse way of grabbing command line parameters
            if self.configfile:
                if os.path.exists(self.configfile):
                    # 2/1/15 CS Adding a check to see whether we're instead passed a directory
                    # In which case we'll assume it's a splunk app and look for config files in
                    # default and local
                    if os.path.isdir(self.configfile):
                        conffiles = [
                            os.path.join(self.grandparentdir, 'default', 'eventgen.conf'),
                            os.path.join(self.configfile, 'default', 'eventgen.conf'),
                            os.path.join(self.configfile, 'local', 'eventgen.conf')]
                    else:
                        conffiles = [os.path.join(self.grandparentdir, 'default', 'eventgen.conf'), self.configfile]
            if len(conffiles) == 0:
                conffiles = [
                    os.path.join(self.grandparentdir, 'default', 'eventgen.conf'),
                    os.path.join(self.grandparentdir, 'local', 'eventgen.conf')]

            logger.debug('Reading configuration files for non-splunkembedded: %s' % conffiles)
            conf.read(conffiles)

            sections = conf.sections()
            ret = {}
            for section in sections:
                ret[section] = dict(conf.items(section))
                # For compatibility with Splunk's configs, need to add the app name to an eai:acl key
                ret[section]['eai:acl'] = {'app': self.grandparentdir.split(os.sep)[-1]}
            self._confDict = ret

        logger.debug("ConfDict returned %s" % pprint.pformat(dict(self._confDict)))
Esempio n. 16
0
def grade():
    '''
    Get test results of all students in src/students/
    '''

    # Get path of this file
    dir_path = os.path.dirname(os.path.realpath(__file__))

    # Save results to a dict
    results = {}

    student_ids = os.listdir(os.path.join(dir_path, 'students'))
    student_ids = [x[:-3] for x in student_ids if x[-3:] == '.py']
    for student_id in student_ids:
        student_result = {}
        student_module = None
        try:
            student_module = importlib.import_module(
                f'students.{student_id}')  # NOQA
        except Exception as err:
            logger.info(err, exc_info=True)
            student_result['import'] = "Failed"
        else:
            student_result['import'] = "Success"

        # Check each task
        for task_id in range(1, TASK_NUM + 1):
            logger.info(f"Testing {student_id} Task {task_id}")
            try:
                eval(f"student_module.task_{task_id}()")
            except Exception as err:
                logger.error(err, exc_info=True)
                student_result[f"task_{task_id}"] = "WA"
            else:
                student_result[f"task_{task_id}"] = "AC"

        # Check flake8
        style_guide = flake8.get_style_guide()
        student_file = os.path.join(dir_path, 'students', student_id + '.py')
        report = style_guide.check_files([student_file])
        if (report.get_statistics('E') == []
                and report.get_statistics('W') == []):
            logger.info(report.get_statistics('E'))
            logger.info(report.get_statistics('W'))
            student_result['flake8'] = "Pass"
        else:
            student_result['flake8'] = "Fail"
        results[student_id] = student_result
    return {
        "results": results,
        "task_num": TASK_NUM,
        "student_num": len(student_ids)
    }
def play_game_with_gen(dict_genes, engine):
    engine.clear()
    sl = 0
    for t in count():
        actions_name, placement, actions = genetic_agent.select_action(
            engine, engine.shape, engine.anchor, engine.board, dict_genes)
        # Observations
        state, reward, done, cleared_lines, sent_lines = engine.step_to_final(
            actions)
        # Perform one step of the optimization (on the target network)
        sl += sent_lines
        logger.info(engine)
        logger.info(f"Sent lines: {sl}")
        time.sleep(.1)
        if done:
            break
    logger.info("")
    logger.info("")
    logger.info("")
Esempio n. 18
0
def sign_packages(pkgname=None):
    """

    :param pkgname:
    :return:
    """
    if pkgname:
        db.publish('build-output', 'Signing package..')
        pkgs2sign = glob.glob(
            '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/x86_64/%s-***.xz' % pkgname)
        pkgs2sign32 = glob.glob(
            '/srv/antergos.info/repo/iso/testing/uefi/antergos-staging/i686/%s-***.xz' % pkgname)
        pkgs2sign = pkgs2sign + pkgs2sign32
        logger.info('[PKGS TO SIGN] %s' % pkgs2sign)

        if pkgs2sign is not None and pkgs2sign != []:
            return batch_sign(pkgs2sign)

    return False
Esempio n. 19
0
def autograde(student_id, tasks):
    ''' Grade tasks specified in args.'''
    # Get path of this file
    dir_path = os.path.dirname(os.path.realpath(__file__))

    # Check if student's file exists
    student_file = os.path.join(dir_path, 'students', student_id + '.py')
    assert os.path.exists(student_file), f"{student_file} not exists"

    # Import student's file as module
    student_module = importlib.import_module(f'students.{student_id}')  # NOQA

    # Run each task
    for task_id in tasks:
        logger.info(f"Testing Task {task_id}")
        # Use try-except to catch erros in order to run througth all tasks
        try:
            eval(f"student_module.task_{task_id}()")
        except Exception as err:
            logger.error(err, exc_info=True)
Esempio n. 20
0
def get_data_and_ans_paths():
    public_data_filename = os.path.join(DIR_PATH, 'test_data',
                                        'public_data.yaml')
    public_ans_filename = os.path.join(DIR_PATH, 'test_data',
                                       'public_answers.yaml')

    private_data_filename = os.path.join(DIR_PATH, 'test_data',
                                         'private_data.yaml')
    private_ans_filename = os.path.join(DIR_PATH, 'test_data',
                                        'private_answers.yaml')

    # Dowonload private data
    try:
        private_data_url = os.environ.get('PRIVATE_DATA_URL')
        urllib.request.urlretrieve(private_data_url, private_data_filename)
        private_ans_url = os.environ.get('PRIVATE_ANS_URL')
        urllib.request.urlretrieve(private_ans_url, private_ans_filename)
    except Exception as err:
        logger.info(err, exc_info=True)

    return (public_data_filename, public_ans_filename, private_data_filename,
            private_ans_filename)
Esempio n. 21
0
    def play_game(self):
        # Initialization
        self.setup()

        game_over = False
        while time.time() - self.start_time < self.game_time and not game_over:
            try:
                self.update_screen()
            except Exception:
                import pdb
                pdb.set_trace()
            game_over = self.update_engines()

        winner, max_score = self.compare_score()
        self.update_stats(winner)
        if not self.use_gui:
            self.stdscr.clear()
            self.stdscr.addstr(
                f'Game Over, winner: {winner}, States: {self.engine_states}\n')
        else:
            logger.info(f"Winner: {winner}")
            logger.info(f"States: {self.engine_states}")

        return winner
Esempio n. 22
0
def grade():
    '''
    Get test results of all students in src/students/
    '''
    # Save results to a dict
    results = {}

    student_ids = os.listdir(os.path.join(DIR_PATH, 'students'))
    student_ids = [x[:-3] for x in student_ids if x[-3:] == '.py']
    for student_id in student_ids:
        student_result = {}

        (public_data_filename, public_ans_filename, private_data_filename,
         private_ans_filename) = get_data_and_ans_paths()
        # Test public data
        try:
            logger.info("Testing public data")
            student_result['public_scores'] = autograde(
                student_id, range(1, TASK_NUM + 1), public_data_filename,
                public_ans_filename)
            student_result['import'] = "Success"
        except Exception as err:
            logger.info(err, exc_info=True)
            student_result['import'] = "Failed"

        # Test private data
        try:
            logger.info("Testing private data")
            student_result['private_scores'] = autograde(
                student_id, range(1, TASK_NUM + 1), private_data_filename,
                private_ans_filename)
        except Exception as err:
            logger.info(err, exc_info=True)

        # Check flake8
        student_file = os.path.join(DIR_PATH, 'students', student_id + '.py')
        student_result['flake8'] = check_flake8(student_file)

        # Add to all results
        results[student_id] = student_result
    return {
        "results": results,
        "task_num": TASK_NUM,
        "student_num": len(student_ids)
    }
Esempio n. 23
0
def batch_sign(paths, uid=gpg_key, passphrase=password, is_iso=False):
    """
    Batch sign several files with the key matching the given UID.

    If no passphrase is given then the user is prompted for one.

    The passphrase is returned to avoid further prompts.
    :param paths:
    :param uid:
    :param passphrase:
    """
    if not isinstance(paths, list):
        logger.error('paths must be a list')
        return False
    for path in paths:
        db.publish('build-output', 'Creating detached signature for %s' % path)
        logger.info('[SIGN PKG] Creating detached signature for %s' % path)
        # Verify existing signatures. This fails if the sig is invalid or
        # non-existent. Either way a new one will be needed.
        cmd = [GPG_BIN, '--verify', path + SIG_EXT]
        with open(os.devnull, 'w') as f:
            p = subprocess.Popen(cmd, stdout=f, stderr=f)
            e = p.wait()
            if e == 0:
                continue

        sigpath = path + '.sig'
        try:
            os.remove(sigpath)
        except OSError:
            pass

        db.publish('build-output', 'Signing %s' % path)
        logger.info('[SIGN PKG] Signing %s' % path)
        if not passphrase:
            return False
            # passphrase = getpass.getpass("Enter passphrase for %s: " % uid).encode('utf-8')
        cmd = [GPG_BIN, '-sbu', 'Antergos', '--batch', '--passphrase-fd', '0', path]
        p = subprocess.Popen(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
        out, err = p.communicate(passphrase)
        if len(out) > 0:
            db.publish('build-output', 'GPG OUTPUT is: %s' % out)
            logger.info('GPG OUTPUT is: %s' % out)
        if len(err) > 0:
            db.publish('build-output', 'Signing FAILED for %s. Error output: %s' % (path, err))
            logger.error('[SIGN PKG] Signing FAILED for %s. Error output: %s' % (path, err))
            paths = [p for p in paths if not os.path.isdir(p) and not is_iso]
            for p in paths:
                remove(p)
                remove(p + '.sig')
            return False

    return True
Esempio n. 24
0
def check_flake8(filename):
    style_guide = flake8.get_style_guide()
    report = style_guide.check_files([filename])
    score = 0
    if report.get_statistics('E') == []:
        score += 4
        if report.get_statistics('W') == []:
            score += 3
        else:
            logger.info(report.get_statistics('W'))
        if report.get_statistics('F') == []:
            score += 3
        else:
            logger.info(report.get_statistics('F'))
    else:
        logger.info(report.get_statistics('E'))
    return score
Esempio n. 25
0
def check_flake8(filename):
    style_guide = flake8.get_style_guide()
    report = style_guide.check_files([filename])
    score = 0
    if report.get_statistics('E') == []:
        score += 3
    else:
        logger.info(report.get_statistics('E'))
    if report.get_statistics('W') == []:
        score += 2
    else:
        logger.info(report.get_statistics('W'))
    if report.get_statistics('F') == []:
        score += 5
    else:
        logger.info(report.get_statistics('F'))
    return score
#!/usr/bin/env python

import bz2
from collections import defaultdict
import operator
import xml.etree.ElementTree as ET

from logging_config import logger

if __name__ == "__main__":
    fname = "data/enwiki-latest-pages-articles1.xml-p000000010p000010000-shortened.bz2"

    f = bz2.BZ2File(fname)

    tree = ET.parse(f)
    root = tree.getroot()
    
    seen = defaultdict(lambda: 0)

    namespaces = {'xmlns': 'http://www.mediawiki.org/xml/export-0.8/'}
    for title in root.findall('xmlns:page/xmlns:revision/xmlns:contributor/xmlns:username', namespaces=namespaces):
        seen[title.text] += 1

    for key, value in sorted(seen.iteritems(), key=operator.itemgetter(1)):
        logger.info("%s : %s" % (key, value))
Esempio n. 27
0
from flask_jsonrpc import JSONRPC
from flask_sqlalchemy import SQLAlchemy
from flask_redis import FlaskRedis
#from celery import Celery
from datetime import timedelta
import os
import config
from logging_config import logger

config_model = 'development'
if os.environ.get('production', None) is not None:
    config_model = 'production'
elif os.environ.get('testing', None) is not None:
    config_model = 'testing'

logger.info('current config model is %s' % config_model)

app = Flask(__name__)
app.config.from_object(config.config[config_model])

redis_store = FlaskRedis(app)


@app.route('/')
def hello_world():
    return 'Hello World Cup 2018!'


static_dir = os.path.join(os.path.dirname(__file__), 'dist')

@app.route('/world_cup/<path>')
#!/usr/bin/env python

import bz2
from xml.dom import minidom

from logging_config import logger

if __name__ == "__main__":
    fname = "data/enwiki-latest-pages-articles1.xml-p000000010p000010000-shortened.bz2"

    f = bz2.BZ2File(fname)

    doc = minidom.parse(f)

    for element in doc.getElementsByTagName("title"):

        text_node = element.childNodes[0]
        logger.info(text_node.data)
 def endDocument(self):
     for k, v in sorted(self.SEEN.iteritems(), key=lambda x: x[1]):
         logger.info("%s: %d" % (k, v))
#!/usr/bin/env python

import bz2
from lxml import etree

from logging_config import logger

if __name__ == "__main__":
    fname = "data/enwiki-latest-pages-articles1.xml-p000000010p000010000-shortened.bz2"

    f = bz2.BZ2File(fname)

    doc = etree.parse(f)
    
    for element in doc.findall("{http://www.mediawiki.org/xml/export-0.8/}page/{http://www.mediawiki.org/xml/export-0.8/}title"):
        
        logger.info(element.text)

 def endDocument(self):
     for k,v in sorted(self.SEEN.iteritems(), key = lambda x: x[1]):
         logger.info("%s: %d" % (k,v))
 def characters(self, content):
     if self.IN_TITLE:
         logger.info(content)
 def evolve_the_beasts(self):
     self.population.calc_fitness_prob()
     logger.info(f"Generation {self.population.current_generation}")
     logger.info(f"Max fitness: {self.population.max_fitness}")
     logger.info(
         f"Max lines sent from best: {self.population.best.sent_lines}")
     logger.info(f"Best child: {self.population.best}")
     logger.info(f"Average fitness: {self.population.get_avg_fitness()}")
     self.population.print_diversity()
     for i in range(self.num_generations):
         self.population.generate_next_generation()
         self.population.calc_fitness_prob()
         logger.info(f"Generation {self.population.current_generation}")
         logger.info(f"Max fitness: {self.population.max_fitness}")
         logger.info(
             f"Max lines sent from best: {self.population.best.sent_lines}")
         logger.info(f"Best child: {self.population.best}")
         logger.info(
             f"Average fitness: {self.population.get_avg_fitness()}")
         self.population.print_diversity()
     fh = open("best_genes.txt", "a")
     fh.write(str(self.population.best.dict_genes))
     fh.close()
     time.sleep(5)
     play_game_with_gen(self.population.best.dict_genes, self.engine)
Esempio n. 34
0
        except Exception as err:
            points[task_id] = 0
            logger.error(err, exc_info=True)
    logger.info(f"TaskID/Points {points}")
    return points


if __name__ == '__main__':
    args = parse_args()
    test_data_filename = os.path.join(
        DIR_PATH, 'test_data', 'public_data.yaml')
    test_answers_filename = os.path.join(
        DIR_PATH, 'test_data', 'public_answers.yaml')
    if args.all:
        student_ids = os.listdir(os.path.join(DIR_PATH, 'students'))
        student_ids = [
            x[:-3] for x in student_ids if x[-3:] == '.py' and
            'sample' not in x
        ]
        for student_id in student_ids:
            logger.info(f"Student {student_id}")
            autograde(
                student_id, args.tasks,
                test_data_filename, test_answers_filename
            )
    else:
        autograde(
            args.student_id, args.tasks,
            test_data_filename, test_answers_filename
        )
#!/usr/bin/env python

import bz2
import xml.etree.ElementTree as ET

from logging_config import logger


if __name__ == "__main__":
    fname = "data/enwiki-latest-pages-articles1.xml-p000000010p000010000-shortened.bz2"

    f = bz2.BZ2File(fname)

    tree = ET.parse(f)
    root = tree.getroot()

    # for title in root.findall('{http://www.mediawiki.org/xml/export-0.8/}page/{http://www.mediawiki.org/xml/export-0.8/}title'):
    # or,
    namespaces = {'xmlns': 'http://www.mediawiki.org/xml/export-0.8/'}
    for title in root.findall('xmlns:page/xmlns:title', namespaces=namespaces):
        logger.info(title.text)
Esempio n. 36
0
def autograde(student_id, tasks, test_data_filename, test_answers_filename):
    ''' Grade tasks specified in args.'''
    # Get path of this file

    # Check if student's file exists
    student_file = os.path.join(DIR_PATH, 'students', student_id + '.py')
    assert os.path.exists(student_file), f"{student_file} not exists"

    # Import student's file as module
    student_module = importlib.import_module(f'students.{student_id}')  # NOQA

    # Load testing data
    test_data = parse_yaml(test_data_filename)

    # Load testing answers
    test_answers = parse_yaml(test_answers_filename)

    # Run each task
    points = {}
    for task_id in tasks:
        logger.info(f"Testing Task {task_id}")
        # Use try-except to catch erros in order to run througth all tasks
        try:
            # This part is a bit dirty. If you have a better way, send a PR to
            # improve!
            if task_id == 7:
                time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
                student = student_module.task_7(student_id, time)
                assert student.student_id == student_id
                assert student.time == time
                assert student.words_to_say != "initial value"
                points[task_id] = test_answers[task_id]['points']
            elif task_id == 8:
                image = student_module.task_8()
                assert str(type(image))[8:11] == 'PIL', type(image)
                points[task_id] = test_answers[task_id]['points']
            else:
                result = eval(
                    f"student_module.task_{task_id}(**{test_data[task_id]})")
                # No check
                if test_answers[task_id]['check'] == 0:
                    points[task_id] = test_answers[task_id]['points']
                # Check equality
                elif test_answers[task_id]['check'] == 1:
                    if result == test_answers[task_id]['answer']:
                        points[task_id] = test_answers[task_id]['points']
                    else:
                        logger.error(f"Your result {result}")
                        logger.error("is different from ")
                        logger.error(f"{test_answers[task_id]['answer']}")
                        points[task_id] = 0
                # Check list equality
                elif test_answers[task_id]['check'] == 2:
                    if set(result) == set(test_answers[task_id]['answer']):
                        points[task_id] = test_answers[task_id]['points']
                    else:
                        logger.error(f"Your result {result}")
                        logger.error("is different from ")
                        logger.error(f"{test_answers[task_id]['answer']}")
                        points[task_id] = 0
                # Other checks (should not be processed here)
                else:
                    points[task_id] = None

        except Exception as err:
            points[task_id] = 0
            logger.error(err, exc_info=True)
    logger.info(f"TaskID/Points {points}")
    return points
Esempio n. 37
0
def autograde(student_id, tasks, test_data_filename, test_answers_filename):
    ''' Grade tasks specified in args.'''
    # Get path of this file

    # Check if student's file exists
    student_file = os.path.join(DIR_PATH, 'students', student_id + '.py')
    assert os.path.exists(student_file), f"{student_file} not exists"

    # Import student's file as module
    student_module = importlib.import_module(f'students.{student_id}')  # NOQA

    # Load testing data
    test_data = parse_yaml(test_data_filename)

    # Load testing answers
    test_answers = parse_yaml(test_answers_filename)

    # Run each task
    points = {}
    for task_id in tasks:
        logger.info(f"Testing Task {task_id}")
        # Use try-except to catch erros in order to run througth all tasks
        try:
            # This part is a bit dirty. If you have a better way, send a PR to
            # improve!
            if task_id == 6:
                answer = test_answers[task_id]['answer']
                result = eval(
                    f"student_module.task_{task_id}(**{test_data[task_id]})")
                if utils.floating_judge(result, answer):
                    points[task_id] = test_answers[task_id]['points']
                else:
                    logger.error(f"Your result {result}")
                    logger.error(f"is different from ")
                    logger.error(f"{test_answers[task_id]['answer']}")
                    points[task_id] = 0
            elif task_id == 7:
                time = strftime("%Y-%m-%d %H:%M:%S", gmtime())
                student = student_module.task_7(student_id, time)
                assert student.student_id == student_id
                assert student.time == time
                assert student.words_to_say != "initial value"
                points[task_id] = test_answers[task_id]['points']
            elif task_id == 8:
                image = student_module.task_8()
                assert str(type(image))[8:11] == 'PIL', type(image)
                points[task_id] = test_answers[task_id]['points']
            else:
                result = eval(
                    f"student_module.task_{task_id}(**{test_data[task_id]})")
                # No check
                if test_answers[task_id]['check'] == 0:
                    points[task_id] = test_answers[task_id]['points']
                # Check equality
                elif test_answers[task_id]['check'] == 1:
                    if result == test_answers[task_id]['answer']:
                        points[task_id] = test_answers[task_id]['points']
                    else:
                        logger.error(f"Your result {result}")
                        logger.error("is different from ")
                        logger.error(f"{test_answers[task_id]['answer']}")
                        points[task_id] = 0
                # Check list equality
                elif test_answers[task_id]['check'] == 2:
                    if set(result) == set(test_answers[task_id]['answer']):
                        points[task_id] = test_answers[task_id]['points']
                    else:
                        logger.error(f"Your result {result}")
                        logger.error("is different from ")
                        logger.error(f"{test_answers[task_id]['answer']}")
                        points[task_id] = 0
                # Other checks (should not be processed here)
                else:
                    points[task_id] = None

        except Exception as err:
            points[task_id] = 0
            logger.error(err, exc_info=True)
    logger.info(f"TaskID/Points {points}")
    return points
Esempio n. 38
0
def direct_crawl_eth_token_deposits():
    addresses = get_eth_token_deposit_address()
    logger.debug("eth addresses count: %d" % len(addresses))
    last_crawled_eth_block_height = get_last_crawled_eth_block_height()
    start_blockheight = 0 if last_crawled_eth_block_height is None else last_crawled_eth_block_height
    api_key = app.config['ETHERSCAN_API_KEY']
    contract_addr = app.config['BLOCKLINK_ERC20_CONTRACT_ADDRESS']
    assert contract_addr is not None and len(contract_addr) > 0
    min_confirmations = 5
    latest_height = get_eth_latest_block_height()
    has_error = False

    for address in addresses:
        token_txs_api_url = "http://api.etherscan.io/api?module=account&action=tokentx&address=%s&startblock=%d&endblock=%d&sort=asc&apikey=%s" % (
            address, start_blockheight, latest_height, api_key)
        try:
            res = requests.get(token_txs_api_url).json()
            if str(res.get('status', None)) == '0' and res.get(
                    'message') == 'No transactions found':
                continue
            if int(res['status']) == 1:
                txs = res['result']
                for tx in txs:
                    tx_block_number = int(tx['blockNumber'])
                    tx_timestamp = datetime.datetime.fromtimestamp(
                        int(tx['timeStamp']))
                    tx_hash = tx['hash'].lower()
                    nonce = tx['nonce']
                    block_hash = tx['blockHash'].lower()
                    from_addr = tx['from'].lower()
                    tx_contract_addr = tx['contractAddress'].lower()
                    to_addr = tx['to'].lower()
                    tx_value = int(tx['value'])
                    tx_token_name = tx['tokenName']
                    tx_token_symbol = tx['tokenSymbol']
                    if tx_token_symbol is None or len(tx_token_symbol) < 1:
                        continue
                    try:
                        tx_token_decimal = int(tx['tokenDecimal'])
                    except Exception as e:
                        continue
                    tx_gas = int(tx['gas'])
                    tx_gas_price = int(tx['gasPrice'])
                    tx_gas_used = int(tx['gasUsed'])
                    tx_confirmations = int(tx['confirmations'])
                    if str(tx_contract_addr).lower() != str(
                            contract_addr).lower():
                        continue
                    if to_addr != address.lower() or from_addr.lower(
                    ) == address.lower():
                        continue
                    if tx_confirmations < min_confirmations:
                        continue
                    deposit_tx = db.session.query(
                        EthTokenDepositOrder).filter_by(
                            trx_id=tx_hash).first()
                    if deposit_tx is not None:
                        continue
                    user = db.session.query(User).filter_by(
                        eth_address=address).first()
                    deposit_tx = EthTokenDepositOrder(
                        from_addr,
                        to_addr,
                        str(tx_value),
                        tx_token_decimal,
                        tx_hash,
                        tx_timestamp,
                        tx_token_symbol,
                        tx_block_number,
                        tx_contract_addr,
                        'SUCCESS',
                        user_id=user.id if user else None,
                        simple_token_amount=Decimal(tx_value) /
                        Decimal(10**tx_token_decimal))
                    db.session.add(deposit_tx)
                    # add unpayed_balance to user
                    if user is not None:
                        user.unpayed_balance = str(
                            Decimal(user.unpayed_balance) +
                            (Decimal(tx_value) /
                             Decimal(10**tx_token_decimal)))
                        db.session.add(user)
                    logger.info("eth token tx crawled: %s" % tx)
                db.session.commit()
        except Exception as e:
            logger.error("process eth tx error", e)
            has_error = True
            db.session.rollback()

    if not has_error:
        update_last_crawled_eth_block_height(latest_height)
Esempio n. 39
0
                # Other checks (should not be processed here)
                else:
                    points[task_id] = None

        except Exception as err:
            points[task_id] = 0
            logger.error(err, exc_info=True)
    logger.info(f"TaskID/Points {points}")
    return points


if __name__ == '__main__':
    args = parse_args()
    test_data_filename = os.path.join(DIR_PATH, 'test_data',
                                      'public_data.yaml')
    test_answers_filename = os.path.join(DIR_PATH, 'test_data',
                                         'public_answers.yaml')
    if args.all:
        student_ids = os.listdir(os.path.join(DIR_PATH, 'students'))
        student_ids = [
            x[:-3] for x in student_ids
            if x[-3:] == '.py' and 'sample' not in x
        ]
        for student_id in student_ids:
            logger.info(f"Student {student_id}")
            autograde(student_id, args.tasks, test_data_filename,
                      test_answers_filename)
    else:
        autograde(args.student_id, args.tasks, test_data_filename,
                  test_answers_filename)
 def endDocument(self):
     logger.info( "[ DOCUMENT DONE PARSING ]" )
Esempio n. 41
0
def sweep_deposit_eth_accounts_balances():
    """做以太充值账户的归账操作"""
    # 计算合约的一次转账操作需要的gas(可以估计一个固定值)
    token_contract_addr = app.config['BLOCKLINK_ERC20_CONTRACT_ADDRESS']
    gas_limit = 100000  # TODO: 不同token合约可能需要不同的gas_limit
    gas_price = 1 * (10**9)
    encrypt_password = app.config['ETH_ENCRYPT_PASSWORD'].encode('utf8')
    min_sweep_blocklink_token_amount = app.config[
        'MIN_SWEEP_BLOCKLINK_TOKEN_AMOUNT']
    sweep_to_eth_address = app.config['SWEEP_TO_ETH_ADDRESS']
    sweep_gas_spender_eth_address = app.config['SWEEP_GAS_SPENDER_ETH_ADDRESS']
    sweep_gas_spender_eth_private_key = app.config[
        'SWEEP_GAS_SPENDER_ETH_PRIVATE_KEY']
    # TODO: 充值账户中的ETH的归账(有可能是sweep_gas_spender转给这个地址的,所以不直接还给用户)
    try:
        eth_accounts = db.session.query(EthAccount).all()
        token_balances_of_accounts = eth_helpers.query_eth_addresses_balances_of_token(
            [account.address for account in eth_accounts], token_contract_addr)
        eth_balances_of_accounts = eth_helpers.query_eth_addresses_balances_of_eth(
            [account.address for account in eth_accounts])
        print(token_balances_of_accounts, eth_balances_of_accounts)
        nonce_of_sweep_gas_spender_address = eth_helpers.get_eth_address_nonce(
            sweep_gas_spender_eth_address)
        for eth_account in eth_accounts:
            eth_privatekey = eth_helpers.try_decrypt_eth_privatekey(
                eth_account.encrypted_private_key, encrypt_password)
            # 检查以太充值账户的私钥和地址是否匹配,如果不匹配,跳过这个以太地址
            if eth_privatekey is None:
                logger.info(
                    "found eth address %s private key error when sweeping deposit eth accounts"
                    % str(eth_account.address))
                continue
            recently_sweep_history = db.session.query(EthTokenSweepTransaction) \
                .filter_by(from_address=eth_account.address) \
                .filter(
                EthTokenSweepTransaction.created_at > (datetime.datetime.utcnow() - datetime.timedelta(hours=3))) \
                .order_by(EthTokenSweepTransaction.created_at.desc()).first()
            if recently_sweep_history is not None:
                # 如果此地址有3小时内的归账操作,跳过
                continue
            token_balance = token_balances_of_accounts.get(
                eth_account.address,
                eth_helpers.EthAccountBalance(eth_account.address, 0,
                                              token_contract_addr))
            if token_balance.balance < min_sweep_blocklink_token_amount:
                # token余额太少的不归账
                print(token_balance.balance, token_balance.simple_balance,
                      min_sweep_blocklink_token_amount)
                logger.info(
                    "eth account has too little blocklink ERC20 token to sweep(%s)"
                    % str(token_balance.simple_balance))
                continue
            eth_balance = eth_balances_of_accounts.get(
                eth_account.address,
                eth_helpers.EthAccountBalance(eth_account.address, 0))
            if int(eth_balance.balance) <= (gas_price * gas_limit):
                # 以太充值账户的ETH余额不够做token转账的gas,从其他账户转一点以太过去
                to_send_eth_amount = gas_limit * gas_price
                transfer_eth_for_gas_tx_dict = {
                    # 'from': sweep_gas_spender_eth_address,
                    'to': eth_account.address,
                    'value': to_send_eth_amount,
                    'gas': 25200,  # ETH转账需要的gas
                    'gasPrice': gas_price,
                    'nonce': nonce_of_sweep_gas_spender_address,
                }
                nonce_of_sweep_gas_spender_address += 1
                signed_raw_tx = eth_helpers.eth_signtransaction(
                    transfer_eth_for_gas_tx_dict,
                    sweep_gas_spender_eth_private_key)
                logger.info("signed raw tx for send eth is: %s" %
                            str(signed_raw_tx))
                tx_id = eth_helpers.send_eth_rawtransaction_to_ether(
                    signed_raw_tx)
                logger.info(
                    "response of transfer gas eth from sweep address to %s is %s"
                    % (eth_account.address, str(tx_id)))
                # 等待下一个任务周期,这个以太充值地址有ETH后再继续归账
                continue
            # 发起从以太充值账户转账token到归账地址的交易并广播
            account_nonce = eth_helpers.get_eth_address_nonce(
                eth_account.address)
            transfer_token_tx_dict = eth_helpers.make_eth_call_params(
                eth_account.address, token_contract_addr, gas_limit, gas_price,
                0, eth_helpers.get_eth_contract_token_transfer_signature(),
                [sweep_to_eth_address,
                 int(token_balance.balance)], account_nonce)
            signed_raw_tx = eth_helpers.eth_signtransaction(
                transfer_token_tx_dict, eth_privatekey)
            logger.info(
                "signed raw tx for send ERC20 token %s from %s to %s: %s" %
                (str(token_balance.simple_balance), eth_account.address,
                 sweep_to_eth_address, str(signed_raw_tx)))
            tx_id = eth_helpers.send_eth_rawtransaction_to_ether(signed_raw_tx)
            logger.info(
                "response of transfer token from %s to sweep eth address is %s"
                % (eth_account.address, str(tx_id)))

            # 把归账交易记录到数据库
            sweep_tx = EthTokenSweepTransaction(tx_id, eth_account.address,
                                                sweep_to_eth_address,
                                                token_contract_addr,
                                                token_balance.simple_balance)
            db.session.add(sweep_tx)
            db.session.commit()
            logger.info(
                "processed one token sweep(amount %s) transaction of %s to %s"
                % (str(token_balance.simple_balance), eth_account.address,
                   sweep_to_eth_address))
    except Exception as e:
        logger.error("sweep deposit eth accounts balances error: %s" % str(e))
        db.session.rollback()
import bz2
from collections import defaultdict
import operator
import xml.etree.ElementTree as ET

from logging_config import logger

if __name__ == "__main__":
    fname = "data/enwiki-latest-pages-articles1.xml-p000000010p000010000-shortened.bz2"

    f = bz2.BZ2File(fname)

    tree = ET.parse(f)
    root = tree.getroot()
    
    seen = defaultdict(lambda: 0)

    namespaces = {'xmlns': 'http://www.mediawiki.org/xml/export-0.8/'}
    for title in root.findall('xmlns:page/xmlns:revision/xmlns:contributor/xmlns:username', namespaces=namespaces):
        seen[title.text] += 1

    for key, value in sorted(seen.iteritems(), key=operator.itemgetter(1)):
        logger.info("%s : %s" % (key, value))

    
     # OR 

    logger.info("##### MAX #####")
    logger.info(max(seen.iteritems(), key=lambda pair: pair[1] ) )
    logger.info("###############")
Esempio n. 43
0
def make_dirs(dirname):
    if not os.path.exists(dirname):
        logger.info(f"Made new directory {dirname}")
        os.makedirs(dirname)
Esempio n. 44
0
def sweep_deposit_eth_accounts_balances_task():
    logger.info("sweep_deposit_eth_accounts_balances task")
    sweep_deposit_eth_accounts_balances()