Esempio n. 1
0
def bitauto_trace(model, deadline):
    URL_FORMAT = "http://baa.bitauto.com/{model}/index-all-all-{page}-1.html"

    for page in count(1):
        url = URL_FORMAT.format(page=page, model=model)
        logger.debug("bitauto trace {url}".format(url=url))
        content = get_content(url)
        d = pq(content)
        rows = d("div.postslist_fist_title").nextAll("div.postslist_xh")

        for row in rows:
            i = pq(row)
            title = i("li.bt span").text().strip()
            href = i("li.bt a").attr("href")
            author = i("li.zz a").html().strip()

            now = datetime.datetime.utcnow()
            published = (
                now.strftime("%Y-%m-%d") if (":" in i("li.zhhf").html().strip()) else i("li.zhhf").html().strip()
            )
            now = now.strftime("%Y-%m-%dT%H:%M:%SZ")

            if published < deadline:
                return

            yield dict(title=title, href=href, author=author, published=published, insert_dt=now)
Esempio n. 2
0
def insert_questionnair_records(records):
    """
    将读取的问卷记录插入到数据库

    Parameters
    -----------
    records : list
        list of QuestionnaireRecord
    """
    conn = connect_database()
    with conn.cursor() as cur:
        for record in records:
            sql = 'insert into xy_wenjuan ' \
                  '(barcode, lbcode, lbname, qcode, question, answer) ' \
                  'VALUES ' \
                  '(\'%s\', \'%s\', \'%s\', \'%s\', \'%s\', \'%s\')' % (
                      record.tjid,
                      record.lbbm,
                      record.lbmc,
                      record.qcode,
                      record.question,
                      record.answer,
                  )
            try:
                logger.debug(sql)
                cur.execute(sql)
            except:
                logger.warn(
                    'Insertion fail when trying to insert %s!' % record.line
                )
        conn.commit()
Esempio n. 3
0
def get_content(url):
    r = requests.get(url, headers={"User-Agent": random.choice(USER_AGENTS)})
    assert r.status_code == 200
    if r.encoding in ("ISO-8859-1", "gb2312"):
        logger.debug("Turn coding from {coding} to gbk".format(coding=r.encoding))
        r.encoding = "gbk"
    return r.text
Esempio n. 4
0
def post_vote():
  user = current_user
  if not request.json or not 'talkId' in request.json:
    abort(400)

  talkId = request.json['talkId']

  try:
    vote = db.session.query(Vote).filter(Vote.talkId==talkId).filter(Vote.email==user.email).first()
  except:
    logger.error("Unexpected error loading the vote:", sys.exc_info()[0])
    raise

  try:
    if vote == None:
      vote = Vote()
      vote.talkId = talkId
      vote.email = user.email
    vote.fitsTechfest = request.json['fitsTechfest']
    vote.fitsTrack = request.json['fitsTrack']
    vote.expectedAttendance = request.json['expectedAttendance']
    db.session.add(vote)
    db.session.commit()
    logger.debug('User {} voted on talkId {} - {}/{}/{}.'.format(user.email,
      talkId, vote.fitsTechfest, vote.fitsTrack, vote.expectedAttendance))
  except:
    logger.error("Unexpected error saving the vote:", sys.exc_info()[0])
    raise

  return json.dumps(vote.serialize), 201
Esempio n. 5
0
def wait_for_operation(project, zone, operation):
    """
    Checks if operation demanded (create/start/stop/delete) is completed

    :param project: Project name on google cloud
    :param zone: zone the vm_instance resides in
    :param operation: which operation is being run
    :return: True when completed
    """
    logger.debug('Waiting for operation to finish...')

    while True:
        try:
            compute = discovery.build('compute', 'v1', credentials=get_compute_engine_credentials())
            req = compute.zoneOperations().get(
                project=project,
                zone=zone,
                operation=operation
            )
            result = req.execute()

            if result['status'] == 'DONE':
                return result['status']

            time.sleep(1)
        except Exception as e:
            logger.debug('Checking if operation is completed failed: %s' % e)
Esempio n. 6
0
def create_disk_for_vm(name, source_image, disk_size, zone=DEFAULT_VM_ZONE, project=PROJECT_NAME):
    """
    Creates disk on Google Cloud compute engine to be used alongside a vm. Disk is generated from
    an image that is also stored on Google Cloud compute engine

    :param name: Name of disk (Usually same name as VM_name)
    :param source_image: Image for disk to replicate (stored on google cloud compute engine/ images)
    :param disk_size: Size of disk
    :param zone: The zone the disk should be created in (same as VM zone)
    :param project: Name of project
    :return: Link of disk if successful, False if unsuccessful
    """
    try:
        compute = discovery.build('compute', 'v1', credentials=get_compute_engine_credentials())

        config = {
            'name': name,
            'description': '',
            'sizeGb': disk_size,
            'sourceImage': source_image,
        }

        req = compute.disks().insert(project=project, zone=zone, body=config)
        resp = req.execute()

        completed = wait_for_operation(project, zone, resp['name'])

        if completed == 'DONE':
            link = resp['targetLink'].split('/v1/')[1]
            return link

    except Exception as e:
        logger.debug("Creation of disk failed: %s" % e)
        print(e)
        return False
Esempio n. 7
0
def get_votes():
  user = current_user
  logger.debug('Getting votes for user {}.'.format(user.email))
  sys.stdout.flush()
  items = Vote.query.filter(Vote.email==user.email).all()
  logger.debug('Found {} votes entered by user {}.'.format(len(items), user.email))
  return Response(json.dumps([item.serialize for item in items]), mimetype='application/json')
Esempio n. 8
0
File: utils.py Progetto: bonfy/cba
def autohome_trace(model, deadline):
    URL_FORMAT = 'http://club.autohome.com.cn/bbs/forum-c-{model}-{page}.html?orderby=dateline&qaType=-1'
    BASE_URL = 'http://club.autohome.com.cn'
    for page in count(1):

        url = URL_FORMAT.format(page=page, model=model)
        logger.debug('autohome trace {url}'.format(url=url))
        content = get_content(url)
        d = pq(content)
        rows = d('dl.bluebg:last').nextAll('dl.list_dl')

        for row in rows:

            i = pq(row)
            title = i('dt a').html().strip()
            href = BASE_URL + i('dt a').attr('href')
            author = i('dd:eq(0) a').html()
            published = i('dd:eq(0) span.tdate').html()
            now = datetime.datetime.utcnow()
            now = now.strftime('%Y-%m-%dT%H:%M:%SZ')

            if published < deadline:
                return

            yield dict(title=title, href=href, author=author, published=published, insert_dt=now)
Esempio n. 9
0
File: utils.py Progetto: bonfy/cba
def bitauto_trace(model, deadline):
    URL_FORMAT = 'http://baa.bitauto.com/{model}/index-all-all-{page}-1.html'

    for page in count(1):
        url = URL_FORMAT.format(page=page, model=model)
        logger.debug('bitauto trace {url}'.format(url=url))
        content = get_content(url)
        d = pq(content)
        rows = d('div.postslist_fist_title').nextAll('div.postslist_xh')

        for row in rows:
            i = pq(row)
            title = i('li.bt span').text().strip()
            href = i('li.bt a').attr('href')
            author = i('li.zz a').html().strip()

            now = datetime.datetime.utcnow()
            published = now.strftime(
                '%Y-%m-%d') if (':' in i('li.zhhf').html().strip()) else i('li.zhhf').html().strip()
            now = now.strftime('%Y-%m-%dT%H:%M:%SZ')

            if published < deadline:
                return

            yield dict(title=title, href=href, author=author, published=published, insert_dt=now)
Esempio n. 10
0
 def compute_hiperspaces(self):
     # La característica heurística al buscar conexiones entre
     # diferentes clusters hace que pueda fallar
     # por lo que redirigimos la salida para ser silenciosos
     # en esos casos
     if not len(self.points) > 0:
         logger.error('No points to compute hull!')
         raise Exception('No points to compute hull!')
     stderr_fd = sys.stderr.fileno()
     with open('/tmp/qhull-output.log', 'w') as f, stderr_redirected(f):
         points = list(self.points)
         logger.info('Searching for hull in dimension %s based on %s points',
                 len(points[0]),len(points))
         output = qconvex('n',points)
         if len(output) == 1:
             logger.debug('Could not get Hull. Joggle input?')
     try:
         dim, facets_nbr, facets = self.__parse_hs_output(output)
     except IncorrectOutput:
         logger.warning('Could not get hull')
         raise CannotGetHull()
     logger.info('Found hull in dimension %s of %s facets',
             dim,len(facets))
     self.dim = dim
     self.facets = facets
     if self.verbose:
         print "Computed MCH with ",facets_nbr," halfspaces"
         print 'This are them:\n'
         for facet in self.facets:print facet
     return self.dim
Esempio n. 11
0
def upload_file(local_path, remote_name, bucket=STORAGE_BUCKET):
    """
    Upload a file to Google Storage
    :param local_path: The local path to the file to upload
    :param remote_name: The name of the file in the google cloud storage
    :param bucket: The bucket on google cloud storage you want to upload the file to
    :return: True if uploaded, False otherwise
    """
    try:
        service = discovery.build('storage', 'v1', credentials=get_storage_credentials())
        logger.info("Uploading %s to google cloud" % local_path)
        req = service.objects().insert(
            bucket=bucket,
            name=remote_name,
            # predefinedAcl="publicRead",         Uncomment this line if you want your files to be accessible to anyone
            media_body=local_path)
        req.execute()

        uploaded = check_if_file_exists(remote_name)

        if uploaded is True:
            logger.info("Upload complete!")
            return True
        else:
            return False

    except Exception as e:
        logger.debug("Unable to upload file %s to google cloud: %s" % (local_path, e))
        return False
Esempio n. 12
0
    def check(self, suggested, data):
        """Return list of tags from 'suggested': first is list of tags that
           passes checks and second is dict of tags whose check(s) did not
           passed (key is tag name and value is why it failed)."""
        passed = set()
        failed = {}
        # Execute all relavant checks and store its results in 'results' dict
        results = {}
        suggested_set = set(suggested)
        relevant_mods = [mod for mod in self.checks if not set(mod.tags).isdisjoint(suggested_set)]
        for mod in relevant_mods:
            result = None
            try:
                results[mod.__name__] = {'out': mod.main(data), 'tags': mod.tags, 'desc': mod.desc}
            except:
                logger.exception('Something failed')
            logger.debug("Check %s returned %s" % (mod.__name__, result))
        # Now take the results of individual checks and compile lists of passed
        # and failed tags
        for result in results.itervalues():
            if result["out"]:
                for tag in result["tags"]:
                    passed.add(tag)
            else:
                for tag in result["tags"]:
                    failed.setdefault(tag, list()).append(result["desc"])

        return passed, failed
Esempio n. 13
0
def get_webinar_info(input_file, details_mark):
    """
    Gather information about the webinar
    
    :input: a csv file of attendees for a GotoWebinar to read from
    :return: a list of two lists containing the webinar details 
             headers and corresponding header values
    """
    try:
        with open(input_file, 'rb') as csv_file:
            rdr = reader(csv_file)
            # read Generated info and advance to next useful headers
            rdr.next()
            keys = rdr.next()
            vals = rdr.next()
            rdr.next()
            # read the rest of webinar info
            while details_mark not in keys:
                try:
                    headers += clear_empty_from_list(keys)
                    values += clear_empty_from_list(vals)
                except NameError:
                    headers = clear_empty_from_list(keys)
                    values = clear_empty_from_list(vals)
                keys = rdr.next()
                vals = rdr.next()
        return [headers, values]
    except IOError as e:
       logger.error("Cannot read file '{}'".format(input_file))
       logger.debug("Exception:\n{}".format(e))
Esempio n. 14
0
def get_participants_info(input_file, webinar_id, details_mark):
    """
    Gather information about the webinar participants
    
    :input: a csv file of attendees for a GotoWebinar to read from
            the webinar id number
    :return: a list of two lists containing the webinar participants 
            details headers and a list of items representing corresponding 
            header values
    """
    
    reading_details = 0
    values_list = []
    remove_row_marker = '*If an attendee left and rejoined the session, the In Session Duration column only includes their first visit.'
    try: 
        with open(input_file, 'rb') as csv_file:
            rdr = reader(csv_file)
            for row in rdr:
                if not reading_details:
                    if details_mark in row:
                        headers = ['Webinar ID'] + rdr.next()
                        reading_details = 1
                        continue
                elif remove_row_marker not in row:
                    values_list.append([webinar_id] + row)
        return [headers, values_list]
    except IOError as e:
        logger.error("Cannot read file '{}'".format(input_file))
        logger.debug("Exception:\n{}".format(e))
Esempio n. 15
0
    def compute_hiperspaces(self):
        if not len(self.points) > 0:
            logger.error('No points to compute hull!')
            raise Exception('No points to compute hull!')

        # The heuristic caracteristic when searching to connect
        # different clusters does that it might fail
        # so we redirect the stdout to avoid such error
        # being visible to user
        stderr_fd = sys.stderr.fileno()
        with open('/tmp/qhull-output.log', 'w') as f, stderr_redirected(f):
            points = list(self.points)
            logger.info('Searching for hull in dimension %s based on %s points',
                    len(points[0]),len(points))
            output = qconvex('n',points)
            if len(output) == 1:
                logger.debug('Could not get Hull. Joggle input?')
        try:
            dim, facets_nbr, facets = self.__parse_hs_output(output)
        except IncorrectOutput:
            logger.error('Could not get hull')
            raise CannotGetHull()
        logger.info('Found hull in dimension %s of %s facets',
                dim,facets_nbr)
        self.dim = dim
        self.facets = facets
        return self.dim
Esempio n. 16
0
def read_prototype(celltype, cdict):
    """Read the cell prototype file for the specified class. The
    channel properties are updated using values in cdict."""
    filename = '%s/%s.p' % (config.modelSettings.protodir, celltype)
    logger.debug('Reading prototype file %s' % (filename))
    adjust_chanlib(cdict)
    cellpath = '%s/%s' % (config.modelSettings.libpath, celltype)
    if moose.exists(cellpath):
        return moose.element(cellpath)
    for handler in logger.handlers:
        handler.flush()
    proto = moose.loadModel(filename, cellpath)
    # If prototype files do not have absolute compartment positions,
    # set the compartment postions to origin. This will avoid
    # incorrect assignemnt of position when the x/y/z values in
    # prototype file is just to for setting the compartment length.
    if not config.modelSettings.morph_has_postion:
        for comp in moose.wildcardFind('%s/#[TYPE=Compartment]' % (proto.path)):
            comp.x = 0.0
            comp.y = 0.0
            comp.z = 0.0
    leveldict = read_keyvals('%s/%s.levels' % (config.modelSettings.protodir, celltype))
    depths = read_keyvals('%s/%s.depths' % (config.modelSettings.protodir, celltype))
    depthdict = {}
    for level, depthset in list(depths.items()):
        if len(depthset) != 1:
            raise Exception('Depth set must have only one entry.')
        depthdict[level] = depthset.pop()
    assign_depths(proto, depthdict, leveldict)
    config.logger.debug('Read %s with %d compartments' % (celltype, len(moose.wildcardFind('%s/#[TYPE=Compartment]' % (proto.path)))))
    return proto
Esempio n. 17
0
    def get_qhull(self, neg_points=[]):
        """ From a Petrinet, gets it's representationas a Convex Hull
        """
        # Create an empty Convex Hull
        qhull = Qhull(neg_points=neg_points)
        # La normal por defaul para cada facet
        dim = len(self.transitions)
        tmpl_normal = [0]*dim
        # Each transition corresponds to one dimension
        # transition.label -> dimension number
        transitions = self.event_dictionary
        # Each facet corresponds to one place
        # place.id -> {normal->[arc.value], offset->marking}
        facets_dict = {}
        # Iteramos sobre los arcos
        for arc in self.arcs:
            # No debería haber arcos nulos
            if not arc.value:
                logger.error('We found a zero arc: %s',arc)
                raise Exception('We found a zero arc: %s',arc)
            # NOTE recordar que nuestra representación interna de HS es
            # al revés que el paper (usamos <= 0 en lguar de >= 0)
            if isinstance(arc.source,Transition):
                # Si el arco sale de una transition el coeficiente es < 0
                coef = -1*arc.value
                transition = arc.source
                place = arc.destination
            else:
                # Si el arco sale de un place el coeficiente es > 0
                coef = arc.value
                place = arc.source
                transition = arc.destination
            x = transitions.setdefault(transition.label,len(transitions))
            facet = facets_dict.setdefault(place.id,{'normal':list(tmpl_normal),
                                                    'in_transitions':[],
                                                    'out_transitions':[],
                                                    'offset': -1*place.marking,
                                                    'id':place.id})
            if coef < 0:
                facet['in_transitions'].append(transition.label)
            else:
                facet['out_transitions'].append(transition.label)
            if facet['normal'][x]:
                logger.debug('Coeficient already loaded. Dummy place')
                coef = 0
            facet['normal'][x] = coef

        facets = []
        for pl_id, facet in facets_dict.items():
            # Do not create the facet for dummy places
            if not any(facet['normal']):
                continue
            # Values are always integer
            hs = Halfspace(facet['normal'], facet['offset'], integer_vals=False)
            logger.debug('Adding facet %s',hs)
            facets.append(hs)
        qhull.dim = dim
        qhull.facets = facets
        return qhull
Esempio n. 18
0
File: utils.py Progetto: bonfy/cba
def get_content(url):
    r = requests.get(url, headers={'User-Agent': random.choice(USER_AGENTS)})
    assert r.status_code == 200
    if r.encoding in ('ISO-8859-1', 'gb2312'):
        logger.debug(
            'Turn coding from {coding} to gbk'.format(coding=r.encoding))
        r.encoding = 'gbk'
    return r.text
def process_csv_info():
    """
    Processes the read information:
        Separate headers form webinars and participants details.
        Detect differences in participants headers and cope with them 
            ( keep the longes header and add empty fields in the right 
              positionfor participants info rows that are shorter than 
              the longest header )        
        Basic error checking and debug messahe logging.
    
    :return: 1 on error and 0 on success
    """
    global w_dict, w_header, w_values, w_info
    global p_header, p_values, p_headers_list

    # get headers and values for webinars
    w_header = w_info[0]
    w_values = []
    for key in w_dict:
        w_values += w_dict[key]

    # get headers and values for participants
    p_header, p_values, diffs = p_headers_list[0], [], []
    for h in p_headers_list[1:]:
        # try to find differences in participants headers 
        if len(p_header) < len(h):
            diffs = [x for x in h if x not in p_header]
            p_header = h
            break
        elif len(h) < len(p_header):
            diffs = [x for x in p_header if x not in h]
            break
    if diffs:
        diffs_pos = [p_header.index(x) for x in diffs]
    for key in p_dict:
        for row in p_dict[key]: 
            if len(row) < len(p_header):
                # handle differences in input files headers
                if not diffs:
                    logger.error("Header longer than row but no diffs detected.")
                    return 1
                for pos in diffs_pos:
                    insert_pos = int(pos)
                    row.insert(insert_pos, "")
            elif len(row) > len(p_header):
                logger.error("Participants row longer than header.Exiting...")
                logger.debug('''
webinar id:{}
final_participants_header:{}
row:{}
'''.format(key, p_header, row))
                return 1
            else:
                break
        p_values += p_dict[key]

    return 0
Esempio n. 20
0
def init():
    models = [m.User,m.Post,m.Image,m.Comment,m.BlogData]
    for t in reversed(models):
        logger.debug("Dropping %s" % t)
        t.drop_table(True)
    for t in models:
        logger.debug("Creating {}.\n\tColumns: {}".format(
            t, ", ".join(t._meta.columns.keys())))
        t.create_table(True)
Esempio n. 21
0
    def emit(self, *args, **kwargs):

        if len(args) != len(self.args):
            raise ValueError('incorrect amount of arguments {} for signal {}'.format(repr(args), repr(self.name)))
        
        for handler in self.__handlers:
            #try:

                logger.debug('firing {} for {}'.format(self, handler))
                handler(*args, **kwargs)
Esempio n. 22
0
def list_vm_instances(project=PROJECT_NAME, zone=DEFAULT_VM_ZONE):
    try:
        compute = discovery.build('compute', 'v1', credentials=get_compute_engine_credentials())
        req = compute.instances().list(project=project, zone=zone)
        response = req.execute()
        print(json.dumps(response['items'], indent='\n'))
        return response

    except Exception as e:
        logger.debug("Unable to list instances: %s" % e)
Esempio n. 23
0
 def build_url(self, hostname, ipaddr):
     qstring = urlencode({"hostname": hostname,
                          "myip": ipaddr,
                          "wildcard": "NOCHG",
                          "mx": "NOCHG",
                          "backmx": "NOCHG"})
     upd_url = "%(url)s?%(qstring)s" % {"url": URL_UPDATE,
                                      "qstring": qstring}
     logger.debug("Built url: %s", upd_url)
     return upd_url
Esempio n. 24
0
 def run(self, tags, rules, data):
   """Run rules (run all when "rules" is empty, othervise run only these
      listed there) and return dict with their answers"""
   results = []
   for mod in self.rules:
     # Skip rule if we are supposed to run only specific rules and this
     # one is not the choosen one
     if len(rules) > 0 and mod.__name__ not in rules:
       logger.debug("Skipping %s because only specific rules are supposed to run" % mod.__name__)
       continue
     # Skip this rule if there is no intersection of tags we should run
     # and tags this rule should be run for
     if len([val for val in tags if val in mod.tags]) == 0:
       logger.debug("Skipping %s because it is not tagged with provided tags" % mod.__name__)
       continue
     # Finally run the rule
     func = getattr(mod, 'main')
     func_text = getattr(mod, 'text')
     name = getattr(mod, 'name')
     result = None
     used = []
     text = ''
     # Reset list of data rule used
     data.reset_access_list()
     # Now run the rule
     try:
       result = func(data)
     except DataNotAvailable:
       logger.error("Data not available for %s" % mod.__name__)
       result = False
     except:
       logger.exception("Something failed badly when executing %s" % mod.__name__)
       result = False
     logger.info("Rule %s returned %s" % (mod.__name__, result))
     # Store list of data rule has used
     used = data.get_access_list()
     # Now if necessary, get description of whats wrong
     if result:
       try:
         text = func_text(result)
       except:
         logger.exception("Something failed badly when getting description for %s" % mod.__name__)
     # Determine what the result was
     if result:
       status = 'FAIL'
     elif result is False:
       status = 'SKIP'
     elif result is None:
       status = 'PASS'
     else:
       logger.error("Failed to understand to result of %s" % result)
       continue
     # Save what was returned
     results.append({'label': mod.__name__, 'status': status, 'result': result, 'name': name, 'text': text, 'used': used})
   return results
Esempio n. 25
0
def start_volatile_status(battle, split_msg):
    if is_opponent(battle, split_msg):
        pkmn = battle.opponent.active
    else:
        pkmn = battle.user.active

    volatile_status = normalize_name(split_msg[3].split(":")[-1])
    if volatile_status not in pkmn.volatile_statuses:
        logger.debug("Starting the volatile status {} on {}".format(
            volatile_status, pkmn.name))
        pkmn.volatile_statuses.append(volatile_status)
Esempio n. 26
0
def singleturn(battle, split_msg):
    if is_opponent(battle, split_msg):
        side = battle.opponent
    else:
        side = battle.user

    move_name = normalize_name(split_msg[3].split(':')[-1])
    if move_name in constants.PROTECT_VOLATILE_STATUSES:
        # set to 2 because the `upkeep` function will decrement by 1 on every end-of-turn
        side.side_conditions[constants.PROTECT] = 2
        logger.debug("{} used protect".format(side.active.name))
Esempio n. 27
0
def startPythm():
    """Start the Pythm and renice if it was requested
    """
    config = PythmConfig()
    renice_level = config.get("pythm", "renice", default=-5, dtype=int)
    if renice_level != 0:
        logger.debug("Renicing pythm to %d" % renice_level)
        try:
            os.nice(renice_level)
        except OSError, e:
            logger.error("Failed to renice: %s" % e)
Esempio n. 28
0
def fieldend(battle, split_msg):
    """Remove the battle's field condition"""
    field_name = normalize_name(split_msg[2].split(':')[-1].strip())

    # trick room shows up as a `-fieldend` item but is separate from the other fields
    if field_name == constants.TRICK_ROOM:
        logger.debug("Removing trick room")
        battle.trick_room = False
    else:
        logger.debug("Setting the field to None")
        battle.field = None
Esempio n. 29
0
def fieldstart(battle, split_msg):
    """Set the battle's field condition"""
    field_name = normalize_name(split_msg[2].split(':')[-1].strip())

    # trick room shows up as a `-fieldstart` item but is separate from the other fields
    if field_name == constants.TRICK_ROOM:
        logger.debug("Setting trickroom")
        battle.trick_room = True
    else:
        logger.debug("Setting the field to {}".format(field_name))
        battle.field = field_name
Esempio n. 30
0
def sideend(battle, split_msg):
    """Remove a side effect such as stealth rock or sticky web"""
    condition = split_msg[3].split(':')[-1].strip()
    condition = normalize_name(condition)

    if is_opponent(battle, split_msg):
        logger.debug("Side condition {} ending for opponent".format(condition))
        battle.opponent.side_conditions[condition] = 0
    else:
        logger.debug("Side condition {} ending for bot".format(condition))
        battle.user.side_conditions[condition] = 0
Esempio n. 31
0
    def make_move(self, level, player, monster):
        
        if monster.has_condition('paralyzed'):
            return

        if monster.has_condition('asleep'):
            if self._should_wake_up(level, player, monster):
                monster.clear_condition('asleep')
            return True

        level.set_fov(monster)

        m_tile = level.tile_for(monster)
        p_tile = level.tile_for(player)

        # if we can fight
        if hasattr(monster.actions, 'melee'):

            # if were next to player then attack
            #FIXME being_distance seems to be high by 1
            if level.being_distance(player, monster) < 2:
                logger.debug('The {} on {} melees with you.'.format(monster, m_tile))
                monster.actions.melee(p_tile)
                return True

        # if we cant even move give up
        if not hasattr(monster.actions, 'move'):
            return False

        #chase if we are not confused
        if not monster.has_condition('confused'):
            logger.debug('The {} on {} is chasing you.'.format(monster, m_tile))
            tile = level.chase_player(monster)
        else:
            tile = None

        # if we cant chase move randomly
        if not tile:
            logger.debug('The {} on {} could not chase you.'.format(monster, m_tile))
            tile = self._random_walk(player, level, monster)

        # if we cant move giveup
        if not tile:
            logger.debug('The {} on {} could not find a tile to move to.'.format(monster, m_tile))
            return False

        # dont attack other monsters
        if tile.being:
            logger.debug('The {} on {} tried to attack another monster.'.format(monster, m_tile))
            return False
        # else just move to the square
        else:
            monster.actions.move(tile)
        return True
Esempio n. 32
0
 async def get_balance(self):
     method = 'GET'
     url_path = '/account/v1/balance'
     headers = self.generate_headers(method, url_path)
     url = API_URL + url_path
     response = await self.api_call(url, method, headers)
     if 'usd' in response:
         self.balance = Balance(**response).usd
         logger.debug(f'BALANCE: {self.balance}')
         return self.balance
     else:
         logger.debug(f'{response}')
Esempio n. 33
0
 def post(self):
     json_data = request.json
     if 'payload' in json_data:
         payload = json_data['payload']
         logger.debug(payload)
         #TODO Handle quality checking and save to db.
         item = payload
         data_item.save_item(item)
         return "Not today."
     else:
         logger.warning("No payload in post json data.")
         return "Not here."
Esempio n. 34
0
 def delete(self, id):
     session = Session()
     username = oidc.user_getfield('username')
     grant = session.query(RoleGrant).get(id)
     if not grant:
         json_abort(404)
     if not grant.topic.user == username:
         json_abort(403)
     session.delete(grant)
     session.commit()
     logger.debug(grant)
     return jsonify(grant)
Esempio n. 35
0
 async def update_base(self):
     final_skins = list()
     for game in GAMES:
         logger.debug(game)
         skins = await self.get_items(self.min_price, self.max_price, game)
         skins = [
             s for s in skins if not self.select_skin.skin_existence(s)
         ]
         final_skins += await self.filter_skins(skins, self.min_price,
                                                self.max_price)
     self.select_skin.create_all_skins(final_skins)
     logger.info(f'Всего проанализировано скинов: {len(final_skins)}')
Esempio n. 36
0
def status(battle, split_msg):
    if is_opponent(battle, split_msg):
        pkmn = battle.opponent.active
    else:
        pkmn = battle.user.active

    if len(split_msg) > 4 and 'item: ' in split_msg[4]:
        pkmn.item = normalize_name(split_msg[4].split('item:')[-1])

    status_name = split_msg[3].strip()
    logger.debug("{} got status: {}".format(pkmn.name, status_name))
    pkmn.status = status_name
Esempio n. 37
0
 def __load_config(self):
   """Loads mapping of label to actual command in case we are running on live
      system and label to possible files in case we are running from dump."""
   config = ConfigParser.SafeConfigParser()
   config.optionxform = str
   config.read(self.__config_filename)
   self.config = {}
   self.config['commands'] = dict(config.items(self.__config_commands_section))
   self.config['files'] = {}
   for k, v in dict(config.items(self.__config_files_section)).iteritems():
     self.config['files'][k] = v.splitlines()
   logger.debug("Loaded commands and files config %s" % self.config)
Esempio n. 38
0
def get_profile_from_id(instance_session, instance_url, instance_key, instance_profile, instance_name=''):
    instance_profiles = get_quality_profiles(instance_session=instance_session, instance_url=instance_url, instance_key=instance_key)

    profile = next((item for item in instance_profiles if item["name"].lower() == instance_profile.lower()), False)
    if not profile:
        logger.error('Could not find profile_id for instance {} profile {}'.format(instance_name, instance_profile))
        exit_system()

    instance_profile_id = profile.get('id')
    logger.debug(f'found profile_id (instance{instance_name}) "{instance_profile_id}" from profile "{instance_profile}"')

    return instance_profile_id
Esempio n. 39
0
def inactive(battle, split_msg):
    regex_string = "(\d+) sec this turn"
    if split_msg[2].startswith(constants.TIME_LEFT):
        capture = re.search(regex_string, split_msg[2])
        try:
            battle.time_remaining = int(capture.group(1))
            logger.debug("Time remaining: {}".format(capture.group(1)))
        except ValueError:
            logger.warning("{} is not a valid int".format(capture.group(1)))
        except AttributeError:
            logger.warning("'{}' does not match the regex '{}'".format(
                split_msg[2], regex_string))
Esempio n. 40
0
 def _on_failure(self):
     '''
     Increments failure counter and switches state if allowed_fails is
     reached
     '''
     self._failure_count += 1
     logger.debug("Failure encountered, failure count: {}".format(
         self._failure_count))
     if self._failure_count >= self._allowed_fails:
         current_state = self._check_state()
         if current_state != OPEN:
             self._open()
Esempio n. 41
0
def startServer(socket, host, port):
	# bind to the given socket to begin listening
	# for connections
	try:
		socket.bind((config.host, port))
		socket.listen(config.queue_length)
		l.debug("Server started at '%s' : %d", host, port,
        extra={'host' : config.host, 'id' : 0})
		return True, None
	except Exception as e:
		print("Server failed to started", str(e))
		return False, e.errno
Esempio n. 42
0
def view():
    global PROJECT_DIR
    ret = {}
    logger.info('project view : view')
    for root, dirs, files in os.walk(PROJECT_DIR):
        for file in files:
            if file[-7:] == '.pickle':
                with open(os.path.join(root, file), 'rb') as f:
                    pj = pickle.load(f)
                ret[file[:-7]] = pj
    logger.debug('project view return : {}'.format(ret))
    return ret
Esempio n. 43
0
async def pokemon_battle(ps_websocket_client, pokemon_battle_type):
    battle = await start_battle(ps_websocket_client, pokemon_battle_type)
    chatted = False
    while True:
        msg = await ps_websocket_client.receive_message()
        if battle_is_finished(msg):
            winner = msg.split(constants.WIN_STRING)[-1].split('\n')[0].strip()
            logger.debug("Winner: {}".format(winner))
            await ps_websocket_client.send_message(battle.battle_tag, [config.battle_ending_message])
            await ps_websocket_client.leave_battle(battle.battle_tag, save_replay=config.save_replay)
            return winner
        else:
            action_required = await async_update_battle(battle, msg)
            if action_required and not battle.wait:
                best_move = await async_pick_move(battle)

                playerSide = battle.user
                opponentSide = battle.opponent
                pCount = 6;
                oCount = 6;
                score=5
                for pkm in opponentSide.reserve:
                    if pkm.hp==0:
                        oCount-=1
                for pkm in playerSide.reserve:
                    if pkm.hp==0:
                        pCount-=1
                if pCount == 1 and oCount == 1:
                    score= 0
                if oCount == 1:
                    score= 1
                if pCount == 1:
                    score = 2
                if pCount >= oCount+3:
                    score=3
                if chatted:
                    pass
                elif score == 0: # both sides have one pokemon left
                    chatted = True
                    await ps_websocket_client.send_message(battle.battle_tag, ['Close game!'])
                elif score == 1: # opponent has one pokemon left
                    chatted = True
                    await ps_websocket_client.send_message(battle.battle_tag, ['Nice try, guy.'])
                elif score == 2: # player has one pokemon left
                    chatted = True
                    await ps_websocket_client.send_message(battle.battle_tag, ['Please forfeit :c'])
                elif score == 3: # player has three more Pokemon than the opponent
                    chatted = True
                    await ps_websocket_client.send_message(battle.battle_tag, ['I must be playing against a bot.'])


                await ps_websocket_client.send_message(battle.battle_tag, best_move)
Esempio n. 44
0
def check_choice_band_or_specs(battle, damage_dealt):
    if (
        battle.opponent.active is None or
        battle.opponent.active.item != constants.UNKNOWN_ITEM or
        damage_dealt.crit or
        damage_dealt.move in constants.WEIGHT_BASED_MOVES or
        damage_dealt.move in constants.SPEED_BASED_MOVES or
        not battle.opponent.active.can_have_choice_item
    ):
        return

    try:
        move_dict = all_move_json[damage_dealt.move]
    except KeyError:
        logger.debug("Could not find the move {}, skipping choice item check".format(move))
        return

    if move_dict[constants.CATEGORY] == constants.PHYSICAL:
        choice_item = 'choiceband'
        spread = 'adamant', '0,252,0,0,0,0'
    elif move_dict[constants.CATEGORY] == constants.SPECIAL:
        choice_item = 'choicespecs'
        spread = 'modest', '0,0,0,252,0,0'
    else:
        # don't guess anything if the move was neither physical nor special
        return

    if battle.battle_type == constants.RANDOM_BATTLE:
        spread = 'serious', '85,85,85,85,85,85'

    max_damage = float('-inf')
    potential_battles = battle.prepare_battles(guess_mega_evo_opponent=False, join_moves_together=True)

    battle_copy = deepcopy(battle)
    battle_copy.user.from_json(battle.request_json)
    for b in potential_battles:
        if b.opponent.active.item != choice_item:
            b.opponent.active.set_spread(*spread)
            b.user.active.stats = battle_copy.user.active.stats

            state = b.create_state()

            damage = calculate_damage(state, constants.OPPONENT, damage_dealt.move, battle.user.last_used_move.move, calc_type='max')[0]
            max_damage = max(max_damage, damage)

    # dont infer if we did not find a damage amount
    if max_damage == float('-inf'):
        return

    if (damage_dealt.percent_damage * battle.user.active.max_hp) > (max_damage * 1.2):  # multiply to avoid rounding errors
        logger.debug("{} has {}".format(battle.opponent.active.name, choice_item))
        battle.opponent.active.item = choice_item
Esempio n. 45
0
async def parse_message(ps_websocket_client, msg, battles):
    split_msg = msg.split('|')

    if split_msg[1].strip() == 'updatechallenges':
        await ps_websocket_client.accept_challenge(split_msg, battles)
        return

    if split_msg[1].strip() == 'init' and split_msg[2].strip() == 'battle':
        battle = None
        for curr in battles:
            if curr.battle_tag == 'pending':
                battle = curr
                battle.battle_tag = split_msg[0].replace('>', '').strip()
                user_name = split_msg[-1].replace('☆', '').strip()
                battle.opponent.account_name = split_msg[4].replace(
                    user_name, '').replace('vs.', '').strip()
                battle.opponent.name = 'pending'
                break
        if battle == None:
            logger.debug("ERROR: can't find pending slot")
        return

    if 'battle' in split_msg[0]:
        battle = None
        i = 0
        for curr in battles:
            if curr.battle_tag == split_msg[0].replace('>', '').strip():
                battle = curr
                break
            i += 1
        if battle == None:
            logger.debug("ERROR: can't find battle slot")
            return
        if battle.opponent.name == 'pending':
            await initialize_battle(ps_websocket_client, battle, split_msg)
        elif battle.started == False:
            if battle.battle_type == constants.STANDARD_BATTLE:
                await run_start_standard_battle(ps_websocket_client, battle,
                                                msg)
                return
            else:
                await run_start_random_battle(ps_websocket_client, battle, msg)
                return
        else:
            ended = await pokemon_battle(ps_websocket_client, battle, msg)
            if (ended):
                battles[i] = Battle('empty')
            return

    if split_msg[1].strip() == 'pm' and '$' in split_msg[4]:
        await ps_websocket_client.parse_command(split_msg, battles)
        return
Esempio n. 46
0
    def get_statuses_list(account_id: int):

        response = requests.get(
            url=base_url + '/account/{account_id}/vacancy/statuses'.format(
                account_id=account_id),
            headers=default_headers)

        logger.debug(json.loads(response.text))

        if response.status_code == 200:
            return response.json()
        else:
            raise Exception(response)
Esempio n. 47
0
    def __init__(self,classdec):
        """
        input:
            - classdec: java ast aterm for this class
        stores following info about class:
            .name classname
            .decorators : ["public","static",...]
        """

        self.name = str(classdec.ClassDecHead[1])
        logger.debug("Java_Class.__init__ %s",self.name)
        #logger.debug(pp_aterm(classdec))
        self.decorators = decorators(classdec.ClassDecHead[0])
Esempio n. 48
0
    def get_me():
        """
        Получение информации о пользователе.
        :return: Response object
        """
        response = requests.get(base_url + '/me', headers=default_headers)

        logger.debug(json.loads(response.text))

        if response.status_code == 200:
            return response
        else:
            raise Exception('Error when try connect to API.')
Esempio n. 49
0
 def pach(self):
     logger.debug('Starting parsing')
     self.parse()
     logger.debug('Starting modeling')
     self.model()
     # Remove unnecesary facets wrt neg traces (if any)
     self.no_smt_simplify()
     # Remove unnecesary facets wrt neg traces
     # Apply smt_simplify.
     # Options are on the hull level, on every facet or none
     self.smt_simplify()
     self.generate_output_file()
     return self.complexity
Esempio n. 50
0
 def _ensure_request_json(self) -> None:
     if not self.url.endswith(".json"):
         dot_position = self.url.rfind(".")
         if dot_position > 0:
             self.url = self.url.replace(self.url[dot_position:], ".json")
         else:
             self.url = f"{self.url}.json"
         logger.warning(
             "URL suffix adjusted to a supported format. "
             "For more details see: "
             "https://config-client.amenezes.net/docs/1.-overview/#default-values"
         )
     logger.debug(f"Target URL configured: {self.url}")
Esempio n. 51
0
 async def receive_message(self):
     message = await self.websocket.recv()
     # check to see if we have a chat message and if we're tracking messages
     if "\n\n|c|" in message and config.track_chat == "True":
         chatRegex = re.compile('\\n\\n\|c\|(.+)\\n$')
         chatSearch = chatRegex.search(message)
         with open("chatMessages.txt", "a") as f:
             try:
                 f.write(chatSearch.group(1) + '\n')
             except:
                 f.write(f"ERROR: COULD NOT PARSE THIS MESSAGE:{message}\n")
     logger.debug("Received from websocket: {}".format(message))
     return message
Esempio n. 52
0
def dispatch(intent_request):
    """
    Called when the user specifies an intent for this bot.
    """
    logger.debug('dispatch userId={}, intentName={}'.format(
        intent_request['userId'], intent_request['currentIntent']['name']))
    intent_name = intent_request['currentIntent']['name']

    # Dispatch to your bot's intent handlers
    try:
        return intent_functions[intent_name](intent_request)
    except KeyError:
        raise Exception('Intent with name ' + intent_name + ' not supported')
  def Delete(self, table, cond=None):
    self.lock.acquire()

    statement = 'DELETE FROM %s' % table
    if cond:
      statement += ' WHERE %s' % cond

    logger.debug(statement)
    self.cursor.execute(statement)
    affected = self.cursor.rowcount

    self.lock.release()
    return affected
Esempio n. 54
0
    def get_applicant_sources(account_id: int):

        response = requests.get(
            url=base_url + '/account/{account_id}/applicant/sources'.format(
                account_id=account_id),
            headers=default_headers)

        logger.debug(json.loads(response.text))

        if response.status_code == 200:
            return response
        else:
            raise Exception(response)
    def Select(self, columns, table, cond=None):
        self.lock.acquire()

        statement = 'SELECT %s FROM %s' % (', '.join(columns), table)
        if cond:
            statement += ' WHERE %s' % cond

        logger.debug(statement)
        self.cursor.execute(statement)
        results = self.cursor.fetchall()

        self.lock.release()
        return results
    def Delete(self, table, cond=None):
        self.lock.acquire()

        statement = 'DELETE FROM %s' % table
        if cond:
            statement += ' WHERE %s' % cond

        logger.debug(statement)
        self.cursor.execute(statement)
        affected = self.cursor.rowcount

        self.lock.release()
        return affected
Esempio n. 57
0
    def run(self):
        # get the payload
        payload = self.request.get_json()

        # get user_id
        user_id = payload["wm.euId"]

        # convert user_id to email
        user_email = lookup_user_by_id(env, user_id)

        if len(user_email) <= 0:
            logger.info(
                f"User email not found, most likely system was accessed without auth"
            )
            user_email = "None"
        else:
            logger.info(f"User email: {user_email}")

        # remove tabs and spaces from e-mail before adding to payload
        payload["user_email"] = user_email.replace(" ",
                                                   "").replace("\t",
                                                               "").lower()

        logger.debug(f"Payload with user_email: {payload}")

        # change any keys with a . to a _ because mongo doesn't like .
        updated_payload = {}
        for k, v in payload.items():
            updated_payload[k.replace(".", "_")] = payload[k]

        logger.debug(f"Updated payload: {updated_payload}")

        # default to tasks table
        collection = db[tasks_tbl]

        # if it's a task, store it in task collection
        # with proper names
        if self.kind == "task":
            collection = db[tasks_tbl]

        # if it's a SWT started event, store it in the started collection
        elif self.kind == "started":
            collection = db[started_tbl]

        # if it's a SWT started event, store it in the started collection
        elif self.kind == "survey":
            collection = db[survey_tbl]

        insert = collection.insert_one(updated_payload)
        logger.info(insert.inserted_id)
        logger.info("Successfully inserted data into DB")
Esempio n. 58
0
 def __load(self, label):
     """Get output of coresponding command or if __data_dir is set load
    content of coresponding file and store it to self.__data[label].
    If command fails, usually "['']" is stored."""
     assert label not in self.__data
     # Are we running on live system or from directory?
     if self.__data_dir == None:
         if label not in self.__access_list:
             self.__access_list[label] = self.config['commands'][label]
         # TODO: Add some timeouts, ulimit, nice... (?)
         logger.debug("Going to execute '%s' for '%s'" %
                      (self.config['commands'][label], label))
         # FIXME: is it OK to have shell=True here from secuity stand-point?
         process = subprocess.Popen([self.config['commands'][label]],
                                    shell=True,
                                    stdout=subprocess.PIPE,
                                    stderr=subprocess.PIPE)
         # FIXME: is this correct way to run this? Can not we got stuck when the data are too big?
         stdout, stderr = process.communicate()
         if len(stderr) != 0:
             logger.warn("Command '%s' failed with '%s'" %
                         (self.config['commands'][label], stderr))
             raise DataNotAvailable(
                 "Command '%s' failed with '%s'" %
                 (self.config['commands'][label], stderr))
         self.__data[label] = stdout.strip().split("\n")
     else:
         our_file = None
         our_file_rel = None
         for relative_file in self.config['files'][label]:
             f = os.path.join(self.__data_dir, relative_file)
             if os.path.isfile(f):
                 our_file = f
                 our_file_rel = relative_file
                 break
         if our_file:
             logger.debug("Going to load '%s' for '%s'" % (f, label))
             if label not in self.__access_list:
                 self.__access_list[label] = our_file_rel
             try:
                 fp = open(f, 'r')
             except IOError:
                 logger.warn("Failed to load %s for %s" % (f, label))
                 raise DataNotAvailable("Failed to load %s for %s" %
                                        (f, label))
             self.__data[label] = fp.read().splitlines()
             fp.close()
         else:
             logger.warn("Suitable file for %s not found" % label)
             raise DataNotAvailable("Suitable file for %s not found" %
                                    label)
Esempio n. 59
0
    def commit_condition_data(self):
        fields = {
            "air_quality_index": self.air_quality_index,
            "condition": self.condition, 
            "dewpoint": self.dewpoint,
            "humidity": self.humidity,
            "pressure": self.pressure,
            "temperature": self.temperature,
            "visibility": self.visibility,
            "wind_direction": self.wind_direction,
            "wind_speed": self.wind_speed,
            "wind_gust": self.wind_gust,
        }
        # tags = {"type":"temp"}
        tags = {}
        tags.update(self.extra_data.get("tags",{}))
        # print(tags)
        json_body = [
            {
                "measurement": "weather_data",
                "tags": self.extra_data.get("tags",{}),
                "time": time.asctime(self.updated_at),
                "fields": fields
            },
            {
                "measurement": "weather_data",
                "tags": tags,
                # "tags": {
                #     "region": "oshawa"
                # },

                "time": time.asctime(time.gmtime()),
                "fields": fields
            }
        ]
        # print(json_body)
        # tsdb.write_points(json_body, database='weather_data')
        # result = tsdb.query('select value from weather_data;',database='weather_data')
        # print("Result: {0}".format(result))

        # now = datetime.datetime.now(tz=datetime.timezone.utc)
        # hour = now.replace(minute=0,second=0,microsecond=0)
        ss = (f"""delete from "weather_data" where region='{tags['region']}' and time > '{time.strftime('%Y-%m-%dT%H:%M:%SZ',self.updated_at)}' and time < '{time.strftime('%Y-%m-%dT%H:%M:%SZ',time.gmtime())}' """)
        # ss = (f"""delete from "weather_data" where time > '{time.asctime(self.updated_at)}' and time < '{(str(hour))[:-6]}' """)
        # print(ss,self.updated_at)
        tsdb.query(ss)
        # print(json_body)
        tsdb.write_points(json_body, database='weather_data')

        logger.debug(f'Region: "{tags["region"]:10}". temp: {self.temperature} C')