def __init__(self, slicedModel):
     # TODO - remove or reduce dependency on slicedModel
     self.slicedModel = slicedModel
     
     self.infillSolidity = config.getfloat('fill', 'infill.solidity.ratio')
     self.infillWidthOverThickness = config.getfloat('fill', 'extrusion.lines.extra.spacer.scaler')
     self.infillPerimeterOverlap = config.getfloat('fill', 'infill.overlap.over.perimeter.scaler')
     self.extraShellsAlternatingSolidLayer = config.getint('fill', 'shells.alternating.solid')
     self.extraShellsBase = config.getint('fill', 'shells.base')
     self.extraShellsSparseLayer = config.getint('fill', 'shells.sparse')
     self.solidSurfaceThickness = config.getint('fill', 'fully.filled.layers')
     self.doubleSolidSurfaceThickness = self.solidSurfaceThickness + self.solidSurfaceThickness
     self.startFromChoice = config.get('fill', 'extrusion.sequence.start.layer')
     self.threadSequenceChoice = config.get('fill', 'extrusion.sequence.print.order')
     self.threadSequence = self.threadSequenceChoice.split(",")
     self.diaphragmPeriod = config.getint('fill', 'diaphragm.every.n.layers')
     self.diaphragmThickness = config.getint('fill', 'diaphragm.thickness')
     self.infillBeginRotation = math.radians(config.getfloat('fill', 'infill.rotation.begin'))
     self.infillBeginRotationRepeat = config.getint('fill', 'infill.rotation.repeat')
     self.infillOddLayerExtraRotation = math.radians(config.getfloat('fill', 'infill.rotation.odd.layer'))
     self.bridgeWidthMultiplier = config.getfloat('inset', 'bridge.width.multiplier.ratio')
     self.extrusionWidth = config.getfloat('carve', 'extrusion.width')
     self.infillWidth = self.extrusionWidth * self.infillWidthOverThickness * (0.7853)
     self.betweenWidth = self.extrusionWidth * self.infillWidthOverThickness * (0.7853)
     self.previousExtraShells = -1
     self.oldOrderedLocation = None
Esempio n. 2
0
def main():
    print "Starting up..."
    state = SharedState()
    bot = BotFactory(config, state)
    reactor.connectSSL(config.get('server'), config.get('port'), bot, ssl.ClientContextFactory())
    cooperate(SlackBot(state, config).listen_to_slack())
    reactor.run()
Esempio n. 3
0
    def process_callback(self):
        """"
        Will Finish Facebook Login
        1 - Capture Token
        2 - Create User if necessary
        3 - Login user
        """

        token = facebook.get_access_token_from_code(
            self.callback_code,
            self._callback_url(),
            config.get('fb_api_key'),
            config.get('fb_secret')
        )

        access_token = token['access_token']
        provider_user_info = self.get_user_info(access_token)
        provider_id = provider_user_info['id']

        social_user = SocialUser.get_by_provider_and_uid('facebook', provider_id)
        if social_user:
            user = social_user.user.get()
        else:
            user = self.create_user(provider_user_info)
            self.create_social_user(user, provider_user_info, access_token)

        return user
Esempio n. 4
0
def send_email(section, key, value):
    client = boto3.client("ses",
                          aws_access_key_id=config.get(section, "aws_access_key"),
                          aws_secret_access_key=config.get(section, "aws_secret_key"),
                          region_name=config.get(section, "aws_region"))
    response = client.send_email(
        Source=config.get("email", "from"),
        Destination={
            'ToAddresses': [config.get(section, "email_to")],
            'CcAddresses': [],
            'BccAddresses': []
        },
        Message={
            'Subject': {
                'Data': 'SQS queue {} key {} value {} not changing'.format(section, key, value),
                'Charset': 'utf-8'
            },
            'Body': {
                'Text': {
                    'Data': 'SQS queue {} key {} value {} not changing'.format(section, key, value),
                    'Charset': 'utf-8',
                },
                'Html': {
                    'Data': 'SQS queue {} key {} value {} not changing'.format(section, key, value),
                    'Charset': 'utf-8',
                }
            }
        },
    )
Esempio n. 5
0
 def refresh(self):
     log.debug('Refresh: %s', self.__class__.__name__)
     if self.height <= 0:
         return
     if self.pos == 0:
         lines = self.built_lines[-self.height:]
     else:
         lines = self.built_lines[-self.height-self.pos:-self.pos]
     with_timestamps = config.get("show_timestamps")
     nick_size = config.get("max_nick_length")
     self._win.move(0, 0)
     self._win.erase()
     offset = 0
     for y, line in enumerate(lines):
         if line:
             msg = line.msg
             if line.start_pos == 0:
                 offset = self.write_pre_msg(msg, with_timestamps, nick_size)
             elif y == 0:
                 offset = self.compute_offset(msg, with_timestamps, nick_size)
             self.write_text(y, offset, line.prepend
                             + line.msg.txt[line.start_pos:line.end_pos])
         else:
             self.write_line_separator(y)
         if y != self.height-1:
             self.addstr('\n')
     self._win.attrset(0)
     self._refresh()
    def login(self):
        self.status['text'] = _('Logging in...')
        self.button['state'] = self.theme_button['state'] = tk.DISABLED
        self.w.update_idletasks()
        try:
            self.session.login(config.get('username'), config.get('password'))
            self.status['text'] = ''
        except companion.VerificationRequired:
            # don't worry about authentication now - prompt on query
            self.status['text'] = ''
        except companion.ServerError as e:
            self.status['text'] = unicode(e)
        except Exception as e:
            if __debug__: print_exc()
            self.status['text'] = unicode(e)

        # Try to obtain exclusive lock on flight log ASAP
        if config.getint('output') & config.OUT_LOG_FILE:
            try:
                flightlog.openlog()
            except Exception as e:
                if __debug__: print_exc()
                if not self.status['text']:
                    self.status['text'] = unicode(e)

        if not getattr(sys, 'frozen', False):
            self.updater.checkForUpdates()	# Sparkle / WinSparkle does this automatically for packaged apps

        self.cooldown()
Esempio n. 7
0
  def __create_tags(self, databuffer):
    databuffer.create_tag("tag")
    add = databuffer.create_tag("add")
    rem = databuffer.create_tag("rem")

    add.set_property("background", config.get("colour", "diffadd"))
    rem.set_property("background", config.get("colour", "diffsub"))
Esempio n. 8
0
 def load(self):
     '''load configure'''
     if not self.__need_load_prefs:
         return
     
     # get uri
     uri = config.get("player", "uri")
     
     # get seek
     seek = int(config.get("player", "seek"))
     
     # get state 
     state = config.get("player", "state")
     
     # Init player state
     play = False
     self.logdebug("player load %s in state %s at %d", uri, state, seek)
     if config.getboolean("player", "play_on_startup") and state == "playing":
         play = True
         
     # load uri
     if uri:    
         song = MediaDB.get_song(uri)
         if song and song.exists():
             if not config.getboolean("player", "resume_last_progress") or not play:
                 seek = None
             self.set_song(song, play, self.get_crossfade() * 2, seek)
     self.emit("loaded")        
 def get_next_song(self, manual=False):
     del self.select_rows[:]
     self.queue_draw()
     self.reset_error_items()
     
     if self.is_empty():
         if config.getboolean("setting", "empty_random"):
             return MediaDB.get_random_song("local")
     else:    
         if manual:
             if config.get("setting", "loop_mode") != "random_mode":
                 return self.get_manual_song()
             else:
                 return self.get_random_song()
         
         elif config.get("setting", "loop_mode") == "list_mode":
             return self.get_manual_song()
         
         elif config.get("setting", "loop_mode") == "order_mode":            
             return self.get_order_song()
         
         elif config.get("setting", "loop_mode") == "single_mode":
             if self.highlight_item != None:
                 return self.highlight_item.get_song()
             
         elif config.get("setting", "loop_mode") == "random_mode":    
             return self.get_random_song()
    def login(self):
        self.status['text'] = _('Logging in...')
        self.button['state'] = tk.DISABLED
        self.w.update_idletasks()
        try:
            self.session.login(config.get('username'), config.get('password'))
            self.view_menu.entryconfigure(_('Status'), state=tk.NORMAL)
            self.status['text'] = ''
        except companion.VerificationRequired:
            # don't worry about authentication now - prompt on query
            self.status['text'] = ''
        except companion.ServerError as e:
            self.status['text'] = unicode(e)
        except Exception as e:
            if __debug__: print_exc()
            self.status['text'] = unicode(e)

        # Try to obtain exclusive lock on flight log ASAP
        if config.getint('output') & config.OUT_LOG_FILE:
            try:
                flightlog.openlog()
            except Exception as e:
                if __debug__: print_exc()
                if not self.status['text']:
                    self.status['text'] = unicode(e)

        if not self.status['text'] and monitor.restart_required():
            self.status['text'] = _('Re-start Elite: Dangerous for automatic log entries')	# Status bar message on launch
        elif not getattr(sys, 'frozen', False):
            self.updater.checkForUpdates()	# Sparkle / WinSparkle does this automatically for packaged apps

        self.cooldown()
    def login(self):
        self.status['text'] = 'Logging in...'
        self.button['state'] = tk.DISABLED
        self.w.update_idletasks()
        try:
            self.session.login(config.get('username'), config.get('password'))
            self.status['text'] = ''

            # Try to obtain exclusive lock on flight log ASAP
            if config.getint('output') & config.OUT_LOG:
                try:
                    flightlog.openlog()
                except Exception as e:
                    if __debug__: print_exc()
                    self.status['text'] = str(e)

        except companion.VerificationRequired:
            # don't worry about authentication now - prompt on query
            self.status['text'] = ''
        except companion.ServerError as e:
            self.status['text'] = str(e)
        except Exception as e:
            if __debug__: print_exc()
            self.status['text'] = str(e)
        self.cooldown()
Esempio n. 12
0
def main():
    username = config.get("username", "slask")
    icon = config.get("icon", ":poop:")

    # ignore message we sent
    msguser = request.form.get("user_name", "").strip()
    if username == msguser or msguser.lower() == "slackbot":
        return ""

    text = None
    try:
        text = handle_message(request.form) 
    except Exception as err:
        text = str(err)

    if not text:
        return ""
    
    response = {
        "text": text,
        "username": username,
        "icon_emoji": icon,
        "parse": "full",
    }
    return json.dumps(response)
Esempio n. 13
0
def start_spark(timeout=None, max_items_per_rdd_sent=None):
    sc = SparkContext("local[4]", "twitter.trending")
    ssc = StreamingContext(sc, 5)

    ssc.checkpoint('hdfs://localhost:9000/user/spark/checkpoint/')

    kafka_params = {
        'zookeeper.connect': config.get('zookeeper', 'host'),
        'group.id': config.get('kafka', 'group_id'),
        'metadata.broker.list': config.get('kafka', 'hosts')
    }

    ksc = KafkaUtils.createDirectStream(ssc,
                                        [config.get('kafka', 'topic')],
                                        kafka_params)

    hashtag_counts = get_word_counts(ksc)
    filtered_tweet_count = filter_tweets(hashtag_counts)
    send_dstream_data(filtered_tweet_count, max_items_per_rdd_sent)
    ssc.start()
    if timeout:
        ssc.awaitTermination(timeout)
        ssc.stop(stopSparkContext=True, stopGraceFully=True)
    else:
        ssc.awaitTermination()
Esempio n. 14
0
def sync(musthavelatest=config.get("pkgsync", {}).get("musthavelatest", ""),
            musthave=config.get("pkgsync", {}).get("musthave", ""),
            mayhave=config.get("pkgsync", {}).get("mayhave", ""),
            maynothave=config.get("pkgsync", {}).get("maynothave", "")):
    """
    Sync a system's packages with a pre-defined manifest. 
    The ``mayhave`` list currently gets ignored.
    """
    pkgspec = {}
    if os.path.isfile(musthavelatest):
        pkgspec.update(generate_package_list(musthavelatest, "latest"))
    
    if os.path.isfile(musthave):
        pkgspec.update(generate_package_list(musthave, "installed"))
    
    if os.path.isfile(maynothave):
        pkgspec.update(generate_package_list(maynothave, "removed"))
    
    if len(pkgspec) == 0:
        print yellow("No packages specified")
        return
        
    import apt
    ok, fail = apt.ensure(**pkgspec)
    if fail == 0:
        print green("%s: %d packages sync'ed." % (env.host, ok))
    elif ok == 0:
        print red("%s: %d packages failed to sync." % (env.host, fail))
    else:
        print yellow("%s: %d packages sync'd and %d packages failed" % (env.host, ok, fail))
Esempio n. 15
0
 def __init__(self, **kwargs):
     self.triplestore_url = kwargs.get("triplestore_url", None)
     if not self.triplestore_url:
         self.triplestore_url = "{}:{}/{}".format(
             config.get('triplestore').get('host'),
             config.get('triplestore').get('port'),
             config.get('triplestore').get('path'))
Esempio n. 16
0
    def post(self):
        data = simplejson.loads(self.request.body)
        logging.info("Received event for PR %s" % data['pull_request']['number'])
        pullrequest = get_pullrequest(data['repository']['full_name'],
                          data['pull_request']['number'])

        # Update Redmine issues
        update_redmine_issues(pullrequest, data)

        # Update PR description
        update_pr_description(pullrequest)

        # Trigger jenkins jobs
        jobs = config.get('repository.mapping.%s:%s' %
                (data['repository']['full_name'],
                data['pull_request']['base']['ref'])
        )

        if not jobs:
            jobs = config.get('repository.mapping.%s' %
                data['repository']['full_name']
            )

        if jobs:
            if isinstance(jobs, list):
                for job in jobs:
                    run_jenkins_job(job)
            else:
                run_jenkins_job(jobs)
        else:
            logging.info("No Jenkins job mappings found")
Esempio n. 17
0
def writelog(timestamp, system, edsmlookupfn):

    try:
        # Look up the system before adding it to the log, since adding it to the log has the side-effect of creating it
        edsmlookupfn()

        r = requests.get(
            "http://www.edsm.net/api-logs-v1/set-log?commanderName=%s&apiKey=%s&systemName=%s&dateVisited=%s&fromSoftware=%s&fromSoftwareVersion=%s"
            % (
                urllib.quote(config.get("edsm_cmdrname")),
                urllib.quote(config.get("edsm_apikey")),
                urllib.quote(system),
                urllib.quote(time.strftime("%Y-%m-%d %H:%M:%S", time.gmtime(timestamp))),
                urllib.quote(applongname),
                urllib.quote(appversion),
            ),
            timeout=EDSM._TIMEOUT,
        )
        r.raise_for_status()
        reply = r.json()
        (msgnum, msg) = reply["msgnum"], reply["msg"]
    except:
        if __debug__:
            print_exc()
        raise Exception(_("Error: Can't connect to EDSM"))

    # Message numbers: 1xx = OK, 2xx = fatal error, 3xx = error (but not generated in practice), 4xx = ignorable errors
    if msgnum // 100 not in (1, 4):
        raise Exception(_("Error: EDSM {MSG}").format(MSG=msg))

    if not config.getint("edsm_historical"):
        config.set("edsm_historical", 1)
        thread = threading.Thread(target=export_historical, name="EDSM export")
        thread.daemon = True
        thread.start()
Esempio n. 18
0
    def add_message(self, txt, time=None, nickname=None,
                    nick_color=None, history=None, user=None, highlight=False,
                    identifier=None, str_time=None, jid=None, ack=0):
        """
        Create a message and add it to the text buffer
        """
        msg = self.make_message(txt, time, nickname, nick_color, history,
                                user, identifier, str_time=str_time,
                                highlight=highlight, jid=jid, ack=ack)
        self.messages.append(msg)

        while len(self.messages) > self.messages_nb_limit:
            self.messages.pop(0)

        ret_val = None
        show_timestamps = config.get('show_timestamps')
        nick_size = config.get('max_nick_length')
        for window in self.windows: # make the associated windows
                                    # build the lines from the new message
            nb = window.build_new_message(msg, history=history,
                                          highlight=highlight,
                                          timestamp=show_timestamps,
                                          nick_size=nick_size)
            if ret_val is None:
                ret_val = nb
            if window.pos != 0:
                window.scroll_up(nb)

        return ret_val or 1
Esempio n. 19
0
 def __init__(self):
     self._connection = None
     self._server = config.get('database', 'server')
     self._database = config.get('database', 'database')
     self._user = config.get('database', 'username')
     self._password = config.get('database', 'password')
     self._port = config.get('database', 'port')
Esempio n. 20
0
    def __init__(self,
                 node,
                 job_finder,
                 prepare_namespaces_states=False,
                 prepare_flow_stats=False,
                 statistics=None):
        logger.info("Created NodeInfoUpdater")
        self.__node = node
        self.statistics = statistics
        self.job_finder = job_finder
        self._namespaces_states = CachedGzipResponse()
        self._flow_stats = {}
        self.__tq = timed_queue.TimedQueue()
        self.__session = elliptics.Session(self.__node)
        wait_timeout = config.get('elliptics', {}).get('wait_timeout') or config.get('wait_timeout', 5)
        self.__session.set_timeout(wait_timeout)
        self.__nodeUpdateTimestamps = (time.time(), time.time())

        self.__cluster_update_lock = threading.Lock()

        if prepare_namespaces_states and statistics is None:
            raise AssertionError('Statistics is required for namespaces states calculation')
        if prepare_flow_stats and statistics is None:
            raise AssertionError('Statistics is required for flow stats calculation')
        self._prepare_namespaces_states = prepare_namespaces_states
        self._prepare_flow_stats = prepare_flow_stats
Esempio n. 21
0
def prefs_changed(cmdr, is_beta):
    changed = config.getint('inara_out') != this.log.get()
    config.set('inara_out', this.log.get())

    # Override standard URL functions
    if config.get('system_provider') == 'Inara':
        this.system_link['url'] = this.system
    if config.get('station_provider') == 'Inara':
        this.station_link['url'] = this.station or this.system

    if cmdr and not is_beta:
        this.cmdr = cmdr
        this.FID = None
        cmdrs = config.get('inara_cmdrs') or []
        apikeys = config.get('inara_apikeys') or []
        if cmdr in cmdrs:
            idx = cmdrs.index(cmdr)
            apikeys.extend([''] * (1 + idx - len(apikeys)))
            changed |= (apikeys[idx] != this.apikey.get().strip())
            apikeys[idx] = this.apikey.get().strip()
        else:
            config.set('inara_cmdrs', cmdrs + [cmdr])
            changed = True
            apikeys.append(this.apikey.get().strip())
        config.set('inara_apikeys', apikeys)

        if this.log.get() and changed:
            this.newuser = True	# Send basic info at next Journal event
            add_event('getCommanderProfile', time.strftime('%Y-%m-%dT%H:%M:%SZ', time.gmtime()), { 'searchName': cmdr })
            call()
Esempio n. 22
0
def update_redmine_issues(pullrequest, data):
    issues = get_issues_from_pr(pullrequest)
    if not issues:
        logging.info("No issues found")
    else:
        logging.info(
            "Updating Redmine issues %s" % ", ".join(map(str, issues))
        )

    if issues and not config.get('dry-run'):
        c = Corgi(
            config['redmine.url'], config['redmine.auth_key'],
            config.get('user.mapping.%s' % data['sender']['login'])
        )
        if not c.connected:
            logging.error("Connection to Redmine failed")
            return

    if data['action'] == 'closed' and data['pull_request']['merged']:
        data['action'] = 'merged'
    status = config.get('redmine.status.on-pr-%s' % data['action'])
    update_message = create_issue_update(pullrequest, data)
    logging.debug(update_message)

    if not config.get('dry-run'):
        for issue in issues:
            c.update_issue(issue, update_message, status)
            logging.info("Added comment to issue %s" % issue)
Esempio n. 23
0
    def __init__(self, node):
        self.node = node
        self.meta_session = self.node.meta_session

        self.commands = {}
        self.history = {}
        self.active_hosts = []

        self.__tq = timed_queue.TimedQueue()
        self.__tq.start()

        self.__tq.add_task_in(self.MAKE_IOLOOP, 0,
            self._make_tq_thread_ioloop)

        self.__tq.add_task_in(self.HISTORY_FETCH,
            5, self._fetch_history)

        self.__tq.add_task_in(self.STATE_FETCH,
            5, self._fetch_states)

        self.minion_headers = ({'X-Auth': config['minions']['authkey']}
                               if config.get('minions', {}).get('authkey') else
                               None)
        self.minion_port = config.get('minions', {}).get('port', 8081)

        self.__commands_lock = threading.Lock()
        self.__active_hosts_lock = threading.Lock()
Esempio n. 24
0
    def apply(self):
        credentials = (config.get('username'), config.get('password'))
        config.set('username', self.username.get().strip())
        config.set('password', self.password.get().strip())

        config.set('output',
                   (self.out_eddn.get() and config.OUT_EDDN) +
                   (self.out_bpc.get() and config.OUT_BPC) +
                   (self.out_td.get() and config.OUT_TD) +
                   (self.out_csv.get() and config.OUT_CSV) +
                   (self.out_ship_eds.get() and config.OUT_SHIP_EDS) +
                   (self.out_log_file.get() and config.OUT_LOG_FILE) +
                   (self.out_ship_coriolis.get() and config.OUT_SHIP_CORIOLIS) +
                   (self.out_log_edsm.get() and config.OUT_LOG_EDSM) +
                   (self.out_log_auto.get() and config.OUT_LOG_AUTO))
        config.set('outdir', self.outdir.get().startswith('~') and join(config.home, self.outdir.get()[2:]) or self.outdir.get())

        config.set('edsm_autoopen', self.edsm_autoopen.get())
        config.set('edsm_cmdrname', self.edsm_cmdr.get().strip())
        config.set('edsm_apikey',   self.edsm_apikey.get().strip())

        if platform in ['darwin','win32']:
            config.set('hotkey_code', self.hotkey_code)
            config.set('hotkey_mods', self.hotkey_mods)
            config.set('hotkey_always', int(not self.hotkey_only.get()))
            config.set('hotkey_mute', int(not self.hotkey_play.get()))

        config.set('anonymous', self.out_anon.get())

        self._destroy()
        if credentials != (config.get('username'), config.get('password')) and self.callback:
            self.callback()
Esempio n. 25
0
 def get_previous_song(self):
     del self.select_rows[:]
     self.queue_draw()
     self.reset_error_items()
     
     if self.is_empty():
         if config.get("setting", "empty_random") == "true":
             return MediaDB.get_random_song("local")
     else:    
         valid_items = self.get_valid_items()
         if not valid_items: return None
         
         if config.get("setting", "loop_mode") == "random_mode":
             return self.get_random_song()
         
         if self.highlight_item != None:
             if self.highlight_item in valid_items:
                 current_index = valid_items.index(self.highlight_item)
                 prev_index = current_index - 1
                 if prev_index < 0:
                     prev_index = len(valid_items) - 1
                 highlight_item = valid_items[prev_index]    
         else:        
             highlight_item = valid_items[0]
         self.set_highlight(highlight_item)    
         return highlight_item.get_song()
Esempio n. 26
0
    def handleRealmList(self, data):
        ST_REALM = Struct('Realm',
                ULInt8('icon'),
                ULInt8('lock'),
                ULInt8('color'),
                CString('name'),
                CString('address'),
                LFloat32('population'),
                ULInt8('nb_characters'),
                ULInt8('timezone'),
                ULInt8('unk')
        )

        ST_REALM_LIST_S_HEADER = Struct('REALM_LIST_S_HEADER',
            ULInt8('opcode'),
            ULInt16('size')
        )

        ST_REALM_LIST_S_PAYLOAD = Struct('REALM_LIST_S',
            ULInt32('unk1'),
            ULInt16('nb_realms'),
            Array(lambda ctx: ctx['nb_realms'], ST_REALM),
            ULInt8('unk2'),
            ULInt8('unk3')
        )

        ST_REALM_LIST_S_FULL = Struct('REALM_LIST_S',
            Embed(ST_REALM_LIST_S_HEADER),
            Embed(ST_REALM_LIST_S_PAYLOAD)
        )

        try:
            realms = ST_REALM_LIST_S_FULL.parse(data)
            realmsList = realms.Realm
        except:
            return None

        if len(data)-3 < realms.size:
            return None
        
        for r in realmsList:
            r.address = '%s:%s' % (config.get('GLOBAL', 'proxyhost'), config.get('WORLD', 'localport'))
            r.name += ' - PROXY'

        c = Container(
            unk1 = 0,
            nb_realms = len(realmsList),
            Realm = realmsList,
            unk2 = 0x10,
            unk3 = 0
        )
        pkt_p = ST_REALM_LIST_S_PAYLOAD.build(c)
        c = Container(
            opcode = 0x10,
            size = len(pkt_p)
        )
        pkt_h = ST_REALM_LIST_S_HEADER.build(c)

        return pkt_h + pkt_p
Esempio n. 27
0
    def authorize(self, payload):
        # Handle OAuth authorization code callback. Returns access token if successful, otherwise raises CredentialsError
        if not '?' in payload:
            print 'Auth\tMalformed response "%s"' % payload.encode('utf-8')
            raise CredentialsError()	# Not well formed

        data = urlparse.parse_qs(payload[payload.index('?')+1:])
        if not self.state or not data.get('state') or data['state'][0] != self.state:
            print 'Auth\tUnexpected response "%s"' % payload.encode('utf-8')
            raise CredentialsError()	# Unexpected reply

        if not data.get('code'):
            print 'Auth\tNegative response "%s"' % payload.encode('utf-8')
            if data.get('error_description'):
                raise CredentialsError('Error: %s' % data['error_description'][0])
            elif data.get('error'):
                raise CredentialsError('Error: %s' % data['error'][0])
            elif data.get('message'):
                raise CredentialsError('Error: %s' % data['message'][0])
            else:
                raise CredentialsError()

        try:
            r = None
            data = {
                'grant_type': 'authorization_code',
                'client_id': CLIENT_ID,
                'code_verifier': self.verifier,
                'code': data['code'][0],
                'redirect_uri': protocolhandler.redirect,
            }
            r = self.session.post(SERVER_AUTH + URL_TOKEN, data=data, timeout=auth_timeout)
            data = r.json()
            if r.status_code == requests.codes.ok:
                print 'Auth\tNew token for %s' % self.cmdr.encode('utf-8')
                cmdrs = config.get('cmdrs')
                idx = cmdrs.index(self.cmdr)
                tokens = config.get('fdev_apikeys') or []
                tokens = tokens + [''] * (len(cmdrs) - len(tokens))
                tokens[idx] = data.get('refresh_token', '')
                config.set('fdev_apikeys', tokens)
                config.save()	# Save settings now for use by command-line app
                return data.get('access_token')
        except:
            print 'Auth\tCan\'t get token for %s' % self.cmdr.encode('utf-8')
            print_exc()
            if r: self.dump(r)
            raise CredentialsError()

        print 'Auth\tCan\'t get token for %s' % self.cmdr.encode('utf-8')
        self.dump(r)
        if data.get('error_description'):
            raise CredentialsError('Error: %s' % data['error_description'])
        elif data.get('error'):
            raise CredentialsError('Error: %s' % data['error'])
        elif data.get('message'):
            raise CredentialsError('Error: %s' % data['message'])
        else:
            raise CredentialsError()
Esempio n. 28
0
 def login(self, username=None, password=None, passkey=None):
     password = password or config.get("KG", "password")
     username = username or config.get("KG", "username")
     response = session.post(self.baseURL + "/takelogin.php", data={"username": username, "password": password}).text
     if response.find('action="takelogin.php"') != -1:
         print response
         raise KGAPIException("Failed to log in")
     self.loggedIn = True
Esempio n. 29
0
def init_socket_server(loop):
    server = yield from loop.create_server(LinkedDataFragmentsServer, 
                                           config.get('port'), 
                                           config.get(7000))
    if config.get('debug'):
        print("Running Socket Server at {} {}".format(config.get('port'), 
                                                      config.get(7000)))
    return server
Esempio n. 30
0
 def __on_config_changed(self, dispatcher, section, option, value):
     if section == "plugins" and option.find("globalkey_") == 0 and option.find("_last") == -1:
         self.__try_unbind(config.get(section, option + "_last", value))
         
         if value:            
             self.__bind(config.get(section, option, value), option)
             
         config.set(section, option + "_last", value)
Esempio n. 31
0
from config import config
from manual_locks import manual_locker

i = iter(xrange(100))
logger.info("trace %d" % (i.next()))


def term_handler(signo, frame):
    # required to guarantee execution of cleanup functions registered
    # with atexit.register
    sys.exit(0)


signal.signal(signal.SIGTERM, term_handler)

nodes = config.get('elliptics', {}).get('nodes',
                                        []) or config["elliptics_nodes"]
logger.debug("config: %s" % str(nodes))

logger.info("trace %d" % (i.next()))
log = elliptics.Logger(str(config["dnet_log"]), config["dnet_log_mask"])

node_config = elliptics.Config()
node_config.io_thread_num = config.get('io_thread_num', 1)
node_config.nonblocking_io_thread_num = config.get('nonblocking_io_thread_num',
                                                   1)
node_config.net_thread_num = config.get('net_thread_num', 1)

logger.info('Node config: io_thread_num {0}, nonblocking_io_thread_num {1}, '
            'net_thread_num {2}'.format(node_config.io_thread_num,
                                        node_config.nonblocking_io_thread_num,
                                        node_config.net_thread_num))
Esempio n. 32
0
    def _tag_record(self, cr, rec, data_node=None, mode=None):
        rec_model = rec.get("model").encode('ascii')
        model = self.pool[rec_model]
        rec_id = rec.get("id", '').encode('ascii')
        rec_context = rec.get("context", {})
        if rec_context:
            rec_context = unsafe_eval(rec_context)

        if self.xml_filename and rec_id:
            rec_context['install_mode_data'] = dict(
                xml_file=self.xml_filename,
                xml_id=rec_id,
                model=rec_model,
            )

        self._test_xml_id(rec_id)
        # in update mode, the record won't be updated if the data node explicitely
        # opt-out using @noupdate="1". A second check will be performed in
        # ir.model.data#_update() using the record's ir.model.data `noupdate` field.
        if self.isnoupdate(data_node) and self.mode != 'init':
            # check if the xml record has no id, skip
            if not rec_id:
                return None

            if '.' in rec_id:
                module, rec_id2 = rec_id.split('.')
            else:
                module = self.module
                rec_id2 = rec_id
            id = self.pool['ir.model.data']._update_dummy(
                cr, self.uid, rec_model, module, rec_id2)
            if id:
                # if the resource already exists, don't update it but store
                # its database id (can be useful)
                self.idref[rec_id] = int(id)
                return None
            elif not self.nodeattr2bool(rec, 'forcecreate', True):
                # if it doesn't exist and we shouldn't create it, skip it
                return None
            # else create it normally

        res = {}
        for field in rec.findall('./field'):
            #TODO: most of this code is duplicated above (in _eval_xml)...
            f_name = field.get("name").encode('utf-8')
            f_ref = field.get("ref", '').encode('utf-8')
            f_search = field.get("search", '').encode('utf-8')
            f_model = field.get("model", '').encode('utf-8')
            if not f_model and f_name in model._fields:
                f_model = model._fields[f_name].comodel_name
            f_use = field.get("use", '').encode('utf-8') or 'id'
            f_val = False

            if f_search:
                q = unsafe_eval(f_search, self.idref)
                assert f_model, 'Define an attribute model="..." in your .XML file !'
                f_obj = self.pool[f_model]
                # browse the objects searched
                s = f_obj.browse(cr, self.uid, f_obj.search(cr, self.uid, q))
                # column definitions of the "local" object
                _fields = self.pool[rec_model]._fields
                # if the current field is many2many
                if (f_name in _fields) and _fields[f_name].type == 'many2many':
                    f_val = [(6, 0, map(lambda x: x[f_use], s))]
                elif len(s):
                    # otherwise (we are probably in a many2one field),
                    # take the first element of the search
                    f_val = s[0][f_use]
            elif f_ref:
                if f_name in model._fields and model._fields[
                        f_name].type == 'reference':
                    val = self.model_id_get(cr, f_ref)
                    f_val = val[0] + ',' + str(val[1])
                else:
                    f_val = self.id_get(cr, f_ref)
            else:
                f_val = _eval_xml(self, field, self.pool, cr, self.uid,
                                  self.idref)
                if f_name in model._fields:
                    if model._fields[f_name].type == 'integer':
                        f_val = int(f_val)
            res[f_name] = f_val

        id = self.pool['ir.model.data']._update(
            cr,
            self.uid,
            rec_model,
            self.module,
            res,
            rec_id or False,
            not self.isnoupdate(data_node),
            noupdate=self.isnoupdate(data_node),
            mode=self.mode,
            context=rec_context)
        if rec_id:
            self.idref[rec_id] = int(id)
        if config.get('import_partial'):
            cr.commit()
        return rec_model, id
Esempio n. 33
0
    def authorize(self, payload):
        # Handle OAuth authorization code callback. Returns access token if successful, otherwise raises CredentialsError
        if not '?' in payload:
            print 'Auth\tMalformed response "%s"' % payload.encode('utf-8')
            raise CredentialsError()  # Not well formed

        data = urlparse.parse_qs(payload[payload.index('?') + 1:])
        if not self.state or not data.get(
                'state') or data['state'][0] != self.state:
            print 'Auth\tUnexpected response "%s"' % payload.encode('utf-8')
            raise CredentialsError()  # Unexpected reply

        if not data.get('code'):
            print 'Auth\tNegative response "%s"' % payload.encode('utf-8')
            if data.get('error_description'):
                raise CredentialsError('Error: %s' %
                                       data['error_description'][0])
            elif data.get('error'):
                raise CredentialsError('Error: %s' % data['error'][0])
            elif data.get('message'):
                raise CredentialsError('Error: %s' % data['message'][0])
            else:
                raise CredentialsError()

        try:
            r = None
            data = {
                'grant_type': 'authorization_code',
                'client_id': CLIENT_ID,
                'code_verifier': self.verifier,
                'code': data['code'][0],
                'redirect_uri': protocolhandler.redirect,
            }
            r = self.session.post(SERVER_AUTH + URL_TOKEN,
                                  data=data,
                                  timeout=auth_timeout)
            data = r.json()
            if r.status_code == requests.codes.ok:
                print 'Auth\tNew token for %s' % self.cmdr.encode('utf-8')
                cmdrs = config.get('cmdrs')
                idx = cmdrs.index(self.cmdr)
                tokens = config.get('fdev_apikeys') or []
                tokens = tokens + [''] * (len(cmdrs) - len(tokens))
                tokens[idx] = data.get('refresh_token', '')
                config.set('fdev_apikeys', tokens)
                config.save()  # Save settings now for use by command-line app
                return data.get('access_token')
        except:
            print 'Auth\tCan\'t get token for %s' % self.cmdr.encode('utf-8')
            print_exc()
            if r: self.dump(r)
            raise CredentialsError()

        print 'Auth\tCan\'t get token for %s' % self.cmdr.encode('utf-8')
        self.dump(r)
        if data.get('error_description'):
            raise CredentialsError('Error: %s' % data['error_description'])
        elif data.get('error'):
            raise CredentialsError('Error: %s' % data['error'])
        elif data.get('message'):
            raise CredentialsError('Error: %s' % data['message'])
        else:
            raise CredentialsError()
Esempio n. 34
0
 def has_permission(self, request, view):
     return config.get('enable_teams')
Esempio n. 35
0
# Setup logging
logging.basicConfig(filename=config['log_file'], level=logging.INFO)
start_message = 'Started taking %(period)s snapshots at %(date)s' % {
    'period': period,
    'date': datetime.today().strftime('%d-%m-%Y %H:%M:%S')
}
message += start_message + "\n\n"
logging.info(start_message)

# Get settings from config.py
aws_access_key = config['aws_access_key']
aws_secret_key = config['aws_secret_key']
ec2_region_name = config['ec2_region_name']
ec2_region_endpoint = config['ec2_region_endpoint']
sns_arn = config.get('arn')
proxyHost = config.get('proxyHost')
proxyPort = config.get('proxyPort')

region = RegionInfo(name=ec2_region_name, endpoint=ec2_region_endpoint)

# Number of snapshots to keep
keep_week = config['keep_week']
keep_day = config['keep_day']
keep_month = config['keep_month']
keep_hour = config['keep_hour']
count_success = 0
count_total = 0

# Connect to AWS using the credentials provided above or in Environment vars or using IAM role.
print 'Connecting to AWS'
Esempio n. 36
0
    def run(self):
        try:
            hostname = get_hostname()
        except HostnameException as e:
            logging.critical(
                "{} - You can define one in datadog.yaml or in your hosts file"
                .format(e))
            sys.exit(1)

        logging.info("Starting the agent, hostname: %s", hostname)

        # init Forwarder
        logging.info("Starting the Forwarder")
        api_key = config.get('api_key')
        dd_url = config.get('dd_url')
        if not dd_url:
            logging.error('No Datadog URL configured - cannot continue')
            sys.exit(1)
        if not api_key:
            logging.error('No API key configured - cannot continue')
            sys.exit(1)

        # get proxy settings
        proxies = get_proxy()
        logging.debug('Proxy configuration used: %s', proxies)

        forwarder = Forwarder(
            api_key,
            dd_url,
            proxies=proxies,
        )
        forwarder.start()

        # aggregator
        aggregator = MetricsAggregator(
            hostname,
            interval=config.get('aggregator_interval'),
            expiry_seconds=(config.get('min_collection_interval') +
                            config.get('aggregator_expiry_seconds')),
            recent_point_threshold=config.get('recent_point_threshold'),
            histogram_aggregates=config.get('histogram_aggregates'),
            histogram_percentiles=config.get('histogram_percentiles'),
        )

        # serializer
        serializer = Serializer(
            aggregator,
            forwarder,
        )

        # instantiate collector
        collector = Collector(config, aggregator)
        collector.load_check_classes()
        collector.instantiate_checks()

        # instantiate AgentRunner
        runner = AgentRunner(collector, serializer, config)

        # instantiate API
        api = APIServer(config, aggregator.stats)

        handler = SignalHandler()
        # components
        handler.register('runner', runner)
        handler.register('forwarder', forwarder)
        handler.register('api', api)
        # signals
        handler.handle(signal.SIGTERM)
        handler.handle(signal.SIGINT)

        # start signal handler
        handler.start()

        runner.start()
        api.start()

        runner.join()
        logging.info("Agent done...")

        api.join()
        logging.info("API done...")

        handler.stop()
        handler.join()
        logging.info("Signal handler done...")

        logging.info("Thank you for shopping at DataDog! Come back soon!")

        sys.exit(0)
Esempio n. 37
0
import jwt

from datetime import datetime, timedelta

from apierrors import errors
from config import config
from database.model.auth import Role

from .auth_type import AuthType
from .payload import Payload

token_secret = config.get('secure.auth.token_secret')


class Token(Payload):
    default_expiration_sec = config.get(
        'apiserver.auth.default_expiration_sec')

    def __init__(self,
                 exp=None,
                 iat=None,
                 nbf=None,
                 env=None,
                 identity=None,
                 entities=None,
                 **_):
        super(Token, self).__init__(AuthType.bearer_token,
                                    identity=identity,
                                    entities=entities)
        self.exp = exp
        self.iat = iat
Esempio n. 38
0
def journal_entry(cmdr, is_beta, system, station, entry, state):

    # Send any unsent events when switching accounts
    if cmdr and cmdr != this.cmdr:
        call()

    this.cmdr = cmdr
    this.FID = state['FID']
    this.multicrew = bool(state['Role'])

    if entry['event'] == 'LoadGame' or this.newuser:
        # clear cached state
        if entry['event'] == 'LoadGame':
            # User setup Inara API while at the loading screen - proceed as for new session
            this.newuser = False
            this.newsession = True
        else:
            this.newuser = True
            this.newsession = False
        this.undocked = False
        this.suppress_docked = False
        this.cargo = None
        this.materials = None
        this.lastcredits = 0
        this.storedmodules = None
        this.loadout = None
        this.fleet = None
        this.shipswap = False
        this.system = None
        this.station = None
    elif entry['event'] in [
            'Resurrect', 'ShipyardBuy', 'ShipyardSell', 'SellShipOnRebuy'
    ]:
        # Events that mean a significant change in credits so we should send credits after next "Update"
        this.lastcredits = 0
    elif entry['event'] in [
            'ShipyardNew', 'ShipyardSwap'
    ] or (entry['event'] == 'Location' and entry['Docked']):
        this.suppress_docked = True

    # Send location and status on new game or StartUp. Assumes Cargo is the last event on a new game (other than Docked).
    # Always send an update on Docked, FSDJump, Undocked+SuperCruise, Promotion, EngineerProgress and PowerPlay affiliation.
    # Also send material and cargo (if changed) whenever we send an update.

    if config.getint('inara_out'
                     ) and not is_beta and not this.multicrew and credentials(
                         cmdr):
        try:
            old_events = len(
                this.events
            )  # Will only send existing events if we add a new event below

            # Dump starting state to Inara

            if (this.newuser or entry['event'] == 'StartUp'
                    or (this.newsession and entry['event'] == 'Cargo')):
                this.newuser = False
                this.newsession = False

                # Send rank info to Inara on startup
                add_event('setCommanderRankPilot', entry['timestamp'], [
                    OrderedDict([
                        ('rankName', k.lower()),
                        ('rankValue', v[0]),
                        ('rankProgress', v[1] / 100.0),
                    ]) for k, v in state['Rank'].iteritems() if v is not None
                ])
                add_event('setCommanderReputationMajorFaction',
                          entry['timestamp'], [
                              OrderedDict([
                                  ('majorfactionName', k.lower()),
                                  ('majorfactionReputation', v / 100.0),
                              ]) for k, v in state['Reputation'].iteritems()
                              if v is not None
                          ])
                if state['Engineers']:  # Not populated < 3.3
                    add_event('setCommanderRankEngineer', entry['timestamp'], [
                        OrderedDict([
                            ('engineerName', k),
                            type(v) is tuple and ('rankValue', v[0]) or
                            ('rankStage', v),
                        ]) for k, v in state['Engineers'].iteritems()
                    ])

                # Update location
                add_event(
                    'setCommanderTravelLocation',
                    entry['timestamp'],
                    OrderedDict([
                        ('starsystemName', system),
                        ('stationName', station),  # Can be None
                    ]))

                # Update ship
                if state['ShipID']:  # Unknown if started in Fighter or SRV
                    data = OrderedDict([
                        ('shipType', state['ShipType']),
                        ('shipGameID', state['ShipID']),
                        ('shipName', state['ShipName']),  # Can be None
                        ('shipIdent', state['ShipIdent']),  # Can be None
                        ('isCurrentShip', True),
                    ])
                    if state['HullValue']:
                        data['shipHullValue'] = state['HullValue']
                    if state['ModulesValue']:
                        data['shipModulesValue'] = state['ModulesValue']
                    data['shipRebuyCost'] = state['Rebuy']
                    add_event('setCommanderShip', entry['timestamp'], data)

                    this.loadout = make_loadout(state)
                    add_event('setCommanderShipLoadout', entry['timestamp'],
                              this.loadout)

            # Promotions
            elif entry['event'] == 'Promotion':
                for k, v in state['Rank'].iteritems():
                    if k in entry:
                        add_event(
                            'setCommanderRankPilot', entry['timestamp'],
                            OrderedDict([
                                ('rankName', k.lower()),
                                ('rankValue', v[0]),
                                ('rankProgress', 0),
                            ]))
            elif entry['event'] == 'EngineerProgress' and 'Engineer' in entry:
                add_event(
                    'setCommanderRankEngineer', entry['timestamp'],
                    OrderedDict([
                        ('engineerName', entry['Engineer']),
                        'Rank' in entry and ('rankValue', entry['Rank'])
                        or ('rankStage', entry['Progress']),
                    ]))

            # PowerPlay status change
            if entry['event'] == 'PowerplayJoin':
                add_event(
                    'setCommanderRankPower', entry['timestamp'],
                    OrderedDict([
                        ('powerName', entry['Power']),
                        ('rankValue', 1),
                    ]))
            elif entry['event'] == 'PowerplayLeave':
                add_event(
                    'setCommanderRankPower', entry['timestamp'],
                    OrderedDict([
                        ('powerName', entry['Power']),
                        ('rankValue', 0),
                    ]))
            elif entry['event'] == 'PowerplayDefect':
                add_event(
                    'setCommanderRankPower', entry['timestamp'],
                    OrderedDict([
                        ('powerName', entry['ToPower']),
                        ('rankValue', 1),
                    ]))

            # Ship change
            if entry['event'] == 'Loadout' and this.shipswap:
                data = OrderedDict([
                    ('shipType', state['ShipType']),
                    ('shipGameID', state['ShipID']),
                    ('shipName', state['ShipName']),  # Can be None
                    ('shipIdent', state['ShipIdent']),  # Can be None
                    ('isCurrentShip', True),
                ])
                if state['HullValue']:
                    data['shipHullValue'] = state['HullValue']
                if state['ModulesValue']:
                    data['shipModulesValue'] = state['ModulesValue']
                data['shipRebuyCost'] = state['Rebuy']
                add_event('setCommanderShip', entry['timestamp'], data)

                this.loadout = make_loadout(state)
                add_event('setCommanderShipLoadout', entry['timestamp'],
                          this.loadout)
                this.shipswap = False

            # Location change
            elif entry['event'] == 'Docked':
                if this.undocked:
                    # Undocked and now docking again. Don't send.
                    this.undocked = False
                elif this.suppress_docked:
                    # Don't send initial Docked event on new game
                    this.suppress_docked = False
                else:
                    add_event(
                        'addCommanderTravelDock', entry['timestamp'],
                        OrderedDict([
                            ('starsystemName', system),
                            ('stationName', station),
                            ('shipType', state['ShipType']),
                            ('shipGameID', state['ShipID']),
                        ]))
            elif entry['event'] == 'Undocked':
                this.undocked = True
                this.station = None
            elif entry['event'] == 'SupercruiseEntry':
                if this.undocked:
                    # Staying in system after undocking - send any pending events from in-station action
                    add_event(
                        'setCommanderTravelLocation', entry['timestamp'],
                        OrderedDict([
                            ('starsystemName', system),
                            ('shipType', state['ShipType']),
                            ('shipGameID', state['ShipID']),
                        ]))
                this.undocked = False
            elif entry['event'] == 'FSDJump':
                this.undocked = False
                this.system = None
                add_event(
                    'addCommanderTravelFSDJump', entry['timestamp'],
                    OrderedDict([
                        ('starsystemName', entry['StarSystem']),
                        ('jumpDistance', entry['JumpDist']),
                        ('shipType', state['ShipType']),
                        ('shipGameID', state['ShipID']),
                    ]))

                if entry.get('Factions'):
                    add_event(
                        'setCommanderReputationMinorFaction',
                        entry['timestamp'], [
                            OrderedDict([
                                ('minorfactionName', f['Name']),
                                ('minorfactionReputation', f['MyReputation']),
                            ]) for f in entry['Factions']
                        ])

            # Override standard URL functions
            if config.get('system_provider') == 'Inara':
                this.system_link['url'] = this.system
            if config.get('station_provider') == 'Inara':
                this.station_link['url'] = this.station or this.system

            # Send event(s) to Inara
            if entry['event'] == 'ShutDown' or len(this.events) > old_events:

                # Send cargo and materials if changed
                cargo = [
                    OrderedDict([('itemName', k),
                                 ('itemCount', state['Cargo'][k])])
                    for k in sorted(state['Cargo'])
                ]
                if this.cargo != cargo:
                    add_event('setCommanderInventoryCargo', entry['timestamp'],
                              cargo)
                    this.cargo = cargo
                materials = []
                for category in ['Raw', 'Manufactured', 'Encoded']:
                    materials.extend([
                        OrderedDict([('itemName', k),
                                     ('itemCount', state[category][k])])
                        for k in sorted(state[category])
                    ])
                if this.materials != materials:
                    add_event('setCommanderInventoryMaterials',
                              entry['timestamp'], materials)
                    this.materials = materials

                # Queue a call to Inara
                call()

        except Exception as e:
            if __debug__: print_exc()
            return unicode(e)

        #
        # Events that don't need to be sent immediately but will be sent on the next mandatory event
        #

        # Send credits and stats to Inara on startup only - otherwise may be out of date
        if entry['event'] == 'LoadGame':
            add_event(
                'setCommanderCredits', entry['timestamp'],
                OrderedDict([
                    ('commanderCredits', state['Credits']),
                    ('commanderLoan', state['Loan']),
                ]))
            this.lastcredits = state['Credits']
        elif entry['event'] == 'Statistics':
            add_event('setCommanderGameStatistics', entry['timestamp'],
                      state['Statistics'])  # may be out of date

        # Selling / swapping ships
        if entry['event'] == 'ShipyardNew':
            add_event(
                'addCommanderShip', entry['timestamp'],
                OrderedDict([
                    ('shipType', entry['ShipType']),
                    ('shipGameID', entry['NewShipID']),
                ]))
            this.shipswap = True  # Want subsequent Loadout event to be sent immediately

        elif entry['event'] in [
                'ShipyardBuy', 'ShipyardSell', 'SellShipOnRebuy',
                'ShipyardSwap'
        ]:
            if entry['event'] == 'ShipyardSwap':
                this.shipswap = True  # Don't know new ship name and ident 'til the following Loadout event
            if 'StoreShipID' in entry:
                add_event(
                    'setCommanderShip', entry['timestamp'],
                    OrderedDict([
                        ('shipType', entry['StoreOldShip']),
                        ('shipGameID', entry['StoreShipID']),
                        ('starsystemName', system),
                        ('stationName', station),
                    ]))
            elif 'SellShipID' in entry:
                add_event(
                    'delCommanderShip', entry['timestamp'],
                    OrderedDict([
                        ('shipType', entry.get('SellOldShip',
                                               entry['ShipType'])),
                        ('shipGameID', entry['SellShipID']),
                    ]))

        elif entry['event'] == 'SetUserShipName':
            add_event(
                'setCommanderShip',
                entry['timestamp'],
                OrderedDict([
                    ('shipType', state['ShipType']),
                    ('shipGameID', state['ShipID']),
                    ('shipName', state['ShipName']),  # Can be None
                    ('shipIdent', state['ShipIdent']),  # Can be None
                    ('isCurrentShip', True),
                ]))

        elif entry['event'] == 'ShipyardTransfer':
            add_event(
                'setCommanderShipTransfer', entry['timestamp'],
                OrderedDict([
                    ('shipType', entry['ShipType']),
                    ('shipGameID', entry['ShipID']),
                    ('starsystemName', system),
                    ('stationName', station),
                    ('transferTime', entry['TransferTime']),
                ]))

        # Fleet
        if entry['event'] == 'StoredShips':
            fleet = sorted(
                [{
                    'shipType': x['ShipType'],
                    'shipGameID': x['ShipID'],
                    'shipName': x.get('Name'),
                    'isHot': x['Hot'],
                    'starsystemName': entry['StarSystem'],
                    'stationName': entry['StationName'],
                    'marketID': entry['MarketID'],
                } for x in entry['ShipsHere']] + [
                    {
                        'shipType': x['ShipType'],
                        'shipGameID': x['ShipID'],
                        'shipName': x.get('Name'),
                        'isHot': x['Hot'],
                        'starsystemName': x.get(
                            'StarSystem'),  # Not present for ships in transit
                        'marketID': x.get('ShipMarketID'),  #   "
                    } for x in entry['ShipsRemote']
                ],
                key=itemgetter('shipGameID'))
            if this.fleet != fleet:
                this.fleet = fleet
                this.events = [
                    x for x in this.events
                    if x['eventName'] != 'setCommanderShip'
                ]  # Remove any unsent
                for ship in this.fleet:
                    add_event('setCommanderShip', entry['timestamp'], ship)

        # Loadout
        if entry['event'] == 'Loadout' and not this.newsession:
            loadout = make_loadout(state)
            if this.loadout != loadout:
                this.loadout = loadout
                this.events = [
                    x for x in this.events
                    if x['eventName'] != 'setCommanderShipLoadout'
                    or x['shipGameID'] != this.loadout['shipGameID']
                ]  # Remove any unsent for this ship
                add_event('setCommanderShipLoadout', entry['timestamp'],
                          this.loadout)

        # Stored modules
        if entry['event'] == 'StoredModules':
            items = dict([(x['StorageSlot'], x)
                          for x in entry['Items']])  # Impose an order
            modules = []
            for slot in sorted(items):
                item = items[slot]
                module = OrderedDict([
                    ('itemName', item['Name']),
                    ('itemValue', item['BuyPrice']),
                    ('isHot', item['Hot']),
                ])

                # Location can be absent if in transit
                if 'StarSystem' in item:
                    module['starsystemName'] = item['StarSystem']
                if 'MarketID' in item:
                    module['marketID'] = item['MarketID']

                if 'EngineerModifications' in item:
                    module['engineering'] = OrderedDict([
                        ('blueprintName', item['EngineerModifications'])
                    ])
                    if 'Level' in item:
                        module['engineering']['blueprintLevel'] = item['Level']
                    if 'Quality' in item:
                        module['engineering']['blueprintQuality'] = item[
                            'Quality']

                modules.append(module)

            if this.storedmodules != modules:
                # Only send on change
                this.storedmodules = modules
                this.events = [
                    x for x in this.events
                    if x['eventName'] != 'setCommanderStorageModules'
                ]  # Remove any unsent
                add_event('setCommanderStorageModules', entry['timestamp'],
                          this.storedmodules)

        # Missions
        if entry['event'] == 'MissionAccepted':
            data = OrderedDict([
                ('missionName', entry['Name']),
                ('missionGameID', entry['MissionID']),
                ('influenceGain', entry['Influence']),
                ('reputationGain', entry['Reputation']),
                ('starsystemNameOrigin', system),
                ('stationNameOrigin', station),
                ('minorfactionNameOrigin', entry['Faction']),
            ])
            # optional mission-specific properties
            for (iprop, prop) in [
                (
                    'missionExpiry', 'Expiry'
                ),  # Listed as optional in the docs, but always seems to be present
                ('starsystemNameTarget', 'DestinationSystem'),
                ('stationNameTarget', 'DestinationStation'),
                ('minorfactionNameTarget', 'TargetFaction'),
                ('commodityName', 'Commodity'),
                ('commodityCount', 'Count'),
                ('targetName', 'Target'),
                ('targetType', 'TargetType'),
                ('killCount', 'KillCount'),
                ('passengerType', 'PassengerType'),
                ('passengerCount', 'PassengerCount'),
                ('passengerIsVIP', 'PassengerVIPs'),
                ('passengerIsWanted', 'PassengerWanted'),
            ]:
                if prop in entry:
                    data[iprop] = entry[prop]
            add_event('addCommanderMission', entry['timestamp'], data)

        elif entry['event'] == 'MissionAbandoned':
            add_event('setCommanderMissionAbandoned', entry['timestamp'],
                      {'missionGameID': entry['MissionID']})

        elif entry['event'] == 'MissionCompleted':
            for x in entry.get('PermitsAwarded', []):
                add_event('addCommanderPermit', entry['timestamp'],
                          {'starsystemName': x})

            data = OrderedDict([('missionGameID', entry['MissionID'])])
            if 'Donation' in entry:
                data['donationCredits'] = entry['Donation']
            if 'Reward' in entry:
                data['rewardCredits'] = entry['Reward']
            if 'PermitsAwarded' in entry:
                data['rewardPermits'] = [{
                    'starsystemName': x
                } for x in entry['PermitsAwarded']]
            if 'CommodityReward' in entry:
                data['rewardCommodities'] = [{
                    'itemName': x['Name'],
                    'itemCount': x['Count']
                } for x in entry['CommodityReward']]
            if 'MaterialsReward' in entry:
                data['rewardMaterials'] = [{
                    'itemName': x['Name'],
                    'itemCount': x['Count']
                } for x in entry['MaterialsReward']]
            factioneffects = []
            for faction in entry.get('FactionEffects', []):
                effect = OrderedDict([('minorfactionName', faction['Faction'])
                                      ])
                for influence in faction.get('Influence', []):
                    if 'Influence' in influence:
                        effect['influenceGain'] = len(
                            effect.get('influenceGain', '')) > len(
                                influence['Influence']
                            ) and effect['influenceGain'] or influence[
                                'Influence']  # pick highest
                if 'Reputation' in faction:
                    effect['reputationGain'] = faction['Reputation']
                factioneffects.append(effect)
            if factioneffects:
                data['minorfactionEffects'] = factioneffects
            add_event('setCommanderMissionCompleted', entry['timestamp'], data)

        elif entry['event'] == 'MissionFailed':
            add_event('setCommanderMissionFailed', entry['timestamp'],
                      {'missionGameID': entry['MissionID']})

        # Combat
        if entry['event'] == 'Died':
            data = OrderedDict([('starsystemName', system)])
            if 'Killers' in entry:
                data['wingOpponentNames'] = [
                    x['Name'] for x in entry['Killers']
                ]
            elif 'KillerName' in entry:
                data['opponentName'] = entry['KillerName']
            add_event('addCommanderCombatDeath', entry['timestamp'], data)

        elif entry['event'] == 'Interdicted':
            add_event(
                'addCommanderCombatInterdicted', entry['timestamp'],
                OrderedDict([
                    ('starsystemName', system),
                    ('opponentName', entry['Interdictor']),
                    ('isPlayer', entry['IsPlayer']),
                    ('isSubmit', entry['Submitted']),
                ]))

        elif entry['event'] == 'Interdiction':
            data = OrderedDict([
                ('starsystemName', system),
                ('isPlayer', entry['IsPlayer']),
                ('isSuccess', entry['Success']),
            ])
            if 'Interdictor' in entry:
                data['opponentName'] = entry['Interdictor']
            elif 'Faction' in entry:
                data['opponentName'] = entry['Faction']
            elif 'Power' in entry:
                data['opponentName'] = entry['Power']
            add_event('addCommanderCombatInterdiction', entry['timestamp'],
                      data)

        elif entry['event'] == 'EscapeInterdiction':
            add_event(
                'addCommanderCombatInterdictionEscape', entry['timestamp'],
                OrderedDict([
                    ('starsystemName', system),
                    ('opponentName', entry['Interdictor']),
                    ('isPlayer', entry['IsPlayer']),
                ]))

        elif entry['event'] == 'PVPKill':
            add_event(
                'addCommanderCombatKill', entry['timestamp'],
                OrderedDict([
                    ('starsystemName', system),
                    ('opponentName', entry['Victim']),
                ]))

        # Community Goals
        if entry['event'] == 'CommunityGoal':
            this.events = [
                x for x in this.events if x['eventName'] not in
                ['setCommunityGoal', 'setCommanderCommunityGoalProgress']
            ]  # Remove any unsent
            for goal in entry['CurrentGoals']:

                data = OrderedDict([
                    ('communitygoalGameID', goal['CGID']),
                    ('communitygoalName', goal['Title']),
                    ('starsystemName', goal['SystemName']),
                    ('stationName', goal['MarketName']),
                    ('goalExpiry', goal['Expiry']),
                    ('isCompleted', goal['IsComplete']),
                    ('contributorsNum', goal['NumContributors']),
                    ('contributionsTotal', goal['CurrentTotal']),
                ])
                if 'TierReached' in goal:
                    data['tierReached'] = int(goal['TierReached'].split()[-1])
                if 'TopRankSize' in goal:
                    data['topRankSize'] = goal['TopRankSize']
                if 'TopTier' in goal:
                    data['tierMax'] = int(goal['TopTier']['Name'].split()[-1])
                    data['completionBonus'] = goal['TopTier']['Bonus']
                add_event('setCommunityGoal', entry['timestamp'], data)

                data = OrderedDict([
                    ('communitygoalGameID', goal['CGID']),
                    ('contribution', goal['PlayerContribution']),
                    ('percentileBand', goal['PlayerPercentileBand']),
                ])
                if 'Bonus' in goal:
                    data['percentileBandReward'] = goal['Bonus']
                if 'PlayerInTopRank' in goal:
                    data['isTopRank'] = goal['PlayerInTopRank']
                add_event('setCommanderCommunityGoalProgress',
                          entry['timestamp'], data)

        # Friends
        if entry['event'] == 'Friends':
            if entry['Status'] in ['Added', 'Online']:
                add_event(
                    'addCommanderFriend', entry['timestamp'],
                    OrderedDict([
                        ('commanderName', entry['Name']),
                        ('gamePlatform', 'pc'),
                    ]))
            elif entry['Status'] in ['Declined', 'Lost']:
                add_event(
                    'delCommanderFriend', entry['timestamp'],
                    OrderedDict([
                        ('commanderName', entry['Name']),
                        ('gamePlatform', 'pc'),
                    ]))

        this.newuser = False
Esempio n. 39
0
# -*- coding: utf-8 -*-
import json
import uuid
import random

from aliyunsdkdysmsapi.request.v20170525 import SendSmsRequest
from aliyunsdkcore.client import AcsClient
from aliyunsdkcore.profile import region_provider
from config import config
Conf = config.get('message')

acs_client = AcsClient(Conf.get('ACCESS_KEY_ID'),
                       Conf.get('ACCESS_KEY_SECRET'), Conf.get('REGION'))
region_provider.add_endpoint(Conf.get('PRODUCT_NAME'), Conf.get('REGION'),
                             Conf.get('DOMAIN'))


def send_sms(business_id,
             phone_numbers,
             sign_name,
             template_code,
             template_param=None):
    smsRequest = SendSmsRequest.SendSmsRequest()
    smsRequest.set_TemplateCode(template_code)

    if template_param is not None:
        smsRequest.set_TemplateParam(template_param)

    smsRequest.set_OutId(business_id)
    smsRequest.set_SignName(sign_name)
    smsRequest.set_PhoneNumbers(phone_numbers)
Esempio n. 40
0
 def open_lrc_dir(self):
     save_dir = os.path.expanduser(
         config.get("lyrics", "save_lrc_path", "~/.lyrics"))
     utils.run_command("xdg-open %s" % save_dir)
Esempio n. 41
0
File: views.py Progetto: rtm516/core
 def get_queryset(self):
     if self.request.user.is_staff and self.request.user.should_deny_admin(
     ):
         return Category.objects.none()
     team = self.request.user.team
     if team is not None:
         solves = Solve.objects.filter(team=team, correct=True)
         solved_challenges = solves.values_list('challenge')
         challenges = Challenge.objects.prefetch_related(
             'unlocked_by').annotate(
                 unlocked=Case(
                     When(auto_unlock=True, then=Value(True)),
                     When(Q(unlocked_by__in=Subquery(solved_challenges)),
                          then=Value(True)),
                     default=Value(False),
                     output_field=models.BooleanField()),
                 solved=Case(When(Q(id__in=Subquery(solved_challenges)),
                                  then=Value(True)),
                             default=Value(False),
                             output_field=models.BooleanField()),
                 solve_count=Count('solves',
                                   filter=Q(solves__correct=True)),
                 unlock_time_surpassed=Case(
                     When(release_time__lte=timezone.now(),
                          then=Value(True)),
                     default=Value(False),
                     output_field=models.BooleanField(),
                 ))
     else:
         challenges = (Challenge.objects.filter(
             release_time__lte=timezone.now()).annotate(
                 unlocked=Case(When(auto_unlock=True, then=Value(True)),
                               default=Value(False),
                               output_field=models.BooleanField()),
                 solved=Value(False, models.BooleanField()),
                 solve_count=Count('solves'),
                 unlock_time_surpassed=Case(
                     When(release_time__lte=timezone.now(),
                          then=Value(True)),
                     default=Value(False),
                     output_field=models.BooleanField(),
                 )))
     x = challenges.prefetch_related(
         Prefetch('hint_set',
                  queryset=Hint.objects.annotate(
                      used=Case(When(id__in=HintUse.objects.filter(
                          team=team).values_list('hint_id'),
                                     then=Value(True)),
                                default=Value(False),
                                output_field=models.BooleanField())),
                  to_attr='hints'),
         Prefetch('file_set', queryset=File.objects.all(), to_attr='files'),
         Prefetch(
             'tag_set',
             queryset=Tag.objects.all() if
             time.time() > config.get('end_time') else Tag.objects.filter(
                 post_competition=False),
             to_attr='tags'), 'unlocks', 'first_blood', 'hint_set__uses')
     if self.request.user.is_staff:
         categories = Category.objects
     else:
         categories = Category.objects.filter(
             release_time__lte=timezone.now())
     qs = categories.prefetch_related(
         Prefetch('category_challenges', queryset=x, to_attr='challenges'))
     return qs
Esempio n. 42
0
File: views.py Progetto: rtm516/core
    def post(self, request):
        if not config.get('enable_flag_submission') or \
                (not config.get('enable_flag_submission_after_competition') and time.time() > config.get('end_time')):
            return FormattedResponse(m='flag_submission_disabled',
                                     status=HTTP_403_FORBIDDEN)

        with transaction.atomic():
            team = Team.objects.select_for_update().get(
                id=request.user.team.id)
            user = get_user_model().objects.select_for_update().get(
                id=request.user.id)
            flag = request.data.get('flag')
            challenge_id = request.data.get('challenge')
            if not flag or not challenge_id:
                return FormattedResponse(status=HTTP_400_BAD_REQUEST)

            challenge = get_object_or_404(
                Challenge.objects.select_for_update(), id=challenge_id)
            solve_set = Solve.objects.filter(challenge=challenge)
            if solve_set.filter(team=team, correct=True).exists() \
                    or not challenge.is_unlocked(user):
                return FormattedResponse(m='already_solved_challenge',
                                         status=HTTP_403_FORBIDDEN)

            if challenge.challenge_metadata.get("attempt_limit"):
                count = solve_set.filter(team=team).count()
                if count > challenge.challenge_metadata['attempt_limit']:
                    flag_reject.send(sender=self.__class__,
                                     user=user,
                                     team=team,
                                     challenge=challenge,
                                     flag=flag,
                                     reason='attempt_limit_reached')
                    return FormattedResponse(d={'correct': False},
                                             m='attempt_limit_reached')

            flag_submit.send(sender=self.__class__,
                             user=user,
                             team=team,
                             challenge=challenge,
                             flag=flag)
            plugin = plugins.plugins['flag'][challenge.flag_type](challenge)
            points_plugin = plugins.plugins['points'][challenge.points_type](
                challenge)

            if not plugin.check(flag, user=user, team=team):
                flag_reject.send(sender=self.__class__,
                                 user=user,
                                 team=team,
                                 challenge=challenge,
                                 flag=flag,
                                 reason='incorrect_flag')
                points_plugin.register_incorrect_attempt(
                    user, team, flag, solve_set)
                return FormattedResponse(d={'correct': False},
                                         m='incorrect_flag')

            solve = points_plugin.score(user, team, flag, solve_set)
            if challenge.first_blood is None:
                challenge.first_blood = user
                challenge.save()

            user.save()
            team.save()
            flag_score.send(sender=self.__class__,
                            user=user,
                            team=team,
                            challenge=challenge,
                            flag=flag,
                            solve=solve)
            ret = {'correct': True}
            if challenge.post_score_explanation:
                ret["explanation"] = challenge.post_score_explanation
            return FormattedResponse(d=ret, m='correct_flag')
 def env(self):
     _args = ["env", config.get("machine_name")]
     return self.command.run(_args, show_stream=True)
Esempio n. 44
0
# You should have received a copy of the GNU General Public License
# along with Mod Organizer.  If not, see <http://www.gnu.org/licenses/>.

from unibuild import Project
from unibuild.modules import sourceforge, build
from subprocess import Popen
from config import config
from glob import glob
import os
import logging
import python
import errno
import shutil

sip_version = "4.19.1"
python_version = config.get('python_version', "2.7") + config.get(
    'python_version_minor', ".13")


def sip_environment():
    result = config['__environment'].copy()
    result['LIB'] += os.path.join(config['paths']['build'],
                                  "python-{}".format(python_version),
                                  "PCbuild", "amd64")
    return result


def make_sure_path_exists(path):
    try:
        os.makedirs(path)
    except OSError as exception:
Esempio n. 45
0
def patchboost(context):
    try:
        savedpath = os.getcwd()
        os.chdir(boost_path)
        pset = patch.fromfile(
            os.path.join(config['__Umbrella_path'], "patches",
                         "boost_python_libname.patch"))
        pset.apply()
        os.chdir(savedpath)
        return True
    except OSError:
        return False


if config.get('binary_boost', True):
    boost_prepare = Project("boost_prepare")
    boost = Project("boost").depend(
        urldownload.URLDownload(
            "https://github.com/ModOrganizer2/modorganizer-umbrella/releases/download/1.0/boost_prebuilt_{}.7z"
            .format(boost_tag_version.replace(".", "_"))).set_destination(
                "boost_{}".format(boost_tag_version.replace(".", "_"))))
    if config['architecture'] == 'x86_64':
        boost_stage = Patch.Copy(
            os.path.join(
                "{}/lib{}-msvc-{}/lib/boost_python{}-vc{}-mt-{}-{}.dll".format(
                    boost_path,
                    "64" if config['architecture'] == 'x86_64' else "32",
                    vc_version, config["python_version"].replace(".", ""),
                    vc_version.replace(".", ""),
                    "x64" if config['architecture'] == "x86_64" else "x86",
Esempio n. 46
0
class Token(Payload):
    default_expiration_sec = config.get(
        'apiserver.auth.default_expiration_sec')

    def __init__(self,
                 exp=None,
                 iat=None,
                 nbf=None,
                 env=None,
                 identity=None,
                 entities=None,
                 **_):
        super(Token, self).__init__(AuthType.bearer_token,
                                    identity=identity,
                                    entities=entities)
        self.exp = exp
        self.iat = iat
        self.nbf = nbf
        self._env = env or config.get('env', '<unknown>')

    @property
    def env(self):
        return self._env

    @property
    def exp(self):
        return self._exp

    @exp.setter
    def exp(self, value):
        self._exp = value

    @property
    def iat(self):
        return self._iat

    @iat.setter
    def iat(self, value):
        self._iat = value

    @property
    def nbf(self):
        return self._nbf

    @nbf.setter
    def nbf(self, value):
        self._nbf = value

    def get_log_entry(self):
        d = super(Token, self).get_log_entry()
        d.update(iat=self.iat, exp=self.exp, env=self.env)
        return d

    def encode(self, **extra_payload):
        payload = self.to_dict(**extra_payload)
        return jwt.encode(payload, token_secret)

    @classmethod
    def decode(cls, encoded_token, verify=True):
        return jwt.decode(encoded_token, token_secret, verify=verify)

    @classmethod
    def from_encoded_token(cls, encoded_token, verify=True):
        decoded = cls.decode(encoded_token, verify=verify)
        try:
            token = Token.from_dict(decoded)
            assert isinstance(token, Token)
            if not token.identity:
                raise errors.unauthorized.InvalidToken(
                    'token missing identity')
            return token
        except Exception as e:
            raise errors.unauthorized.InvalidToken('failed parsing token, %s' %
                                                   e.args[0])

    @classmethod
    def create_encoded_token(cls,
                             identity,
                             expiration_sec=None,
                             entities=None,
                             **extra_payload):
        if identity.role not in (Role.system, ):
            # limit expiration time for all roles but an internal service
            expiration_sec = expiration_sec or cls.default_expiration_sec

        now = datetime.utcnow()

        token = cls(identity=identity, entities=entities, iat=now)

        if expiration_sec:
            # add 'expiration' claim
            token.exp = now + timedelta(seconds=expiration_sec)

        return token.encode(**extra_payload)
Esempio n. 47
0
 def get_lrc_filepath(self, song):
     save_path = os.path.expanduser(config.get("lyrics", "save_lrc_path"))
     if not os.path.exists(save_path):
         utils.makedirs(save_path)
     fillpath = os.path.join(save_path, self.get_lrc_search_str(song))
     return fillpath
Esempio n. 48
0
def benchmark_model_accuracy(df):
    """Extract the win percentage

    Compare accuracy with result.mean
    x = df[(df.elo_prob1 > 0.2) & (df.elo_prob1 < 0.3)]
    # Accuracy:
    print(accuracy_score(x.result, x.elo_prob1 > .5))
    print(x.result == (x.elo_prob1 > 0.5).astype('int'))
    # Calibration (should be between 0.2 and 0.3 if calibrated)
    print(x.result.mean())
    """

    # Prediction = Home team win pct > 0.5
    df["pred_home_winpct"] = (df["team1_lag1_win_pct"] > 0.5).astype("int")
    df["acc_pred_home_winpct"] = (
        df["result"] == df["pred_home_winpct"]).astype("int")

    acc_home_win_pct = breakdown_accuracy(
        df,
        result="result",
        proba="team1_lag1_win_pct",
        pred="pred_home_winpct",
        name="home_winpct",
    )

    # Don't use breakdown_accuracy() function for these simple aggregates
    df_tmp = df[df.date < config.get("date", "today")].copy()
    # home team win pct
    acc_home = df_tmp.result.mean()
    # home win pct > away win pct.
    pred = (df_tmp["team1_lag1_win_pct"] >
            df_tmp["team2_lag1_win_pct"]).astype("int")
    acc_home_vs_away = accuracy_score(y_true=df_tmp.result, y_pred=pred)

    # Combine all accuracies
    df_acc = acc_home_win_pct.append(
        pd.DataFrame(
            {"all": [acc_home, acc_home_vs_away]},
            index=["home_overall", "home_vs_away_winpct"],
        ),
        sort=False,
    )

    # Add Bryan's predictions
    for ver in ["v01", "v02"]:
        acc_ver = breakdown_accuracy(
            df,
            result="result",
            proba=f"result_{ver}_prob1",
            pred=f"pred_result_{ver}_prob1",
            name=ver,
        )
        df_acc = df_acc.append(acc_ver)

    # Does prediction improve after filtering out first month?
    # df = df[df.ymdhms > '2019-12-01']
    # pred = (df['home_lag1_win_pct'] > .5).astype('int')
    # accuracy_score(y_true=df.result, y_pred=pred)

    # Predicting using the away team win pct
    # pred = 1-(df['away_lag1_win_pct'] > .5).astype('int')
    # accuracy_score(y_true=df.result, y_pred=pred)

    # Five-thirty-eight models
    # Compare Nate Silver's results
    models = ["elo", "carm-elo", "raptor"]
    for model in models:
        df[f"pred_{model}_prob1"] = (df[f"{model}_prob1"] > 0.5).astype("int")
        acc = breakdown_accuracy(
            df,
            result="result",
            proba=f"{model}_prob1",
            pred=f"pred_{model}_prob1",
            name=model,
        )
        df_acc = df_acc.append(acc)

        # Save out binary accuracy for groupby-accuracy by season_week later
        df[f"acc_pred_{model}_prob1"] = (
            df[f"pred_{model}_prob1"] == df["result"]).astype("int")

    # reset index because rownames contain the accuracy breakdowns
    df_acc = df_acc.transpose().fillna("").reset_index()
    return df, df_acc
Esempio n. 49
0
def parse_command(text):
    if text.find(str(config.get("activate_command"))) != -1:
        return str(config.get("activate_command"))
    if text.find(str(config.get("deactivate_command"))) != -1:
        return str(config.get("deactivate_command"))
    raise KeyError('Commands not found')
 def status(self):
     _args = ["status", config.get("machine_name")]
     return self.command.run(_args)
Esempio n. 51
0
app.register_blueprint(user)
app.register_blueprint(admin)

# --- FLASK CONFIG ---
app.config["SECRET_KEY"] = os.getenv("SECRET_KEY", "secret")
app.config["SQLALCHEMY_TRACK_MODIFICATIONS"] = False
app.config["SQLALCHEMY_DATABASE_URI"] = os.getenv("DB_URI",
                                                  "sqlite:///depot.db")

if app.config["SECRET_KEY"] == "secret":
    app.logger.warning(
        "SECRET_KEY environment variable not set. Do not do this in a production environment."
    )

# --- CHECK FOR BAD VALUES ---
if config.get("OMEKA_URI") is None:
    raise RuntimeError(
        f"Omeka URI is not set. Please set the value of OMEKA_URI in an environment variable."
    )
else:
    config["OMEKA_URI"] = f"http://{config['OMEKA_URI']}/api"

r = requests.get(config["OMEKA_URI"])
if r.status_code != 500:
    raise RuntimeError(
        f"Failed to connect to Omeka URI: {config['OMEKA_URI']} returned {r.status_code}"
    )

db.init_app(app)
csrf.init_app(app)
socketio.init_app(app)
Esempio n. 52
0
def find_in_path(name):
    path = os.environ.get('PATH', os.defpath).split(os.pathsep)
    if config.get('bin_path') and config['bin_path'] != 'None':
        path.append(config['bin_path'])
    return which(name, path=os.pathsep.join(path))
Esempio n. 53
0
def create_table():
    wechat_article_list_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_article_list` (
      `id` int(11) unsigned NOT NULL AUTO_INCREMENT,
      `title` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `digest` varchar(2000) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `url` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `source_url` varchar(1000) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `cover` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `subtype` int(11) DEFAULT NULL,
      `is_multi` int(11) DEFAULT NULL,
      `author` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `copyright_stat` int(11) DEFAULT NULL,
      `duration` int(11) DEFAULT NULL,
      `del_flag` int(11) DEFAULT NULL,
      `type` int(11) DEFAULT NULL,
      `publish_time` datetime DEFAULT NULL,
      `sn` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `spider_time` datetime DEFAULT NULL,
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `sn` (`sn`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC
    '''

    wechat_article_task_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_article_task` (
      `id` bigint(20) unsigned NOT NULL AUTO_INCREMENT,
      `sn` varchar(50) DEFAULT NULL,
      `article_url` varchar(255) DEFAULT NULL,
      `state` int(11) DEFAULT '0' COMMENT '文章抓取状态,0 待抓取 2 抓取中 1 抓取完毕 -1 抓取失败',
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `sn` (`sn`) USING BTREE,
      KEY `state` (`state`) USING BTREE
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
    '''

    wechat_article_dynamic_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_article_dynamic` (
      `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
      `sn` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `read_num` int(11) DEFAULT NULL,
      `like_num` int(11) DEFAULT NULL,
      `comment_count` int(11) DEFAULT NULL,
      `spider_time` datetime DEFAULT NULL,
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `sn` (`sn`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC
    '''

    wechat_article_comment_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_article_comment` (
      `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
      `comment_id` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '与文章关联',
      `nick_name` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `logo_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `content` varchar(2000) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `create_time` datetime DEFAULT NULL,
      `content_id` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL COMMENT '本条评论内容的id',
      `like_num` int(11) DEFAULT NULL,
      `is_top` int(11) DEFAULT NULL,
      `spider_time` datetime DEFAULT NULL,
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `content_id` (`content_id`),
      KEY `comment_id` (`comment_id`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC
    '''

    wechat_article_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_article` (
      `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
      `account` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `title` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `author` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `publish_time` datetime DEFAULT NULL,
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `digest` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `cover` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `pics_url` text CHARACTER SET utf8mb4 COLLATE utf8mb4_unicode_ci,
      `content_html` text COLLATE utf8mb4_unicode_ci,
      `source_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `comment_id` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `sn` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `spider_time` datetime DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `sn` (`sn`),
      KEY `__biz` (`__biz`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC
    '''

    wechat_account_task_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_account_task` (
      `id` int(11) unsigned NOT NULL AUTO_INCREMENT,
      `__biz` varchar(50) DEFAULT NULL,
      `last_publish_time` datetime DEFAULT NULL COMMENT '上次抓取到的文章发布时间,做文章增量采集用',
      `last_spider_time` datetime DEFAULT NULL COMMENT '上次抓取时间,用于同一个公众号每隔一段时间扫描一次',
      `is_zombie` int(11) DEFAULT '0' COMMENT '僵尸号 默认3个月未发布内容为僵尸号,不再检测',
      PRIMARY KEY (`id`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci
    '''

    wechat_account_table = '''
    CREATE TABLE IF NOT EXISTS `wechat_account` (
      `id` int(10) unsigned NOT NULL AUTO_INCREMENT,
      `__biz` varchar(50) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `account` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `head_url` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `summary` varchar(500) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `qr_code` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `verify` varchar(255) COLLATE utf8mb4_unicode_ci DEFAULT NULL,
      `spider_time` datetime DEFAULT NULL,
      PRIMARY KEY (`id`),
      UNIQUE KEY `__biz` (`__biz`)
    ) ENGINE=InnoDB AUTO_INCREMENT=1 DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_unicode_ci ROW_FORMAT=DYNAMIC
    '''

    if config.get('mysqldb').get('auto_create_tables'):
        mysqldb = MysqlDB(**config.get('mysqldb'))
        _create_table(mysqldb, wechat_article_list_table)
        _create_table(mysqldb, wechat_article_task_table)
        _create_table(mysqldb, wechat_article_dynamic_table)
        _create_table(mysqldb, wechat_article_comment_table)
        _create_table(mysqldb, wechat_article_table)
        _create_table(mysqldb, wechat_account_task_table)
        _create_table(mysqldb, wechat_account_table)
 def create(self):
     _args = [
         "create", "--driver", "virtualbox",
         config.get("machine_name")
     ]
     return self.command.run(_args, show_stream=True)
Esempio n. 55
0
def convert_csv_import(cr,
                       module,
                       fname,
                       csvcontent,
                       idref=None,
                       mode='init',
                       noupdate=False):
    '''Import csv file :
        quote: "
        delimiter: ,
        encoding: utf-8'''
    if not idref:
        idref = {}
    model = ('.'.join(fname.split('.')[:-1]).split('-'))[0]
    #remove folder path from model
    head, model = os.path.split(model)

    input = cStringIO.StringIO(csvcontent)  #FIXME
    reader = csv.reader(input, quotechar='"', delimiter=',')
    fields = reader.next()
    fname_partial = ""
    if config.get('import_partial'):
        fname_partial = module + '/' + fname
        if not os.path.isfile(config.get('import_partial')):
            pickle.dump({}, file(config.get('import_partial'), 'w+'))
        else:
            data = pickle.load(file(config.get('import_partial')))
            if fname_partial in data:
                if not data[fname_partial]:
                    return
                else:
                    for i in range(data[fname_partial]):
                        reader.next()

    if not (mode == 'init' or 'id' in fields):
        _logger.error(
            "Import specification does not contain 'id' and we are in init mode, Cannot continue."
        )
        return

    uid = 1
    datas = []
    for line in reader:
        if not (line and any(line)):
            continue
        try:
            datas.append(map(misc.ustr, line))
        except:
            _logger.error("Cannot import the line: %s", line)

    registry = yuancloud.registry(cr.dbname)
    result, rows, warning_msg, dummy = registry[model].import_data(
        cr, uid, fields, datas, mode, module, noupdate, filename=fname_partial)
    if result < 0:
        # Report failed import and abort module install
        raise Exception(
            _('Module loading %s failed: file %s could not be processed:\n %s')
            % (module, fname, warning_msg))
    if config.get('import_partial'):
        data = pickle.load(file(config.get('import_partial')))
        data[fname_partial] = 0
        pickle.dump(data, file(config.get('import_partial'), 'wb'))
        cr.commit()
 def ready(self):
     _args = ["config", config.get("machine_name")]
     _, _, exit_code = self.command.run(_args)
     return exit_code == 0
Esempio n. 57
0
 def _expiration():
     return config.get('repo_update_frequency',
                       60 * 60 * 24)  # default: one day
Esempio n. 58
0
 def register_incorrect_attempt(self, user, team, flag, solves, *args, **kwargs):
     if config.get('enable_track_incorrect_submissions'):
         Solve(team=team, solved_by=user, challenge=self.challenge, flag=flag, correct=False, score=None).save()
Esempio n. 59
0
 def _max_workers(self):
     return config.get("services.events.max_metrics_concurrency", 4)
Esempio n. 60
0
            lastversion = sorted(items, key=versioncmp)[-1]
            if versioncmp(lastversion) > versioncmp(appversion):
                latest = ' (%s is available)' % items[lastversion]
        except:
            pass	# Quietly suppress timeouts etc.
        print '%.2f%s' % (float(''.join(appversion.split('.')[:3])) / 100, latest)	# just first three digits
        sys.exit(EXIT_SUCCESS)

    if args.j:
        # Import and collate from JSON dump
        data = json.load(open(args.j))
        config.set('querytime', getmtime(args.j))
    else:
        # Get state from latest Journal file
        try:
            logdir = config.get('journaldir') or config.default_journal_dir
            logfiles = sorted([x for x in os.listdir(logdir) if x.startswith('Journal') and x.endswith('.log')],
                              key=lambda x: x.split('.')[1:])
            logfile = join(logdir, logfiles[-1])
            with open(logfile, 'r') as loghandle:
                for line in loghandle:
                    try:
                        monitor.parse_entry(line)
                    except:
                        if __debug__:
                            print 'Invalid journal entry "%s"' % repr(line)
        except Exception as e:
            sys.stderr.write("Can't read Journal file: %s\n" % unicode(e).encode('ascii', 'replace'))
            sys.exit(EXIT_SYS_ERR)

        if not monitor.cmdr: