Exemple #1
0
def get_all_threads():
  api_link = 'https://disqus.com/api/3.0/threads/list.json'
  info = {
    'api_secret': settings.get('disqus_secret_key'),
    'forum': settings.get('disqus_short_code'),
  }
  return do_api_request(api_link, 'GET', info)
def do_api_request(api_link, method='GET', params={}):
    # add sendgrid user & api key
    params.update({
            'api_user': settings.get('sendgrid_user'),
            'api_key': settings.get('sendgrid_secret')
    })
    try:
        if method.upper() == 'GET':
            if len(params.keys()) > 0:
                r = requests.get(
                        api_link,
                        params=params,
                        verify=False
                )
            else:
                r = requests.get(
                        api_link,
                        verify=False
                )
        else:
            r = requests.post(
                    api_link,
                    params=params,
                    verify=False
            )
        response = r.json()
    except:
        response = {}
    if settings.get('environment') == "dev":
        logging.info("=================")
        logging.info( api_link)
        logging.info( json.dumps(params, indent=4))
        logging.info( response)
        logging.info( "=================")
    return response
Exemple #3
0
	def post(self):
		# Get submitted form data
		to_name = self.get_argument('to_name', '')
		to_email = self.get_argument('to_email', '')
		for_name = self.get_argument('for_name', '')
		for_email = self.get_argument('for_email', '')
		purpose = self.get_argument('purpose', '')
		intro = {'to_name': to_name, 'to_email': to_email, 'for_name': for_name, 'for_email': for_email, 'purpose': purpose}

		# TODO: Server side error handling? 

		# Save intro to database
		try:
			intro['sent_initial'] = datetime.datetime.now()
			introdb.save_intro(intro)
		except:
			return self.redirect('introbot?err=%s' % 'Failed to save file to database. Email was not sent.')

		# Send initial email
		try:
			name = settings.get('name')
			email = settings.get('email')
			subject = "Intro to %s?" % intro['for_name']
			response_url = "%s/response" % settings.get('base_url')
			if "http://" not in response_url:
				response_url = "http://" + response_url
			text_body = 'Hi %s, %s wants to meet with you to %s If you are open to the connection please email reply to [email protected]. This will automatically generate an email from [email protected] to connect the two of you. Thanks! Brittany' % (intro['to_name'], intro['for_name'], intro['purpose'])
			html_body = 'Hi %s,<br><br> %s wants to meet with you to %s <br><br>If you are open to the connection please <a href="%s?id=%s">click here</a>. This will automatically generate an email from %s to connect the two of you. <br><br> Thanks! %s' % (intro['to_name'], intro['for_name'], intro['purpose'], response_url, intro['id'], email, name)
			response = self.send_email(name, intro['to_email'], subject, text_body, html_body, from_name=name)
			if response.status_code != 200:
				raise Exception 
			return self.redirect('?sent=%s (%s)' % (intro['to_name'], intro['to_email'])) # Always redirect after successful POST
		except:
			introdb.remove_intro(intro)
			return self.redirect('?err=%s' % 'Email failed to send.')
def send_daily_email(email):
	recipients = userdb.get_newsletter_recipients()
	recipient_usernames = [r['user']['username'] for r in recipients]
	email_sent = False
	for user in recipients:
		# send email
		if settings.get('environment') != "prod":
			print "If this were prod, we would have sent email to %s" % user['email_address']
		else:
			requests.post(
				"https://sendgrid.com/api/mail.send.json",
				data={
					"api_user":settings.get('sendgrid_user'),
					"api_key":settings.get('sendgrid_secret'),
					"from": email['from'],
					"to": user['email_address'],
					"subject": email['subject'],
					"html": email['body_html']
				},
				verify=False
			)
			email_sent = True
	# log it
	if email_sent:
		log_daily_email(email, recipient_usernames)
Exemple #5
0
def pubsub_ping():
    """
    Ping a PubSub hub. Might be overtailored to Superfeedr docs; 
    your pubsub hub may differ.

    Also, I'd love to use requests here, but this is a bit minor.
    Trying to keep those reqs down.
    """
    hub_url = settings.get('PUBSUB_URL')
    blog_url = settings.get('BLOG_URL')
    if not hub_url or not blog_url:
        print "Need to have BLOG_URL and PUBSUB_URL set for pubsub to work."
        return

    import urllib, urllib2

    rss_url  = blog_url+'feed/rss.xml'
    atom_url = blog_url+'feed/atom.xml'

    rss_args = { 'hub.mode':'publish', 'hub.url': rss_url }
    rss_req = urllib2.Request(hub_url)
    rss_req.add_data(urllib.urlencode(rss_args))
    rss_res = urllib2.urlopen(rss_req).read()

    atom_args = { 'hub.mode':'publish', 'hub.url': atom_url }
    atom_req = urllib2.Request(hub_url)
    atom_req.add_data(urllib.urlencode(atom_args))
    atom_res = urllib2.urlopen(atom_req).read()

    return
Exemple #6
0
def generate_response(thumbnail_url="", width=0, height=0, format="plaintext", callback="", request=None):
    if format.lower() == "json":
        json_response = json.dumps(
            {
                "type": "photo",
                "version": "1.0",
                "provider_name": settings.get("provider_name"),
                "provider_url": settings.get("provider_url"),
                "cache_age": settings.get("suggested_cache_time"),
                "url": str(thumbnail_url),
                "height": height,
                "width": width,
            }
        )

        if callback != "":
            logging.debug("Response format is JSONP (callback: %s).\nResponse: %s" % (callback, json_response))
            json_response = "%s(%s);" % (callback, json_response)
        else:
            logging.debug("Response format is JSON.\nResponse: %s" % json_response)
        return json_response
    elif format.lower() == "plaintext":
        logging.debug("Response format is Plaintext. Response: %s" % thumbnail_url)
        return thumbnail_url
    elif format.lower() == "redirect":
        # not implemented yet
        logging.debug("Response format is a 302 redirect to URL: %s" % thumbnail_url)
        return thumbnail_url
Exemple #7
0
def send_message(message):
    apikey = settings.get('facebook', 'apikey')
    secretkey = settings.get('facebook', 'sessionkey')
    fb = facebook.Facebook(apikey, secretkey)
    fb.session_key = settings.get('facebook', 'sessionkey')
    fb.secret = settings.get('facebook', 'secret')
    fb.status.set([message])
Exemple #8
0
  def get(self):
    code = self.get_argument('code','')
    req_host = self.request.headers['host']
    api_key = settings.get('disqus_public_key')
    api_secret = settings.get('disqus_secret_key')

    link = 'https://disqus.com/api/oauth/2.0/access_token/'
    data = {
      'grant_type':'authorization_code',
      'client_id':api_key,
      'client_secret':api_secret,
      'redirect_uri': 'http://%s/disqus' % req_host,
      'code' : code
    }
    try:
      account = userdb.get_user_by_screen_name(self.current_user)
      if account:
        response = urllib2.urlopen(link, urllib.urlencode(data))
        #  access_token should look like access_token=111122828977539|98f28d8b5b8ed787b585e69b.1-537252399|1bKwe6ghzXyS9vPDyeB9b1fHLRc
        user_data = json.loads(response.read())
        # refresh the user token details
        account['disqus_username'] = user_data['username']
        account['disqus_user_id'] = user_data['user_id']
        account['disqus_access_token'] = user_data['access_token']
        account['disqus_expires_in'] = user_data['expires_in']
        account['disqus_refresh_token'] = user_data['refresh_token']
        account['disqus_token_type'] = user_data['token_type']
        userdb.save_user(account)
    except:
      # trouble logging in
      data = {}
    self.redirect('/your_account?section=social_accounts')
Exemple #9
0
    def __init__(self, controller):
        """Create a Frame instance."""
        wx.Frame.__init__(self, parent=None, id=-1, size=(800, 600))

        self._controller = controller

        self._debug("Gui frame initialised")

        self._updateStatus()

        self._createMenus()

        self.iconized = False

        self.Bind(wx.EVT_MOVE, self.OnMove)
        self.Bind(wx.EVT_CLOSE, self.OnClose)
        self.Bind(wx.EVT_ICONIZE, self.OnIconize)

        self.BuildWindow()

        # Load saved location/size settings
        x = settings.get("win.main.pos.x", -1)
        y = settings.get("win.main.pos.y", -1)
        pos = wx.Point(int(x), int(y))

        h = settings.get("win.main.size.h", -1)
        w = settings.get("win.main.size.w", -1)
        size = wx.Size(int(h), int(w))

        self.SetSize(size)
        self.Move(pos)
Exemple #10
0
 def __init__(self):
     self.username_soa = settings.get("username_soa")
     self.password_soa = settings.get("password_soa")
     self.client = Client(self.url)
     security = Security()
     security.tokens.append(UsernameToken(self.username_soa, self.password_soa))
     self.client.set_options(wsse=security)
Exemple #11
0
def conc_register(self, user_id, corpus_id, subc_name, subchash, query, samplesize, time_limit):
    """
    Register concordance calculation and initiate the calculation.

    arguments:
    user_id -- an identifier of the user who entered the query (used to specify subc. directory if needed)
    corpus_id -- a corpus identifier
    subc_name -- a sub-corpus identifier (None if not used)
    subchash -- a MD5 checksum of the sub-corpus data file
    query -- a query tuple
    samplesize -- a row number limit (if 0 then unlimited - see Manatee API)
    time_limit -- a time limit (in seconds) for the main conc. task

    returns:
    a dict(cachefile=..., pidfile=..., stored_pidfile=...)
    """
    reg_fn = concworker.TaskRegistration(task_id=self.request.id)
    subc_path = os.path.join(settings.get('corpora', 'users_subcpath'), str(user_id))
    pub_path = os.path.join(settings.get('corpora', 'users_subcpath'), 'published')
    initial_args = reg_fn(corpus_id, subc_name, subchash, (subc_path, pub_path), query, samplesize)
    if not initial_args['already_running']:   # we are first trying to calc this
        app.send_task('worker.conc_calculate',
                      args=(initial_args, user_id, corpus_id,
                            subc_name, subchash, query, samplesize),
                      soft_time_limit=time_limit)
    return initial_args
    def __init__(self, parent=None, id=-1, title=version.__title__,
                 pos=wx.DefaultPosition, size=(800,600), 
                 style=wx.DEFAULT_FRAME_STYLE):
        """Create a Frame instance."""
        wx.Frame.__init__(self, parent, id, title, pos, size, style)

        settings.loadSettings()

        self.CreateStatusBar()
        self.SetStatusText('Version %s' % self.revision)
        self._createMenus()

        self.iconized = False

        self.Bind(wx.EVT_MOVE, self.OnMove)
        self.Bind(wx.EVT_CLOSE, self.OnClose)
        self.Bind(wx.EVT_ICONIZE, self.OnIconize)

        # Handle incoming comms and settings when the UI is idle
        self.Bind(wx.EVT_IDLE, self.OnIdle)

        self.BuildWindow()

        # Load saved location/size settings
        x = settings.get('win.main.pos.x', -1)
        y = settings.get('win.main.pos.y', -1)
        pos  = wx.Point(int(x), int(y))
        
        h = settings.get('win.main.size.h', -1)
        w = settings.get('win.main.size.w', -1)
        size = wx.Size(int(h), int(w))

        self.SetSize(size)
        self.Move(pos)
Exemple #13
0
 def _get_template_filename():
     name = os.path.join(os.path.dirname(__file__), 'templates')
     if settings.get('theme'):
         theme = os.path.join(name, settings.get('theme'))
         if os.path.exists(theme):
             name = theme
     return os.path.join(name, template_name)
Exemple #14
0
def corp_freqs_cache_path(corp, attrname):
    """
    Generates an absolute path to an 'attribute' directory/file. The path
    consists of two parts: 1) absolute path to corpus indexed data
    2) filename given by the 'attrname' argument. It is also dependent
    on whether you pass a subcorpus (files are created in user's assigned directory)
    or a regular corpus (files are created in the 'cache' directory).

    arguments:
    corp -- manatee corpus instance
    attrname -- name of an attribute

    returns:
    a path encoded as an 8-bit string (i.e. unicode paths are encoded)
    """
    if hasattr(corp, "spath"):
        ans = corp.spath.decode("utf-8")[:-4] + attrname
    else:
        if settings.contains("corpora", "freqs_cache_dir"):
            cache_dir = os.path.abspath(settings.get("corpora", "freqs_cache_dir"))
            subdirs = (corp.corpname,)
        else:
            cache_dir = os.path.abspath(settings.get("corpora", "cache_dir"))
            subdirs = (corp.corpname, "freqs")
        for d in subdirs:
            cache_dir = "%s/%s" % (cache_dir, d)
            if not os.path.exists(cache_dir):
                os.makedirs(cache_dir)
        ans = "%s/%s" % (cache_dir, attrname)
    return ans.encode("utf-8")
Exemple #15
0
def send_message(message):
    blog = settings.get('tumblr', 'blog')
    email = settings.get('tumblr', 'email')
    password = settings.get('tumblr', 'password')
    api = Api(blog, email, password)
    post = api.write_regular(title=None, body=message)
    print post['url']
Exemple #16
0
def can_read_page(title, user, is_admin):
    """Returns True if the user is allowed to read the specified page.

    Admins and global readers and editors are allowed to read all pages.  Other
    users are allowed to read all pages if the wiki is open or if the user is
    listed in the readers/editors page property.

    Otherwise no access."""
    if is_admin:
        return True

    is_user_reader = user and (user.email() in settings.get('readers', []) or user.email() in settings.get('editors', []))
    if is_user_reader:
        return True

    page = model.WikiContent.get_by_title(title)
    options = util.parse_page(page.body or '')

    is_open_wiki = settings.get('open-reading', 'yes') == 'yes'
    if is_open_wiki:
        if options.get('private') != 'yes':
            return True
        return user and (user.email() in options.get('readers', []) or user.email() in options.get('editors', []))
    elif settings.get('open-reading') == 'login':
        return options.get('public') == 'yes' or user
    else:
        return options.get('public') == 'yes'
    def __init__(self, parent):
        grid.Grid.__init__(self, parent)

        key = 'ui.commsdiagnostics.row.size.'
        width = []
        width.append( settings.get(key+'0', 110)    )
        width.append( settings.get(key+'1', 45)     )
        width.append( settings.get(key+'2', 75)     )
        width.append( settings.get(key+'3', 530)    )

        self.CreateGrid(        1, 4                )
        self.SetRowLabelSize(   50                  )
        self.SetColLabelValue(  0, 'Time'           )
        self.SetColSize(        0, int(width[0])    )
        self.SetColLabelValue(  1, 'Flags'          )             
        self.SetColSize(        1, int(width[1])    )
        self.SetColLabelValue(  2, 'Id'             )
        self.SetColSize(        2, int(width[2])    )
        self.SetColLabelValue(  3, 'Payload'        )
        self.SetColSize(        3, int(width[3])    )

        self.SetDefaultCellFont(wx.Font(8, wx.MODERN, wx.NORMAL, wx.NORMAL))

        # Get all column resizing
        self.Bind(grid.EVT_GRID_COL_SIZE, self.onResize)

        # Bind to connection
        self.conn = comms.getConnection()
        self.conn.bindSendWatcher(self.printSentPacket)
        self.conn.bindRecieveWatcher(self.printRecievedPacket)
Exemple #18
0
def send_concordance_url(auth, plugin_api, recipient, url):
    user_id = plugin_api.session['user']['id']
    user_info = auth.get_user_info(user_id)
    user_email = user_info['email']
    username = user_info['username']
    smtp_server = settings.get('mailing', 'smtp_server')
    sender = settings.get('mailing', 'sender')

    text = _('KonText user %s has sent a concordance link to you') % (username, ) + ':'
    text += '\n\n'
    text += url + '\n\n'
    text += '\n---------------------\n'
    text += time.strftime('%d.%m. %Y %H:%M')
    text += '\n'

    s = smtplib.SMTP(smtp_server)

    msg = MIMEText(text, 'plain', 'utf-8')
    msg['Subject'] = _('KonText concordance link')
    msg['From'] = sender
    msg['To'] = recipient
    msg.add_header('Reply-To', user_email)
    try:
        s.sendmail(sender, [recipient], msg.as_string())
        ans = True
    except Exception as ex:
        logging.getLogger(__name__).warn(
            'There were errors sending concordance link via e-mail(s): %s' % (ex,))
        ans = False
    finally:
        s.quit()
    return ans
def redirect_uri():
    """
    Both Auth and AuthReturn need redirect_uri
    """
    if settings.get('environment') in ['dev', 'prod']:
        return '%s/auth/google/return' % settings.get('base_url')
    else:
        return 'http://localhost:8001/auth/google/return'
Exemple #20
0
 def get(self):
   if not self.current_user_can('manage_disqus'):
     return self.write("not authorized")
   
   from disqusapi import DisqusAPI
   disqus = DisqusAPI(settings.get('disqus_secret_key'), settings.get('disqus_public_key'))
   for result in disqus.trends.listThreads():
       self.write(result)
Exemple #21
0
	def __init__(self):
		self._playlist_prefix = settings.get('playlist_prefix')
		self._video_prefix = settings.get('video_prefix')
		self.playlists = settings.get('playlists').split(',')
		self._cache_path = settings.get('cache_path')
		self._make_dirs('current')
		self.stamps = self._make_stamps('current')
		logging.info('Instantiated sync manager')
Exemple #22
0
def test_6(finfo):
    bck_type = settings.get('periodic_tasks', 'type')
    conf = settings.get('periodic_tasks', 'conf')
    if bck_type in ('celery', 'konserver'):
        return finfo.file_exists(os.path.join(conf))
    elif bck_type:
        return False, UnsupportedValue('/global/periodic_tasks', bck_type)
    return True, None
def get_thread_details(post):
    api_link = 'https://disqus.com/api/3.0/threads/details.json'
    info = {
      'api_key': settings.get('disqus_public_key'),
      'thread:link': template_helpers.post_permalink(post),
      'forum': settings.get('disqus_short_code'),
    }
    return do_api_request(api_link, 'GET', info)
Exemple #24
0
 def __init__(self):
     """
     Initializes the application and persistent objects/modules (settings, plugins,...)
     """
     super(KonTextWsgiApp, self).__init__()
     self.cleanup_runtime_modules()
     os.environ['MANATEE_REGISTRY'] = settings.get('corpora', 'manatee_registry')
     setup_plugins()
     translation.load_translations(settings.get('global', 'translations'))
    def __init__(self, *args, **kwargs):
        delimiter = settings.get('CSV_DELIMITER', '\t')
        kwargs['delimiter'] = delimiter

        fields_to_export = settings.get('FIELDS_TO_EXPORT', [])
        if fields_to_export :
            kwargs['fields_to_export'] = fields_to_export

        super(KindleCsvItemExporter, self).__init__(*args, **kwargs)
Exemple #26
0
def test_6(finfo):
    bck_type = settings.get('calc_backend', 'type')
    conf = settings.get('calc_backend', 'conf')
    if conf:  # otherwise user uses direct configuration which is taken care of by RelaxNG validation
        if bck_type in ('celery', 'konserver'):
            return finfo.file_exists(os.path.join(conf))
        elif bck_type != 'multiprocessing':
            return False, UnsupportedValue('/global/calc_backend', bck_type)
    return True, None
Exemple #27
0
def subscribe_to_thread(thread_id, access_token):
  api_link = 'https://disqus.com/api/3.0/threads/subscribe.json'
  info = {
    'api_secret': settings.get('disqus_secret_key'),
    'api_key': settings.get('disqus_public_key'),
    'access_token': access_token,
    'thread': thread_id,
  }
  return do_api_request(api_link, 'POST', info)
Exemple #28
0
 def get(self):
   consumer_key = settings.get('twitter_consumer_key')
   consumer_secret = settings.get('twitter_consumer_secret')
   callback_host = 'http://%s/twitter' % self.request.headers['host']
   auth = tweepy.OAuthHandler(consumer_key, consumer_secret, callback_host)
   auth_url = auth.get_authorization_url(True)
   self.set_secure_cookie("request_token_key", auth.request_token.key)
   self.set_secure_cookie("request_token_secret", auth.request_token.secret)
   self.redirect(auth_url)
Exemple #29
0
 def test_SettingHierarchy(self):
   # test that user settings overwrite global settings where they are present
   g = settings.set('an_index',is_global=True,value='global value')
   u = settings.set('an_index',value='user value')
   self.assert_(g.key() != u.key(),"User settings should not overwrite global settings")
   s_one = settings.get('an_index') #should be global
   self.assert_(s_one['value'] == 'global value',"settings.get should return the global value unless user_first is set")
   s_two = settings.get('an_index',user_first=True)
   self.assert_(s_two['value'] == 'user value',"User setting should be returned in favour of the global setting when user_first is True")
Exemple #30
0
def can_upload_image(user=None, is_admin=False):
    if is_admin:
        return True

    if settings.get('image-uploading') == 'yes':
        return True
    if user and settings.get('image-uploading') == 'login':
        return True
    return False
Exemple #31
0
 def __init__(self):
     self.mails_addicionals = settings.get("mails_addicionals")
     self.patrons_mails_addicionals \
         = settings.get("patrons_mail_addicionals")
Exemple #32
0
def init():
    import settings
    mongo_init(settings.get('mongo_database'))
    redis_init(settings.get('redis_para'))
Exemple #33
0
def test_16(finfo):
    conf = settings.get('plugins', 'sessions')
    return _test_plugin_common('sessions', conf)
Exemple #34
0
def test_21(finfo):
    conf = settings.get('plugins', 'user_items')
    return _test_plugin_common('user_items', conf)
Exemple #35
0
def test_18(finfo):
    conf = settings.get('plugins', 'conc_persistence')
    return _test_plugin_common('conc_persistence', conf)
Exemple #36
0
    def post(self):
        if self.current_user not in settings.get('staff'):
            return self.redirect('/')

        vote = self.get_vote()
        tweet_beginning = self.get_tweet_beginning()
        tweet_text = self.get_argument('tweet_text', '')

        # Check if rePOSTing. I did this once and it doesn't break anything
        # but fails when trying to tweet, so sets tweet document to 0 accounts tweeted
        existing_tweet = tweetdb.find_one({'vote': vote})
        if existing_tweet:
            return self.redirect('admin/?err=tweet_exists')

        if len(
                tweet_text
        ) > 110:  # poorly hardcoded. calculated from get_tweet_beginning()
            err = 'Some tweets will exceed 140 characters in length!'
            return self.render('admin/tweet.html',
                               err=err,
                               tweet_beginning=tweet_beginning,
                               vote=vote,
                               form=self.get_tweet_form())

        else:
            vote['fields'] = 'voter_ids'
            individual_votes = congress.votes(**vote)

            if len(individual_votes) != 1:
                print 'Error finding votes'
                raise Exception

            individual_votes = individual_votes[0][
                'voter_ids']  # returns a dict with bioguide_ids for keys

            # Tweet for every applicable politician. Yes, this is suboptimal
            tweeted = {}  # Track successfully tweeted accounts...
            failed = {}  # and those that failed
            for p in Politician.objects():
                # Hierarchy of name choosing
                if p.twitter:
                    name = "@" + p.twitter
                elif len(p.brief_name()) <= 16:
                    name = p.brief_name()
                elif len(p.last_name) <= 16:
                    name = p.last_name
                elif p.title == 'Sen':
                    name = "Senator"
                else:
                    name = "Representative"

                # Find corresponding vote
                if p.bioguide_id in individual_votes:
                    choice = individual_votes[p.bioguide_id]
                    if choice == 'Yea':
                        choice = 'YES'
                    elif choice == 'Nay':
                        choice = 'NO'
                    elif choice == 'Not Voting':
                        choice = 'abstained'

                    # Turn template into actual tweet and tweet!
                    tweet_template = tweet_beginning + tweet_text  # Further down replace
                    tweet = tweet_template.replace(REPS_ACCOUNT_PLACEHOLDER,
                                                   name).replace(
                                                       CHOICE_PLACEHOLDER,
                                                       choice)
                    if choice == 'abstained':
                        tweet = tweet.replace(
                            'voted ',
                            '')  # get rid of voting verb if abstained

                    success = p.tweet(tweet)
                    # If successfull tweeted, save for entry to database
                    if success:
                        tweeted[p.bioguide_id] = choice
                    else:
                        failed[p.bioguide_id] = choice

                    logging.info(len(tweeted))
                    logging.info(len(failed))
                # endfor p in Politician.objects():

            # Save to database
            save_tweet = {
                'datetime': datetime.datetime.now(),
                'vote': vote,
                'tweeted':
                tweeted,  # Who actually had FTV accounts, i.e. actually tweeted 
                'tweet_template': tweet_template,
                'placeholders': {
                    'reps_account_placeholder': REPS_ACCOUNT_PLACEHOLDER,
                    'choice_placeholder': CHOICE_PLACEHOLDER
                },
                'tweet':
                tweet,  # A sample tweet (always from last rep in database to tweet)
                'individual_votes': individual_votes,
                'admin': self.current_user
            }
            tweetdb.save(save_tweet)
            logging.info('saved tweet')

            # Email admins
            subject = '%s tweeted!' % self.current_user
            text_body = tweet_template
            for sn in settings.get('staff'):
                admin = userdb.get_user_by_screen_name(sn)
                try:
                    self.send_email(admin['email_address'], subject, text_body)
                except:
                    print 'Failed to send email to admin %s' % admin['user'][
                        'username']
                    pass

            if len(failed) is 0:
                return self.redirect('/admin?msg=tweet_success')
            else:
                return self.redirect(
                    '/admin?msg=tweet_success&num_accounts_failed=%s' %
                    len(failed))
Exemple #37
0
    def loop_body(self):
        # because closing the GUI doesn't actually exit the program
        if not self.gui.alive:
            del self.log
            raise SystemExit

        loop_start_time: float = time.perf_counter()
        self.slow_sleep_time = False
        self.loop_iteration += 1
        self.log.debug(
            f"Main loop iteration this app session: {self.loop_iteration}")
        self.no_condebug = False  # this will be updated if need be
        self.fast_next_loop = False

        if self.custom_functions:
            self.custom_functions.before_loop(self)

        p_data: Dict[str, Dict[str,
                               Union[bool, str, int,
                                     None]]] = self.process_scanner.scan()

        if self.process_scanner.tf2_without_condebug:
            self.no_condebug = True

        if p_data['Steam']['running']:
            username_count: int = len(self.usernames)
            self.usernames.add(configs.get_steam_username())
            if len(self.usernames) != username_count:
                self.log.debug(f"Username(s) updated: {self.usernames}")

            if not p_data['TF2']['running']:
                # reads steam config files to find TF2 launch options (on first loop, and if any of them have been modified)
                config_scan_needed: bool = self.steam_config_mtimes == {} or not self.gui.tf2_launch_cmd

                for steam_config in self.steam_config_mtimes:
                    old_mtime: int = self.steam_config_mtimes[steam_config]
                    new_mtime: int = int(os.stat(steam_config).st_mtime)

                    if new_mtime > old_mtime:
                        self.log.debug(
                            f"Rescanning Steam config files ({new_mtime} > {old_mtime} for {steam_config})"
                        )
                        config_scan_needed = True

                if config_scan_needed:
                    # to be clear, this scan is always needed but doesn't need to be re-done every loop
                    tf2_exe_path: str = self.find_tf2_exe(
                        p_data['Steam']['path'])
                    need_condebug: bool = not self.gui.launched_tf2_with_button and self.process_scanner.tf2_without_condebug
                    tf2_launch_cmd: Optional[str] = self.steam_config_file(
                        p_data['Steam']['path'], need_condebug)

                    if tf2_exe_path and tf2_launch_cmd is not None:
                        self.gui.tf2_launch_cmd = (tf2_exe_path,
                                                   tf2_launch_cmd)
                        self.log.debug(
                            f"Set launch TF2 command to {self.gui.tf2_launch_cmd}"
                        )
                    elif self.process_scanner.tf2_without_condebug:
                        self.no_condebug = True
        else:
            if p_data['Steam']['pid'] is not None or p_data['Steam'][
                    'path'] is not None:
                self.log.error(
                    f"Steam isn't running but its process info is {p_data['Steam']}. WTF?"
                )

            if p_data['TF2']['running']:
                self.log.error("TF2 is running but Steam isn't. WTF?")

        if p_data['TF2']['running'] and p_data['Discord'][
                'running'] and p_data['Steam']['running']:
            # modifies a few tf2 config files
            if not self.has_checked_class_configs:
                configs.class_config_files(self.log, p_data['TF2']['path'])
                self.has_checked_class_configs = True

            self.game_state.game_start_time = p_data['TF2']['time']
            self.gui.set_console_log_button_states(True)
            self.gui.set_launch_tf2_button_state(False)
            self.gui.set_bottom_text('discord', False)
            self.reset_launched_with_button = True

            console_log_path: str = os.path.join(p_data['TF2']['path'], 'tf',
                                                 'console.log')
            self.gui.console_log_path = console_log_path
            console_log_parsed: Optional[Tuple[
                bool, str, str, str, str, bool]] = self.interpret_console_log(
                    console_log_path,
                    self.usernames,
                    tf2_start_time=p_data['TF2']['time'])
            self.old_console_log_mtime = self.console_log_mtime

            if console_log_parsed:
                self.game_state.set_bulk(console_log_parsed)

            base_window_title: str = self.loc.text(
                "TF2 Rich Presence ({0})").format(launcher.VERSION)
            window_title_format_menus: str = self.loc.text("{0} - {1} ({2})")
            window_title_format_main: str = self.loc.text("{0} - {1} on {2}")

            if self.game_state.in_menus:
                self.test_state = 'menus'
                window_title: str = window_title_format_menus.format(
                    base_window_title, "In menus",
                    self.loc.text(self.game_state.queued_state))
            else:
                self.test_state = 'in game'
                window_title = window_title_format_main.format(
                    base_window_title, self.game_state.tf2_class,
                    self.game_state.map_fancy)

                # get server data, if needed (game_state doesn't handle it itself)
                server_modes = []
                if settings.get('top_line') in ('Server name', 'Player count',
                                                'Kills'):
                    server_modes.append(settings.get('top_line'))
                if settings.get('bottom_line') in ('Server name',
                                                   'Player count', 'Kills'):
                    server_modes.append(settings.get('bottom_line'))
                self.game_state.update_server_data(server_modes,
                                                   self.usernames)

            if self.custom_functions:
                self.custom_functions.modify_game_state(self)

            self.set_gui_from_game_state(p_data['TF2']['time'])

            if self.custom_functions:
                self.custom_functions.modify_gui(self)

            if self.game_state.update_rpc:
                self.activity = self.game_state.activity()

                if self.custom_functions:
                    self.custom_functions.modify_rpc_activity(self)

                try:
                    self.send_rpc_activity()
                except KeyboardInterrupt:
                    self.handle_rpc_error("Timed out sending RPC activity")
            else:
                self.log.debug("Not updating RPC state")

            self.gui.master.title(window_title)
            self.log.debug(f"Set window title to \"{window_title}\"")

        elif not p_data['TF2']['running']:
            # there's probably a better way to do this
            if self.reset_launched_with_button:
                self.gui.launched_tf2_with_button = False
                self.reset_launched_with_button = False

            if self.gui.launched_tf2_with_button:
                self.log.debug(
                    "Skipping possibly resetting launch button due to game hopefully launching"
                )
            else:
                self.gui.set_launch_tf2_button_state(
                    p_data['Steam']['running'])

            self.last_console_log_size = None
            self.necessary_program_not_running('Team Fortress 2', 'TF2')
            self.should_mention_tf2 = False
        elif not p_data['Discord']['running']:
            self.necessary_program_not_running('Discord')
            self.should_mention_discord = False
            self.gui.set_launch_tf2_button_state(p_data['Steam']['running'])
            self.gui.launch_tf2_button['state'] = 'disabled'
        else:
            # last but not least, Steam
            self.necessary_program_not_running('Steam')
            self.should_mention_steam = False
            self.gui.set_launch_tf2_button_state(p_data['Steam']['running'])
            self.gui.launch_tf2_button['state'] = 'disabled'

        self.gui.safe_update()
        self.init_operations()

        if self.no_condebug and not self.gui.launched_tf2_with_button:
            self.gui.no_condebug_warning()
            self.fast_next_loop = True

        if self.gui.update_checker.update_check_ready():
            self.gui.handle_update_check(
                self.gui.update_checker.receive_update_check())

        if self.custom_functions:
            self.custom_functions.after_loop(self)

        if not self.has_set_process_priority:
            self_process: psutil.Process = psutil.Process()
            priorities_before: tuple = (self_process.nice(),
                                        self_process.ionice())
            self_process.nice(psutil.BELOW_NORMAL_PRIORITY_CLASS)
            self_process.ionice(psutil.IOPRIO_LOW)
            priorities_after: tuple = (self_process.nice(),
                                       self_process.ionice())
            self.log.debug(
                f"Set process priorities from {priorities_before} to {priorities_after}"
            )
            self.has_set_process_priority = True

        if not gc.isenabled():
            gc.enable()
            gc.collect()
            self.log.debug("Enabled GC and collected")

        self.gui.main_loop_body_times.append(
            round(time.perf_counter() - loop_start_time, 2))
        return self.client_connected, self.rpc_client
Exemple #38
0
    def v1(self, req):
        self._headers['Content-Type'] = 'application/xml'
        current_version = 1.2

        default_corp_list = settings.get('corpora', 'default_corpora', [])
        corpname = None
        if 0 == len(default_corp_list):
            _logger.critical('FCS cannot work properly without a default_corpora set')
        else:
            corpname = default_corp_list[0]

        pr = urllib.parse.urlparse(req.host_url)
        # None values should be filled in later
        data = {
            'corpname': corpname,
            'corppid': None,
            'version': current_version,
            'recordPacking': 'xml',
            'result': [],
            'operation': None,
            'numberOfRecords': 0,
            'server_name': pr.hostname,
            'server_port': pr.port or 80,
            'database': req.path,
            'maximumRecords': None,
            'maximumTerms': None,
            'startRecord': None,
            'responsePosition': None,
        }
        # supported parameters for all operations
        supported_args = ['operation', 'stylesheet', 'version', 'extraRequestData']

        try:
            # check operation
            operation = req.args.get('operation', 'explain')
            data['operation'] = operation

            # check version
            version = req.args.get('version', None)
            if version is not None and current_version < float(version):
                raise Exception(5, version, 'Unsupported version')

            # check integer parameters
            maximumRecords = req.args.get('maximumRecords', 250)
            if 'maximumRecords' in req.args:
                try:
                    maximumRecords = int(maximumRecords)
                    if maximumRecords <= 0:
                        raise Exception(6, 'maximumRecords', 'Unsupported parameter value')
                except:
                    raise Exception(6, 'maximumRecords', 'Unsupported parameter value')
            data['maximumRecords'] = maximumRecords

            maximumTerms = req.args.get('maximumTerms', 100)
            if 'maximumTerms' in req.args:
                try:
                    maximumTerms = int(maximumTerms)
                except:
                    raise Exception(6, 'maximumTerms', 'Unsupported parameter value')
            data['maximumTerms'] = maximumTerms

            startRecord = req.args.get('startRecord', 1)
            if 'startRecord' in req.args:
                try:
                    startRecord = int(startRecord)
                    if startRecord <= 0:
                        raise Exception(6, 'startRecord', 'Unsupported parameter value')
                except:
                    raise Exception(6, 'startRecord', 'Unsupported parameter value')
            data['startRecord'] = startRecord

            responsePosition = req.args.get('responsePosition', 0)
            if 'responsePosition' in req.args:
                try:
                    responsePosition = int(responsePosition)
                except:
                    raise Exception(6, 'responsePosition', 'Unsupported parameter value')
            data['responsePosition'] = responsePosition

            # set content-type in HTTP header
            recordPacking = req.args.get('recordPacking', 'xml')
            if recordPacking == 'xml':
                pass
            elif recordPacking == 'string':
                # TODO(jm)!!!
                self._headers['Content-Type'] = 'text/plain; charset=utf-8'
            else:
                raise Exception(71, 'recordPacking', 'Unsupported record packing')

            # provide info about service
            if operation == 'explain':
                self._check_args(
                    req, supported_args,
                    ['recordPacking', 'x-fcs-endpoint-description']
                )
                corpus = self.cm.get_Corpus(corpname)
                data['result'] = corpus.get_conf('ATTRLIST').split(',')
                data['numberOfRecords'] = len(data['result'])
                data['corpus_desc'] = 'Corpus {0} ({1} tokens)'.format(
                    corpus.get_conf('NAME'), l10n.simplify_num(corpus.size()))
                data['corpus_lang'] = Languages.get_iso_code(corpus.get_conf('LANGUAGE'))
                data['show_endpoint_desc'] = (True if req.args.get('x-fcs-endpoint-description', 'false') == 'true'
                                              else False)

            # wordlist for a given attribute
            elif operation == 'scan':
                self._check_args(
                    req, supported_args,
                    ['scanClause', 'responsePosition', 'maximumTerms', 'x-cmd-resource-info']
                )
                data['resourceInfoRequest'] = req.args.get('x-cmd-resource-info', '') == 'true'
                scanClause = req.args.get('scanClause', '')
                if scanClause.startswith('fcs.resource='):
                    value = scanClause.split('=')[1]
                    data['result'] = self._corpora_info(value, maximumTerms)
                else:
                    data['result'] = conclib.fcs_scan(
                        corpname, scanClause, maximumTerms, responsePosition)

            # simple concordancer
            elif operation == 'searchRetrieve':
                # TODO we should review the args here (especially x-cmd-context, resultSetTTL)
                self._check_args(
                    req, supported_args,
                    ['query', 'startRecord', 'maximumRecords', 'recordPacking',
                        'recordSchema', 'resultSetTTL', 'x-cmd-context', 'x-fcs-context']
                )
                if 'x-cmd-context' in req.args:
                    req_corpname = req.args['x-cmd-context']
                    user_corpora = plugins.runtime.AUTH.instance.permitted_corpora(
                        self.session_get('user'))
                    if req_corpname in user_corpora:
                        corpname = req_corpname
                    else:
                        _logger.warning(
                            'Requested unavailable corpus [%s], defaulting to [%s]', req_corpname, corpname)
                    data['corpname'] = corpname

                corp_conf_info = plugins.runtime.CORPARCH.instance.get_corpus_info('en_US',
                                                                                   corpname)
                data['corppid'] = corp_conf_info.get('web', '')
                query = req.args.get('query', '')
                corpus = self.cm.get_Corpus(corpname)
                if 0 == len(query):
                    raise Exception(7, 'fcs_query', 'Mandatory parameter not supplied')
                data['result'], data['numberOfRecords'] = self.fcs_search(
                    corpus, corpname, query, maximumRecords, startRecord)

            # unsupported operation
            else:
                # show within explain template
                data['operation'] = 'explain'
                raise Exception(4, '', 'Unsupported operation')

        # catch exception and amend diagnostics in template
        except Exception as e:
            data['message'] = ('error', repr(e))
            try:
                data['code'], data['details'], data['msg'] = e
            except (ValueError, TypeError):
                data['code'], data['details'] = 1, repr(e)
                data['msg'] = 'General system error'

        return data
Exemple #39
0
def test_no_settings_dir():
    assert settings.get('config') is None, settings.get('config')
    create_config_file('default_settings.py', default_config)
    settings.reload()
    assert settings.get('config') == 'default', settings.get('config')
Exemple #40
0
def _getSources():
    sources = settings.get('sources')
    if sources is None:
        sources = []
    return sources
Exemple #41
0
    ]
    sys.path[0:0] = CMIFPATH

    os.environ["CMIF"] = CMIFDIR
    #os.environ["CHANNELDEBUG"] = "1"
else:
    progdir = os.path.split(sys.argv[0])[0]
    os.environ["CMIF"] = progdir

# Next, show the splash screen
import splash

splash.splash('loadprog')
import settings

license = settings.get('license')
user = settings.get('license_user')
org = settings.get('license_organization')
splash.setuserinfo(user, org, license)

if len(sys.argv) > 1 and sys.argv[1] == '-p':
    profile = 1
    del sys.argv[1]
    print '** Profile **'
else:
    profile = 0

## import trace
## trace.set_trace()

## if len(sys.argv) < 2:
Exemple #42
0
    def object_command(self, object_type, object_action):
        """
        Process object-based commands such as "distro add" or "profile rename"
        """
        task_id = -1  # if assigned, we must tail the logfile

        fields = self.get_fields(object_type)
        if object_action in [
                "add", "edit", "copy", "rename", "find", "remove"
        ]:
            utils.add_options_from_fields(object_type, self.parser, fields,
                                          object_action)
        elif object_action in ["list"]:
            pass
        elif object_action not in ("reload", "update"):
            self.parser.add_option("--name",
                                   dest="name",
                                   help="name of object")
        elif object_action == "reload":
            self.parser.add_option("--filename",
                                   dest="filename",
                                   help="filename to load data from")
        (options, args) = self.parser.parse_args()

        # the first three don't require a name
        if object_action == "report":
            if options.name is not None:
                report_item(self.remote, object_type, None, options.name)
            else:
                report_items(self.remote, object_type)
        elif object_action == "list":
            list_items(self.remote, object_type)
        elif object_action == "find":
            items = self.remote.find_items(
                object_type, utils.strip_none(vars(options), omit_none=True),
                "name", False)
            for item in items:
                print item
        elif object_action in OBJECT_ACTIONS:
            if opt(options,
                   "name") == "" and object_action not in ("reload", "update"):
                print "--name is required"
                sys.exit(1)
            if object_action in ["add", "edit", "copy", "rename", "remove"]:
                try:
                    if object_type == "setting":
                        settings = self.remote.get_settings()
                        if not settings.get('allow_dynamic_settings', False):
                            raise RuntimeError(
                                "Dynamic settings changes are not enabled. Change the allow_dynamic_settings to 1 and restart cobblerd to enable dynamic settings changes"
                            )
                        elif options.name == 'allow_dynamic_settings':
                            raise RuntimeError(
                                "Cannot modify that setting live")
                        elif self.remote.modify_setting(
                                options.name, options.value, self.token):
                            raise RuntimeError("Changing the setting failed")
                    else:
                        self.remote.xapi_object_edit(
                            object_type, options.name, object_action,
                            utils.strip_none(vars(options), omit_none=True),
                            self.token)
                except xmlrpclib.Fault, (err):
                    (etype, emsg) = err.faultString.split(":", 1)
                    print emsg[1:-1]  # don't print the wrapping quotes
                    sys.exit(1)
                except RuntimeError, (err):
                    print err.args[0]
                    sys.exit(1)
Exemple #43
0
def test_22(finfo):
    conf = settings.get('plugins', 'menu_items')
    return _test_plugin_common('menu_items', conf)
Exemple #44
0
    def get(self):
        if self.current_user not in settings.get('staff'):
            return self.redirect('/')

        tweet_text = self.get_argument('tweet_text', '')
        return self.render('admin/tweet_no_vote.html', tweet_text=tweet_text)
        return 2
    if estat == UNKNOWN:
        return 3

    # should not reach
    return -1


if __name__ == '__main__':
    a = None
    opts, args = getopt.getopt(sys.argv[1:], 'c:')
    for o, a in opts:
        if o == '-c':
            settings.load(a)

    logging.basicConfig(filename=settings.get("log_file"),
                        level=settings.get("log_level"),
                        format='%(asctime)s [%(process)d] %(name)-12s'
                        ' %(levelname)-8s %(message)s')

    buffer_logs = StringIO()
    logger.addHandler(logging.StreamHandler(buffer_logs))

    if a is not None:
        logger.info("Fitxer de configuracio [%s]", a)

    estat = UNKNOWN
    tractat = False
    try:
        logger.info("-----------------------------------------------------")
        logger.info("Llegeixo mail")
Exemple #46
0
    def post(self):
        if self.current_user not in settings.get('staff'):
            return self.redirect('/')

        tweet = self.get_argument('tweet_text', '')
        if not tweet:
            return self.redirect('')  # Goes back to GET

        # Check if rePOSTing. I did this once and it doesn't break anything
        # but fails when trying to tweet, so sets tweet document to 0 accounts tweeted
        existing_tweet = tweetdb.find_one({'tweet': tweet})
        if existing_tweet:
            err = 'Already tweeted same text!'
            return self.render('admin/tweet_no_vote.html',
                               err=err,
                               tweet_text=tweet)

        if len(tweet) > 110:
            err = 'Tweet exceeds 110 characters'
            return self.render('admin/tweet_no_vote.html',
                               err=err,
                               tweet_text=tweet)

        # Get accounts to tweet for
        account = self.get_argument('account', '')
        chamber = self.get_argument('chamber', '')
        if account and chamber:
            err = 'Please choose EITHER a group of accounts or write in a single FTV account name'
            return self.render('admin/tweet_no_vote.html',
                               err=err,
                               tweet_text=tweet)

        # Single account takes precedence
        if account:
            try:
                politicians = Politician.objects(ftv__twitter=account)
            except:
                err = 'Could not find account'
                return self.render('admin/tweet_no_vote.html',
                                   err=err,
                                   tweet_text=tweet)
        else:
            if chamber == 'all':
                politicians = Politician.objects()
            elif chamber == 'house':
                politicians = Politician.objects(chamber='House')
            elif senate == 'senate':
                politicians = Politician.objects(chamber='Senate')
            else:
                raise Exception

        tweeted = []  # Track successfully tweeted accounts...
        failed = []  # and those that failed
        for p in politicians:
            success = p.tweet(tweet)
            # If successfully tweeted, save for entry to database
            if success:
                tweeted.append(p.bioguide_id)
            else:
                failed.append(p.bioguide_id)

        # Save to database
        save_tweet = {
            'datetime': datetime.datetime.now(),
            'tweeted':
            tweeted,  # Who actually had FTV accounts, i.e. actually tweeted 
            'tweet':
            tweet,  # A sample tweet (always from last rep in database to tweet)
            'admin': self.current_user
        }
        tweetdb.save(save_tweet)
        logging.info('saved tweet')

        # Email admins
        subject = '%s tweeted!' % self.current_user
        text_body = tweet
        for sn in settings.get('staff'):
            admin = userdb.get_user_by_screen_name(sn)
            try:
                self.send_email(admin['email_address'], subject, text_body)
            except:
                print 'Failed to send email to admin %s' % admin['user'][
                    'username']
                pass

        if len(failed) is 0:
            return self.redirect('/admin?msg=tweet_success')
        else:
            return self.redirect(
                '/admin?msg=tweet_success&num_accounts_failed=%s' %
                len(failed))
    def compute(self, config, budget, *args, **kwargs):
        """
        Get model with hyperparameters from config generated by get_configspace()
        """
        if not torch.cuda.is_available():
            logging.info('no gpu device available')
            sys.exit(1)

        logging.info(f'Running config for {budget} epochs')
        gpu = 'cuda:0'
        np.random.seed(self.seed)
        torch.cuda.set_device(gpu)
        cudnn.benchmark = True
        torch.manual_seed(self.seed)
        cudnn.enabled = True
        torch.cuda.manual_seed(self.seed)
        logging.info('gpu device = %s' % gpu)
        logging.info("config = %s", config)

        ensemble_model = EnsembleModel(self.trained_models,
                                       dense_units=config['dense_units'],
                                       out_size=self.train_dataset.n_classes)
        ensemble_model = ensemble_model.cuda()

        logging.info("param size = %fMB",
                     utils.count_parameters_in_MB(ensemble_model))

        criterion = nn.CrossEntropyLoss()
        criterion = criterion.cuda()

        if config['optimizer'] == 'sgd':
            optimizer = torch.optim.SGD(ensemble_model.parameters(),
                                        lr=config['initial_lr'],
                                        momentum=config['sgd_momentum'],
                                        weight_decay=config['weight_decay'],
                                        nesterov=config['nesterov'])
        else:
            optimizer = get('opti_dict')[config['optimizer']](
                ensemble_model.parameters(),
                lr=config['initial_lr'],
                weight_decay=config['weight_decay'])

        if config['lr_scheduler'] == 'Cosine':
            lr_scheduler = torch.optim.lr_scheduler.CosineAnnealingLR(
                optimizer, int(budget))
        elif config['lr_scheduler'] == 'Exponential':
            lr_scheduler = torch.optim.lr_scheduler.ExponentialLR(optimizer,
                                                                  gamma=0.1)

        indices = list(
            np.random.randint(
                0,
                2 * len(self.train_dataset) // 3,
                size=len(self.train_dataset) //
                3))  #list(range(int(self.split*len(self.train_dataset))))
        valid_indices = list(
            np.random.randint(2 * len(self.train_dataset) // 3,
                              len(self.train_dataset),
                              size=len(self.train_dataset) // 6)
        )  #list(range(int(self.split*len(self.train_dataset)), len(self.train_dataset)))
        print("Training size=", len(indices))
        training_sampler = SubsetRandomSampler(indices)
        valid_sampler = SubsetRandomSampler(valid_indices)
        train_queue = torch.utils.data.DataLoader(dataset=self.train_dataset,
                                                  batch_size=self.batch_size,
                                                  sampler=training_sampler)

        valid_queue = torch.utils.data.DataLoader(dataset=self.train_dataset,
                                                  batch_size=self.batch_size,
                                                  sampler=valid_sampler)

        for epoch in range(int(budget)):
            logging.info('epoch %d lr %e', epoch, lr_scheduler.get_lr()[0])
            ensemble_model.drop_path_prob = config[
                'drop_path_prob'] * epoch / int(budget)

            train_acc, train_obj = ensemble_train(
                train_queue,
                ensemble_model,
                criterion,
                optimizer,
                grad_clip=config['grad_clip_value'])
            logging.info('train_acc %f', train_acc)
            lr_scheduler.step()

            valid_acc, valid_obj = infer(valid_queue, ensemble_model,
                                         criterion)
            logging.info('valid_acc %f', valid_acc)

        return ({
            'loss':
            valid_obj,  # Hyperband always minimizes, so we want to minimise the error, error = 1-accuracy
            'info':
            {}  # mandatory- can be used in the future to give more information
        })
Exemple #48
0
import os
import imp
import sys
import time
import cPickle

CURR_PATH = os.path.realpath(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, '%s/lib' % CURR_PATH)
import settings
import initializer
import plugins
import translation
from stderr2f import stderr_redirector

settings.load('%s/conf/config.xml' % CURR_PATH)
if settings.get('global', 'manatee_path', None):
    sys.path.insert(0, settings.get('global', 'manatee_path'))
import manatee

os.environ['MANATEE_REGISTRY'] = settings.get('corpora', 'manatee_registry')
initializer.init_plugin('db')
initializer.init_plugin('sessions')
initializer.init_plugin('auth')
initializer.init_plugin('locking')
initializer.init_plugin('conc_cache')
initializer.init_plugin('conc_persistence')
initializer.init_plugin('sessions')

translation.load_translations(settings.get('global', 'translations'))
translation.activate('en_US')  # background jobs do not need localization
Exemple #49
0
def test_19(finfo):
    conf = settings.get('plugins', 'conc_cache')
    return _test_plugin_common('conc_cache', conf)
Exemple #50
0
        if output_type == dict:
            return d
        elif output_type == "CLI":
            spacer = 15
            print("\n\tWorld Daemon stats:\n")
            for k, v in sorted(d.iteritems()):
                utils.cli_dump(k, spacer, v)
            print("\n")


if __name__ == "__main__":

    # never ever do anything in this file as root.
    if os.getuid() == 0:
        sys.stderr.write("The API World Daemon may not be operated as root!\n")
        daemon_user = getpwnam(settings.get("world", "daemon_user"))
        uid = daemon_user.pw_uid
        gid = daemon_user.pw_gid
        sys.stderr.write("Changing UID to %s\n" % (uid))
        try:
            os.setuid(uid)
        except Exception as e:
            sys.stderr.write("Could not set UID!")
            raise

    # optparse
    parser = OptionParser()
    parser.add_option("-l",
                      dest="list",
                      action="store_true",
                      default=False,
Exemple #51
0
def test_17(finfo):
    conf = settings.get('plugins', 'settings_storage')
    return _test_plugin_common('settings_storage', conf)
Exemple #52
0
def test_14(finfo):
    conf = settings.get('plugins', 'db')
    return _test_plugin_common('db', conf)
Exemple #53
0
def test_15(finfo):
    conf = settings.get('plugins', 'auth')
    return _test_plugin_common('auth', conf)
Exemple #54
0
def delete_media():
    folder_path = settings.get("media_folder_path")
    if os.path.exists(folder_path):
        shutil.rmtree(folder_path)
        logger.info(f"Deleted {folder_path} folder")
    return jsonify(success=True)
Exemple #55
0
class WorldDaemon:
    """ The world daemon determines whether to update a given world asset (see
    assets/world.py) based on the default 'asset_max_age' in settings.cfg or
    based on the custom 'max_age' attribute of a given asset.

    Since the daemon does not always update all assets, it minimizes resource
    usage and can therefore be left running without whaling on CPU and/or
    physical memory.

    Finally, the world daemon DOES NOT actually refresh/update or otherwise
    gather any data or run any queries. Rather, it initializes a World object
    (see above) and then works with that object as necessary. """
    def __init__(self):

        self.logger = utils.get_logger(log_name="world_daemon",
                                       log_level=settings.get(
                                           "world", "log_level"))

        self.pid_dir = settings.get("application", "pid_root_dir")
        self.pid_file_path = os.path.join(self.pid_dir, "world_daemon.pid")
        self.set_pid()

    def check_pid_dir(self):
        """ Checks to see if the pid directory exists and is writable. Creates a
        a new dir if it needs to do so. Also logs a WARN if the user requesting
        the check is not the owner of the pid dir. """

        if not os.path.isdir(self.pid_dir):
            self.logger.error("PID dir '%s' does not exist!" % self.pid_dir)
            try:
                shutil.os.mkdir(self.pid_dir)
                self.logger.critical("Created PID dir '%s'!" % self.pid_dir)
            except Exception as e:
                self.logger.error("Could not create PID dir '%s'!" %
                                  self.pid_dir)
                self.logger.exception(e)
                sys.exit(255)

        # warn if we're going to start off by trying to write a lock/pid file to
        # some other user's directory, b/c that would be bad
        pid_dir_owner = getpwuid(os.stat(self.pid_dir).st_uid).pw_name
        cur_user = os.getlogin()
        if pid_dir_owner != cur_user:
            self.logger.warn("PID dir owner is not the current user (%s)!" %
                             cur_user)

    def set_pid(self):
        """ Updates 'self.pid' with the int in the daemon pid file. Returns None
        if there is no file or the file cannot be parsed. """
        self.pid = None

        if os.path.isfile(self.pid_file_path):
            try:
                self.pid = int(file(self.pid_file_path, "rb").read().strip())
            except Exception as e:
                self.logger.exception(e)

            try:
                os.kill(self.pid, 0)
            except OSError as e:
                self.logger.exception(e)
                self.logger.error(
                    "PID %s does not exist! Removing PID file..." % self.pid)
                shutil.os.remove(self.pid_file_path)
                self.pid = None

    def command(self, command=None):
        """ Executes a daemon command. Think of this as the router for incoming
        daemon commands/operations. Register all commands here. """

        if command == "start":
            self.start()
        elif command == "stop":
            self.stop()
        elif command == "restart":
            self.stop()
            time.sleep(3)
            self.start()
        elif command == "status":
            pass
        else:
            self.logger.error("Unknown daemon command ('%s')!" % command)

        # sleep a second and dump a status, regardless of command
        time.sleep(1)
        self.dump_status()

    @retry(
        tries=6,
        delay=2,
        jitter=1,
        logger=utils.get_logger(settings.get("world", "log_level")),
    )
    def start(self):
        """ Starts the daemon. """

        self.set_pid()
        if self.pid is not None:
            try:
                os.kill(self.pid, 0)
                self.logger.warn(
                    "PID %s exists! Demon is already running! Exiting..." %
                    self.pid)
                sys.exit(1)
            except OSError:
                self.logger.info("Starting World Daemon...")
        else:
            self.logger.info("Starting World Daemon...")

        # pre-flight sanity checks and initialization tasks
        self.check_pid_dir()

        context = daemon.DaemonContext(
            working_directory=(settings.get("api", "cwd")),
            detach_process=True,
            umask=0o002,
            pidfile=PIDLockFile(self.pid_file_path),
            files_preserve=[self.logger.handlers[0].stream],
        )

        with context:
            while True:
                try:
                    self.run()
                except Exception as e:
                    self.logger.error(
                        "An exception occured during daemonization!")
                    self.logger.exception(e)
                    raise

    def run(self):
        """ A run involves checking all warehouse assets and, if they're older
        than their 'max_age' attrib (default to the world.asset_max_age value),
        it refreshes them.

        Once finished, it sleeps for world.refresh_interval, which is measured
        in minutes. """

        W = World()
        W.refresh_all_assets()
        self.logger.debug("World Daemon will sleep for %s minutes..." %
                          settings.get("world", "refresh_interval"))
        time.sleep(settings.get("world", "refresh_interval") * 60)

    def stop(self):
        """ Stops the daemon. """

        self.set_pid()
        self.logger.warn("Preparing to kill PID %s..." % self.pid)
        if self.pid is not None:
            os.kill(self.pid, 15)
            time.sleep(2)
            try:
                os.kill(self.pid, 0)
            except:
                self.logger.warn("PID %s has been killed." % self.pid)
        else:
            self.logger.debug(
                "Daemon is not running. Ignoring stop command...")

    def get_uptime(self, return_type=None):
        """ Uses the pid file to determine how long the daemon has been active.
        Returns None if the daemon isn't active. Otherwise, this returns a raw
        timedelta. """

        if os.path.isfile(self.pid_file_path):
            pid_file_age = time.time() - os.stat(
                self.pid_file_path)[stat.ST_MTIME]
            ut = timedelta(seconds=pid_file_age)
            uptime = "%sd %sh %sm" % (ut.days, ut.seconds // 3600,
                                      (ut.seconds // 60) % 60)
        else:
            return None

        if return_type == "date":
            return datetime.fromtimestamp(
                os.stat(self.pid_file_path)[stat.ST_MTIME])

        return uptime

    def dump_status(self, output_type="CLI"):
        """ Prints daemon status to stdout. """

        active = False
        d = {"active": active}
        if self.pid is not None and os.path.isfile(self.pid_file_path):
            active = True

        if active:
            owner_uid = os.stat(self.pid_file_path).st_uid
            owner_name = getpwuid(owner_uid).pw_name

            try:
                utils.mdb.world.find()
            except Exception as e:
                self.logger.error(
                    "Daemon is active, but MDB cannot be reached!")
                self.logger.exception(e)
                raise

            d = {}
            d["active"] = active
            d["up_since"] = self.get_uptime("date")
            d["uptime_hms"] = self.get_uptime()
            d["owner_uid"] = owner_uid
            d["owner_name"] = owner_name
            d["pid"] = self.pid
            d["pid_file"] = self.pid_file_path
            d["assets"] = utils.mdb.world.find().count()

        if output_type == dict:
            return d
        elif output_type == "CLI":
            spacer = 15
            print("\n\tWorld Daemon stats:\n")
            for k, v in sorted(d.iteritems()):
                utils.cli_dump(k, spacer, v)
            print("\n")
Exemple #56
0
def serve_media(username, date, filename):
    folder_path = settings.get("media_folder_path")
    media_folder = os.path.join(os.path.join(folder_path, username), date)
    return send_from_directory(media_folder, filename)
Exemple #57
0
def get_external_files():
    external_files = []
    for file_path in settings.get('load_external_files', []):
        external_files.extend(glob.glob(file_path))
    return external_files
Exemple #58
0
import shutil
import settings
import logging
from thread_runner import ThreadRunner
from datetime import datetime, timedelta
import json

app = Flask(__name__)
app.config['TEMPLATES_AUTO_RELOAD'] = True

SKIP_EXTENSIONS = (".json", ".txt")
PAGINATE_EVERY_N_ROWS = 100

settings.setup_logger()
logger = logging.getLogger(__name__)
user_settings = settings.get()

scraper_runner = ThreadRunner(start_scrape,
                              user_settings["loop_delay_seconds"],
                              user_settings["loop_variation_percentage"])

################### UTIL FUNCTIONS ###################


def get_log_file_list():
    scraping_logs_path = settings.get('scraping_log_file_path')
    if not os.path.exists(scraping_logs_path):
        return []
    with open(scraping_logs_path, "r+") as o:
        logs = [log_lines for log_lines in o.readlines()]
        return logs[::-1]
Exemple #59
0
def get_app_logs():
    log_file_path = settings.get('system_log_file_path')
    if not os.path.exists(log_file_path): return []
    with open(log_file_path, 'r+') as log_file:
        return [log for log in log_file.readlines()[::-1]]
Exemple #60
0
def email(body):
    if settings.isset('mailgrouponto'):
        to = settings.get('mailgrouponto')
        subject = 'SkuTouch could not validate orders from Groupon'
        print('Sending email to ' + to)
        mail.sendmail(to, subject, body)