Exemple #1
0
def saveWorkload(name, content):
    import tempfile
    wkld_dir = os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("workloads")["dir"]
    )

    ensureDir(wkld_dir)

    temp_f = tempfile.mkstemp(
        suffix='.wkld',
        prefix='',
        dir=wkld_dir
    )

    try:
        temp = os.fdopen(temp_f[0], 'wb')
        temp.write(content)
    finally:
        temp.close()

    newWorkload = {
        "name" : name,
        "file" : temp_f[1],
        "actors" : []
    }

    if name in config.getSettings("workloads")["defs"]:
        os.remove(config.getSettings("workloads")["defs"][name]["file"])
    config.getSettings("workloads")["defs"][name] = newWorkload
    config.saveConfig()
Exemple #2
0
    def GET(self, request):
        """
        Get a particular page.

        The handler will take anything after the base address and look for it
        in the pages folder. This folder is stored in the :mod:`config` module
        at::

            config.getSettings("server")["pagedir"]

        If a file is not found the handler will assume the address is a
        directory and try to serve the file index.html from it.

        :returns: The requested file.
        :raises: :exc:`restlite.Status` 400 if the path contains ".."

                 :exc:`restlite.Status` 404 if the file is not found.

        """

        authentication.login(request)

        if '..' in request['PATH_INFO']:
            raise restlite.Status, '400 Invalid Path'

        page_file = os.path.join(
            config.getSettings("global")["basedir"],
            config.getSettings("server")["pagedir"],
            request['PATH_INFO']
        )
        try:
            return request.response(self.getPage(page_file), self.getType(page_file))
        except restlite.Status:
            index_page = os.path.join(page_file, 'index.html')
            return request.response(self.getPage(index_page), 'text/html')
Exemple #3
0
def setupConfig(proj_dir, base_dir):
    """
    Setup the :mod:`config` module using the config file that will be or is in
    the project directory.

    The project and base directories will only be stored if the config is new.

    :param proj_dir: The absolute path to the project directory.
    :type proj_dir: ``str``
    :param base_dir: The absolute path to the base (server installation) 
                     directory.
    :type base_dir: ``str``
    :returns: ``True`` if there was not an existing config file. (i.e. this was
              a new configuration.)
    :rtype: ``bool``

    """

    config_file = os.path.join(proj_dir, pb.appname_file+".config")
    print "Using " + config_file + " as the config file."
    config.setConfig(config_file)

    new_config = not config.onDisk()

    if new_config:
        print "Storing essential directories."
        config.getSettings("global")["basedir"] = base_dir
        config.getSettings("global")["projdir"] = proj_dir
    else:
        print "Config Loaded."

    return new_config
Exemple #4
0
def makeKey():
    """
    Generate a new :program:`ssh` key to be used by vazels.

    This will be very insecure as Vazels requires a passwordless key.
    If you wish to inspect the generated product, it will be saved to
    the path returned by :func:`keyPath()`.

    """

    ensureSecurityDir()
    try:
        os.remove(keyPath())
        os.remove(keyPath()+".pub")
    except: pass

    command = [
        "ssh-keygen",
        "-t", "rsa",
        "-f", keyPath(),
        "-q",
        "-C", "Generated by Reef for use with Vazels",
        "-N", "",
    ]
    subprocess.Popen(
        command,
        stdout=open('/dev/null','w'),
        stderr=subprocess.STDOUT
    ).wait()
    config.getSettings("security")["customkey"] = True
    config.getSettings("security")["certificate"] = keyPath()
Exemple #5
0
def saveActor(name, type, content):
    import tempfile
    actor_dir = os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("actors")["dir"]
    )

    ensureDir(actor_dir)

    temp_f = tempfile.mkstemp(
        suffix='.tar.gz',
        prefix='',
        dir=actor_dir
    )

    try:
        temp = os.fdopen(temp_f[0], 'wb')
        temp.write(content)
    finally:
        temp.close()

    newActor = {
        "name" : name,
        "type" : type,
        "file" : temp_f[1]
    }

    if name in config.getSettings("actors")["defs"]:
        os.remove(config.getSettings("actors")["defs"][name]["file"])
    config.getSettings("actors")["defs"][name] = newActor
    config.saveConfig()
Exemple #6
0
def _extractSueComponents(group_number, sueComps):
    """
    Extract SUE components into the appropriate directories.
    
    :param group_number: The number of the group to extract into.
    :type group_number: ``int``
    :param sueComps: The set of names of Sue Componenets to extract.
    :type sueComps: ``set(str)``
    
    """
    
    experiment_dir = config.getSettings('control_centre')['experiment_dir']
    
    for sueComp_name in sueComps:
        try:
            sueCompFile = config.getSettings('SUE')['defs'][sueComp_name]['file']
        except KeyError:
            print sueComp_name
            print config.getSettings('SUE')['defs']
            raise restlite.Status, "500, Failed to extract Sue component"
    
        extractpath = os.path.join(
            controlcentre.getExperimentPath(),
            "Group_"+str(group_number),
            "SUE"
        )
        
        sueCompTGZ = tarfile.open(sueCompFile,'r:gz')
        sueCompTGZ.extractall(path=extractpath)
Exemple #7
0
def restore():
    if config.getSettings("security")["backedup"]:
        ensureSecurityDir()
        try:
            shutil.move(backupFile(), authKeysFile())
        # Failure means there was not backup, so delete file
        except IOError:
            try:
                os.remove(authKeysFile())
            except:
                pass
        config.getSettings("security")["backedup"] = True
Exemple #8
0
def getControlCentreOutPath():
    """
    Grab the control centre output file.

    :returns: Absolute path to the ouput file.
    :rtype: ``str``

    """

    return os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("control_centre")["out_file"]
    )
Exemple #9
0
def backup():
    if not config.getSettings("security")["backedup"]:
        ensureSecurityDir()
        try:
            os.remove(backupFile())
        except:
            pass  # Means it probably didn't exist
        try:
            shutil.copy2(authKeysFile(), backupFile())
        # Ignore failure as it just means there was nothing to backup
        except IOError:
            pass
        config.getSettings("security")["backedup"] = True
Exemple #10
0
def getVazelsPath():
    """
    Grab the path to the vazels installation directory.

    :returns: Absolute path to the vazels install directory.
    :rtype: ``str``

    """

    return os.path.join(
        config.getSettings("global")['basedir'],
        config.getSettings("control_centre")["vazels_dir"],
    )
Exemple #11
0
def getExperimentPath():
    """
    Grab the experiment path.

    :returns: Absolute path to the vazels experiment for this project.
    :rtype: ``str``

    """

    return os.path.join(
        config.getSettings("global")['projdir'],
        config.getSettings("control_centre")["experiment_dir"]
    )
Exemple #12
0
    def GET(request):
        authentication.login(request)

        if ".." in request["PATH_INFO"]:
            raise restlite.Status, "400 Invalid Path"

        page_file = os.path.join(
            config.getSettings("global")["basedir"], config.getSettings("server")["pagedir"], request["PATH_INFO"]
        )
        try:
            return request.response(getPage(page_file), getType(page_file))
        except restlite.Status:
            index_page = os.path.join(page_file, "index.html")
            return request.response(getPage(index_page), "text/html")
Exemple #13
0
def __extractActors(workload_def, target_group):
  print("Extracting actors")
  group_number = target_group['group_number']
  experiment_dir = config.getSettings("global")['expdir']
  
  print("workload_def: " + str(workload_def))
  for actor_name in workload_def['actors']:
    print("Extracting actor: " + actor_name)
    actor = config.getSettings('actors')['defs'][actor_name]
    type=actor['type'].upper()
    actorTGZ = tarfile.open(actor['file'],'r:gz')
    if type.find("SUE") != -1 :
      actorTGZ.extractall(path=experiment_dir+'/Group_'+str(group_number)+'/SUE/')
    else :
      actorTGZ.extractall(path=experiment_dir+'/Group_'+str(group_number)+"/Vazels/"+type+"_launcher/")
Exemple #14
0
def __getCommandLineClientArgs():
    """
    Get the arguments needed to start the command line client.

    :returns: List of the necessary arguments.
    :rtype: ``list(str)``

    """

    return [
        '/bin/sh',
        'commandline_client.sh',
        '--rmi_host='+config.getSettings('control_centre')['rmi_host'],
        '--rmi_port='+config.getSettings('control_centre')['rmi_port']
    ]
Exemple #15
0
    def __init__(self):
        """
        Create a basic server setup.

        This will set up the most basic possible settings for a working server.
        It also adds enough defaults in the :mod:`config` so that the server
        will run with no additional input.

        """

        self.authmodel = None
        self.routes = []
        config.getSettings("server").setdefault("port", 8000)
        # Paths from base dir
        config.getSettings("server").setdefault("pagedir", "./pages/")
def updateFrequency():
    cron = CronTab(user=True)
    writeCron = False
    settings = getSettings()

    sidebarJob = getJob(cron, 'sidebar/main.py')
    # If the job exists, make sure the existing crontab matches
    # what the settings say it should be
    if sidebarJob:
        if sidebarJob.minute != '*/' + str(settings['sidebar']['update_interval_mins']):
            sidebarJob.minute.every(settings['sidebar']['update_interval_mins'])
            writeCron = True
    # If the job doesn't exist, create it with the settings given
    else:
        # This crontab entry will run the bot and redirect stderr and stdout to
        # an individual file for just this iteration as well as to the full
        # appended output log that can be `tail -f`'d
        base_path = os.path.dirname(os.path.abspath(sys.argv[0]))
        sidebarJob = cron.new(command='python ' + base_path \
            + '/main.py 2>&1 | tee ' + base_path \
            + '/logs/iterations/`date -u +\%s`.txt >> ' \
            + base_path + '/logs/output.txt',
            comment='GOB Sidebar module')
        sidebarJob.minute.every(settings['sidebar']['update_interval_mins'])
        writeCron = True

    # If changes were made, write them to the crontab
    if writeCron:
        cron.write()
Exemple #17
0
  def POST(request,entity):
    authentication.login(request)

    existing_groups = config.getSettings("groups")
    group_name = request['PATH_INFO']

    args = urlparse.parse_qs(entity)

    g_data = _getGroupDataFromArgs(args)

    if group_name in existing_groups:
      if "size" in g_data and g_data["size"] == 0:
        # Delete the group
        del existing_groups[group_name]
      else:
        existing_groups[group_name].update(g_data)
    else:
      # New group
      n_group = __newGroup()
      n_group["name"] = group_name
      n_group.update(g_data)
      if n_group["size"] != 0:
        existing_groups[group_name] = n_group
    
    #print config.getSettings

    try:
      return request.response(existing_groups[group_name])
    except KeyError:
      return request.response({"name":group_name, "size":0})
Exemple #18
0
 def loadCategories(self):
     """
     Loads categories (task lists) from the net
     
     Returns a dictionary containing the names (key) and IDs (value) for the categories.
     """
     if not self.isConnected():
         raise ValueError('Not connected with RTM Net Service; cannot proceed. Please connect first.')
     categories = {}
     rspLists = self._connection.lists.getList()
     for cat in rspLists.lists.list:
         try:
             config.getSettings().getValue("hidden_groups",  list).index(cat.name)
         except:
             categories[cat.name] = cat.id
     return categories
    def init_settings(self):
	self.settings = config.getSettings()
        self.settings.beginGroup('glclient')
        self.port = unicode(self.settings.value('port').toString())
        if not self.port:
            self.port = '9999'
            self.settings.setValue('port', self.port)
            print 'new settings'
        print unicode(self.settings.value('port').toString())
        self.colormap = unicode(self.settings.value('colormap').toString())
        if not self.colormap:
            self.colormap = '../../gl/colormaps/rainbow2'
        if not os.path.isfile(self.colormap):
            print 'Colormap file not found:', self.colormap
            self.colormap = unicode(QtGui.QFileDialog.getOpenFileName(self, 'Select Colormap File'))
        self.settings.setValue('colormap', self.colormap)
        self.executable = unicode(self.settings.value('executable').toString()        )
        if not self.executable:
            self.executable = '../../gl/src/glclient'
        if not os.access(self.executable, os.X_OK):        
            self.executable = QtGui.QFileDialog.getOpenFileName(self, 'Select GLClient Executable')
        self.settings.setValue('executable', self.executable)
        if not os.access(self.executable, os.X_OK):        
            QtGui.QMessageBox.critical(self, 'GLClient executable', 'Please select the correct glclient executable. Make sure you have execute permission on this file')
            
        self.settings.endGroup()
Exemple #20
0
def doWork():
    """
    where we can put our work
    :return:
    """
    logging.info("looking for applicable alerts")
    settings = config.getSettings()
    saltStackMaster = settings["saltStackMaster"]
    saltStackUser = settings["saltStackUser"]
    saltStackPass = settings["saltStackPass"]
    saltStackStateToRun = settings["saltStackStateToRun"]
    alertsRaw = alerts.getAlertExists(True)
    if alertsRaw["alertExists"]:
        logging.info("Firing off Salt State since I see an alert")
        session = requests.Session()
        session.post('https://' + saltStackMaster + ':8000/login',
                     verify=False,
                     json={
                         'username': saltStackUser,
                         'password': saltStackUser,
                         'eauth': 'auto',
                     })
        resp = session.post('https://' + saltStackMaster + ':8000/login',
                            verify=False,
                            json=[{
                                'client': 'local',
                                'tgt': '*',
                                'fun': 'state.orchestrate',
                                'arg': [saltStackStateToRun],
                            }])
        logging.info(resp.content)
        return resp
    else:
        return {"status": "nothing to do"}
Exemple #21
0
def restore():
    """
    Restore the backed up version of the :program:`ssh` authorized_hosts file if
    a backup has been created by :func:`backup`.

    """

    if config.getSettings("security")["backedup"]:
        ensureSecurityDir()
        try:
            shutil.move(backupFile(), authKeysFile())
        # Failure means there was not backup, so delete file
        except IOError:
            try: os.remove(authKeysFile())
            except: pass
        config.getSettings("security")["backedup"] = True
Exemple #22
0
 def GET(request):
     authentication.login(request)
     groups = config.getSettings("groups")
 
     #component_info = {}
 
     # For each component, build a list of groups to which it is assigned.
     #for component_name in config.getSettings("SUE")["defs"]:
     #    assigned_groups = []
     #    for group in groups:
     #        if component_name in groups[group]["sue_components"]:
     #          assigned_groups.append(group)
     #        component_info[component_name] = assigned_groups
 
     #return request.response(component_info)
     return request.response(config.getSettings("SUE")["defs"].keys(), 'text/html')
Exemple #23
0
    def GET(self, request):
        """
        Get setting data from the :mod:`config`.

        Parameters are expected to be part of the query string.

        If the base of this address handler is called:

        :returns: A list of sections in the :mod:`config`
        :rtype: ``list``

        If anything more than the base of this handler is called then the
        excess is assumed to be a section name and:

        :returns: A JSON representation of that particular section of the 
                  :mod:`config`.
        :rtype: ``json``
        :raises: :exc:`restlite.Status` 404 if the section does not exist.

        """

        authentication.login(request)
        section = getSection(request['PATH_INFO'])
        if section:
            settings = config.getSettings(section, False)
            if settings is None:
                raise restlite.Status, "404 Not Found"
            else:
                return request.response(settings)
        else:
            return request.response(config.getSections())
Exemple #24
0
def build(r):
    settings = config.getSettings()
    # Get the base stylesheet
    if 'disable' in settings['stylesheet'] and settings['stylesheet'][
            'disable']:
        return None
    stylesheet = config.getStylesheet()
    # Grab the "license" text
    header = config.getStylesheetHeader()
    # Handle the demonyms, if specified in the settings to do so
    if settings['stylesheet']['demonyms_enabled']:
        demonyms = getDemonymsCss()
    else:
        demonyms = ''
    # Handle the multiple-headers settings
    if settings['stylesheet']['num_of_headers'] > 1:
        stylesheet = stylesheet.replace('%%header1%%', getHeader())
    # Handle the new header cycle if enabled
    if settings['stylesheet']['new_header_cycle']['enabled']:
        getNextHeader(r)
    # Prepend the demonyms
    stylesheet = demonyms + "\n\n" + stylesheet
    # Handle the minify stylesheet setting without affecting the header
    if settings['stylesheet']['minify']:
        stylesheet = cssmin(stylesheet)
    # Add the header
    stylesheet = header + "\n\n" + stylesheet
    return stylesheet
Exemple #25
0
    def get(self):
        log.log('\tGetting %s streams...' % self.name)
        startTime = time()
        settings = getSettings()

        # Just use the cached copy in dev mode
        if settings['dev_mode'] == True:
            log.log('\t\t...done getting %s streams! (cached data)' %
                    self.name)
            cachedCopy = cache.readJson('%s.json' % self.name)
            return cachedCopy['data'] if cachedCopy != False else []

        req = self.makeApiCall(self.api_url)
        if req == False:
            return self.useCachedCopy()

        streams = req[self.streams_field]

        ret = []

        # Make sure we only have valid live streams, then add them to the
        # list of stream objects we're returning
        for stream in streams:
            streamObj = self.convertStream(stream)
            if streamObj != None:
                ret.append(streamObj)

        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.log('\t\t\GREEN...done! %s' % (elapsedTime))

        cache.saveJson("%s.json" % self.name, {'time': time(), 'data': ret})

        return ret
Exemple #26
0
 def POST(request,entity):
     authentication.login(request)
 
     groups = config.getSettings("groups")
 
     fields = parseMultipart(request, entity)
     if fields is None:
         raise restlite.Status, "400 Invalid SUE POST request - need parameters"
 
     # Try to find what we should label the component
     component_name = fields.getfirst("component_name")
     if not component_name:
         raise restlite.Status, "400 Must give the SUE component a name"
 
     # Try to get the SUE component
     try:
         component_f = fields['component_file']
     except KeyError:
         raise restlite.Status, "400 Must provide a file when specifying a SUE component"
 
     if component_f.file:
         component = component_f.file.read()
         saveSueComponent(component_name, component)
     else:
         raise restlite.Status, "400 The supplied \"component_file\" was not a file"
 
 
     return request.response("", "text/plain")
Exemple #27
0
def addUser(name, password):
    """
    Add a user to the current authentication model.

    This will also get update the configuration with the information
    needed to recreate itself.

    :param name: The name of the user to add.
    :type name: ``str``
    :param password: The password to use for this user.
    :type password: ``str``
    :raises: ``KeyError`` if the name exists

    """
    global authModel
    users = config.getSettings("auth")["users"]

    if name in users:
        raise KeyError, str(name)+" already exists"

    users[name] = password
    # Don't need a complete refresh to add one user
    if authModel is None:
        refresh()
    else:
        addUserToModel(name, password)
Exemple #28
0
def __applyWorkload(workload_def, target_group):
  print("Applying workload")
  wkld_dir = os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("workloads")["dir"]
    )
  
  args = ['/bin/sh', 'commandline_client.sh']
  args.append('--rmi_host='+config.getSettings('command_centre')['rmi_host'])
  args.append('--rmi_port='+config.getSettings('command_centre')['rmi_port'])
  args.append('--group_rank='+str(target_group['group_number']))
  args.append('addworkload')
  args.append(workload_def['file'])
  
  subprocess.Popen(args, cwd=vazelsmanager.getVazelsPath()+'/client')
  print("Workload dispatched")
Exemple #29
0
def securityDir():
    """
    Get the location of our security directory.

    This should be somewhere inside our project directory as described
    `here <https://github.com/jelford/reef/wiki/Directory-Structure>`_.

    :returns: Absolute path to the security directory.
    :rtype: ``str``

    """

    return os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("security")["dir"],
    )    
Exemple #30
0
def _extractActors(group_number, actors):
    """
    Extract actors into group directories.

    :param group_number: The number of the group to extract into.
    :type group_number: ``int``
    :param actors: The set of actors to extract.
    :type actors: ``set(str)``

    """

    experiment_dir = controlcentre.getExperimentPath()

    for actor_name in actors:
        actor = config.getSettings('actors')['defs'][actor_name]
        type=actor['type'].upper()

        extractpath = os.path.join(
            controlcentre.getExperimentPath(),
            "Group_"+str(group_number)
        )

        actorTGZ = tarfile.open(actor['file'],'r:gz')
        extractpath = os.path.join(extractpath, "Vazels", type+"_launcher")

        actorTGZ.extractall(path=extractpath)
Exemple #31
0
    def POST(request, entity):
        authentication.login(request)

        import urlparse

        args = urlparse.parse_qs(entity, True)

        # Only accept a single value for any parameter
        for key in args:
            if len(args[key]) > 1:
                raise restlite.Status, "400 Duplicate Arguments"
            args[key] = args[key][0]

        # Check for action
        if "do" not in args:
            raise restlite.Status, "400 Missing Action"

        # Check for workload
        if "workload" not in args:
            raise restlite.Status, "400 Missing Workload"

        # Check for actor
        if "actor" not in args:
            raise restlite.Status, "400 Missing Actor"

        do = args["do"].lower()
        workload = args["workload"]
        actor = args["actor"]

        # Validate workload
        if workload not in config.getSettings("workloads")["defs"]:
            raise restlite.Status, "404 Workload Not Found"

        # Validate actor
        if actor not in config.getSettings("actors")["defs"]:
            raise restlite.Status, "404 Actor Not Found (" + actor + ")"

        workload = config.getSettings("workloads")["defs"][workload]

        try:
            {"add": addActor, "rem": remActor}[do](workload, actor)
        except KeyError:
            raise restlite.Status, "400 Invalid Action"

        config.saveConfig()

        return request.response(workload)
Exemple #32
0
    def POST(request, entity):
        authentication.login(request)
        section = getSection(request['PATH_INFO'])
        # Uncomment below to disallow editing of new sections
        #if not section:
        #    raise restlite.Status, '403 New Section Forbidden'

        # Grab arguments
        import urlparse
        parsed = urlparse.parse_qs(entity, True) # Null value is kept as ''
        used_names = [] # Names used
        new_qs = {} # Values to add
        rem_qs = [] # Values to delete

        for key in parsed:
            # Grab name and extension
            name, ext = splitName(key)
            # If name already used, ignore
            if name in used_names:
                continue
            # If theres no extension ignore
            if ext is None:
                continue
            # If there is no value at all, skip
            if parsed[key] == []:
                continue
            # Otherwise take first value
            q = parsed[key][0]
            # If up for deletion
            if ext is 'r':
                # Check used_names here as we use key when deleting
                # Or parsed name otherwise below
                rem_qs += [name]
                used_names += [name]
            # Adding value
            else:
                # Try to convert to the correct format
                func = funcFor(ext)
                if func is None:
                    raise restlite.Status, "400 Invalid Action"
                try:
                    q = func(q)
                except:
                    raise restlite.Status, "400 Invalid Format"
                new_qs[name] = q
                used_names += [name]

        # All settings are good to go
        settings = config.getSettings(section)
        # Add all new settings
        settings.update(new_qs)
        # Delete all new deletions
        for key in rem_qs:
            try:
                del settings[key]
            except KeyError: pass
        # Make it permanent
        config.saveConfig()
        return request.response(settings)
Exemple #33
0
 def __init__(self):
     self.settings = config.getSettings()
     self.state = self.settings["state"]
     if self.state == "":
         self.coord = self.settings["coord"]
         if self.coord == "":
             import geo
             self.coord = geo.getCoord()
Exemple #34
0
def saveSueComponent(component_name, component_file):
    import tempfile
    sue_dir = os.path.join(
        config.getSettings("global")["projdir"],
        config.getSettings("SUE")["dir"]
    )

    ensureDir(sue_dir)

    temp_f = tempfile.mkstemp(
        suffix='.tar.gz',
        prefix='',
        dir=sue_dir
    )

    try:
        temp = os.fdopen(temp_f[0], 'wb')
        temp.write(component_file)
    finally:
        temp.close()

    newSueComponent = {
        "name" : component_name,
        "file" : temp_f[1],
    }

    if component_name in config.getSettings("SUE")["defs"]:
        os.remove(config.getSettings("SUE")["defs"][component_name]["file"])
    config.getSettings("SUE")["defs"][component_name] = newSueComponent

    print config.getSettings("SUE")
    config.saveConfig()
Exemple #35
0
    def __init__(self):
        self.settings = config.getSettings()
        request_headers = {}

        # Variables to be used in the rest of the object, mainly inherited funcs
        self.name = 'Facebook Watch'
        self.api_url = ''
        self.streams_field = '[]'
        self.request_headers = request_headers
Exemple #36
0
def getVazelsPath():
    """
    Get the path to the base directory of the Vazels installation.

    :returns: Path to the Vazels installation directory.
    :rtype: ``str``

    """

    return os.path.join(config.getSettings("global")['basedir'],"vazels")
Exemple #37
0
    def __init__(self):
        settings = config.getSettings()
        game_ids = self.convertGames(settings['sidebar']['livestreams']['games'])['MLG']

        # The list of games. If there are none, just use CS:GO because CS:GO is awesome.
        game_id = ','.join(game_ids) if len(game_ids) > 0 else '13'

        self.name = 'MLG'
        self.streams_api = ''
        self.channels_api = '' % game_id
        self.request_headers = {}
Exemple #38
0
def getNextHeader(r):
    log.log('> New header cycle is \GREEN ENABLED')
    metadata = cache.getMetadata()
    settings = config.getSettings()
    interval_mins = settings['stylesheet']['new_header_cycle']['interval_mins']

    num_next_header = 0

    if int(time()) - metadata['new_header_cycle']['last_updated'] > (
            interval_mins * 60) - 120:
        log.log(
            '\t Interval exceeded for current header...grabbing next header')
        last_header_name = metadata['new_header_cycle']['last_header']
        num_next_header = int(last_header_name.split('.')[0]) + 1
        next_header = Path(
            '/var/www/globaloffensivebot/public/images/headers/{}.jpg'.format(
                str(num_next_header)))

        try:
            header_exists = next_header.resolve()
        except FileNotFoundError:
            num_next_header = 1
            next_header = '/var/www/globaloffensivebot/public/images/headers/{}.jpg'.format(
                str(num_next_header))

        with open(
                '/var/www/globaloffensivebot/public/images/headers/creators/creators.json',
                'r') as f:
            creators = json.load(f)
        creator = creators[str(num_next_header)]

        try:
            r.subreddit(settings['subreddit']).stylesheet.upload(
                'header', str(next_header))
            metadata['new_header_cycle']['last_updated'] = int(time())
            metadata['new_header_cycle']['last_header'] = '{}.jpg'.format(
                str(num_next_header))
            metadata['new_header_cycle']['creator'] = creator
            cache.setMetadata(metadata)
        except prawcore.TooLarge:
            log.error(
                '\t\tError uploading {}.jpg by {}. Image too large.'.format(
                    str(num_next_header), creator))
        except praw.exceptions.APIException as e:
            print(e)

        log.log('\t\t\GREEN...done! \R Now using header #{} by {}\n'.format(
            str(num_next_header), creator))
    else:
        current_num = metadata['new_header_cycle']['last_header'].split('.')[0]
        log.log(
            '\t Not time to change the current header yet. Using header #{} by {}. Moving on...\n'
            .format(current_num, metadata['new_header_cycle']['creator']))
    def __init__(self):
        settings = config.getSettings()
        game_ids = self.convertGames(
            settings['sidebar']['livestreams']['games'])['Azubu']

        # Just use the first game, if there are none, just use CS:GO because CS:GO is awesome.
        # TODO: Implement multiple game searching on Azubu
        game_id = game_ids[0] if len(game_ids) > 0 else 'csgo'

        limit = str(settings['sidebar']['livestreams']['max_shown'] * 2)

        self.name = 'Azubu'
        self.api_url = 'http://api.azubu.tv/public/channel/live/list/game/' \
            + game_id + '?limit=' + limit
        self.streams_field = 'data'
        self.request_headers = {}
    def __init__(self):
        settings = config.getSettings()
        game_ids = self.convertGames(
            settings['sidebar']['livestreams']['games'])['Hitbox']

        # Just use the first game, if there are none, just use CS:GO because CS:GO is awesome.
        # TODO: Implement multiple game searching on Hitbox
        game_id = game_ids[0] if len(game_ids) > 0 else '427'

        limit = str(settings['sidebar']['livestreams']['max_shown'] * 2)

        self.name = 'Hitbox'
        self.api_url = 'http://api.hitbox.tv/media/?game=' + game_id \
            + '&live=1&limit=' + limit
        self.streams_field = 'livestream'
        self.request_headers = {}
Exemple #41
0
def getHeader():
    metadata = cache.getMetadata()
    numOfHeaders = config.getSettings()['stylesheet']['num_of_headers']

    if metadata["header_cycle"]["current_index"] > numOfHeaders:
        metadata["header_cycle"]["current_index"] = 0
    if int(time()) - int(metadata["header_cycle"]["last_updated"]) > 60 * 60:
        # Grabs the new index for the active demonym
        metadata["header_cycle"]["current_index"] = (
            metadata["header_cycle"]["current_index"] + 1) % numOfHeaders
        metadata["header_cycle"]["last_updated"] = int(time())
        # Write the metadata object
    cache.setMetadata(metadata)
    # Returns the Reddit CSS %%variable%%
    return "%%header" + str(metadata["header_cycle"]["current_index"] +
                            1) + "%%"
Exemple #42
0
def buildMarkdown():
    log.log('\n> Retrieving community metrics...')
    startTime = time()

    settings = getSettings()
    markdown = ''

    if settings['dev_mode'] == True:
        log.log('\t... done! (using a cached copy)')
        return cache.read('community_metrics.txt')
    

    if settings['sidebar']['social']['ts_enabled']:
        teamspeakMd = getTeamspeakUsersMarkdown(settings)
    else:
        log.log('\t\REDTeamspeak metrics disabled.')
        teamspeakMd = None
    
    if settings['sidebar']['social']['irc_enabled']:
        ircMd = getIrcUsersMarkdown(settings)
    else:
        log.log('\t\REDIRC metrics disabled.')
        ircMd = None

    if settings['sidebar']['social']['discord_enabled']:
        disMd = getDiscordUsersMarkdown(settings)
    else:
        log.log('\t\REDDiscord metrics disabled.')
        disMd = None


    if teamspeakMd is not None:
        markdown += teamspeakMd + '\n'
    if ircMd is not None:
        markdown += ircMd + '\n'
    if disMd is not None:
        markdown += disMd

    # markdown = teamspeakMd + '\n' + ircMd + '\n' + disMd
    # markdown = teamspeakMd + '\n' + ircMd + '\n' + ' '
    cache.save('community_metrics.txt', markdown)

    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\GREENDone retrieving community metrics. %s \n' % elapsedTime)

    return markdown
Exemple #43
0
def getAlertExists(verbose=False):
    """
    lets see if our alerts have any of the keywords we are looking for
    """
    settings = config.getSettings()
    keyWords = settings["alertEventTypeKeyWords"]
    alerts = getAlerts()
    flag = False
    for word in keyWords:
        if alerts:
            for alert in alerts:
                alertType = alerts[alert]["eventType"]
                if operator.contains(alertType, word):
                    flag = True
    if verbose:
        return {"alertExists": flag, "Raw Message": alerts}
    else:
        return {"alertExists": flag}
Exemple #44
0
def buildMarkdown():
    log.log("> Building calendar...")
    settings = getSettings()
    events = getEvents(settings['sidebar']['calendar'])

    if len(events) == 0:
        return ''

    markdown = '[*Upcoming Events*](#heading)\n\n. | .\n---|---\n'
    for event in events:
        date = datetime.datetime.strptime(event['start']['date'], '%Y-%m-%d')
        link = event['htmlLink']
        markdown += date.strftime('%b **%-d**') + ' | ' + '[{}]'.format(
            event['summary']) + '({})'.format(link) + '\n'

    markdown += '\n**[See all](http://goo.gl/q9iOZb#button#slim)**'

    return markdown
Exemple #45
0
def finish():
    settings = config.getSettings()

    if 'db' not in settings or 'mongo' not in settings['db']:
        return

    # Send the HTML version of the log to Mongo for webpanel access
    logContentsHtml = file_manager.readAndDelete('app-cache/log-html.txt')
    # Establish database connection
    client = MongoClient(settings['db']['mongo']['host'],
                         settings['db']['mongo']['port'])
    db = client[settings['db']['mongo']['database']]
    db.authenticate(settings['db']['mongo']['username'],
                    settings['db']['mongo']['password'],
                    mechanism='SCRAM-SHA-1')
    # Insert the log as a document, including a timestamp
    db['botlog'].insert_one({
        "body": logContentsHtml,
        "time_finished": str(int(time()))
    })
Exemple #46
0
    def get(self):
        log.log('\tGetting %s streams...' % self.name)
        startTime = time()
        settings = config.getSettings()

        if settings['dev_mode'] == True:
            log.log('\t\t...done getting %s streams! (cached data)' % self.name)
            cachedCopy = cache.readJson('%s.json' % self.name)
            return cachedCopy['data'] if cachedCopy != False else []

        # Get all CS:GO streams (CS:GO's MLG game ID is 13)
        mlgCsgoStreams = self.makeApiCall(self.channels_api)
        if mlgCsgoStreams == False:
            return self.useCachedCopy()

        streams = {}

        # Convert streams to usable, more uniform format
        for stream in mlgCsgoStreams:
            streams[stream['stream_name']] = self.convertStream(stream)

        # Get viewer info on all CS:GO channels
        streamInfo = self.makeApiCall(self.streams_api)
        if streamInfo == False:
            return self.useCachedCopy()

        liveStreams = []

        for stream in streamInfo:
            if stream['stream_name'] in streams:
                if 'viewers' in stream:
                    streams[stream['stream_name']]['viewers'] = '{:,}'.format(stream['viewers'])
                    streams[stream['stream_name']]['viewers_raw'] = stream['viewers']
                    liveStreams.append(streams[stream['stream_name']])

        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.log('\t\t\GREEN...done! %s' % (elapsedTime))

        cache.saveJson("%s.json" % self.name, {'time': time(), 'data': liveStreams})

        return liveStreams
Exemple #47
0
def getStatus():
    log.log('Getting CS:GO Matchmaking status...')
    startTime = time()

    settings = getSettings()

    if settings['dev_mode'] == True:
        log.log('\t... done! (using a cached copy)')
        return cache.readJson('matchmaking.json')

    if 'api_key' not in settings or 'steam' not in settings['api_key']:
        log.error('No Steam API key -- cannot retrieve CS:GO server status.')
        return {'status': 'UNKNOWN', 'url': 'down'}

    offline = {'status': 'OFFLINE', 'url': 'down'}
    online = {'status': 'ONLINE', 'url': 'up'}

    try:
        api_url = 'https://api.steampowered.com/ICSGOServers_730/GetGameServersStatus/v1/?key=%s'
        req = requests.get(api_url % settings['api_key']['steam'])
    except Exception as e:
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error(
            'Could not retrieve CS:GO GC status. (%s) %s' %
            (str(e), elapsedTime), 1)
        return offline

    status = req.json()['result']['matchmaking']['scheduler']

    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\t...done! %s' % elapsedTime)

    if status == 'offline':
        cache.saveJson('matchmaking.json', offline)
        return offline
    elif status == 'normal':
        cache.saveJson('matchmaking.json', online)
        return online
    else:
        return {'status': 'UNKNOWN (%s)' % status, 'url': 'down'}
    def __init__(self):
        self.settings = config.getSettings()
        self.oauth_info = config.getOAuthInfo()
        self.is_token_valid = self.validateToken(self.oauth_info)
        self.token = self.getOauth()

        parameters = ''
        request_headers = {}
        game_id = 0
        max_shown = str(self.settings['sidebar']['livestreams']['max_shown'] *
                        2)

        if 'twitch' in self.settings and 'client_id' in self.settings['twitch']:
            request_headers = {
                'Authorization': 'Bearer {}'.format(self.token),
                'Client-ID': self.settings['twitch']['client_id']
            }

        # If the sidebar.livestreams.twitch_include_only field is set in
        # settings, then we only grab those channels from Twitch.
        if 'twitch_include_only' in self.settings['sidebar']['livestreams']:
            channels = self.settings['sidebar']['livestreams'][
                'twitch_include_only']
            if channels != []:
                parameters.append('channel=' + ','.join(channels))

        if 'game_ids' in self.settings['sidebar']['livestreams']:
            if 'CSGO' in self.settings['sidebar']['livestreams']['game_ids']:
                game_id = self.settings['sidebar']['livestreams']['game_ids'][
                    'CSGO']
                # Turn parameters list into a stringified URL parameter chain
                parameters = '?game_id={}&first={}'.format(game_id, max_shown)

        # Variables to be used in the rest of the object, mainly inherited funcs
        self.name = 'Twitch'
        self.api_url = 'https://api.twitch.tv/helix/streams/' + parameters
        self.streams_field = 'data'
        self.request_headers = request_headers
Exemple #49
0
def getShortUrl(longUrl):
    settings = getSettings()

    if 'api_key' not in settings or 'google' not in settings['api_key']:
        log.error('No Google API key -- cannot shorten URLs')
        return longUrl

    data = json.dumps({'longUrl': longUrl})
    headers = {'Content-Type': 'application/json'}
    endpoint = 'https://www.googleapis.com/urlshortener/v1/url?key=%s' % settings[
        'api_key']['google']
    shortUrl = 'http://example.org/'
    try:
        req = requests.post(endpoint, data=data, headers=headers)
    except Exception as e:
        log.error('Could not shorten URL (%s)\n- %s' % (str(e), longUrl), 1)
        return longUrl
    try:
        shortUrl = req.json()['id']
    except Exception as e:
        log.error('Error parsing Google API JSON: %s' % str(e), 1)
        return longUrl
    return shortUrl
Exemple #50
0
def handle_database(command):
    connection = None
    try:
        config_data = getDbConfig(config.getSettings())
        connect_str = "dbname='" + config_data[
            'db_name'] + "' user='******'user'] + "' host='" + config_data[
                    'host'] + "' password='******'password'] + "'"
        connection = psycopg2.connect(connect_str)
        connection.autocommit = True
        cursor = connection.cursor()
        cursor.execute(command)
        if "SELECT" in command:
            table = cursor.fetchall()
            return table
        cursor.close()
    except psycopg2.DatabaseError as exception:
        error_message = "Uh oh, can't connect. Invalid dbname, user or password? \n" + str(
            exception)
        print(error_message)
    finally:
        if connection:
            connection.close()
Exemple #51
0
def buildMarkdown():
    log.log('> Beginning to build the matchticker...')
    startTime = time()

    settings = getSettings()

    if settings['dev_mode'] == True:
        log.log('\t...done! (using a cached copy)')
        return cache.read('matchticker.txt')

    if 'api_key' not in settings or 'gosugamers' not in settings['api_key']:
        log.error('No GosuGamers API key -- cannot build matchticker.')
        return ''

    # Get the stream information
    try:
        api_url = ''
        req = requests.get(api_url % settings['api_key']['gosugamers'])
    except requests.exceptions.RequestException as e:
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error('From GosuGamers API: %s %s' % (str(e), elapsedTime), 1)
        return ''
    if req.status_code == 403 or not req.ok or 'IP Address Not Allowed' in str(
            req.content):
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error('GosuGamers rejected our IP ' + elapsedTime, 1)
        return blankTicker(startTime)
    try:
        upcomingMatches = req.json()['matches']
    except Exception as e:
        elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
        log.error(
            'Issue with GosuGamers API JSON: %s %s' % (str(e), elapsedTime), 1)
        return ''

    # Matches to display
    matches = []
    gamesToGrab = 0

    if len(upcomingMatches) == 0:
        return blankTicker(startTime)

    if len(upcomingMatches) < settings['sidebar']['matchticker']['max_shown']:
        gamesToGrab = len(upcomingMatches)
    else:
        gamesToGrab = settings['sidebar']['matchticker']['max_shown']
    for i in range(0, gamesToGrab):
        matches.append({
            'tourny':
            prepareTournyTitle(upcomingMatches[i]['tournament']['name']),
            'team1': {
                'name':
                str(upcomingMatches[i]['firstOpponent']['shortName']),
                'cc':
                str(upcomingMatches[i]['firstOpponent']['country']
                    ['countryCode']).lower()
            },
            'team2': {
                'name':
                str(upcomingMatches[i]['secondOpponent']['shortName']),
                'cc':
                str(upcomingMatches[i]['secondOpponent']['country']
                    ['countryCode']).lower()
            },
            'time':
            getMatchTime(upcomingMatches[i]['datetime']),
            'url':
            upcomingMatches[i]['pageUrl'],
            'is_live':
            bool(upcomingMatches[i]["isLive"])
        })
    # Build the markdown
    matchtickerMd = ''
    matchMdTemplate = ('>>>\n'
                       '[~~__TOURNY__~~\n'
                       '~~__TIME__~~\n'
                       '~~__TEAM1__~~\n'
                       '~~__TEAM2__~~](__URL__#info)\n'
                       '[ ](#lang-__LANG1__)\n'
                       '[ ](#lang-__LANG2__)\n\n'
                       '>>[](#separator)\n\n')
    matchtickerMd = '[*Match Ticker*](#heading)\n\n'
    i = 0
    for match in matches:
        matchMd = matchMdTemplate
        matchMd = (matchMd.replace('__TOURNY__', match['tourny']).replace(
            '__TIME__',
            match['time']).replace('__TEAM1__',
                                   match['team1']['name']).replace(
                                       '__TEAM2__',
                                       match['team2']['name']).replace(
                                           '__LANG1__',
                                           match['team1']['cc']).replace(
                                               '__LANG2__',
                                               match['team2']['cc']).replace(
                                                   '__URL__', match['url']))
        matchtickerMd += matchMd
        i += 1
    matchtickerMd += '>>**[See all](http://bit.ly/1xGEuiJ#button#slim)**'

    cache.save('matchticker.txt', matchtickerMd)

    characters = '\YELLOW(%d characters)' % len(matchtickerMd)
    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\t\GREEN...done! %s %s \n' % (characters, elapsedTime))

    return matchtickerMd
def build():
    log.log('> Beginning to build the livestream feed...')
    livestreamFeedStartTime = time()

    settings = config.getSettings()

    # Fetch all the livestream data
    livestreams = []
    services = (s.lower()
                for s in settings['sidebar']['livestreams']['services'])
    if settings['sidebar']['livestreams']['services'] != []:
        if 'twitch' in services:
            try:
                livestreams += twitch.Twitch().get()
            except KeyError as ke:
                log.log(
                    '\t\tFailed to retrieve Twitch streams. \n\t\tKeyError: {}'
                    .format(ke))
        if 'youtube' in services:
            try:
                livestreams += youtube.youtube_search()
            except KeyError as ke:
                log.log(
                    '\t\tFailed to retrieve YouTube Gaming streams. \n\t\tKeyError: {}'
                    .format(ke))
        if 'fbwatch' in services:
            try:
                livestreams += fbwatch.FBWatch().get()
            except KeyError as ke:
                log.log(
                    '\t\tFailed to retrieve Facebook Watch ESL stream. \n\t\tKeyError: {}'
                    .format(ke))
    else:
        livestreams = twitch.Twitch().get() \
                    + hitbox.Hitbox().get() \
                    + azubu.Azubu().get() \
                    + mlg.MLG().get() \
                    + youtube.youtube_search() \
                    + fbwatch.FBWatch().get()

    # Return a blank container's markdown if there are no streams
    if len(livestreams
           ) == 0 and settings['sidebar']['livestreams']['none_message'] != '':
        return {
            'markdown': '[*Live Streams*](#heading)\n\n    %s' \
                % settings['sidebar']['livestreams']['none_message'],
            'spritesheet_path': None
        }

    # Sort the livestreams by number of viewers, descending
    livestreams = sorted(livestreams,
                         key=lambda channel: channel['viewers_raw'],
                         reverse=True)
    # Trim off the remainder if there were more than the number we're supposed to use
    if len(livestreams) > settings['sidebar']['livestreams']['max_shown']:
        livestreams = livestreams[:settings['sidebar']['livestreams']
                                  ['max_shown']]

    # TODO: Clean up all this code, the naming is all horrid
    templates = config.getTemplates()

    # Goes in between items in the livestream list
    if 'livestreams' in templates and 'separator' in templates['livestreams']:
        separator = templates['livestreams']['separator']
    else:
        separator = '>>[](#separator)'
    separator = '\n\n' + separator + '\n\n'

    # Template for a stream in the livestream list
    if 'livestreams' in templates and 'stream' in templates['livestreams']:
        livestreamMdTemplate = templates['livestreams']['stream']
    else:
        # Default template
        livestreamMdTemplate = ('>>>#[__TITLE__](__URL__#profile-__INDEX__)\n'
                                '##\n'
                                '### __VIEWERS__ @ __STREAMER__\n')
    livestreamMdTemplate += separator

    # Template for the livestreams section heading
    if 'livestreams' in 'templates' and 'heading' in templates['livestreams']:
        livestreamsMd = templates['livestreams']['heading'] + '\n\n'
    else:
        # Default heading
        livestreamsMd = '[*Live Streams*](#heading)\n\n'

    i = 0
    for livestream in livestreams:
        livestreamMd = livestreamMdTemplate
        livestreamMd = (livestreamMd.replace(
            '__TITLE__',
            livestream['title']).replace('__URL__', livestream['url']).replace(
                '__INDEX__',
                str(i)).replace('__VIEWERS__', livestream['viewers']).replace(
                    '__STREAMER__', livestream['streamer']))
        livestreamsMd += livestreamMd
        i += 1

    if 'see_all_link' in settings['sidebar']['livestreams']:
        if 'livestreams' in templates and 'see_all' in templates['livestreams']:
            see_all_template = templates['livestreams']['see_all']
        else:
            see_all_template = '>>**[See all](__LINK__#button#slim)**'
        see_all_template = see_all_template.replace(
            '__LINK__', settings['sidebar']['livestreams']['see_all_link'])
        livestreamsMd += see_all_template

    # This ignore bit is because any lines starting with a hashtag
    # are headers in Reddit Markdown and comments in YAML, so in the
    # YAML, these lines must be preceded with something unique so that
    # we can parse those lines properly.  In case this was pulled from the
    # wiki, we need to remove all the potential %%IGNORE%%s from the string
    livestreamsMd = livestreamsMd.replace('%%IGNORE%%', '')

    spritesheetPath = None

    if settings['sidebar']['livestreams']['show_thumbnails']:
        uploadThumbnailsStartTime = time()
        log.log('\n\tGenerating spritesheet...')
        spritesheetPath = generateSpritesheet(
            [x['thumbnail'] for x in livestreams])
        log.log('\t\t\GREEN...done! ' + '\BLUE(%s s)' %
                str(round(time() - uploadThumbnailsStartTime, 3)))

    characters = '\YELLOW(%d characters)' % len(livestreamsMd)
    elapsedTime = '\BLUE(%s s)' % str(
        round(time() - livestreamFeedStartTime, 3))
    log.log('\GREENDone building livestream feed. %s %s \n' %
            (characters, elapsedTime))

    return {'markdown': livestreamsMd, 'spritesheet_path': spritesheetPath}
Exemple #53
0
import config
import log
from time import time
from apiclient import discovery
from apiclient import errors
from oauth2client.tools import argparser
from pprint import pprint

settings = config.getSettings()

DEVELOPER_KEY = settings['api_key']['youtube']
YOUTUBE_API_SERVICE_NAME = "youtube"
YOUTUBE_API_VERSION = "v3"
SEARCH_PART = "snippet"
VIDEOS_PART = "liveStreamingDetails"
base_url = 'https://gaming.youtube.com/watch?v='
ret = []


def youtube_search():
    log.log('\tGetting YouTube Gaming streams...')
    startTime = time()

    youtube = discovery.build(YOUTUBE_API_SERVICE_NAME,
                              YOUTUBE_API_VERSION,
                              developerKey=DEVELOPER_KEY)

    for channel in settings['sidebar']['livestreams'][
            'youtube_channels'].values():
        try:
            # Call search.list to retrieve results matching the specified query term
Exemple #54
0
def runAutoposterAndBuildMarkdown(r):
    notices = config.getNotices()
    settings = config.getSettings()

    noticesToBuild = []
    potentialStickies = []

    # Extra \n just to add some whitespace
    log.log('> Running the auto-poster and building notices...')
    startTime = time()

    for notice in notices:
        if notice['notice_title'] != 'Default Notice Title':
            log.log("\t\MAGENTAFor \"%s\"" % notice['notice_title'])
        else:
            log.log("\t\MAGENTAFor \"%s\"" % notice['thread_title'])

        live = '\GREENlive'
        notLive = '\REDnot live'

        postIsLive = isPostLive(notice)
        log.log('\t\tThe post is ' + (live if postIsLive else notLive) + '.')
        noticeIsLive = isNoticeLive(notice)
        log.log('\t\tThe notice is ' + (live if noticeIsLive else notLive) +
                '.')

        if noticeIsLive:
            noticesToBuild.append(notice)

        # Determine eligibility for being posted
        # --- Subtracting 60 sec from the last_posted time is to account for the fact that no
        # --- notices/threads are ever posted at *exactly* the scheduled time, because it takes a few
        # --- seconds to post at the least. Therefore, without shaving off 60 seconds (which is a very
        # --- generous 'buffer' for how long it may take to post any given recurring notice/thread),
        # --- the 'last_posted' time will increase by a few seconds each week, making a scheduled item
        # --- not go up until the next 5 min update interval, and continues increasing until 'Reset Timing'
        # --- is issued from the webpanel. This is really just a bandaid for now and should be rewritten.
        timeSinceLastPosted = (int(time()) - notice['last_posted']) - 60

        eligibleForPosting = timeSinceLastPosted > frequency[
            notice['frequency']]
        if notice['frequency'] == 'once' and notice['last_posted'] != 0:
            eligibleForPosting = False

        # Determine if it should be posted, and get its ID
        if eligibleForPosting and postIsLive:
            post = createPostFromNotice(r, settings, notice)
            notice['last_posted'] = int(time())
            notice['last_posted_id'] = post.id
            post_id = post.id
        else:
            post_id = notice['last_posted_id']

        # Handle stickies later when we can ensure we're on the primary bot acct
        if post_id != None and post_id != "":
            potentialStickies.append({'postIsLive': postIsLive, 'id': post_id})

        log.log('')

    # Handle whether or not the posts should be stickied
    for potential in potentialStickies:
        stickied = isPostStickied(r, potential['id'])
        if stickied == False and potential['postIsLive']:
            stickyPost(r, potential['id'])
        elif stickied and potential['postIsLive'] == False:
            unstickyPost(r, potential['id'])

    # Write changes of the notices to the notices file
    config.saveNotices(notices)

    noticesMd = ''
    # Shuffle the notices if necessary
    if len(noticesToBuild) > 3:
        random.shuffle(noticesToBuild)
        noticesToBuild = noticesToBuild[:3]
    # Build the notices markdown string
    template = "1. [__TITLE__](__LINK__#__CATEGORY__)\n"
    for notice in noticesToBuild:
        if notice['type'] == 'autopost+notice':
            noticesMd += (template.replace(
                '__TITLE__', formatTitle(notice['notice_title'])).replace(
                    '__LINK__',
                    "http://redd.it/" + notice['last_posted_id']).replace(
                        '__CATEGORY__', notice['category']))
        else:
            noticesMd += (template.replace(
                '__TITLE__', formatTitle(notice['notice_title'])).replace(
                    '__LINK__',
                    notice['notice_link']).replace('__CATEGORY__',
                                                   notice['category']))
    elapsedTime = '\BLUE(%s s)' % str(round(time() - startTime, 3))
    log.log('\GREENDone running the auto-poster and building notices! %s \n' %
            elapsedTime)

    # Return the markdown
    return noticesMd
Exemple #55
0
    # here comes a check if server is prod
    is_prod = False

    if is_dev:
        return db_settings['dev']
    elif is_test:
        return db_settings['test']
    elif is_prod:
        return db_settings['prod']

    return dict()


try:
    # get config data
    config_data = getDbConfig(config.getSettings())
    # setup connection string
    connect_str = "dbname='" + config_data[
        'db_name'] + "' user='******'user'] + "' host='" + config_data[
                'host'] + "' password='******'password'] + "'"
    # use our connection values to establish a connection
    conn = psycopg2.connect(connect_str)
    # set autocommit option, to do every query when we call it
    conn.autocommit = True
    # create a psycopg2 cursor that can execute queries
    cursor = conn.cursor()
    # removing the test table if it already exists
    cursor.execute("""DROP TABLE IF EXISTS connection_check;""")
    # create a new table with a single column called "name"
    cursor.execute("""CREATE TABLE connection_check (name varchar(40));""")
Exemple #56
0
 def __init__(self):
     self.settings = config.getSettings()
     self.weatherAPIKey = self.settings["openWeatherMapAPIKey"]
     self.zipCode = self.settings["zipCode"]
     self.units = self.settings["units"]