def _email_picture(self):
     aiy.audio.say('Sure I can take a picture for you')
     self._take_picture()
     email('Here is the picture you asked for:',
           'Took Picture',
           email_user,
           email_password,
           '*****@*****.**',
           files=['/home/pi/Desktop/image.jpg'])
     aiy.audio.say("I emailed the picture to you")
Esempio n. 2
0
def register():

    if "email" in session:
        emailval = session['email']
        return redirect(
            url_for("logged_in",
                    message=f"You are already logged in as {emailval}"))

    form = RegisterForm()

    if form.validate_on_submit():
        if form.password.data != form.password2.data:
            return render_template("register.html",
                                   form=form,
                                   invalid="passwords do not match")

        if not (check_email(form.email.data)):
            return render_template("register.html",
                                   form=form,
                                   invalid="invalid email")

        hashed = bcrypt.hashpw(form.password.data.encode('utf-8'),
                               bcrypt.gensalt())
        date = datetime.datetime.now()
        auth_dict = {
            "email": str(form.email.data),
            "password": hashed,
            "register_date": date
        }
        email_found = collection.find_one({"email": form.email.data})

        if email_found:
            return render_template("register.html",
                                   form=form,
                                   invalid="user already exists")

        collection.insert_one(auth_dict)
        emailer.email(form.email.data)
        session["email"] = form.email.data
        email = form.email.data
        return redirect(
            url_for('logged_in',
                    invalid=f"Account was created with the username {email}"))

    return render_template("register.html", form=form)
Esempio n. 3
0
    def send_email(self):
        """Save named player information
        """
        email = self.request.params.get("email", "").strip()
        logger.debug(self.request.params)

        if not email:  # does not validate
            return HTTPBadRequest()

        players = []
        for playerid, player in self.data.players.items():
            if 'email' in player.details and player.details['email'] == email:
                #print 'send player id to ', player.details['email'], playerid, player.details
                players.append({ 'name': player.details['name'] or '',
                                 'id': playerid
                                 })
        if len(players) > 0:
            logger.info('Send players for email: '+email)
            import emailer
            emailer.email(email, players)
        else:
            logger.info('No players for email: '+email)
        return {}
Esempio n. 4
0
def main():

    # Build my Parser with help for user input
    parser = argparse.ArgumentParser()
    parser.add_argument('--email', '-e', action='store_true',
                dest='email', default=None, help='Send output in email')
    parser.add_argument('--forcegit', '-f', action='store_true',
                dest='forcegit', default=None, help='Wipe away old Git and start with a fresh repo')
    args = parser.parse_args()

    # get our config
    config = get_config()

    # cache the contents of epel, so we can for loop, and seek on it
    u = urlopen(config['baseurl'])
    f = open(os.path.expanduser(config['baseurl_cache']), 'w')
    f.write(u.read())
    f.close()

    # if we have a cache we will link against it
    # this will verify we only process once per version
    if args.forcegit:
        cache = []
    else:
        if os.path.exists(config['cache_file']):
            cache = cPickle.load(open(config['cache_file'], 'r'))
        else:
            cache = []

    # get a list of all repos in launchpad
    repos = lp_repos()

    # a dict to hold all messages, we will use this to email later
    global pkgs
    pkgs = {}

    # read over our file list of package names
    for p in open(os.path.expanduser(config['pkgfile']), 'r').readlines():
        p = p.strip("\n")

        # empty list to hold our messages
        pkgs[p] = []

        content = open(os.path.expanduser(config['baseurl_cache']), 'r')
        for line in content.readlines():

            # regex to find all SRPM matching our package name
            m = match('.*>((%s-[\w.]*-[\w.]*)[\.el6]?\.src.rpm).*' % p, line)
            if m:
                full_p = m.group(1)
                pkg_name = m.group(2)
                pkg_name = pkg_name.replace('.el6', '')

                # if we have this version lets continue to the next package
                if full_p in cache:
                    store_message(p, '%s is in cache no need to go any further' % p)
                    continue

                # currently repos need to be created manually in LP
                # if a repo does not exist we will be forced to skip
                reponame = p
                print '\n== %s ==' % p
                if reponame not in repos:
                    store_message(p, '%s does not exist in launchpad' % reponame)
                    continue

                # create a tmp location for each package
                # this will end up being the git repo
                tmp = mkdtemp()
                store_message(p, 'working out of %s' % tmp)
                os.chdir(tmp) 

                # all check passed thus far, we can not start our bzr shell commands
                # we now need to worry about success and failures for commands.
                proceed = True

                if proceed: 
                    store_message(p, 'pulling bzr repo')
                    rm = 'lp:~ius-coredev/ius/%s' % p.lower()
                    remote = run(['bzr', 'branch', rm])
                    os.chdir(p)
                    if remote.returncode > 0:
                        proceed = False
                        store_message(p, remote.communicate())

                if proceed:
                    # download the SRPM to current directory
                    store_message(p, 'fetching package for %s' %full_p)
                    f = open(full_p, 'w')
                    pkg = urlopen("%s/%s" % (config['baseurl'], full_p))
                    f.write(pkg.read())
                    f.close()

                if proceed:
                    # install the SRPM
                    store_message(p, 'installing %s' % full_p)
                    rpm = run(['rpm', '-i', full_p])
                    if rpm.returncode > 0:
                        proceed = False
                        store_message(p, rpm.communicate())
                    else:
                        # we do not want the SRPM in the Git repo
                        os.remove(full_p)

                if proceed: 
                    store_message(p, 'adding all files to bzr')
                    add = run(['bzr', 'add', '.'])
                    if add.returncode > 0:
                        proceed = False
                        store_message(p, add.communicate())

                if proceed: 
                    store_message(p, 'commmiting changes to git')
                    commit = run(['bzr', 'commit', '-m', '[commit] %s' % full_p])
                    if commit.returncode > 0:
                        proceed = False
                        store_message(p, commit.communicate())
                
                if proceed: 
                    store_message(p, 'pushing changes to git')
                    if args.forcegit:
                        push = run(['git', 'push', '-f', 'origin', 'master'])
                    else:
                        push = run(['git', 'push', 'origin', 'master'])
                    if push.returncode > 0:
                        proceed = False
                        store_message(p, push.communicate())

                if proceed:
                    # if everything was successful we can add to our cache
                    cache.append(full_p)

                    # save all our work to the pickle cache
                    f = open(os.path.expanduser(config['cache_file']), 'wb')
                    cPickle.dump(cache, f)
                    f.close()

                # at this point the package source is in our repo
                # we can now start to work with Monkey Farm to get
                # a build submitted
                
                if proceed:
                    # lets first verify a package by this name exists in MF
                    from mymonkeyfarm import connect, createpackagebranch, createpackage, createbuild
                    hub = connect()

                    try:
                        hub.package.get_one(p, 'rpmdev')
                    except HTTPError:
                        # It does not appear a package exists, we should create it now
                        spec = open('SPECS/%s.spec' % p, 'r').read()

                        try:
                            summary = search('Summary:(.*)', spec)
                            summary = summary.group(1).lstrip()
                        except AttributeError:
                            store_message(p, 'failed to pull summary from spec')
                            proceed = False
                        else:
                            # We were able to grab the summary
                            # lets start by creating our package
                            store_message(p, 'creating package %s in MF' % p)
                            package = createpackage(hub, p, config['user_label'], summary)
                            for errors in package['errors']:
                                store_message(p, errors + ': ' + package['errors'][errors])
                                proceed = False

                            if proceed:
                                # We now need to create our package_branch
                                store_message(p, 'creating package_branch in MF')
                                branch = createpackagebranch(hub, p)
                                for errors in branch['errors']:
                                    store_message(p, errors + ': ' + branch['errors'][errors])
                                    proceed = False

                    if proceed:
                        # at this point we should have our package and branchs created
                        # lets go ahead and submit the build to MF
                        store_message(p, 'creating build %s in MF' % pkg_name)
                        build = createbuild(hub, p, config['user_label'], pkg_name)
                        for errors in build['errors']:
                            store_message(p, errors + ': ' + build['errors'][errors])
                            proceed = False

                # its now safe to delete the tmp location we were using
                shutil.rmtree(tmp)

        # set our URL cache back
        # this allows us to parse from top to bottom again
        content.seek(0)

    if args.email:
        # And finally we can use our stored message to email
        email(config['toaddr'], config['fromaddr'], pkgs)
Esempio n. 5
0
#!/usr/bin/env python2
Esempio n. 6
0
 def __init__(self, debug=False):
     self.DEBUG = debug        
     self.streamStatus = True
     self.email = emailer.email(self.DEBUG)
     self.fixStream = False
Esempio n. 7
0
def main():

    # Build my Parser with help for user input
    parser = argparse.ArgumentParser()
    parser.add_argument('--email',
                        '-e',
                        action='store_true',
                        dest='email',
                        default=None,
                        help='Send output in email')
    parser.add_argument('--forcegit',
                        '-f',
                        action='store_true',
                        dest='forcegit',
                        default=None,
                        help='Wipe away old Git and start with a fresh repo')
    args = parser.parse_args()

    # get our config
    config = get_config()

    # cache the contents of epel, so we can for loop, and seek on it
    u = urlopen(config['baseurl'])
    f = open(os.path.expanduser(config['baseurl_cache']), 'w')
    f.write(u.read())
    f.close()

    # if we have a cache we will link against it
    # this will verify we only process once per version
    if args.forcegit:
        cache = []
    else:
        if os.path.exists(config['cache_file']):
            cache = cPickle.load(open(config['cache_file'], 'r'))
        else:
            cache = []

    # get a list of all repos in launchpad
    repos = lp_repos()

    # a dict to hold all messages, we will use this to email later
    global pkgs
    pkgs = {}

    # read over our file list of package names
    for p in open(os.path.expanduser(config['pkgfile']), 'r').readlines():
        p = p.strip("\n")

        # empty list to hold our messages
        pkgs[p] = []

        content = open(os.path.expanduser(config['baseurl_cache']), 'r')
        for line in content.readlines():

            # regex to find all SRPM matching our package name
            m = match('.*>((%s-[\w.]*-[\w.]*)[\.el6]?\.src.rpm).*' % p, line)
            if m:
                full_p = m.group(1)
                pkg_name = m.group(2)
                pkg_name = pkg_name.replace('.el6', '')

                # if we have this version lets continue to the next package
                if full_p in cache:
                    store_message(
                        p, '%s is in cache no need to go any further' % p)
                    continue

                # currently repos need to be created manually in LP
                # if a repo does not exist we will be forced to skip
                reponame = p
                print '\n== %s ==' % p
                if reponame not in repos:
                    store_message(p,
                                  '%s does not exist in launchpad' % reponame)
                    continue

                # create a tmp location for each package
                # this will end up being the git repo
                tmp = mkdtemp()
                store_message(p, 'working out of %s' % tmp)
                os.chdir(tmp)

                # all check passed thus far, we can not start our bzr shell commands
                # we now need to worry about success and failures for commands.
                proceed = True

                if proceed:
                    store_message(p, 'pulling bzr repo')
                    rm = 'lp:~ius-coredev/ius/%s' % p.lower()
                    remote = run(['bzr', 'branch', rm])
                    os.chdir(p)
                    if remote.returncode > 0:
                        proceed = False
                        store_message(p, remote.communicate())

                if proceed:
                    # download the SRPM to current directory
                    store_message(p, 'fetching package for %s' % full_p)
                    f = open(full_p, 'w')
                    pkg = urlopen("%s/%s" % (config['baseurl'], full_p))
                    f.write(pkg.read())
                    f.close()

                if proceed:
                    # install the SRPM
                    store_message(p, 'installing %s' % full_p)
                    rpm = run(['rpm', '-i', full_p])
                    if rpm.returncode > 0:
                        proceed = False
                        store_message(p, rpm.communicate())
                    else:
                        # we do not want the SRPM in the Git repo
                        os.remove(full_p)

                if proceed:
                    store_message(p, 'adding all files to bzr')
                    add = run(['bzr', 'add', '.'])
                    if add.returncode > 0:
                        proceed = False
                        store_message(p, add.communicate())

                if proceed:
                    store_message(p, 'commmiting changes to git')
                    commit = run(
                        ['bzr', 'commit', '-m',
                         '[commit] %s' % full_p])
                    if commit.returncode > 0:
                        proceed = False
                        store_message(p, commit.communicate())

                if proceed:
                    store_message(p, 'pushing changes to git')
                    if args.forcegit:
                        push = run(['git', 'push', '-f', 'origin', 'master'])
                    else:
                        push = run(['git', 'push', 'origin', 'master'])
                    if push.returncode > 0:
                        proceed = False
                        store_message(p, push.communicate())

                if proceed:
                    # if everything was successful we can add to our cache
                    cache.append(full_p)

                    # save all our work to the pickle cache
                    f = open(os.path.expanduser(config['cache_file']), 'wb')
                    cPickle.dump(cache, f)
                    f.close()

                # at this point the package source is in our repo
                # we can now start to work with Monkey Farm to get
                # a build submitted

                if proceed:
                    # lets first verify a package by this name exists in MF
                    from mymonkeyfarm import connect, createpackagebranch, createpackage, createbuild
                    hub = connect()

                    try:
                        hub.package.get_one(p, 'rpmdev')
                    except HTTPError:
                        # It does not appear a package exists, we should create it now
                        spec = open('SPECS/%s.spec' % p, 'r').read()

                        try:
                            summary = search('Summary:(.*)', spec)
                            summary = summary.group(1).lstrip()
                        except AttributeError:
                            store_message(p,
                                          'failed to pull summary from spec')
                            proceed = False
                        else:
                            # We were able to grab the summary
                            # lets start by creating our package
                            store_message(p, 'creating package %s in MF' % p)
                            package = createpackage(hub, p,
                                                    config['user_label'],
                                                    summary)
                            for errors in package['errors']:
                                store_message(
                                    p,
                                    errors + ': ' + package['errors'][errors])
                                proceed = False

                            if proceed:
                                # We now need to create our package_branch
                                store_message(p,
                                              'creating package_branch in MF')
                                branch = createpackagebranch(hub, p)
                                for errors in branch['errors']:
                                    store_message(
                                        p, errors + ': ' +
                                        branch['errors'][errors])
                                    proceed = False

                    if proceed:
                        # at this point we should have our package and branchs created
                        # lets go ahead and submit the build to MF
                        store_message(p, 'creating build %s in MF' % pkg_name)
                        build = createbuild(hub, p, config['user_label'],
                                            pkg_name)
                        for errors in build['errors']:
                            store_message(
                                p, errors + ': ' + build['errors'][errors])
                            proceed = False

                # its now safe to delete the tmp location we were using
                shutil.rmtree(tmp)

        # set our URL cache back
        # this allows us to parse from top to bottom again
        content.seek(0)

    if args.email:
        # And finally we can use our stored message to email
        email(config['toaddr'], config['fromaddr'], pkgs)
Esempio n. 8
0
 def __init__(self, debug=False):
     self.DEBUG = debug
     self.streamStatus = True
     self.email = emailer.email(self.DEBUG)
     self.fixStream = False
Esempio n. 9
0
import praw
from pprint import pprint

from emailer import email

__author__ = 'Dan'

#r = praw.Reddit(user_agent='Testing reddit API for Dan B.')

#submissions = r.get_subreddit('starcraft').get_hot(limit=10)

#links = {}
#for submission in submissions:
    #links[str(submission)] = str(submission.url)

#pprint(links)

email()

##TODO: now build up email and send it out.




Esempio n. 10
0
#!/usr/bin/env python2