示例#1
0
class CloudImageLoader(object):
    def __init__(self, uname, password):
        self.uname = uname
        self.password = password
        self.pc = PyCloud(uname, password)

    def add_file(self, json_body):
        number = self._get_progressive_number()
        testfile = os.path.join(os.path.dirname(__file__), 'file_to_upload', str(number) + '.json')
        print(testfile)
        f = open(testfile, "w+")
        f.write(json_body)
        f.close()
        result = self.pc.uploadfile(files=[testfile])
        os.remove(testfile)
        return result

    @property
    def get_dataset_list(self):
        filename_list = self._list_remote_folder()['metadata']['contents']
        return filename_list

    def _get_progressive_number(self):
        try:
            lis = self._list_remote_folder()
            list1 = [int(x['name'].replace('.json', '')) for x in lis['metadata']['contents']]
            list1.sort()
            integer_progressive_number = list1[-1] + 1
            print(integer_progressive_number)
            return integer_progressive_number
        except:
            return 0

    def _list_remote_folder(self):
        json_list = str(self.pc.listfolder(folderid=0)).replace("'", '"')
        json_list = json_list.replace("False", '"False"').replace("True", '"True"')
        lis = json.loads(json_list)
        return lis

    def _read_from_file(self, pcloudfs, file_name, file_size):
        file = pcloudfs.openbin("/" + file_name, mode="r")
        json_string = file.read(file_size).decode('utf-8').replace("'", '"')
        json_obj = json.loads(json_string)
        specie = json_obj['specie']
        img = json_obj['img']
        return specie, img

    def get_data_file(self, file_name, file_size):
        uname = self.uname.replace('@', '%40')
        password = self.password
        link = 'pcloud://' + uname + ':' + password + '@/'
        with opener.open_fs(link) as pcloudfs:
            specie, img = self._read_from_file(pcloudfs, file_name, file_size)

        dict = {
            'name': file_name,
            'specie': specie,
            'img': img,
        }
        return dict
示例#2
0
 def __init__(self):
     manager = Manager()
     self.pcloud = PyCloud(PCLOUD_USER, PCLOUD_PASS)
     self.putio = putiopy.Client(PUTIO_KEY)
     self.download_list = manager.list()
     self.upload_list = manager.list()
     self.files_left = manager.Value(1, 0)
     self.destination = None
示例#3
0
def pcloud() -> PyCloud:
    global pc

    load_params()
    if pc is None:
        pc = PyCloud(params['pcloud']['user'], params['pcloud']['password'])
    return pc
示例#4
0
class CloudInterface(object):
    def __init__(self,user,password):
        self.pc = PyCloud(user,password)

    def uploadFiles(self,localpath,folderid):
        pcfilesuploads = []
        results = []
        for dirpath, dirnames, filenames in os.walk(localpath):
            for n in filenames:
                pcfilesuploads.append(localpath+n)
            for f in pcfilesuploads:
                results.append(self.pc.uploadfile(files=[f],folderid=folderid))
        return results

    def cloneFolders(self,localpath,folderid):
        subfolders = os.listdir(localpath)
        folderlist = self.pc.listfolder(folderid=folderid)
        current_folder = {};
        # find current folders to decide which folders we need.
        for value in folderlist['metadata']['contents']:
            if (value.get('isfolder')):
                current_folder[value.get('name','')] = value.get('folderid',0)


        for folder in subfolders:
            if folder not in current_folder:
                # creates new folder and its it to current folder list
                results = self.pc.createfolder(folderid=folderid,name=folder)
                if results['result'] == 0:
                    current_folder[folder] =  results['metadata']['folderid']
        return current_folder;


    def getFiles(self,folder=True,folderid=0):
        ls = self.pc.listfolder(folderid=folderid)
        folders = {}
        for item in ls['metadata']['contents']:
            if folder and (item.get('isfolder')):
                folders[item.get('name','')] = item.get('folderid',0)
            elif folder == False and item.get('isfolder') == False:
                folders[item.get('name','')] = item.get('fileid')
        return folders

    def getConfig(self,folderid=0):
        # this gets configuration file from pcloud
        pass
示例#5
0
def upload(localSyncFolderConfig, cloudSyncFolderConfig):
    cloudConfig = CloudConfig()

    try:
        pyCloud = PyCloud(cloudConfig.username, cloudConfig.password)

        pyCloud.createfolderifnotexists(path=cloudSyncFolderConfig.folder)
        pyCloud.createfolderifnotexists(path=cloudSyncFolderConfig.tmpfolder)

        for filename in os.listdir(localSyncFolderConfig.folder):
            LoggerFactory.getLogger().info("filename=" + filename + "," + "from=" + localSyncFolderConfig.folder)
            LoggerFactory.getLogger().info("-> upload to=" + cloudSyncFolderConfig.folder);
            pyCloud.uploadfile(files=[os.path.join(localSyncFolderConfig.folder, filename)], path=cloudSyncFolderConfig.tmpfolder, progresshash='0')
            LoggerFactory.getLogger().info("-> move to=" + localSyncFolderConfig.backupfolder);
            Utils.moveToFolder(filename, localSyncFolderConfig.folder, localSyncFolderConfig.backupfolder)
            LoggerFactory.getLogger().info("-> rename from=" + cloudSyncFolderConfig.tmpfolder + "," + "to=" + cloudSyncFolderConfig.folder);
            pyCloud.renamefile(path=cloudSyncFolderConfig.tmpfolder + '/' + filename, topath=cloudSyncFolderConfig.folder + '/', filename=filename);
    except:
        LoggerFactory.getLogger().error(sys.exc_info())
示例#6
0
def main() :
    global connection
    user = get_section_value(config,'pcloud', 'user','')
    pss = get_section_value(config,'pcloud', 'pass','')
    folderid = get_section_value(config,'pcloud', 'folderid',0)
    pc = PyCloud(user,pss)

    for section in config.sections():
        site_obj = get_site_object(config,section)


        if site_obj['active'] and 'active' in site_obj:
            # if both paths are the same no need to move.
            if (paths['tmp_path'] != paths['archive_root']):
                if 'dry-run' not in flags:
                    cmd_cleanup = "rm -rf %s*" %(paths['tmp_path']);
                    os.system(cmd_cleanup)
                    logger.debug('cleanup '+cmd_cleanup);
            site_obj['ssh_key'] = paths['pem_path'] +site_obj['ssh_key']
            connection=True
            logger.info("***** Section: "+ site_obj['section'])
            if os.path.isfile(site_obj['ssh_key']):
                if 'dry-run' in flags:
                    ssh_cmd(site_obj,['uname -a', 'readlink -f .'])

                #get database command and then send to ssh_cmd
                cmds = get_database_command(config,site_obj)
                logger.debug(cmds)
                if 'dry-run' not in flags:
                    ssh_cmd(site_obj,cmds)
                # gets set to true each time ssh_cmd is iniated and doesn't have error do all rsync

                if connection == True:
                    logger.info('Successfull Connection to %s@%s'%(site_obj['ssh_user'],site_obj['ssh_host']))
                    for remote in site_obj['ssh_remote'] :
                        cmd = do_rsync(site_obj,paths,remote)
                        logger.debug(cmd)
                        try:
                            if 'dry-run' not in flags:
                                os.system(cmd)
                        except:
                            logger.error('rsycn failed '+ rsync_cmd);

                    if 'no-archive' not in flags:
                        cmds = set_archive(site_obj,paths, 'restore-point' in flags)
                        logger.debug(cmds);
                        if 'dry-run' not in flags:
                            for cmd in cmds:
                                os.system(cmd)
                    if (paths['tmp_path'] != paths['archive_root']) and 'no-archive' not in flags:
                        send_to_pcloud(site_obj, paths['tmp_path'],pc,folderid)


            else:
                logger.error('Could Not find Key')
示例#7
0
def download(localSyncFolderConfig, cloudSyncFolderConfig, backup):
    cloudConfig = CloudConfig()

    try:
        pyCloud = PyCloud(cloudConfig.username, cloudConfig.password)

        folderinfo = pyCloud.createfolderifnotexists(
            path=cloudSyncFolderConfig.folder)
        listFolderinfo = pyCloud.listfolder(path=cloudSyncFolderConfig.folder)

        metadata = listFolderinfo["metadata"]
        contents = metadata["contents"]
        backupfolder = os.path.join(localSyncFolderConfig.backupfolder,
                                    Utils.timestamp())
        for file in contents:
            if (not file["isfolder"]):
                filename = file["name"]
                LoggerFactory.getLogger().info("filename=" + filename + "," +
                                               "from=" +
                                               cloudSyncFolderConfig.folder)
                filelink = pyCloud.getfilelink(
                    path=cloudSyncFolderConfig.folder + "/" + filename)
                downloadlink = 'https://' + filelink["hosts"][0] + filelink[
                    "path"]
                LoggerFactory.getLogger().info("-> download to=" +
                                               localSyncFolderConfig.tmpfolder)
                urllib.request.urlretrieve(
                    downloadlink,
                    os.path.join(localSyncFolderConfig.tmpfolder, filename))
                if backup:
                    LoggerFactory.getLogger().info(
                        "-> backup from=" + localSyncFolderConfig.tmpfolder +
                        "," + "to=" + backupfolder)
                    Utils.copyToFolder(filename,
                                       localSyncFolderConfig.tmpfolder,
                                       backupfolder)
                LoggerFactory.getLogger().info(
                    "-> rename from=" + localSyncFolderConfig.tmpfolder + "," +
                    "to=" + localSyncFolderConfig.folder)
                os.rename(
                    os.path.join(localSyncFolderConfig.tmpfolder, filename),
                    os.path.join(localSyncFolderConfig.folder, filename))
                LoggerFactory.getLogger().info("-> delete from=" +
                                               cloudSyncFolderConfig.folder)
                pyCloud.deletefile(path=cloudSyncFolderConfig.folder + "/" +
                                   filename)
    except:
        LoggerFactory.getLogger().error(sys.exc_info())
示例#8
0
new_event = sys.argv[1]

#Name validity check
if not bool(re.match('^[a-z0-9_]+$',new_event)):
    Logger.error(__name__,"Name Error, must contain only lower case letters, numbers, underscores")
    sys.exit(0)
else:
    Logger.success(__name__,"Valid Name Entered")


#Login To pCloud, Check if folder already exists for new event.

f = open( cfg.get("upload__password_file"), 'r')
usr = f.readline().rstrip()
psswd = f.readline().rstrip()
pcloud = PyCloud(usr, psswd)
    
new_event_pc_path = cfg.get("upload__pcloud_path") + "/" + new_event

folder_split = filter(None,new_event_pc_path.split("/"))
folder_walk = [0]
for idx,i  in enumerate(folder_split):
    resp = pcloud.listfolder(folderid=folder_walk[-1])
    dir_list = resp['metadata']['contents']
    match = False
    last =  i == folder_split[-1]
    for j in dir_list:
        if j['name'] == i and j['isfolder']:
            folder_walk.append(j['folderid'])
            match = True
            if last:
示例#9
0
 def __init__(self,user,password):
     self.pc = PyCloud(user,password)
def lambda_handler(event, context):
    # obtain all entries in database
    response = table.scan(
        FilterExpression=Attr('episode_int').gte(1)
    )

    # save object with the items themselves
    items = response['Items']
    #print(items)
    
    items_sorted = sorted(items, key = lambda i: i['episode_int'])

    # set up overall feed metadata
    fg = FeedGenerator()

    # general feed params
    fg.id('https://r-podcast.org')
    fg.title('Residual Snippets')
    fg.author( {'name':'Eric Nantz', 'email':'*****@*****.**'})
    fg.link(href='https://r-podcast.org', rel='alternate' )
    fg.logo(LOGO_URL)
    fg.subtitle('Musings on R, data science, linux, and life')
    fg.link( href=RSS_URL, rel='self')
    fg.language('en')

    fg.load_extension('podcast')

    # podcast-specific params
    fg.podcast.itunes_category('Technology')
    fg.podcast.itunes_author('Eric Nantz')
    fg.podcast.itunes_explicit('no')
    fg.podcast.itunes_owner('Eric Nantz', '*****@*****.**')
    fg.podcast.itunes_summary('Residual Snippets is an informal, unedited, and free-flowing audio podcast from Eric Nantz.  If you enjoy hearing quick takes from a data scientist on their journey to blend innovative uses of open-source technology, contributing back to their brilliant communities, and juggling the curveballs life throws at them, this podcast is for you!')
    
    for x in range(len(items_sorted)):
        #print(items[x])
        fe = fg.add_entry()
        fe.title(items_sorted[x]['episode_title'])
        fe.author( {'name':'Eric Nantz', 'email':'*****@*****.**'} )
        fe.enclosure(url=items_sorted[x]['episode_url'], type = 'audio/mpeg')

        # process description before adding to feed
        ep_desc = create_summary(items_sorted[x]['episode_summary'])
        #fe.description(items_sorted[x]['episode_summary'])
        fe.description(ep_desc)
 
    # populate xml file for RSS feed    
    feed_string = fg.rss_str(pretty=True)
    fg.rss_file('/tmp/residual_snippets.xml', pretty=True)
    
    # upload xml feed to pcloud and s3
    pc = PyCloud(PCLOUD_USERNAME, PCLOUD_PASS)
    pc.uploadfile(data = feed_string, filename='residual_snippets.xml', folderid=PCLOUD_FOLDER_ID)

    #upload_file("/tmp/residual_snippets.xml", BUCKET_NAME, object_name = 'residual_snippets.xml')
    s3_client.upload_file("/tmp/residual_snippets.xml", BUCKET_NAME, 'residual_snippets.xml')
    
    # create export of dynamodb and upload to s3
    # obtain all entries in database
    response2 = table.scan(
        FilterExpression=Attr('episode_int').gte(1)
    )

    # save object with the items themselves
    items2 = response2['Items']

    items2_sorted = sorted(items2, key = lambda i: i['episode_int'])

    db_export = "/tmp/dbexport.json"
    f = open(db_export, "w")
    f.write(json.dumps(items2_sorted, indent=2, default=decimal_default))
    f.close()
    
    # upload to s3 bucket
    success = s3_client.upload_file(db_export, BUCKET_NAME, 'dbexport.json')
    
    return {
        'statusCode': 200,
        'body': json.dumps('Hello from Lambda!')
    }
示例#11
0
class PutIOpCloudSyncer(object):
    def __init__(self):
        manager = Manager()
        self.pcloud = PyCloud(PCLOUD_USER, PCLOUD_PASS)
        self.putio = putiopy.Client(PUTIO_KEY)
        self.download_list = manager.list()
        self.upload_list = manager.list()
        self.files_left = manager.Value(1, 0)
        self.destination = None

    def download(self, file):
        print('Download of {} started'.format(file.name))
        aria2c('-d', TMPDIR, '-o', file.name, '--continue=true', '-x3',
               file.download_link)
        print('Download finished')

    def upload(self, file):
        print('Starting upload of {}'.format(file.name))
        self.pcloud.uploadfile(path=self.destination,
                               files=['{}/{}'.format(TMPDIR, file.name)])
        print('Finished upload')

    def cleanup(self, file):
        print('Removing local copy of {}'.format(file.name))
        rm(filename(file))
        print('Removed successfully')

    def process_folder(self, folder):
        files = folder.dir() if folder.file_type == 'FOLDER' else [folder]
        for file in files:
            self.enqueue_file(file)
        print("Files to sync: {}".format(self.files_left.get()))
        uploader = Process(target=self.file_uploader)
        downloader = Process(target=self.file_downloader)
        uploader.start()
        downloader.start()
        uploader.join()
        downloader.join()
        # fs = {
        #     executor.submit(self.file_uploader): "uploader",
        #     executor.submit(self.file_downloader): "downloader"
        # }
        # for future in concurrent.futures.as_completed(fs):
        #     tag = fs[future]
        #     print("Result of {}: ".format(tag, future.result()))

    def enqueue_file(self, file):
        self.download_list.append(
            FileEntry(file.name, file.get_download_link()))
        self.files_left.set(self.files_left.get() + 1)

    def list_paths(self):
        file_list = self.putio.File.list()
        folders = [x for x in file_list if x.name in ('Serien', 'Filme')]
        for path in [folder for fs in folders for folder in fs.dir()]:
            print(path.name, path.id)

    def filter_paths(self, name):
        file_list = self.putio.File.list()
        folders = [x for x in file_list if x.name in ('Serien', 'Filme')]
        files = [file for folder in folders for file in folder.dir()]
        file = list(filter(lambda x: x.name.startswith(name), files))
        if file:
            if len(file) > 1:
                print("More than 1 possible folder", file)
                sys.exit(1)
            return file[0]
        print('No Matching file')
        sys.exit(1)

    def file_downloader(self):
        print("File Downloader started")
        while self.download_list:
            file = self.download_list.pop()
            self.download(file)
            self.upload_list.append(file)
        print("File Downloader stopped")

    def file_uploader(self):
        print("File Uploader started")
        while self.files_left.get():
            print('Files left to upload: {}'.format(self.files_left.get()))
            print('Files to upload in queue: {}'.format(len(self.upload_list)))
            while not self.upload_list:
                print('Waiting for something to upload...')
                time.sleep(10)
            while self.upload_list:
                file = self.upload_list.pop()
                self.upload(file)
                self.cleanup(file)
                self.files_left.set(self.files_left.get() - 1)
        print("File Uploader stopped")

    def sync(self):
        if sys.argv[1] == 'list':
            self.list_paths()
        elif sys.argv[1] == 'filter':
            path = self.filter_paths(sys.argv[2])
            print('Selected Path: {}'.format(path.name))
        elif sys.argv[1] == 'sync':
            path = self.filter_paths(sys.argv[2])
            print('Selected Path: {}'.format(path.name))
            self.destination = sys.argv[3]
            self.process_folder(path)
            print("Started downloader and uploader")
示例#12
0
 def pcloud_login(self):
     f = open(self.cfg.get("upload__password_file"), 'r')
     usr = f.readline().rstrip()
     psswd = f.readline().rstrip()
     self.pCloud = PyCloud(usr, psswd, self.timeout)
示例#13
0
class Upload(State):
    def __init__(self, cfg, gd, w, h, fps, gpio):
        State.__init__(self, cfg, gd, w, h, fps, gpio)
        self.pcloud_path = self.cfg.get(
            "upload__pcloud_path") + "/" + self.cfg.get("event_name")
        self.public_link = self.pcloud_path.replace(
            "/Public Folder", self.cfg.get("upload__pcloud_url"))
        self.timeout = self.cfg.get("upload__timeout")

        self.gen_upload_bar()
        self.gen_upload_menu()
        self.gen_upload_info()

        if self.cfg.get("upload__enabled"):
            # If Upload Enabled, Test for internet connection, if fails Disable uploading.
            Logger.info(__name__,
                        "Upload Enabled, Testing Internet Connection...")
            if not self.test_internet():
                Logger.warning(
                    __name__,
                    "Internet Connection Failed! disabling upload...")
                cfg.set("upload__enabled", False)
                return
            else:
                Logger.success(__name__, "Internet Connection Success!")

            #Internet Connection is good, so attempt to connect to pCloud, if fails Disable uploading
            Logger.info(__name__, "Connecting to pCloud...")
            if self.pcloud_test_conn():
                Logger.success(__name__, "pCloud Connection Success")
                self.pcloud_upload_folder = self.pcloud_get_uploadfolderid(
                    self.pcloud_path)
                Logger.info(
                    __name__,
                    "pCloud Folder ID - {0}".format(self.pcloud_upload_folder))
            else:
                Logger.error(__name__, "pCloud Connection Failed")
                self.cfg.set("upload__enabled", False)
            return

    def test_internet(self):
        for _ in range(120):
            try:
                rsp = urllib2.urlopen('http://google.com', timeout=1)
                return True
            except urllib2.URLError:
                pass
        return False

    def pcloud_login(self):
        f = open(self.cfg.get("upload__password_file"), 'r')
        usr = f.readline().rstrip()
        psswd = f.readline().rstrip()
        self.pCloud = PyCloud(usr, psswd, self.timeout)

    def pcloud_get_uploadfolderid(self, pPath):
        folder_split = filter(None, pPath.split("/"))
        folder_walk = [0]
        for idx, i in enumerate(folder_split):
            resp = self.pCloud.listfolder(folderid=folder_walk[-1])
            dir_list = resp['metadata']['contents']
            match = False
            for j in dir_list:
                if j['name'] == i and j['isfolder']:
                    folder_walk.append(j['folderid'])
                    match = True
                    break
            if not match:
                raise Exception(
                    "pCloud folder walk: Could not find \"{0}\"".format(i))
        return folder_walk[-1]

    def pcloud_uploadfiles(self, pc, fid, f, ret_q):
        failed = True
        slinks = []
        files = [f['primary']] + [f['primary_thumb']] + f['secondary']
        for _ in range(3):
            try:
                resp = pc.uploadfile(files=files, folderid=fid)
                failed = False
                break
            except:
                Logger.warning(__name__, "Upload Error, retrying connection")
                if not self.pcloud_test_conn():
                    break
        if failed:
            ret_q.put(None)
            return
        else:
            names = [
                str(i['name']) for i in resp['metadata']
                if "thumb" not in i['name']
            ]
            failed = len(names)
            for name in names[::-1]:  #Reverse List
                ll = self.public_link + '/' + name
                primary = name.count(".") == 1
                sl = self.register_photodb(name, ll, primary)
                if sl != None:
                    failed -= 1
                    if primary:
                        slinks.append(sl)
            if failed == 0:
                ret_q.put(slinks[0])
            else:
                ret_q.put(None)
            return

    def pcloud_test_conn(self):
        for _ in range(3):
            try:
                self.pcloud_login()
                return True
            except:
                Logger.warning(__name__,
                               "pCloud Connection Error, retrying connection")
        return False

    def register_photodb(self, photo_name, photo_link, photo_primary):
        primary = 1 if photo_primary else 0
        for _ in range(3):
            try:
                r = None
                url = "{0}/rest/{1}/{2}".format(
                    self.cfg.get("upload__photodb_url"),
                    self.cfg.get("event_name"), primary)
                req = {'photo_name': photo_name, 'photo_link': photo_link}
                r = requests.post(url, json=req, timeout=self.timeout)

                if r.status_code != 200:
                    raise Exception(
                        "photodb POST gave respose code {0}".format(
                            r.status_code))

                link = "{0}/gallery/{1}/{2}".format(
                    self.cfg.get("upload__photodb_url"),
                    self.cfg.get("event_name"),
                    photo_name.split(".")[0])
                return tinyurl.shorten(link, "")
            except:
                if r == None:
                    Logger.warning(__name__,
                                   "photodb register timeout, retrying...")
                else:
                    Logger.warning(
                        __name__,
                        "photodb register error {0}, retrying..".format(
                            r.status_code))
        return None

    def gen_upload_bar(self):
        img = pygame.image.load(self.cfg.get("display__upload_icon"))
        ratio = 0.40
        shrink = (int(img.get_size()[0] * ratio),
                  int(img.get_size()[1] * ratio))
        self.upload_img = pygame.transform.scale(img, shrink)

        film_h = 300
        surf = pygame.Surface((self.disp_w + 10, film_h), pygame.SRCALPHA)
        surf.fill((40, 40, 40))

        #Create Film strip Holes
        y1 = 15
        y2 = surf.get_size()[1] - 2 * y1
        x = 0
        while x < surf.get_size()[0]:
            for i in range(0, y1):
                for j in range(0, y1):
                    surf.set_at((x + j, y1 + i), (255, 255, 255, 0))
                    surf.set_at((x + j, y2 + i), (255, 255, 255, 0))
            x += 2 * y1

        self.upload_bar = surf
        self.upload_bar_pos = (0, ((self.disp_h - film_h) / 2))
        self.upload_bar_txt_pos = (50, ((self.disp_h - film_h) / 2) + 20)
        self.upload_bar_img_pos = (1200, (self.disp_h - film_h) / 2)

    def gen_upload_menu(self, next_str="Print"):
        #Film Strip Background
        surf = pygame.Surface((400, self.disp_h + 200), pygame.SRCALPHA)
        surf.fill((40, 40, 40))

        #Create Film strip Holes
        x1 = 15
        x2 = surf.get_rect().size[0] - 2 * x1
        y = 0
        while y < surf.get_rect().size[1]:
            for i in range(0, x1):
                for j in range(0, x1):
                    surf.set_at((x1 + j, y + i), (255, 255, 255, 0))
                    surf.set_at((x2 + j, y + i), (255, 255, 255, 0))
            y += 2 * x1

        #Rotate Film Strip
        surf = pygame.transform.rotozoom(surf, 10, 1)

        #Create Info Text
        font = pygame.font.Font(self.cfg.get("display__font"), 100)
        radius = 90
        l0 = font.render(next_str, 1, (255, 255, 255))
        surf.blit(l0, (150, 220))
        #Generate Gradient Button Image
        for i in [float(x) / 20 for x in range(10, 21)]:
            pygame.draw.circle(surf, (71 * i, 211 * i, 59 * i), (282, 425),
                               radius)
            radius -= 2

        self.upload_menu = surf
        self.upload_menu_pos = (-50, -100)

    def gen_upload_info(self):
        film_h = 200
        surf = pygame.Surface((self.disp_w + 10, film_h), pygame.SRCALPHA)
        surf.fill((40, 40, 40))

        #Create Film strip Holes
        y1 = 15
        y2 = surf.get_size()[1] - 2 * y1
        x = 0
        while x < surf.get_size()[0]:
            for i in range(0, y1):
                for j in range(0, y1):
                    surf.set_at((x + j, y1 + i), (255, 255, 255, 0))
                    surf.set_at((x + j, y2 + i), (255, 255, 255, 0))
            x += 2 * y1

        self.upload_info_pos = (0, self.disp_h - 250)
        self.upload_info = []
        info_bars_txt = self.cfg.get("display__upload_instrs")

        for txt in info_bars_txt:
            bar = surf.copy()
            font = pygame.font.Font(self.cfg.get("display__font"), 75)
            t = font.render(txt, 1, (255, 255, 255))
            bar.blit(t, (250, 50))
            self.upload_info.append(bar)

    def gen_qr(self, link):
        qr_path = self.cfg.get("tmp_dir") + '/tmpqr.png'
        qr = qrcode.QRCode(version=1,
                           error_correction=qrcode.constants.ERROR_CORRECT_L,
                           box_size=10,
                           border=4)
        qr.add_data(link)
        qr.make(fit=True)

        img = qr.make_image(fill_color="black", back_color="transparent")
        img.save(qr_path)
        return qr_path

    def start(self, photo_set):
        self.gpio.set('green_led', 0)
        self.gpio.set('red_led', 0)
        self.gameDisplay.fill((200, 200, 200))

        self.photo_set = photo_set

        self.ani_q_img_push(self.upload_bar, self.upload_bar_pos, 0.1, False,
                            False)
        self.ani_q_txt_push("Uploading....", (255, 255, 255), 200,
                            self.upload_bar_txt_pos, 0.1, False)
        self.ani_q_cmd_push("UPLOAD")
        self.next()

    def state_cmd(self, item):
        if item['cmd'] == 'UPLOAD':
            self.pcloud_upload_q = Queue()
            #print "pCloud Starting Upload"
            self.pcloud_upload = Thread(target=self.pcloud_uploadfiles,\
                                        args=( self.pCloud,\
                                               self.pcloud_upload_folder,\
                                               self.photo_set,\
                                               self.pcloud_upload_q ))
            self.pcloud_upload.start()
            self.ani_q_cmd_push("UPLOADWAIT")

        elif item['cmd'] == 'UPLOADWAIT':
            if self.pcloud_upload.is_alive(
            ):  #While Uploading Continue animation
                self.ani_q_img_push(self.upload_img, self.upload_bar_img_pos,
                                    0.9, True, False, False)
                self.ani_q_img_push(self.upload_bar, self.upload_bar_pos, 0.1,
                                    False)
                self.ani_q_txt_push("Uploading....", (255, 255, 255), 200,
                                    self.upload_bar_txt_pos, 0.1, False)
                self.ani_q_cmd_push("UPLOADWAIT")
            else:
                result = self.pcloud_upload_q.get()
                if result == None:
                    self.upload_link = self.cfg.get("event_url")
                    Logger.info(__name__,
                                "pCloud Upload Failed, saving link as album")
                else:
                    self.upload_link = result
                    Logger.info(
                        __name__, "pCloud Upload Complete - {0}".format(
                            self.upload_link))

                self.ani_q_cmd_push("UPLOADQR")

        elif item['cmd'] == 'UPLOADQR':
            self.gpio.set('green_led', 1)
            self.gpio.set('red_led', 0)
            qr_path = self.gen_qr(self.upload_link)
            self.gameDisplay.fill((200, 200, 200))
            qr_img = pygame.image.load(qr_path)
            qr_pos = (((self.disp_w - qr_img.get_size()[0]) / 2) + 200,
                      ((self.disp_h - qr_img.get_size()[1]) / 2) - 175)

            if self.cfg.get("printer__enabled"):
                pass
            else:
                self.gen_upload_menu("Finish")

            link_pos = (((self.disp_w) / 2) - 100, ((self.disp_h) / 2))
            self.ani_q_img_push(qr_img, qr_pos, 0.1, False)
            self.ani_q_txt_push(self.upload_link, (40, 40, 40), 75, link_pos,
                                0.1, False)
            self.ani_q_img_push(self.upload_menu, self.upload_menu_pos, 0.1,
                                False)
            self.ani_q_cmd_push("COMPLETE")
            self.ani_q_cmd_push("UPLOADINFO")

        elif item['cmd'] == 'UPLOADINFO':
            for info in self.upload_info:
                self.ani_q_img_push(info,
                                    self.upload_info_pos,
                                    0.4,
                                    True,
                                    forceSurfaceAlpha=False)
                self.ani_q_pause_push(5)

            self.ani_q_cmd_push("UPLOADINFO")

    def reset(self):
        State.reset(self)
        self.photo_set = {}
        self.pcloud_upload = None
        self.upload_link = None
def lambda_handler(event, context):

    # job_name will be the same as the key column (episode_id) in database
    job_name = event['detail']['TranscriptionJobName']
    print(job_name)
    job = transcribe.get_transcription_job(TranscriptionJobName=job_name)
    uri = job['TranscriptionJob']['Transcript']['TranscriptFileUri']
    print(uri)

    content = urllib.request.urlopen(uri).read().decode('UTF-8')

    print(json.dumps(content))

    data = json.loads(content)

    text = data['results']['transcripts'][0]['transcript']

    # update episode_summary in database for this record
    response = table.update_item(Key={'episode_id': job_name},
                                 UpdateExpression="set episode_summary = :r",
                                 ExpressionAttributeValues={':r': text},
                                 ReturnValues="UPDATED_NEW")

    # add text file with transcript to s3 bucket
    object = s3.Object(BUCKET_NAME, job_name + '-asrOutput.txt')
    object.put(Body=text)

    # obtain all entries in database
    response = table.scan(FilterExpression=Attr('episode_int').gte(1))

    # save object with the items themselves
    items = response['Items']
    #print(items)

    items_sorted = sorted(items, key=lambda i: i['episode_int'])

    # set up overall feed metadata
    fg = FeedGenerator()

    # general feed params
    fg.id('https://r-podcast.org')
    fg.title('Residual Snippets')
    fg.author({'name': 'Eric Nantz', 'email': '*****@*****.**'})
    fg.link(href='https://r-podcast.org', rel='alternate')
    fg.logo('http://rsnippets.show.s3.amazonaws.com/residual_snippets.png')
    fg.subtitle('Musings on R, data science, linux, and life')
    fg.link(
        href=
        'https://filedn.com/lXHQDOYF1yHVL1Tsc38wxx7/site/residual_snippets.xml',
        rel='self')
    fg.language('en')

    fg.load_extension('podcast')

    # podcast-specific params
    fg.podcast.itunes_category('Technology')
    fg.podcast.itunes_author('Eric Nantz')
    fg.podcast.itunes_explicit('no')
    fg.podcast.itunes_owner('Eric Nantz', '*****@*****.**')
    fg.podcast.itunes_summary(
        'Residual Snippets is an informal, unedited, and free-flowing audio podcast from Eric Nantz.  If you enjoy hearing quick takes from a data scientist on their journey to blend innovative uses of open-source technology, contributing back to their brilliant communities, and juggling the curveballs life throws at them, this podcast is for you!'
    )

    for x in range(len(items_sorted)):
        #print(items[x])
        fe = fg.add_entry()
        fe.title(items_sorted[x]['episode_title'])
        fe.author({'name': 'Eric Nantz', 'email': '*****@*****.**'})
        fe.enclosure(url=items_sorted[x]['episode_url'], type='audio/mpeg')

        # process description before adding to feed
        ep_desc = create_summary(items_sorted[x]['episode_summary'])
        #fe.description(items_sorted[x]['episode_summary'])
        fe.description(ep_desc)

    # populate xml file for RSS feed
    feed_string = fg.rss_str(pretty=True)
    fg.rss_file('/tmp/residual_snippets.xml', pretty=True)

    # upload xml feed to pcloud and s3
    pc = PyCloud(PCLOUD_USERNAME, PCLOUD_PASS)
    pc.uploadfile(data=feed_string,
                  filename='residual_snippets.xml',
                  folderid=PCLOUD_FOLDER_ID)

    upload_file("/tmp/residual_snippets.xml",
                BUCKET_NAME,
                object_name='residual_snippets.xml')

    # create export of dynamodb and upload to s3
    # obtain all entries in database
    response2 = table.scan(FilterExpression=Attr('episode_int').gte(1))

    # save object with the items themselves
    items2 = response2['Items']

    items2_sorted = sorted(items2, key=lambda i: i['episode_int'])

    db_export = "/tmp/dbexport.json"
    f = open(db_export, "w")
    f.write(json.dumps(items2_sorted, indent=2, default=decimal_default))
    f.close()

    # upload to s3 bucket
    success = upload_file(db_export, BUCKET_NAME, object_name='dbexport.json')
示例#15
0
 def _get_api_object(self) -> PyCloud:
     return PyCloud(self.config["email"], self.config["password"])
示例#16
0
 def __init__(self, uname, password):
     self.uname = uname
     self.password = password
     self.pc = PyCloud(uname, password)