Esempio n. 1
0
def encrypt(filename, password, iv):
    chucksize = 64 * 1024
    out_filename = filename + ".aes"
    iv = iv[:16]

    secure_key = KDF.PBKDF1(password, salt, 16, count=1000)

    encryptor = AES.new(secure_key, AES.MODE_CBC, iv)

    filesize = os.path.getsize(filename)
    progress = progressbar.AnimatedProgressBar(end=filesize, width=50)

    with open(filename, 'rb') as infile:
        with open(out_filename, 'wb') as outfile:
            outfile.write(struct.pack('<Q', filesize))
            outfile.write(iv)

            while True:
                chunk = infile.read(chucksize)
                if len(chunk) == 0:
                    # we are finished
                    break
                elif len(chunk) % 16 != 0:
                    chunk += ' ' * (16 - len(chunk) % 16)

                    # show the progress bar

                outfile.write(encryptor.encrypt(chunk))
                progress + len(chunk)
                progress.show_progress()
            print ""
            print "Done!"
def get_all_image_links():
    """retrieves all image links of images available on simpledesktops.com"""
    pbar = progressbar.AnimatedProgressBar(end=47, width=50)
    print("getting all links for image urls")
    pbar.show_progress()
    img_dl_links = []
    next_link = "/browse/1/"
    while next_link:
        fh = requests.get("%s%s" % (BASE_URL, next_link), allow_redirects=False)
        # print fh.url
        result = image_links_from_page(fh.text)
        fh.close()
        img_dl_links.append(result[0])
        next_link = result[1]
        pbar += 1
        pbar.show_progress()
        # print next_link
    # flattening list of links
    print()
    tmp = []
    for x in img_dl_links:
        for y in x:
            tmp.append(y)
    img_dl_links = tmp
    return img_dl_links
Esempio n. 3
0
def interface(input_file, output_file, specie):
    """
     the interface of dochap.
     input:
     input_file: path to input file
     output_file: path to the output file
     specie: string of the specie (must be one from conf.py)
     output:
     output_file: the path to the output file
    """
    print('parsing {}...'.format(input_file))
    transcripts = parse_gtf(input_file)
    print('assigning domains to exons...')
    bar = progressbar.AnimatedProgressBar(end=len(transcripts), width=10)
    for transcript_id, exons in transcripts.items():
        transcripts[transcript_id] = assign_gtf_domains_to_exons(
            transcript_id, exons, specie)
        bar += 1
        bar.show_progress()
    bar += 1
    bar.show_progress()
    to_write = [(name, data) for name, data in transcripts.items() if data]
    with open(output_file, 'w') as f:
        f.write(json.dumps(transcripts))
    # stop here
    return output_file
Esempio n. 4
0
def main():
    """
     takes argv
     usage - will be printed upon calling the script
    """
    if len(sys.argv) < 4:
        print('inteface.py <specie> <inputfile> <outputfile>')
        sys.exit(2)
    specie = sys.argv[1]
    input_file = sys.argv[2]
    output_file = sys.argv[3]
    print('parsing {}...'.format(input_file))
    transcripts = parse_gtf(input_file)
    print('assigning domains to exons...')
    bar = progressbar.AnimatedProgressBar(end=len(transcripts), width=10)
    for transcript_id, exons in transcripts.items():
        transcripts[transcript_id] = assign_gtf_domains_to_exons(
            transcript_id, exons, specie)
        #print('output for {} is \
        #        \n0:{} \
        #        \n1:{}'.format(transcript_id,transcripts[transcript_id][0],transcripts[transcript_id][1]))
        #print('\n\n')
        bar += 1
        bar.show_progress()
    bar += 1
    bar.show_progress()
    new_visualizer.visualize(transcripts)
    # maybe call visualizer here? instead of writing to json file
    print('writing transcripts to file...')
    with open(output_file, 'w') as f:
        f.write(json.dumps(transcripts))
    # stop here, writing json dump is easier then creating something else
    return
    '''
def ftpDownlaod(firmware, region):
    ftp.cwd(ftpPath + "/" + region)
    ftp.voidcmd('TYPE I')
    print("Wait for Downloading")
    if os.path.exists("firmware"):
        shutil.rmtree('firmware')
    os.makedirs("firmware")
    filesize = ftp.size(firmware)
    progress = progressbar.AnimatedProgressBar(end=filesize, width=50)
    with open("firmware/" + firmware, 'wb') as f:

        def callback(chunk):
            f.write(chunk)
            progress + len(chunk)
            progress.show_progress()

        ftp.retrbinary("RETR " + firmware, callback)
    print("Downloading finished")
    z = zipfile.ZipFile('firmware/' + firmware, "r")
    z.extractall("firmware")
    z.close()
    if os.path.isfile('firmware/' + firmware):
        os.remove('firmware/' + firmware)
    config['YotaPhoneFlasher']['currentFirmware'] = firmware
    with open('config.ini', 'w') as configfile:
        config.write(configfile)
    wayChooser()
def download_images(img_dl_links, PATH):
    """download images and remove downloaded images from img_links"""
    end = len(img_dl_links)
    pbar = progressbar.AnimatedProgressBar(end=end, width=50)
    print('downloading wallpapers..')
    pbar.show_progress()
    for img_url in img_dl_links:
        tmp = requests.request('get', img_url)
        with open(os.path.join(PATH, img_url.split('/')[-1]), 'w') as f:
            f.write(str(tmp.content))
        update_config_file(img_url)
        time.sleep(1)
        pbar += 1
        pbar.show_progress()
    print()
Esempio n. 7
0
def parse_proteins(specie):
    """
    records = [record for record in SeqIO.parse(gbk_file,"genbank")]
    """
    print ("parsing protein.gbk of {}".format(specie))
    records=[]
    # length as of 2016 of mouse
    length = 76216
    p_bar = progressbar.AnimatedProgressBar(end=length,width=10)
    p_bar+1
    for record in SeqIO.parse(gbk_file.format(specie),"genbank"):
        records.append(record)
        p_bar+1
        p_bar.show_progress()
    print()
    print("done")
    return records
Esempio n. 8
0
def load_db_data(user_data, specie):
    """
     load all the exons transcript data from the given transcripts names in the user_data
     input:
     user_data: dictionary created from the output file by parse_output_file
     specie: name of the specie from which to take the transcripts
    """
    print("loading db exons...")
    bar = progressbar.AnimatedProgressBar(end=len(user_data), width=10)
    bar += 1
    bar.show_progress()
    for key, item in user_data.items():
        #doms = domains_to_exons.get_domains(key)
        exons = domains_to_exons.get_exons_by_transcript_id(key, specie)
        item.append(exons)
        bar += 1
        bar.show_progress()
    print("\ndone")
Esempio n. 9
0
def download_ftp_data(address, username, password, files):
    """
    """
    print('connecting to: ', address, '...')
    ftp = ftplib.FTP(address)
    print('logging in...')
    ftp.login(username, password)
    for file in files:
        os.makedirs(os.path.dirname(file[1]), exist_ok=True)
        if ask_me_every_time:
            user_input = input(ftp_prompt.format(file[0]))
            if user_input.lower() != 'y':
                print(ftp_skipping_prompt.format(file[0]))
                continue
        print('downloading: ', file[0], '...')
        ftp.sendcmd("TYPE i")
        size = ftp.size(file[0])
        p_bar = progressbar.AnimatedProgressBar(end=size, width=10)
        with open(file[1] + '.gz', 'wb') as f:

            def callback(chunk):
                f.write(chunk)
                p_bar + len(chunk)
                p_bar.show_progress()

            ftp.retrbinary("RETR " + file[0], callback)
            p_bar + size
            p_bar.show_progress()
        print()
        print('extracting...')
        gunzip(file[1] + '.gz', '-f')
        # add \ to \t because backward compatability is important
        with open(file[1], 'r') as f:
            content = f.read()
        content.replace('\t', '\\t')
        with open(file[1], 'w') as f:
            f.write(content)
        print('done')
Esempio n. 10
0
def load_and_visualize(data, specie):
    """
     parses the given output_file and create svgs from it.
     input:
     data: 
     output_file: path to the output file from interface.py
     specie: specie to visualize
     output:
     target_folder: path to the folder containing the new svgs
    """
    #data = parse_output_file(output_file)
    #load_db_data(data,specie)
    target_folder = str(specie) + '/' + str(uuid.uuid4())
    print("creating svgs...")
    bar = progressbar.AnimatedProgressBar(end=len(data), width=10)
    bar += 1
    bar.show_progress()
    for index, (key, item) in enumerate(data.items()):
        bar += 1
        bar.show_progress()
        visualizer.visualize_transcript(target_folder, item)
    print('\ndone.')
    return target_folder
Esempio n. 11
0
def centre(gro_file, nojump_xtc_file, output_xtc, centre_type='CoM', skip=0):
    progressBar = progressbar.AnimatedProgressBar(end=100, width=75)
    progressBar.show_progress()

    u = MDAnalysis.Universe(gro_file, nojump_xtc_file)

    progressBar + 2
    progressBar.show_progress()

    allAtoms = u.selectAtoms("all")

    # open a stream to write the frames of the XTC file to
    numAtoms = allAtoms.numberOfAtoms()
    FileWriter = MDAnalysis.coordinates.core.writer(filename=output_xtc,
                                                    numatoms=numAtoms)

    progressBar + 3
    progressBar.show_progress()
    progressBar.show_progress()
    progressPerFrame = 95.0 / (u.trajectory.numframes)

    for ts in u.trajectory[::skip]:

        progressBar + progressPerFrame
        progressBar.show_progress()

        if centre_type == 'CoM':
            allAtoms.translate(-allAtoms.centerOfMass())
        elif centre_type == 'CoG':
            allAtoms.translate(-allAtoms.centerOfGeometry())
        FileWriter.write(allAtoms)

    FileWriter.close()
    print 'File writing finished'

    return
Esempio n. 12
0
def decrypt(filename, password):
    chucksize = 64 * 1024
    secure_key = KDF.PBKDF1(password, salt, 16, count=1000)
    out_filename = os.path.splitext(filename)[0]

    with open(filename, 'rb') as infile:
        origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
        iv = infile.read(16)
        decryptor = AES.new(secure_key, AES.MODE_CBC, iv)
        filesize = os.path.getsize(filename)
        progress = progressbar.AnimatedProgressBar(end=filesize, width=50)

        with open(out_filename, 'wb') as outfile:
            while True:
                chunk = infile.read(chucksize)
                if len(chunk) == 0:
                    break
                outfile.write(decryptor.decrypt(chunk))
                progress + len(chunk)
                progress.show_progress()

            outfile.truncate(origsize)
        print ""
        print "Done!"
Esempio n. 13
0
def main(specie):
    print("loading {} data...".format(specie))
    with lite.connect(conf.databases[specie]) as con:
        cursor = con.cursor()
        cursor.execute("SELECT DISTINCT transcript_id from aliases")
        result = cursor.fetchall()
    # TODO -  remve boundry fr names
    names = [value[0] for value in result]
    print("creating transcript database for {}".format(specie))
    # give this thing a progress bar
    global bar
    bar = progressbar.AnimatedProgressBar(end=len(names) + 1, width=10)
    pool = ThreadingPool(num_threads)
    assign_and_get_with_specie = partial(assign_and_get, specie)
    result = pool.amap(assign_and_get_with_specie, names)
    while True:
        if result.ready():
            break
        bar.show_progress()
        time.sleep(1)
    data = list(result.get())
    #print(data)
    # dark magic incoming
    # flatten the list
    # make it a list of tuples of id,index,domainlist
    # TODO Change to deal with new variant data coming from assign_and_get
    # variant has name,variant_index,domains,exons
    # TODO maybe no dark magic
    # TODO check that it is actually runs
    '''
    the following dark magic is one line of this:
    for variants in data:
        if not variants:
            continue
        for variant in variants:
            if not variant:
                continue
            for exon in variant['exons']:
                if not exon:
                    continue
            pass
    '''
    data = [
            (
            variant['name'],
            '_'.join([variant['name'],str(variant['variant_index'])]),\
            exon['transcript_id'],\
            exon['index'],\
            exon['relative_start'],\
            exon['relative_end'],\
            json.dumps(exon['domains_states']),\
            json.dumps(exon['domains'])\
            ) \
            for variants in data if variants != None \
            for variant in variants if variant != None \
            for exon in variant['exons'] if exon!= None
            ]
    print('new_data: \n', data)
    #    print("well that was fun, now exit")
    #    sys.exit(2)
    write_to_db(data, specie)
    print()
Esempio n. 14
0
def download(time_point, ftp, remote_dir, work_dir):

    filename = ''
    file_list = []
    download_status = 0
    filename = ''
    file_found = 0

    ftp.cwd(remote_dir)
    ftp.dir('.', file_list.append)

    if time_point == 'current':
        for i in file_list:
            terms = i.split(' ')
            if not terms[-1].startswith('gene_association'):
                continue
            unzipped_fname = terms[-1].replace('.gz', '')
            if os.path.exists(work_dir + '/' + terms[-1]):
                filename = terms[-1]
                download_status = -1
                continue
            elif os.path.exists('./' + terms[-1]):
                filename = terms[-1]
                download_status = -1
                continue
            elif os.path.exists(work_dir + '/' + unzipped_fname):
                filename = unzipped_fname
                download_status = -1
                continue
            elif os.path.exists('./' + unzipped_fname):
                filename = unzipped_fname
                download_status = -1
                continue

            filename = terms[-1]
            filesize = ftp.size(filename)
            file_found = 1

        if download_status == 0 and file_found == 1:
            modified_filesize = float(filesize / 1024000000)
            #if modified_filesize > 1.0:
            #   print 'This is a ' + str(modified_filesize) + ' GB file. Downloading it might take a while. Please be patient!!!'
            local_filename = os.path.join(work_dir + '/' + filename)
            progress = progressbar.AnimatedProgressBar(start=0,
                                                       end=filesize,
                                                       width=50)
            print 'Downloading ' + filename
            with open(local_filename, 'w') as outfile:

                def callback(block):
                    outfile.write(block)
                    progress + len(block)
                    progress.show_progress()

                ftp.retrbinary('RETR ' + filename, callback, 81920)
            download_status = 1

    else:
        for i in file_list:
            [month, year] = time_point.split('_')
            month = month.capitalize()
            terms = i.split(' ')
            if not terms[-1].startswith('gene_association'):
                continue
            if (terms[18] == month) and (terms[21] == year):
                file_found = 1
                unzipped_fname = terms[-1].replace('.gz', '')
                if os.path.exists(work_dir + '/' + terms[-1]):
                    filename = terms[-1]
                    download_status = -1
                    continue
                elif os.path.exists(work_dir + '/' + unzipped_fname):
                    filename = unzipped_fname
                    download_status = -1
                    continue

                if filename:
                    version_num = filename.split('.')[2]
                    if terms[-1].split('.')[2] > version_num:
                        filename = terms[-1]
                        filesize = ftp.size(filename)
                else:
                    filename = terms[-1]
                    filesize = ftp.size(filename)

        if download_status == 0 and file_found == 1:
            modified_filesize = float(filesize / 1024000000)
            #if modified_filesize > 1.0:
            #   print 'This is a ' + str(modified_filesize) + ' GB file. Downloading it might take a while. Please be patient!!!'

            local_filename = os.path.join(work_dir + '/' + filename)
            progress = progressbar.AnimatedProgressBar(start=0,
                                                       end=filesize,
                                                       width=50)

            print 'Downloading ' + filename
            with open(local_filename, 'w') as outfile:

                def callback(block):
                    outfile.write(block)
                    progress + len(block)
                    progress.show_progress()

                ftp.retrbinary('RETR ' + filename, callback, 81920)
            download_status = 1

    if download_status == 1:
        print '\nSuccesfully downloaded files from uniprot-goa'
    elif download_status == -1:
        print '\n' + time_point + ' file to be downloaded already present in working directory'
    else:
        print '\nThere is no file to be downloaded for time point ' + time_point
        time_point = raw_input(
            'Do you want to provide a different time point (either provide a time point or say n) : '
        )
        if (re.match('[a-zA-Z]+\_\d+', time_point)) or (re.match(
                'current', time_point)):
            [download_status, filename] = download(time_point, ftp, remote_dir,
                                                   work_dir)
        elif time_point == 'n':
            sys.exit(1)

    return download_status, filename
Esempio n. 15
0
    #Download Video from AWS
    client.download_file(bucket, source + '{}.mp4'.format(file_name), '{}.mp4'.format(file_name))
    print('{}.mp4 downloaded to local'.format(file_name))

    #Video to Frames
    FrameCapture(file_name)
    print('{}.mp4 converted to frames'. format(file_name))
    
    #Create Zip
    zip_dir(file_name)

    #Upload to AWS w/ Progress Bar
    filesize = os.stat('{}.zip'.format(file_name)).st_size
    print("\nuploading {}.zip | size: {}".format(file_name, filesize))
    up_progress = progressbar.AnimatedProgressBar(end=filesize, width=50)
    def upload_progress(chunk):
        up_progress + chunk
        up_progress.show_progress()
    client.upload_file('{}.zip'.format(file_name), bucket, '{}.zip'.format(destination), Callback=upload_progress)
    print("\n{}.zip uploaded to S3 bucket:{} path:{}.zip".format(file_name, bucket, destination))

    #Clean Up Local Directory
    clean = input("Clean Up Directory? (y/n): ")
    if clean == 'y':
        os.remove('{}.mp4'.format(file_name))
        os.remove('{}.zip'.format(file_name))
        shutil.rmtree(file_name)
        shutil.rmtree('__pycache__')
    else:
        exit()
Esempio n. 16
0
def main():
    progress = progressbar.AnimatedProgressBar(end=FILESIZE, width=50)

    for i in range(0, FILESIZE, CHUNK):
        progress + CHUNK
        progress.show_progress()
Esempio n. 17
0
print ("logging in...")
ftp.login()
prompt = 'Update {} data? (y/N): '
skipping_prompt = 'Skipping {}'
for specie in conf.species:
    user_input = input(prompt.format(specie))
    if user_input.lower() != 'y':
        print(skipping_prompt.format(specie))
        continue
    formatted_extract_path = extract_path.format(specie)
    print ("Creating directory {} ".format(formatted_extract_path))
    pathlib.Path(formatted_extract_path).mkdir(parents=True, exist_ok=True)
    print ("downloading {} data".format(specie))
    ftp.sendcmd("TYPE i")
    readme_size = ftp.size(readme_file.format(specie))
    readme_progress = progressbar.AnimatedProgressBar(end=readme_size,width=10)
    if os.path.isfile(formatted_extract_path+ "readme_old"):
        print("readme_old found, checking againts existing database...")
    else:
        print("readme_old not found, downloading new database...")
    print ("downloading readme_file...")
    with open(formatted_extract_path+"readme_new",'w') as f:
        def callback(chunk):
            f.write(chunk)
            readme_progress + len(chunk)
            readme_progress.show_progress()
        ftp.retrlines("RETR " + readme_file.format(specie),callback)
        readme_progress+readme_size
        readme_progress.show_progress()
    # empty line
    print()