Exemplo n.º 1
0
def read_passwd_key_from_file(path=None):
    # get md.key file.
    if path:
        if not os.path.exists(path):  # check path file.
            pr_dbg('error, not exist %s' % path)
            return None, None
    else:
        # check current folder for mdcrypto.key.
        if os.path.exists(os.path.join(os.getpwd(), KEY_FILE)):
            path = os.path.join(os.getpwd(), KEY_FILE)
        else:  # get file from system env vars.
            path = os.getenv("KEY_FILE")
            if not path:
                pr_dbg('error, not get KEY_FILE from sys env.')
                return None, None
            if not os.path.exists(path):  # no found.
                pr_dbg('error, not exist sys env KEY_FILE: %s' % path)
                return None, None
    # get passwd and key from file.
    with open(path) as fd:
        passwd_key = fd.read()
    if not passwd_key:
        pr_dbg('error, not get passwd from read file.')
        return None, None
    passwd_md5 = passwd_key[:32]
    key_md5 = passwd_key[32:]
    # return result.
    return passwd_md5, key_md5
Exemplo n.º 2
0
def main():
    parser = argparse.ArgumentParser(description='S3 File Uploader')
    parser.add_argument(
        '--s3-bucket',
        help=('S3 bucket name (defaults to $S3_UPLOAD_BUCKET)'),
        default=os.environ.get('S3_UPLOAD_BUCKET'))
    parser.add_argument(
        '--s3-region',
        help=('S3 region (defaults to $S3_UPLOAD_REGION)'),
        default=os.environ.get('S3_UPLOAD_REGION'))
    parser.add_argument(
        '--s3-username',
        help=('S3 username (defaults to $S3_UPLOAD_USERNAME)'),
        default=os.environ.get('S3_UPLOAD_USERNAME'))
    parser.add_argument(
        '--s3-key',
        help=('S3 access key (defaults to $S3_UPLOAD_ACCESSKEY)'),
        default=os.environ.get('S3_UPLOAD_ACCESSKEY'))
    parser.add_argument(
        '--s3-secret',
        help=('S3 secret (defaults to $S3_UPLOAD_SECRET)'),
        default=os.environ.get('S3_UPLOAD_SECRET'))
    parser.add_argument(
        '--destdir',
        help='Destination directory.')
    parser.add_argument(
        'package', metavar='PACKAGE',
        help='Package name and version to download.')

    args = parser.parse_args()

    if args.s3_region:
        endpoint = 's3-{}.amazonaws.com'.format(args.s3_region.lower())
    else:
        endpoint = 's3.amazonaws.com'

    conn = tinys3.Connection(
        access_key=args.s3_key,
        secret_key=args.s3_secret,
        default_bucket=args.s3_bucket,
        tls=True,
        endpoint=endpoint,
    )

    files = []

    for entry in conn.list(args.package):
        files.append(entry['key'])

    destdir = args.destdir or os.getpwd()

    for file in files:
        print('Downloading {}...'.format(file))
        url = 'https://{}/{}/{}'.format(endpoint, args.s3_bucket, file)
        target = os.path.join(destdir, file)
        urllib.request.urlretrieve(url, target)

    return 0
Exemplo n.º 3
0
 def img(self):
     img_url = self.driver.find_element_by_xpath(
         "//div[@class='qr-inner']/img")
     if img_url != None:
         # 保存图片数据
         data = urllib.request.urlopen(img_url).read()
         filename = int(time.time())
         filepath = os.path.join(os.getpwd() + "/img" + filename + '.png')
         f = open('./van/' + filepath, 'wb')
         f.write(data)
         f.close()
Exemplo n.º 4
0
def main():
    parser = argparse.ArgumentParser(description='S3 File Uploader')
    parser.add_argument(
        '--s3-bucket',
        help=('S3 bucket name (defaults to $S3_UPLOAD_BUCKET)'),
        default=os.environ.get('S3_UPLOAD_BUCKET'))
    parser.add_argument('--s3-region',
                        help=('S3 region (defaults to $S3_UPLOAD_REGION)'),
                        default=os.environ.get('S3_UPLOAD_REGION'))
    parser.add_argument('--s3-username',
                        help=('S3 username (defaults to $S3_UPLOAD_USERNAME)'),
                        default=os.environ.get('S3_UPLOAD_USERNAME'))
    parser.add_argument(
        '--s3-key',
        help=('S3 access key (defaults to $S3_UPLOAD_ACCESSKEY)'),
        default=os.environ.get('S3_UPLOAD_ACCESSKEY'))
    parser.add_argument('--s3-secret',
                        help=('S3 secret (defaults to $S3_UPLOAD_SECRET)'),
                        default=os.environ.get('S3_UPLOAD_SECRET'))
    parser.add_argument('--destdir', help='Destination directory.')
    parser.add_argument('package',
                        metavar='PACKAGE',
                        help='Package name and version to download.')

    args = parser.parse_args()

    if args.s3_region:
        endpoint = 's3-{}.amazonaws.com'.format(args.s3_region.lower())
    else:
        endpoint = 's3.amazonaws.com'

    conn = tinys3.Connection(
        access_key=args.s3_key,
        secret_key=args.s3_secret,
        default_bucket=args.s3_bucket,
        tls=True,
        endpoint=endpoint,
    )

    files = []

    for entry in conn.list(args.package):
        files.append(entry['key'])

    destdir = args.destdir or os.getpwd()

    for file in files:
        print('Downloading {}...'.format(file))
        url = 'https://{}/{}/{}'.format(endpoint, args.s3_bucket, file)
        target = os.path.join(destdir, file)
        urllib.request.urlretrieve(url, target)

    return 0
Exemplo n.º 5
0
# Arguments: 
#   * channel_group: the channel group index to process
#   * filename: the filename of the KWIK file
#   * params: a dictionary with all KK parameters

import os
import shutil
import tempfile
from spikedetekt2.dataio import Experiment

# get the basename (filename without the extension)
basename = os.path.splitext(filename)[0]

# Create a temporary working folder where we're going to run KK.
tmpdir = tempfile.mkdtmp()
curdir = os.getpwd()
os.chdir(tmpdir)

# Create the filenames of the .fet and .fmask files to create.
filename_fet = os.path.join(tmpdir, basename + '.fet')
filename_fmask = os.path.join(tmpdir, basename + '.fmask')
filename_clu = os.path.join(tmpdir, basename + '.clu')

with Experiment(filename) as exp:  # Open in read-only, close the file at the end of the block
    # Load all features and masks in memory.
    # WARNING: this might consume to much Ram ==> need to do it by chunks.
    fm = exp.channel_groups[channel_group].spikes.features_masks[:]
    # fm is a Nspikes x Nfeatures x 2 array (features AND masks)
    fet = fm[:,:,0]
    fmask = fm[:,:,1]
    # Convert to .fet and .fmask.
Exemplo n.º 6
0
import os

print(os.getpwd())
Exemplo n.º 7
0
 def save_time(self):
     print(os.getpwd())
     with open("pump_controller_last_time.txt", "w") as f:
         f.write(str(self.last_level_time))
Exemplo n.º 8
0
# Arguments:
#   * channel_group: the channel group index to process
#   * filename: the filename of the KWIK file
#   * params: a dictionary with all KK parameters

import os
import shutil
import tempfile
from spikedetekt2.dataio import Experiment

# get the basename (filename without the extension)
basename = os.path.splitext(filename)[0]

# Create a temporary working folder where we're going to run KK.
tmpdir = tempfile.mkdtmp()
curdir = os.getpwd()
os.chdir(tmpdir)

# Create the filenames of the .fet and .fmask files to create.
filename_fet = os.path.join(tmpdir, basename + '.fet')
filename_fmask = os.path.join(tmpdir, basename + '.fmask')
filename_clu = os.path.join(tmpdir, basename + '.clu')

with Experiment(
        filename
) as exp:  # Open in read-only, close the file at the end of the block
    # Load all features and masks in memory.
    # WARNING: this might consume to much Ram ==> need to do it by chunks.
    fm = exp.channel_groups[channel_group].spikes.features_masks[:]
    # fm is a Nspikes x Nfeatures x 2 array (features AND masks)
    fet = fm[:, :, 0]
Exemplo n.º 9
0
#listing import of os modules.
import os

#list operations on files and directories
os.getpwd()  # print the current working directoy
os.mkdir()  # create new directory in current path
os.listdir()  # list files in current directory
os.stat()  # get stats of file or directory
os.rename()  # rename directory
os.rmdir()  # remove directory

# Copying, Moving, Renaming, deleteing data
os.chdir("/tmp")  # change dir
os.makedirs("/tmp/test1/test2")  # create multiple dirs ~ mkdir -p

import shutil
shutil.copytree("test", "test-copy")  # copy multiple recursive ~ cp -Rv
shutil.move("test-copy", "test-copy-moved")  # move directory
shutil.rmtree("test-copy-moved")  # Remove directory
Exemplo n.º 10
0
import os

path =os.getpwd()

print path again
print "Update 2"

Func1

Func2

Func4
Exemplo n.º 11
0
#Just loads data from Data folder
#This data created using R script

import os
os.getpwd()
fin = open("")