# new dataloop dataset name: new_dataset_name = date.today().strftime("%m-%d-%Y") s3_bucket_name = args.s3_bucket_name #"dataloop-annotations" path_in_bucket = args.path_in_bucket #elyakim bucektURL = "https://" + s3_bucket_name + ".s3amazonaws.com/" dl_dest = args.new_dataset_name # connect to s3: s3 = boto3.client("s3") # get file names of all files in parent folder in bucket objectURLs = [ bucektURL + f["Key"] for f in s3.list_objects( Bucket=s3_bucket_name, Prefix=path_in_bucket)["Contents"] ] objectFiles = [ f["Key"] for f in s3.list_objects(Bucket=s3_bucket_name, Prefix=path_in_bucket)["Contents"] ] ''' ======== upload to dataloop ============================ ''' # connect to dataloop: dl.login() project = dl.projects.get(project_name="Elbit") # create new project in dataloop, with today date: # get list of all datasets in project datasets = project.datasets datasets.list().print()
def maybe_login(env): try: dl.setenv(env) except: dl.login() dl.setenv(env)
def maybe_login(): try: dl.setenv('dev') except: dl.login() dl.setenv('dev')