def list_outputs(self):
     path = 's3:/{}/'.format(self.bucket)
     result = rclone.with_config(self.config).run_cmd('lsd', [path])
     if result['code'] != 0:
         raise ValueError('failed to list output bucket: {}'.format(result))
     out = result['out'].decode('utf8')
     dirs = []
     for line in out.splitlines():
         name = line.split()[-1]
         dirs.append(name)
     return dirs
Example #2
0
 def test_copy_lsjson_and_delete(self):
     source = "local:" + os.getcwd() + "/README.md"
     with tempfile.TemporaryDirectory() as dest:
         # copy
         result = rclone.with_config(self.cfg).copy(source, "local:" + dest)
         self.assertEqual(result.get('code'), 0)
         self.assertEqual(result.get('out'), b'')
         # lsjson
         result = rclone.with_config(self.cfg).lsjson("local:" + dest)
         self.assertEqual(result.get('code'), 0)
         result_json = json.loads(result.get('out').decode("utf-8"))
         self.assertGreater(len(result_json), 0)
         self.assertEqual(result_json[0].get('Path'), 'README.md')
         self.assertFalse(result_json[0].get('IsDir'))
         # delete
         result = rclone.with_config(self.cfg).delete("local:" + dest +
                                                      "/README.md")
         self.assertEqual(result.get('code'), 0)
         # lsjson to check that the file is gone
         result = rclone.with_config(self.cfg).lsjson("local:" + dest)
         self.assertEqual(result.get('code'), 0)
         result_json = json.loads(result.get('out').decode("utf-8"))
         self.assertEqual(len(result_json), 0)
Example #3
0
def sync(source: Path = source_dir, dest: str = OSS_DEST):
    import rclone
    with open('rclone.conf') as cf:
        cfg = cf.read()
        cfg = cfg.format(
            access_key_id=access_key_id,
            secret_access_key=secret_access_key,
            endpoint=endpoint,
            provider=provider,
            name=name,
            type=_type
        )
    logging.info(f'syncing {source.absolute()} {dest}')
    rst = rclone.with_config(cfg).sync(source=str(source.absolute()), dest=dest)
    logging.info(rst)
    def store_all(self, src_dir, ignore=[]):
        """
        :param src_dir: path to local dir containing multiple test run dirs, e.g. ./output
        :param ignore: list of subdirectories to ignore
        """
        for f in os.listdir(src_dir):
            if f in ignore:
                continue
            src = os.path.join(src_dir, f)
            dest = 's3:/{}/{}'.format(self.bucket, f)

            print('syncing {} to {}'.format(src, dest))
            result = rclone.with_config(self.config).sync(src, dest)
            if result['code'] != 0:
                print('error storing {}: {}'.format(f, result['error']),
                      file=sys.stderr)
Example #5
0
def move_to_drive_loop():
    while True:
        for img_name in db.keys():
            img_data = db[img_name]
            oldness = (datetime.now() - img_data['accessed_at']).seconds

            if img_data['location'] == 'cached':
                if oldness > MAX_SECONDS_BEFORE_DRIVE:
                    if os.path.isfile(f'{IMAGES_PATH}/{img_name}'):
                        os.remove(f'{IMAGES_PATH}/{img_name}')
                    db[img_name] = {'location': 'drive', 'accessed_at': img_data['accessed_at']}
            elif img_data['location'] == 'local':
                if not os.path.isfile(f'{IMAGES_PATH}/{img_name}'):
                    del(db[img_name])
                else:
                    if oldness > MAX_SECONDS_BEFORE_DRIVE:
                        res = rclone.with_config(RCLONE_CONFIG).run_cmd(command="copy", extra_args=[f'local:{IMAGES_PATH}/{img_name}', f'{RCLONE_DRIVE_NAME}:{DRIVE_IMAGES_PATH}'])
                        if res['code'] == 0:
                            db[img_name] = {'location': 'drive', 'accessed_at': img_data['accessed_at']}
                            os.remove(f'{IMAGES_PATH}/{img_name}')
        time.sleep(10)
Example #6
0
    def drive_backup(self, drive_dir=None):

        self.append_log(self.log, '--- Backup to Google Drive')
        self.backup_db()
        tarname = self.compress_provision_dir('/home/kusanagi/')

        cfg_file = '/root/.config/rclone/rclone.conf'
        with open(cfg_file, 'rt') as f:
            cfg = f.read()
        # rc_options = ['--buffer-size=64M', '--transfers=5', '--drive-chunk-size=16M', '--drive-upload-cutoff=16M',
        #              '--log-file=%s' % self.log]
        rc_options = ['--buffer-size=64M', '--log-file=%s' % self.log]
        result = rclone.with_config(cfg).copy('%s.tar.gz' % tarname,
                                              'GGD1:%s' % drive_dir,
                                              rc_options)
        res = result.get('code')
        if int(res) == 0:
            self.update_backup_record(2, 1)
            # return {'status': 1, 'msq': 'Backup completed successfully'}
        else:
            self.update_backup_record(2, 0)
            # return {'status': 0, 'msg': 'Check %s for more details' % self.log}
        os.remove('%s.tar.gz' % tarname)
Example #7
0
        # Run one at a time
        files = fs.ls('hrrr/sfc/' + str(x))[-3:]
        #files2 = fs.ls('hrrr/prs/' + str(x))[-3:]
    # Catch in case counter tries to pull non-existant day
    except FileNotFoundError:
        continue
    # Import sfc files
    for file in files:
        #for file in files2:
        item = str(file)
        lst = item.split("/")
        idx = len(lst) - 1
        name = lst[idx]
        path = "Staging/sfc/" + dest + "/" + name
        #path = "Staging/prs/" + dest + "/" + name
        fs.download(file, path)

# Upload files from local dir using rclone
print("Uploading to AWS...")
base = os.getcwd()
result = rclone.with_config(cfg).run_cmd(
    command="sync",
    extra_args=[base + "/Staging/sfc", "AWS test:transferfrompando/sfc"])
#result = rclone.with_config(cfg).run_cmd(command="sync", extra_args=[base + "/Staging/prs", "AWS test:transferfrompando/prs"])

# Change dir back to working directory
os.chdir("..")

# Delete medium directory upon completion
shutil.rmtree("Staging")
 def _ensure_rclone_exists(self):
     result = rclone.with_config(self.config).listremotes()
     if result['code'] == -20:
         raise EnvironmentError(
             "the 'rclone' command must be present on the $PATH")
import rclone

cfg_path = r'/Users/noelnathanieljimenofonseca/.config/rclone/rclone.conf'

with open(cfg_path) as f:
   cfg = f.read()

result = rclone.with_config(cfg).listremotes()

#print(result)

#COPY Google drive to S3
rclone.with_config(cfg).run_cmd(command='copy', extra_args=["-v", "--ignore-checksum", "mygoogledrive1:/", "mys3_1:/noelbeerbucket"])

#SYNC Google drive to S3
# rclone.with_config(cfg).run_cmd(command='sync', extra_args=["-v", "--ignore-checksum", "mygoogledrive1:/", "mys3_1:/noelbeerbucket"])

# S3 to Google Drive
# rclone.with_config(cfg).run_cmd(command='sync', extra_args=["-v", "--ignore-checksum", "mys3_1:/noelbeerbucket", "mygoogledrive1:/"])

# print(rclone.with_config(cfg).run_cmd(command='ls', extra_args=["-v", "mys3_1:/noelbeerbucket"]))
Example #10
0
        if alarm_active():
            if not alarm_logged:
                logging.info('Alarm Active!')
                alarm_logged = 1
            events = i.event_gen(yield_nones=False, timeout_s=1)
            events = list(events)
            if events:
                for event in events:
                    (_, type_names, path, filename) = event
                    if 'IN_CLOSE_WRITE' in type_names:
                        source = path + '/' + filename
                        dest = rclone_remote + path

                        logging.debug('Uploading: {}'.format(source))
                        logging.debug('    to {}'.format(dest))
                        result = rclone.with_config(rconfig).copy(source, dest)
                        if result.get('error'):
                            logging.error('OUTPUT: {}'.format(
                                result.get('out')))
                            logging.error('CODE: {}'.format(
                                result.get('code')))
                            logging.error('ERROR: {}'.format(
                                result.get('error')))
                        else:
                            uploaded_files += 1
        else:
            if alarm_logged:
                logging.info(
                    "Alarm resolved, uploaded {} files".format(uploaded_files))
                alarm_logged = 0
                uploaded_files = 0
Example #11
0
 def test_execute_with_correct_command(self):
     result = rclone.with_config(self.cfg)._execute(["echo", "123"])
     self.assertEqual(result.get('code'), 0)
     self.assertIsNotNone(result.get('out'))
Example #12
0
import os
import rclone
from os.path import expanduser
import logging

# Enabled verbose logging
logging.basicConfig(level=logging.DEBUG,
                    format="%(asctime)s %(name)s [%(levelname)s]: %(message)s")

#cfg = """[local]
#type = local
#nounc = true"""

with open(expanduser('./rclone.conf')) as config_file:
    config = config_file.read()

# List all top level folders with run_cmd
#result = rclone.with_config(config).run_cmd(command="lsd", extra_args=["remote:"])

# List top level folders using ls
#result = rclone.with_config(config).ls("remote:")

# Copy README.md to root to test
source = "./README.md"
result = rclone.with_config(config).copy(source, "remote:backup-test")
print(result.get('out'))

# Perform sync
import rclone

cfg_path = r'/home/nfonseca/.config/rclone/rclone.conf'

with open(cfg_path) as f:
    cfg = f.read()

result = rclone.with_config(cfg).listremotes()

#print(result)

#rclone.with_config(cfg).run_cmd(command='sync', extra_args=["-v", "--ignore-checksum", "mygoogledrive2:/", "mys3:/noel-fonseca-bucket-1"])

print(
    rclone.with_config(cfg).run_cmd(
        command='ls', extra_args=["-v", "mys3:/noel-fonseca-bucket-1"]))
Example #14
0
 def test_listremoted(self):
     result = rclone.with_config(self.cfg).listremotes()
     self.assertEqual(result.get('code'), 0)
     self.assertEqual(result.get('out'), b'local:\n')
Example #15
0
def upload(local_path: str, remote_uri: str):
    "Upload local to remote using rclone"
    remote_uri = _to_rclone(remote_uri)
    rclone.with_config(_load_rclone_cfg()).copy(local_path, remote_uri, flags=["-P"])
Example #16
0
def fetch_image_from_drive(img_name):
    if RCLONE_CONFIG is None:
        return False
    result = rclone.with_config(RCLONE_CONFIG).run_cmd(command="copy", extra_args=[f'{RCLONE_DRIVE_NAME}:{DRIVE_IMAGES_PATH}/{img_name}', f'local:{IMAGES_PATH}'])
    return result['code'] == 0
Example #17
0
def download(remote_uri: str, local_path: str):
    "Download remote to local using rclone"
    remote_uri = _to_rclone(remote_uri)
    rclone.with_config(_load_rclone_cfg()).copy(remote_uri, local_path, flags=["-P"])
Example #18
0
 def test_execute_with_wrong_command(self):
     result = rclone.with_config(self.cfg)._execute(
         ["command_not_valid", "some", "args"])
     self.assertEqual(result.get('code'), -20)
     self.assertIsInstance(result.get('error'), FileNotFoundError)