示例#1
0
def backup(filename, destination="glacier", description=None, **kwargs):
    conf = kwargs.get("conf", None)
    storage_backend = storage_backends[destination](conf)

    log.info("Backing up " + filename)
    arcname = filename.strip('/').split('/')[-1]
    stored_filename = arcname +  datetime.now().strftime("%Y%m%d%H%M%S") + ".tgz"
    
    password = kwargs.get("password")
    if not password:
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    log.info("Compressing...")
    with tempfile.NamedTemporaryFile(delete=False) as out:
        with tarfile.open(fileobj=out, mode="w:gz") as tar:
            tar.add(filename, arcname=arcname)
        outname = out.name

    if password:
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"
        os.remove(outname)  # remove non-encrypted tmp file
        outname = encrypted_out.name

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)
    os.remove(outname)
示例#2
0
def enc_archive():
  # Encrypt archives after they are written
  global archive
  enc_pass = random(args['encpass'])
  enc_archive = archive + '.enc' 
  c.execute('UPDATE files SET enc_pass=(?) WHERE archive=(?)', (enc_pass,enc_archive,))
  conn.commit()
  encrypt_file(archive, enc_archive, enc_pass)
  os.remove(archive)
示例#3
0
def enc_archive():
    # Encrypt archives after they are written
    global archive
    enc_pass = random(args['encpass'])
    enc_archive = archive + '.enc'
    c.execute('UPDATE files SET enc_pass=(?) WHERE archive=(?)', (
        enc_pass,
        enc_archive,
    ))
    conn.commit()
    encrypt_file(archive, enc_archive, enc_pass)
    os.remove(archive)
示例#4
0
 def save_editor(self, request, pk=None, *args, **kwargs):
     file_res = File.objects.get(id=pk)
     decrypt_file(
         os.path.join(settings.MEDIA_ROOT + str(
             crud.get(self.table, "*", 'where id=' +
                      pk)[0].get('owner_id')) + '/' +
                      base64.b16encode(file_res.modified_file_name)),
         os.path.join(settings.MEDIA_ROOT + file_res.modified_file_name),
         '123')
     self.org_data = convert(settings.MEDIA_ROOT +
                             FileSerializer(File.objects.get(
                                 id=pk)).data.get('modified_file_name'))
     os.remove(
         os.path.join(settings.MEDIA_ROOT + file_res.modified_file_name))
     if self.org_data != request.data:
         # Activity log
         request_data = {}
         param = {'field': 'file_id', 'file_id': pk, 'label': 'version'}
         track_fields = {
             c.can_read: c.read,
             c.can_write: c.write,
             c.can_delete: c.delete
         }
         request_data.update({'user_id': request.user.id})
         log_view = LogView()
         log_view.generate_log(request_data, param, "", track_fields)
         f = open(
             settings.MEDIA_ROOT + str(
                 crud.get(self.table, "*",
                          'where id=' + pk)[0].get('owner_id')) + '/' +
             file_res.modified_file_name.split('.')[0] + '.html', 'w')
         f.write(request.data['data'].encode())
         f.close()
         os.chdir(settings.MEDIA_ROOT + str(
             crud.get(self.table, "*", 'where id=' +
                      pk)[0].get('owner_id')))
         os.system('unoconv --format=' + file_res.name.split('.')[-1] +
                   ' ' + settings.MEDIA_ROOT + str(
                       crud.get(self.table, "*", 'where id=' +
                                pk)[0].get('owner_id')) + '/' +
                   file_res.modified_file_name.split('.')[0] + '.html')
         time.sleep(3)
         os.remove(settings.MEDIA_ROOT + str(
             crud.get(self.table, "*", 'where id=' +
                      pk)[0].get('owner_id')) + '/' +
                   file_res.modified_file_name.split('.')[0] + '.html')
         encrypt_file(
             os.getcwd() + '/' + file_res.modified_file_name,
             os.getcwd() + '/' +
             base64.b16encode(file_res.modified_file_name), '123')
         os.remove(os.getcwd() + '/' + file_res.modified_file_name)
     return Response({"hai": 'hai'})
示例#5
0
文件: archiver.py 项目: jlewallen/ice
def upload_archive(vault, sha1, paths, upload=True, dryRun=False):
  files = paths.files()

  ice.log.info("Have archive (sha1=%s)" % (sha1))
  with tempfile.NamedTemporaryFile(delete=False) as temp_fh:
    ice.log.info("Making archive %s (%s)" % (sha1, temp_fh.name))
    if not dryRun:
      with closing(tarfile.open(fileobj=temp_fh, mode="w")) as tar:
        for file in files:
          tar.add(file, exclude=globally_excluded)
    encrypted_fh = tempfile.NamedTemporaryFile(delete=False)
    temp_fh.seek(0)
    ice.log.info("Encrypting archive %s (%s)" % (sha1, encrypted_fh.name))
    if not dryRun:
      beefish.encrypt_file(temp_fh.name, encrypted_fh.name, BACKUP_PASSWORD)
      encrypted_fh.seek(0)
    if upload:
      ice.log.info("Uploading archive %s (%s)" % (sha1, encrypted_fh.name))
      if not dryRun:
        archive_id = vault.concurrent_create_archive_from_file(encrypted_fh.name, sha1)
    os.remove(temp_fh.name)
    os.remove(encrypted_fh.name)
示例#6
0
def backup(filename=os.getcwd(),
           destination=None,
           prompt="yes",
           tags=[],
           profile="default",
           config=CONFIG_FILE,
           key=None,
           **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier|swift

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :type custom_filename: str
    :keyword custom_filename: Override the original filename (only in metadata)

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    storage_backend, destination, conf = _get_store_backend(
        config, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    # Check if compression is disabled on the configuration.
    if conf:
        compress = conf.get("compress", True)
    else:
        compress = config.get(profile).get("compress", True)

    if not compress:
        backup_file_fmt = "{0}.{1}"

    log.info("Backing up " + filename)
    arcname = filename.strip('/').split('/')[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_date = int(now.strftime("%s"))
    backup_data = dict(filename=kwargs.get("custom_filename", arcname),
                       backup_date=backup_date,
                       last_updated=backup_date,
                       backend=destination,
                       is_deleted=False)

    # Useful only when using bakmanager.io hook
    backup_key = key

    password = kwargs.get("password", os.environ.get("BAKTHAT_PASSWORD"))
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    if not compress:
        log.info("Compression disabled")
        outname = filename
        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size
        bakthat_compression = False

    # Check if the file is not already compressed
    elif mimetypes.guess_type(arcname) == ('application/x-tar', 'gzip'):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r'(\.t(ar\.)?gz)', '', arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")
        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, list):
        tags = " ".join(tags)

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption,
                                   client=socket.gethostname())
    backup_data["stored_filename"] = stored_filename

    access_key = storage_backend.conf.get("access_key")
    container_key = storage_backend.conf.get(storage_backend.container_key)
    backup_data["backend_hash"] = hashlib.sha512(access_key +
                                                 container_key).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)

    # We only remove the file if the archive is created by bakthat
    if bakthat_compression or bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    Backups.create(**backup_data)

    BakSyncer(conf).sync_auto()

    # bakmanager.io hook, enable with -k/--key paramter
    if backup_key:
        bakmanager_hook(conf, backup_data, backup_key)

    return backup_data
示例#7
0
def backup(filename=os.getcwd(), destination=None, profile="default", config=CONFIG_FILE, prompt="yes", tags=[], key=None, exclude_file=None, s3_reduced_redundancy=False, **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier|swift

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :type custom_filename: str
    :keyword custom_filename: Override the original filename (only in metadata)

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    storage_backend, destination, conf = _get_store_backend(config, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    session_id = str(uuid.uuid4())
    events.before_backup(session_id)

    # Check if compression is disabled on the configuration.
    if conf:
        compress = conf.get("compress", True)
    else:
        compress = config.get(profile).get("compress", True)

    if not compress:
        backup_file_fmt = "{0}.{1}"

    log.info("Backing up " + filename)

    if exclude_file and os.path.isfile(exclude_file):
        EXCLUDE_FILES.insert(0, exclude_file)

    _exclude = lambda filename: False
    if os.path.isdir(filename):
        join = functools.partial(os.path.join, filename)
        for efile in EXCLUDE_FILES:
            efile = join(efile)
            if os.path.isfile(efile):
                _exclude = _get_exclude(efile)
                log.info("Using {0} to exclude files.".format(efile))

    arcname = filename.strip('/').split('/')[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_date = int(now.strftime("%s"))
    backup_data = dict(filename=kwargs.get("custom_filename", arcname),
                       backup_date=backup_date,
                       last_updated=backup_date,
                       backend=destination,
                       is_deleted=False)

    # Useful only when using bakmanager.io hook
    backup_key = key

    password = kwargs.get("password", os.environ.get("BAKTHAT_PASSWORD"))
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    if not compress:
        log.info("Compression disabled")
        outname = filename
        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size
        bakthat_compression = False

    # Check if the file is not already compressed
    elif mimetypes.guess_type(arcname) == ('application/x-tar', 'gzip'):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r'(\.t(ar\.)?gz)', '', arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")

        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname, exclude=_exclude)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, list):
        tags = " ".join(tags)

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption,
                                   client=socket.gethostname())
    stored_filename = os.path.join(os.path.dirname(kwargs.get("custom_filename", "")), stored_filename)
    backup_data["stored_filename"] = stored_filename

    access_key = storage_backend.conf.get("access_key")
    container_key = storage_backend.conf.get(storage_backend.container_key)
    backup_data["backend_hash"] = hashlib.sha512(access_key + container_key).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname, s3_reduced_redundancy=s3_reduced_redundancy)

    # We only remove the file if the archive is created by bakthat
    if bakthat_compression or bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    backup = Backups.create(**backup_data)

    BakSyncer(conf).sync_auto()

    # bakmanager.io hook, enable with -k/--key paramter
    if backup_key:
        bakmanager_hook(conf, backup_data, backup_key)

    events.on_backup(session_id, backup)

    return backup
示例#8
0
def backup(filename=os.getcwd(), destination=None, prompt="yes", tags=[], profile="default", **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.

    :type destination: str
    :param destination: s3|glacier

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type tags: str or list
    :param tags: Tags either in a str space separated,
        either directly a list of str (if calling from Python).

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata, backend and filename.

    """
    conf = kwargs.get("conf", None)
    storage_backend = _get_store_backend(conf, destination, profile)
    backup_file_fmt = "{0}.{1}.tgz"

    log.info("Backing up " + filename)
    arcname = filename.strip("/").split("/")[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)

    backup_data = dict(filename=arcname, backup_date=int(now.strftime("%s")), backend=destination, is_deleted=False)

    password = kwargs.get("password")
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return

    # Check if the file is not already compressed
    if mimetypes.guess_type(arcname) == ("application/x-tar", "gzip"):
        log.info("File already compressed")
        outname = filename

        # removing extension to reformat filename
        new_arcname = re.sub(r"(\.t(ar\.)?gz)", "", arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)

        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")
        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size

    # Handling tags metadata
    if isinstance(tags, (str, unicode)):
        tags = tags.split()

    backup_data["tags"] = tags

    backup_data["metadata"] = dict(is_enc=bakthat_encryption)
    backup_data["stored_filename"] = stored_filename
    backup_data["backend_hash"] = hashlib.sha512(
        storage_backend.conf.get("access_key") + storage_backend.conf.get(storage_backend.container_key)
    ).hexdigest()

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)

    # We only remove the file if the archive is created by bakthat
    if bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)

    # Insert backup metadata in SQLite
    dump_truck_insert_backup(backup_data)

    return backup_data
示例#9
0
    def upload_file(self, request, *args, **kwargs):

        file = request.data.get('file')
        if request.data.get('directory_id') != 'null':
            modified_file_name = map(
                lambda x: x.replace("\'", "").strip()
                if x.find("'") != -1 else x.replace(' ', '_').strip(),
                file.name.split('.'))
            modified_file_name[-1] = '.' + modified_file_name[-1]
            modified_file_name.insert(-2, request.data.get('directory_id'))
        else:
            modified_file_name = map(
                lambda x: x.replace("\'", "").strip()
                if x.find("'") != -1 else x.replace(' ', '_').strip(),
                file.name.split('.'))
            modified_file_name[-1] = '.' + modified_file_name[-1]
        try:
            if not os.path.isdir(settings.MEDIA_ROOT + str(request.user.id)):
                os.mkdir(settings.MEDIA_ROOT + str(request.user.id))
            upload_dir = default_storage.save(''.join(modified_file_name),
                                              ContentFile(file.read()))
            user_name = request.user.first_name + request.user.last_name
            user_id = request.user.id
            tmp_file = os.path.join(settings.MEDIA_ROOT, upload_dir)
            encrypt_file(
                os.path.join(settings.MEDIA_ROOT, upload_dir),
                os.path.join(settings.MEDIA_ROOT + str(request.user.id),
                             base64.b16encode(upload_dir)), '123')
            os.remove(tmp_file)
            file_data = {
                c.name: file.name,
                "modified_file_name": ''.join(modified_file_name),
                "file_type": file.name.split('.')[-1] or 'n/a',
                "size": file.size,
                "file_content_type": file.content_type,
                "created_by_id": request.user.id,
                "owner_id": request.user.id,
                "created_date": datetime.now(),
                "modified_date": datetime.now()
            }
            connection_es.index(index='dms_test',
                                doc_type='post',
                                body={
                                    'name': file.name,
                                    'type': 'File',
                                    'content_type': file.content_type,
                                    'owner_id': request.user.id
                                })
            if request.data.get('directory_id') != 'null':
                file_data.update(
                    {c.directory_id: request.data.get('directory_id')})
            param = {
                'field': 'file_id',
                'label': 'File',
                'user_name': user_name
            }
            act_log_data = file_data
            response = crud.add(self.table, file_data)
            file_data.update({"id": response})
            act_log_data.update({'user_id': user_id})
            log_view = LogView()
            log_view.generate_log(act_log_data, param)
            return Response(response)
        except Exception as e:
            return Response({"error": e})
示例#10
0
def backup(filename, destination=None, prompt="yes", **kwargs):
    """Perform backup.

    :type filename: str
    :param filename: File/directory to backup.
            
    :type destination: str
    :param destination: s3|glacier

    :type prompt: str
    :param prompt: Disable password promp, disable encryption,
        only useful when using bakthat in command line mode.

    :type password: str
    :keyword password: Password, empty string to disable encryption.

    :type conf: dict
    :keyword conf: Override/set AWS configuration.

    :rtype: dict
    :return: A dict containing the following keys: stored_filename, size, metadata and filename.

    """
    conf = kwargs.get("conf", None)
    storage_backend = _get_store_backend(conf, destination)
    backup_file_fmt = "{0}.{1}.tgz"

    log.info("Backing up " + filename)
    arcname = filename.strip('/').split('/')[-1]
    now = datetime.utcnow()
    date_component = now.strftime("%Y%m%d%H%M%S")
    stored_filename = backup_file_fmt.format(arcname, date_component)
    
    backup_data = dict(filename=arcname, backup_date=int(now.strftime("%s")))

    password = kwargs.get("password")
    if password is None and prompt.lower() != "no":
        password = getpass("Password (blank to disable encryption): ")
        if password:
            password2 = getpass("Password confirmation: ")
            if password != password2:
                log.error("Password confirmation doesn't match")
                return


    # Check if the file is not already compressed
    if mimetypes.guess_type(arcname) == ('application/x-tar', 'gzip'):
        log.info("File already compressed")
        outname = filename

        new_arcname = re.sub(r'(\.t(ar\.)?gz)', '', arcname)
        stored_filename = backup_file_fmt.format(new_arcname, date_component)
        
        with open(outname) as outfile:
            backup_data["size"] = os.fstat(outfile.fileno()).st_size

        bakthat_compression = False
    else:
        # If not we compress it
        log.info("Compressing...")
        with tempfile.NamedTemporaryFile(delete=False) as out:
            with closing(tarfile.open(fileobj=out, mode="w:gz")) as tar:
                tar.add(filename, arcname=arcname)
            outname = out.name
            out.seek(0)
            backup_data["size"] = os.fstat(out.fileno()).st_size
        bakthat_compression = True

    bakthat_encryption = False
    if password:
        bakthat_encryption = True
        log.info("Encrypting...")
        encrypted_out = tempfile.NamedTemporaryFile(delete=False)
        encrypt_file(outname, encrypted_out.name, password)
        stored_filename += ".enc"

        # We only remove the file if the archive is created by bakthat
        if bakthat_compression:
            os.remove(outname)  # remove non-encrypted tmp file

        outname = encrypted_out.name

        encrypted_out.seek(0)
        backup_data["size"] = os.fstat(encrypted_out.fileno()).st_size


    backup_data["metadata"] = dict(is_enc=bakthat_encryption)
    backup_data["stored_filename"] = stored_filename

    log.info("Uploading...")
    storage_backend.upload(stored_filename, outname)

    # We only remove the file if the archive is created by bakthat
    if bakthat_encryption:
        os.remove(outname)

    log.debug(backup_data)
    return backup_data