コード例 #1
0
def delete_resource_handler():
    id_token = request.cookies.get("token")
    claims = None
    error_message = None
    try:
        claims = google.oauth2.id_token.verify_firebase_token(
            id_token, firebase_request_adapter)
    except ValueError as exc:
        error_message = str(exc)
    print(claims['email'])
    email = claims['email']

    json_data = request.get_json()
    name = json_data['name']
    response = database.deleteResource(email, name)
    storage.delete_file(email, name)
    if response:
        return "The resource was deleted"
    else:
        return "Something went wrong. Try again!"
コード例 #2
0
ファイル: contact.py プロジェクト: jcnelson/syndicatemail
def delete_contact( pubkey_str, email_addr ):
   global STORAGE_DIR, CACHED_CONTACT_LIST
   
   contact_path = make_contact_path( pubkey_str, email_addr )
   
   rc = storage.delete_file( contact_path )
   if not rc:
      log.exception( Exception("Failed to detete contact") )
      return False
   
   else:
      storage.purge_cache( CACHED_CONTACT_LIST )
      
   return True
コード例 #3
0
ファイル: account.py プロジェクト: jcnelson/syndicatemail
def delete_syndicate_user_id( volume ):
   uid_path = storage.volume_path( USER_STORAGE_DIR, "syndicate_user_id" )
   return storage.delete_file( uid_path, volume=volume )
コード例 #4
0
ファイル: account.py プロジェクト: jcnelson/syndicatemail
def delete_gateway_port():
   name_path = storage.local_path( GATEWAY_STORAGE_DIR, "gateway_port" )
   return storage.delete_file( name_path, volume=None )
コード例 #5
0
ファイル: account.py プロジェクト: jcnelson/syndicatemail
def delete_volume_pubkey( volume_name ):
   return storage.delete_file( volume_pubkey_path( volume_name ), volume=None )
コード例 #6
0
ファイル: message.py プロジェクト: jcnelson/syndicatemail
def delete_message( pubkey_str, folder, msg_timestamp, msg_id ):
   rc = storage.delete_file( stored_message_path( pubkey_str, folder, msg_timestamp, msg_id ) )
   storage.purge_cache( folder_cache_name( folder ) )
   return rc
コード例 #7
0
ファイル: message.py プロジェクト: jcnelson/syndicatemail
   """
   # FIXME: broken--send one attachment per sender, or one per receiver.
   for attachment_name in message.attachment_names:
      attachment_path = attachment_paths[attachment_name]
      attachment_data = attachment_data[attachment_name]
      rc = storage.write_encrypted_file( receiver_pubkey_str, attachment_path, attachment_data )
      if not rc:
         failed = True
         break
      
      stored.append( attachment_path )
   """
   if failed:
      # roll back
      for path in stored:
         storage.delete_file( path )
      
      return False
   
   else:
      storage.purge_cache( folder_cache_name( folder ) )
      return True


#-------------------------
def read_stored_message( privkey_str, folder, msg_timestamp, msg_id, volume=None, receiver_pubkey_pem=None ):
   if receiver_pubkey_pem is None:
      pkey = CryptoKey.importKey( privkey_str )
      receiver_pubkey_pem = pkey.publickey().exportKey()

   mpath = stored_message_path( receiver_pubkey_pem, folder, msg_timestamp, msg_id )
コード例 #8
0
ファイル: keys.py プロジェクト: jcnelson/syndicatemail
def delete_public_key( key_name ):
   key_path = make_key_local_path( key_name + ".pub" )
   return storage.delete_file( key_path, volume=None )
コード例 #9
0
ファイル: keys.py プロジェクト: jcnelson/syndicatemail
def delete_private_key( key_name ):
   key_path = make_key_local_path( key_name )
   rc = storage.delete_file( key_path, volume=None )
   return rc
コード例 #10
0
ファイル: keys.py プロジェクト: jcnelson/syndicatemail
def delete_private_key_from_volume( key_name, volume=None ):
   key_path = make_key_volume_path( key_name )
   rc = storage.delete_file( key_path, volume=volume )
   return rc
コード例 #11
0
ファイル: rpmrepo.py プロジェクト: notnarb/mkrepo
def update_repo(storage, sign, tempdir):
    filelists = {}
    primary = {}
    revision = "0"
    initial_filelists = None
    initial_primary = None

    if storage.exists('repodata/repomd.xml'):
        data = storage.read_file('repodata/repomd.xml')

        filelists, primary, revision = parse_repomd(data)

        initial_filelists = filelists['location']
        data = storage.read_file(initial_filelists)
        filelists = parse_filelists(gunzip_string(data))

        initial_primary = primary['location']
        data = storage.read_file(initial_primary)
        primary = parse_primary(gunzip_string(data))

    recorded_files = set()
    for package in primary.values():
        recorded_files.add((package['location'], float(package['file_time'])))

    existing_files = set()
    expr = r'^.*\.rpm$'
    for file_path in storage.files('.'):
        match = re.match(expr, file_path)

        if not match:
            continue

        mtime = storage.mtime(file_path)

        existing_files.add((file_path, mtime))

    files_to_add = existing_files - recorded_files

    for file_to_add in files_to_add:
        file_path = file_to_add[0]
        mtime = file_to_add[1]
        print("Adding: '%s'" % file_path)

        tmpdir = tempfile.mkdtemp('', 'tmp', tempdir)
        storage.download_file(file_path, os.path.join(tmpdir, 'package.rpm'))

        rpminfo = rpmfile.RpmInfo()
        header = rpminfo.parse_file(os.path.join(tmpdir, 'package.rpm'))
        sha256 = file_checksum(os.path.join(tmpdir, 'package.rpm'), "sha256")

        statinfo = os.stat(os.path.join(tmpdir, 'package.rpm'))
        size = statinfo.st_size

        shutil.rmtree(tmpdir)

        nerv, prim = header_to_primary(header, sha256, mtime, file_path,
                                       rpminfo.header_start,
                                       rpminfo.header_end, size)
        _, flist = header_to_filelists(header, sha256)

        primary[nerv] = prim
        filelists[nerv] = flist

    revision = str(int(revision) + 1)

    filelists_str = dump_filelists(filelists)
    primary_str = dump_primary(primary)
    filelists_gz = gzip_string(filelists_str)
    primary_gz = gzip_string(primary_str)

    repomd_str = generate_repomd(filelists_str, filelists_gz, primary_str,
                                 primary_gz, revision)

    filelists_gz_sha256 = string_checksum(filelists_gz, 'sha256')
    primary_gz_sha256 = string_checksum(primary_gz, 'sha256')
    filelists_name = 'repodata/%s-filelists.xml.gz' % filelists_gz_sha256
    primary_name = 'repodata/%s-primary.xml.gz' % primary_gz_sha256

    storage.write_file(filelists_name, filelists_gz)
    storage.write_file(primary_name, primary_gz)
    storage.write_file('repodata/repomd.xml', repomd_str)

    if initial_filelists:
        storage.delete_file(initial_filelists)
    if initial_primary:
        storage.delete_file(initial_primary)

    if sign:
        repomd_str_signed = gpg_sign_string(repomd_str)
        storage.write_file('repodata/repomd.xml.asc', repomd_str_signed)