コード例 #1
0
def run_jobs(vault_name):
  """
  Execute a completed job.  If not completed, updates its status.
  """
  handler = get_handler()
  #Need to get the vault as always...
  region = handler.region.name
  vault = Vault.query.filter_by(name=vault_name,region=region).first()
  if vault is None:
    abort(401)
  #Get the job from our local db
  job=Job.query.filter_by(job_id=(request.args['job_id'])).first()
  #If we don't have the job, or our records show it's incomplete, check with amazon
  if job is None or not job.completed:
    if vault.lock:
      abort(401)
    job=process_job(handler.describe_job(vault.name,request.args['job_id']),vault)
  #If it's still none, something went wrong...
  if job is None or not job.completed or not job.live or not job.status_code=="Succeeded":
    abort(401)
  #Now we have the job, get its output
  if job.action=='list':
    dat=handler.get_job_output(vault.name,job.job_id)
    for archive in dat["ArchiveList"]:
      process_archive(archive,vault)
    vault.lock=False
    WG.db.session.add(vault)
    WG.db.session.commit()
  elif job.action=='download':
    pass
  #return redirect(request.referrer)
  return redirect(url_for("vault_view",vault_name=vault.name))
コード例 #2
0
def check_job_status(vault_name):
  """
  Pretty self explanatory isn't it?
  """
  handler = get_handler()
  region = handler.region.name
  vault = Vault.query.filter_by(name=vault_name,region=region).first()
  if vault is None:
    abort(401)
  #Get the live jobs from Amazon
  live_jobs = handler.list_jobs(vault.name)
  #First update/add all of them to db
  for job in live_jobs['JobList']:
    process_job(job,vault)
  #Then kill any ones that are in our db and should be dead
  jobs = vault.jobs.filter_by(live=True).all()
  live_ids = [x["JobId"] for x in live_jobs['JobList']]
  for job in jobs:
    if job.job_id not in live_ids:
      job.live=False
      WG.db.session.add(job)
      WG.db.session.commit()
  return redirect(request.referrer)
コード例 #3
0
def request_archive(vault_name):
  """
  Asks glacier to get your data.  You'll have to wait for it to get back first...
  """
  handler = get_handler()
  #Need to get the vault as always...
  region = handler.region.name
  vault = Vault.query.filter_by(name=vault_name,region=region).first()
  if vault is None:
    abort(401)
  #Need to get the archive too...
  if 'archive_id' not in request.args:
    abort(401)
  archive = Archive.query.filter_by(archive_id=request.args['archive_id']).first()
  if archive is None:
    abort(401)
  #OK, everything exists, go ahead...
  def_opts={"Description":"Fetch archive.",
    "Type":"archive-retrieval",
    "ArchiveId":archive.archive_id}
  job_id = handler.initiate_job(vault.name, def_opts)
  job=process_job(handler.describe_job(vault.name,job_id["JobId"]),vault)
  return redirect(request.referrer)
コード例 #4
0
def get_inventory(vault_name):
  """
  Initiates an inventory job for the specified vault.
  Currently lacks any checks on if it's a good idea to submit another of these jobs
  """
  handler = get_handler()
  #Need to get the vault...
  region = handler.region.name
  vault = Vault.query.filter_by(name=vault_name,region=region).first()
  if vault is None:
    abort(401)
  #Already asked for one, don't need another...
  if vault.lock:
    abort(401)
  def_opts={"Description":"Auto-made inventory job.",
    "Type":"inventory-retrieval","Format":"JSON"}
  job_id = handler.initiate_job(vault.name, def_opts)
  #Lock the vault...
  vault.lock=True
  WG.db.session.add(vault)
  WG.db.session.commit()
  #Add the job to the database
  job=process_job(handler.describe_job(vault.name,job_id["JobId"]),vault)
  return redirect(request.referrer)