def fetch_source(repo): """Retrieve a copy of source from storage. """ repo_zip = repo + '.zip' if exists(repo_zip): os.remove(repo_zip) try: zipfile_data = qmk_storage.get('cache/%s.zip' % repo) except qmk_storage.exceptions.ClientError as e: logging.warning('Could not fetch %s.zip from S3: %s', repo, e.__class__.__name__) logging.warning(e) return False with open(repo_zip, 'xb') as zipfile: zipfile.write(zipfile_data) zip_command = ['unzip', repo_zip] try: logging.debug('Unzipping %s Source: %s', (repo, zip_command)) check_output(zip_command) os.remove(repo_zip) return True except CalledProcessError as build_error: logging.error('Could not unzip source, Return Code %s, Command %s', build_error.returncode, build_error.cmd) logging.error(build_error.output) return False
def test_put_and_get(): """Make sure we can store a string and retrieve it. """ test_key = 'qmk_compiler_test_unique_key_name' # Make sure our test key doesn't exist try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise # Write it to S3 qmk_storage.put(test_key, 'hello') # Make sure we can retrieve it saved_file = qmk_storage.get(test_key) qmk_storage.delete(test_key) assert saved_file == 'hello'
def GET_v1_compile_job_id_src(job_id): """Download a completed compile job. """ job = get_job_metadata(job_id) if not job: return error("Compile job not found", 404) if job['result']['firmware']: source_zip = qmk_storage.get('%(id)s/%(source_archive)s' % job['result']) return send_file(source_zip, mimetype='application/octet-stream', as_attachment=True, attachment_filename=job['result']['source_archive']) return error("Compile job not finished or other error.", 422)
def test_save_fd(): """Make sure we can stream file-like objects to S3. """ test_key = 'qmk_compiler_test_unique_key_name' # Make sure our test key doesn't exist try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise # Save our file in S3 with BytesIO(b'hello') as fd: qmk_storage.save_fd(fd, test_key) # Make sure we get it back saved_file = qmk_storage.get(test_key) qmk_storage.delete(test_key) assert saved_file == 'hello'
def test_get_fd(): """Make sure we can get a file with a file-like interface """ test_key = 'qmk_compiler_test_unique_key_name' # Make sure our test key doesn't exist try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise # Create it on S3 qmk_storage.put(test_key, 'hello') # Make sure we can retrieve it fd = qmk_storage.get_fd(test_key) saved_file = fd.read() fd.close() qmk_storage.delete(test_key) assert saved_file == b'hello'
def test_save_file(): """Make sure we can store a file and retrieve it. """ test_key = 'qmk_compiler_test_unique_key_name' # Make sure our test key doesn't exist try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise # Write it to S3 with NamedTemporaryFile(mode='w', encoding='utf-8') as tempfile: tempfile.write('hello') tempfile.flush() qmk_storage.save_file(tempfile.name, test_key) # Make sure we can retrieve it saved_file = qmk_storage.get(test_key) qmk_storage.delete(test_key) assert saved_file == 'hello'
def test_delete(): """Create and then delete an object from s3, make sure we can't fetch it afterward.""" test_key = 'qmk_compiler_test_unique_key_name' # Make sure our test key doesn't exist try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise # Store a test key we can delete qmk_storage.put(test_key, 'hello') assert qmk_storage.get(test_key) == 'hello' qmk_storage.delete(test_key) # Make sure it actually deleted try: qmk_storage.get(test_key) raise RuntimeError('%s exists on S3 when it should not!' % test_key) except Exception as e: if e.__class__.__name__ != 'NoSuchKey': raise
def fetch_source(repo, uncompress=True): """Retrieve a copy of source from storage. """ repo_zip = repo + '.zip' if os.path.exists(repo_zip): os.remove(repo_zip) try: zipfile_data = qmk_storage.get('cache/%s.zip' % repo) except qmk_storage.exceptions.ClientError as e: logging.warning('Could not fetch %s.zip from S3: %s', repo, e.__class__.__name__) logging.warning(e) return False with open(repo_zip, 'xb') as zipfile: zipfile.write(zipfile_data) if uncompress: return unzip_source(repo_zip) else: return True
def get_job_metadata(job_id): """Fetch a job's metadata from the file store. """ json_text = qmk_storage.get('%s/%s.json' % (job_id, job_id)) return json.loads(json_text)