def save_config(access_key, secret_key, region_id, bucket_name, domain_id, project_id, obs_endpoint, iam_endpoint, fis_endpoint, compute_hash=True): config_hash = utils.compute_md5(access_key, secret_key, region_id, bucket_name, domain_id, project_id, obs_endpoint, iam_endpoint, fis_endpoint) with open(CONFIG_FILE, 'w') as config_file: config_file.write('%s = %s\n' % ('OS_ACCESS_KEY', access_key)) config_file.write('%s = %s\n' % ('OS_SECRET_KEY', secret_key)) config_file.write('%s = %s\n' % ('OS_REGION_ID', region_id)) config_file.write('%s = %s\n' % ('OS_BUCKET_NAME', bucket_name)) config_file.write('%s = %s\n' % ('OS_DOMAIN_ID', domain_id)) config_file.write('%s = %s\n' % ('OS_PROJECT_ID', project_id)) config_file.write('%s = %s\n' % ('OS_OBS_ENDPOINT', obs_endpoint)) config_file.write('%s = %s\n' % ('OS_IAM_ENDPOINT', iam_endpoint)) config_file.write('%s = %s\n' % ('OS_FIS_ENDPOINT', fis_endpoint)) if compute_hash: config_file.write('%s = %s\n' % ('OS_CONFIG_HASH', config_hash))
def compare_md5(self, md5): """ Compares the MD5 of existing file against computed MD5 :param md5: Existing MD5 :return: Boolean """ # Hash both the ads data and the parameter `expand` hash_data = [ self.params['expand'], self.ads, self.params['reduction_factor'] ] ad_md5 = compute_md5(hash_data) return ad_md5 == md5
def read_config_and_verify(): """read the current configurations""" try: with open(CONFIG_FILE, 'r') as config_file: _read_config_and_update(config_file, os.environ) config_hash = utils.compute_md5(os.getenv('OS_ACCESS_KEY'), os.getenv('OS_SECRET_KEY'), os.getenv('OS_REGION_ID'), os.getenv('OS_BUCKET_NAME'), os.getenv('OS_DOMAIN_ID'), os.getenv('OS_PROJECT_ID'), os.getenv('OS_OBS_ENDPOINT'), os.getenv('OS_IAM_ENDPOINT'), os.getenv('OS_FIS_ENDPOINT')) if config_hash != os.getenv('OS_CONFIG_HASH'): raise exception.FisException('%s is corrupted' % CONFIG_FILE) except Exception as e: utils.exit('Read configuration file failed: %s\n%s' % (encode.exception_to_unicode(e), CONFIG_TIPS))
def save_metadata(self): """ Saves ad-metadata to file :return: None """ # Save the data structures with open(self.META_FILE, 'w') as meta_file: pickle.dump(self.keyword_to_company, meta_file) pickle.dump(self.all_companies, meta_file) pickle.dump(self.ad_keywords, meta_file) pickle.dump(self.ad_negatives, meta_file) # Update the MD5 hash. hash_data = [self.params['expand'], self.ads] new_md5 = compute_md5(hash_data) with open(self.MD5_FILE, 'w') as md5_file: md5_file.write(new_md5) self.all_companies_scalar = self.all_companies.copy() for c in self.all_companies_scalar: self.all_companies_scalar[c] = 0
except (HTTPError, URLError), e: print "Something went wrong.. Contact tha spodz: %s" % e sys.exit(1) ebooks = [] output = [] errors = [] # a relatively quick search for all ebooks for root, dirs, files in os.walk(ebook_home): for filename in files: fn, ext = os.path.splitext(filename) if ext == ".epub" or ext == ".mobi" or ext == ".azw" or ext == ".pdf": filepath = os.path.join(root, filename) md5_tup = compute_md5(filepath) ebooks.append( (filepath, fn, ext, md5_tup[2], md5_tup[0]) ) i = 0 total = len(ebooks) if total == 0: print "No ebooks found. Is $EBOOK_HOME set correctly?" sys.exit(1) print "You have %s ebooks full of tasty meta. Nom nom.." % total ebooks_dict = {} # now parse all book meta data; building a complete dataset for item in ebooks: