def setUp(self): self.db = Database("localhost", "postgres", "postgres", "Ivan@1995") self.logger = setuplogger(False, "./log/test_log") self.song1 = Song("sinetest", "unknown", "./test_audio/sinetest.wav", "./test_audio/", False) self.song2 = Song("sine", "unknown", "./test_audio/sine.mp3", "./test_audio/", False)
def identify(args): """ compute the snippet's siganature and compare it with the signatures of songs in the database. Return the closest match. ---------- args : Namespace the namespace that was pasrsed from the commind line input """ logger = setuplogger(args.verbose, "./log/identify_log") filename = args.filename if filename is None: # record the audio, set duration in s and fs in Hz (dur, fs) = (30, 44100) print("*recording") myrecording = sd.rec(int(dur * fs), fs, 1, blocking=True) print("*end") print("*playing") sd.play(myrecording, fs, blocking=True) print("*end") #mono_signal = np.mean(myrecording, axis=1) mono_signal = np.reshape(myrecording, (int(dur * fs), )) snippet = Song("recording", "user", "from recording", "None", True, mono_signal, fs) else: if filename.startswith("http"): filePath = filename else: filePath = os.path.join("./snippets", filename) libPath = './Library/' snippet = Song("recording", "user", filePath, libPath, True) db = Database("localhost", "postgres", "postgres", "Ivan@1995") matched_result = db.search(snippet) logger.info("find the matched song {}".format(matched_result))
def __init__(self, api_key): self.token = api_key self.secure_url = end_point self.secure_headers = { 'content-type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Token token=' + self.token } self.database = Database()
def add_to_database(job_list: list, db: Database, session): """ This method adds each job to the database :param job_list: list of dictionaries :param db: database manager :param session: database session :return: void """ for i in job_list: db.add_job(i['id'], i['title'], i['url'], \ i['created_at'], i['location'], i['description'], \ i['company'], i['type']) session.commit()
class TestDatabase(unittest.TestCase): def setUp(self): self.db = Database("localhost", "postgres", "postgres", "Ivan@1995") self.logger = setuplogger(False, "./log/test_log") self.song1 = Song("sinetest", "unknown", "./test_audio/sinetest.wav", "./test_audio/", False) self.song2 = Song("sine", "unknown", "./test_audio/sine.mp3", "./test_audio/", False) def test_save_to_database(self): self.db.save_to_database([self.song1]) self.db.save_to_database([self.song2]) print("test save to db") self.assertTrue(self.db.is_in_database(self.song1)) self.assertTrue(self.db.is_in_database(self.song2)) def test_remove_from_database(self): self.db.remove_from_database([self.song1]) self.db.remove_from_database([self.song2]) print("test remove from db") self.assertFalse(self.db.is_in_database(self.song1)) self.assertFalse(self.db.is_in_database(self.song2)) def test_search(self): # a cut snippet (with high sound quality) snippet = Song("dvorak_mini_snippet", "dvorak", \ "./snippets/dvorak_mini_snippet.wav", "./snippets/", False) # a real-worlf recording (with low sound quality) recording = Song("who_knew_noise", "pink", "./snippets/who_knew_noise.wav", \ "./snippets/", False) self.assertEqual("dvorak_miniatures_opus_74a_3.mp3", self.db.search(snippet)) self.assertEqual("who_knew.mp3", self.db.search(recording))
def __init__(self, api_key): self.token = api_key self.secure_url = end_point self.headers = {"content-type": "application/json"} self.secure_headers = { "content-type": "application/json", "Accept": "application/json", "Authorization": "Token token=" + self.token, } self.database = Database()
def add(args): """ compute the song's siganature and add the song's info and signature to the database. ---------- args : Namespace the namespace that was pasrsed from the commind line input """ filename = args.filename if filename.startswith("http"): filePath = filename else: filePath = os.path.join("./Library", filename) libPath = './Library/' if args.filename == 'all': # adding all songs in the Library folder audio_paths = getListOfFiles(libPath) db = Database("localhost", "postgres", "postgres", "*********") songs = [] for audio_path in audio_paths: title = os.path.basename(audio_path) artist = "various" song = Song(title, artist, audio_path, libPath, True) songs.append(song) db = Database("localhost", "postgres", "postgres", "*********") db.save_to_database([song]) print("{} added".format(title)) else: if args.title is None: title = os.path.basename(filePath) else: title = args.title song = Song(title, args.artist, filePath, libPath, True) db = Database("localhost", "postgres", "postgres", "Ivan@1995") db.save_to_database([song])
class PivNetUploader: def __init__(self): self.database = Database() def upload_files(self, config, folder_path, force=False): file_names = [] print(type(folder_path)) files = os.listdir(folder_path) for file in files: if os.path.isfile(os.path.join(folder_path, file)): file_names.append(file) files_from_db = self.database.check_file_exists(file_names) if len(file_names) == len(files_from_db) or force: self.upload(config, folder_path) else: return "Found files in [pathname] that are not in product db. Either remove unknown files, or use --force to upload every file in [pathname] whether its in the db or not." def upload(self, config, dest_path): c = pycurl.Curl() print(config) for opsman in config["opsmanager"]: # for i in opsman: if "username" in opsman and "password" in opsman: username = opsman["username"] password = opsman["password"] c.setopt(pycurl.USERPWD, "%s:%s" % (username, password)) if "url" in opsman: c.setopt(c.URL, "https://" + opsman["url"] + "/api/products") c.setopt(pycurl.VERBOSE, 0) c.setopt(c.SSL_VERIFYPEER, 0) c.setopt(c.SSL_VERIFYHOST, 0) c.setopt(c.NOPROGRESS, 0) c.setopt(c.PROGRESSFUNCTION, self.progress) files = os.listdir(dest_path) for filename in files: full_path = os.path.join(dest_path, filename) c.setopt(c.HTTPPOST, [("product[file]", (c.FORM_FILE, full_path))]) c.perform() def progress(self, download_t, download_d, upload_t, upload_d): if upload_t > 0: print(" Uploaded so far {per}%".format(per=int(upload_d * 100 / upload_t)))
def __init__(self): self.database = Database()
def __init__(self): self.url = "https://network.pivotal.io" self.headers = {"content-type": "application/json"} self.database = Database()
class PivNetUpdater: def __init__(self): self.url = "https://network.pivotal.io" self.headers = {"content-type": "application/json"} self.database = Database() def update_db(self): self.database.clear_all_tables() reader = codecs.getreader("utf-8") conf = "conf.toml" if not os.path.exists(conf): return "no valid confi.toml with key" with open(conf) as conffile: config = toml.loads(conffile.read()) api_key = config.get("api_key") if not api_key or len(api_key) <= 0: return "no valid confi.toml with key" products = self.getProducts() for product in products: product_id = product.get("id") slug = product.get("slug") pname = product.get("name") p = Product(id=product_id, slug=slug, name=pname) self.database.session.add(p) # print('id=%s,slug=%s,name=%s'%(product_id,slug,pname)) print("Found %s" % slug) releases = self.getReleases(slug) if releases: for release in releases: rid = release.get("id") version = release.get("version") r = Release(id=rid, version=version, product_slug=p.slug) self.database.session.add(r) print("Found %s,%s" % (slug, version)) # print('id=%s,version=%s,slud=%s'%(rid,version,p.id)) files = self.getProductFiles(product_id, rid) if files: for file in files: links = file.get("_links") url = links.get("download").get("href") name = file.get("aws_object_key").split("/")[-1] f = ProductFile(id=file.get("id"), release_id=r.id, filename=name, download_url=url) # print('id=%s,releasid=%s,filename=%s,url=%s'%(f.id,r.id,name,url)) self.database.session.add(f) print("Found %s,%s,%s" % (slug, version, name)) self.database.commit() self.database.commit() print("Local Pivotal Network db has been updated.") def getProducts(self): url = self.url + "/api/v2/products/" r = requests.get(url, headers=self.headers, proxies=proxies) data = json.loads(r.content.decode("utf-8")) return data.get("products") def getReleases(self, slug): url = self.url + "/api/v2/products/" + slug + "/releases" r = requests.get(url, headers=self.headers, proxies=proxies) data = json.loads(r.content.decode("utf-8")) releases = data.get("releases") # print(releases) return releases def getProductFiles(self, product_id, release_id): url = self.url + "/api/v2/products/" + str(product_id) + "/releases/" + str(release_id) + "/product_files" r = requests.get(url, headers=self.headers, proxies=proxies) data = json.loads(r.content.decode("utf-8")) return data.get("product_files")
def __init__(self): self.database = Database() self.gera_query = Gera_query() self.controller = Controller()
class Backend(): def __init__(self): self.database = Database() self.gera_query = Gera_query() self.controller = Controller() def gera_token(self, data): try: user_type = data['UserType'] username = data['Username'] user_id = self.database.return_user_id(username) token = str(user_type) token += self.crypto(username, str(user_id)) query = f"INSERT INTO `tokens` (`user`,`token`) VALUES ({user_id}, '{token}');" self.database.commit_without_return(query, database=2) self.r = {'Message': token} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def search_token(self, user_id): try: query = f'select token from tokens where user = {user_id}' token = self.database.commit_with_return(query)[0][0] except: raise Exception('User ID não possui token') return token def confirm_token(self, data): try: token = data['Token'] username = data['Username'] user_type = data['UserType'] user_id = self.database.return_user_id(username) v_token = str(user_type) v_token += self.crypto(username, user_id) query = f'select user from tokens where token = {token}' user_token = self.database.commit_with_return(query, database=2)[0][0] if v_token == token and user_token == user_id: self.r = {'Message': 'OK', 'Status': 200} else: raise Exception('Invalid credencials') except Exception as e: self.r = {'Message': {'error': str(e)}, 'Status': 401} return self.r def active_token(self, token, user_id): try: query = f'select id from tokens where token = {token} and user = {user_id}' token_id = self.database.commit_with_return(query, database=2)[0] if len(token_id) > 0: query = self.gera_query.alterar_dados_da_tabela( 'connections', ['active'], [1], where=True, valor_where=token_id[0], coluna_verificacao='id') self.database.commit_without_return(query, database=2) else: query = self.gera_query.inserir_na_tabela( 'connections', ['token'], [token_id[0]]) self.database.commit_without_return(query, database=2) self.r = { 'Message': { 'token': token, 'token_id': token_id[0] }, 'Status': 200 } except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} def deactive_token(self, token): try: query = f'select id from tokens where token = {token}' token_id = self.database.commit_with_return(query, database=2)[0] if len(token_id) > 0: query = self.gera_query.alterar_dados_da_tabela( 'connections', ['active'], [0], where=True, valor_where=token_id[0], coluna_verificacao='id') self.database.commit_without_return(query, database=2) else: query = self.gera_query.inserir_na_tabela( 'connections', ['token', 'active'], [token_id[0], 0]) self.database.commit_without_return(query, database=2) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} def gera_qrcode(self, license_plate): qr_code = pyqrcode.create(license_plate) path = f'qrCodes/{license_plate}.png' try: qr_code.png(path, scale=10) except Exception as e: print(e) system('mkdir qrCodes') self.gera_qrcode(license_plate) return path def gera_salt(self): letras = string.ascii_uppercase for x in range(0, 100): salt = ''.join(random.choice(letras) for _ in range(3)) salt += str(randint(0, 9)) salt += ''.join(random.choice(letras) for _ in range(3)) return salt def compare_password(self, username, password): passw = self.database.return_password(username, password) if passw == password: return True else: return False def crypto(self, password, salt): password = hashlib.md5(str(password + salt).encode()) password = password.hexdigest() return password def novo_usuario(self, data): try: username = data['Username'] password = data['Password'] first_name = data['FirstName'] second_name = data['SecondName'] user_type = data['UserType'] salt = self.gera_salt() password = self.crypto(password, salt) columns = self.database.return_columns('usuarios') columns.pop('id') dados = [ username, first_name, second_name, '', password, salt, user_type ] query = self.gera_query.inserir_na_tabela('usuarios', columns, dados, string=True) self.database.commit_without_return(query) token = self.gera_token(data) self.r = { 'Message': { 'Username': username, 'FirstName': first_name, 'SecondName': second_name, 'UserType': user_type, 'Token': token }, 'Status': 200 } except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 401} return self.r def novo_veiculo(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] user_id = self.database.return_user_id(username) columns = self.database.return_columns('carros') codigo_qr = self.gera_qrcode(license_plate) dados = ['Null', f'"{license_plate}"', f'"{codigo_qr}"', user_id] query = self.gera_query.inserir_na_tabela('carros', columns, dados) self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def nova_limpeza(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] date = data['Date'] car_id = self.database.return_car_id(license_plate) columns = self.database.return_columns('limpezas') values = ['Null', car_id, date] query = self.gera_query.inserir_na_tabela('limpezas', list(columns.keys()), values) self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def grava_envio_notificao(self, data): try: username = data['Username'] user_type = data['UserType'] notificacao_id = data['NotificationId'] query = self.gera_query.alterar_dados_da_tabela( 'notificacoes', ['enviada'], ['TRUE'], where=True, valor_where=notificacao_id, coluna_verificacao='id') self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def recusa_notificacao(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] date = data['Date'] notificacao_id = data["NotificationId"] columns = self.database.return_columns('notificacoes_recusas') car_id = self.database.return_car_id(license_plate) values = ['Null', notificacao_id, 1, car_id, date] query = self.gera_query.inserir_na_tabela('notificacoes_recusadas', columns, values) self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def nova_avaliacao(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] rating = data['Rating'] comment = data['Comment'] car_id = self.database.return_car_id(license_plate) columns = self.database.return_columns('carros_satisfactions') values = ['Null', car_id, rating, comment] query = self.gera_query.inserir_na_tabela('carros_satisfacao', columns, values) self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def solicitar_limpeza(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] username = data['Username'] user_id = self.database.return_user_id(username) car_id = self.database.return_car_id(license_plate) query = self.gera_query.inserir_na_tabela( 'notificacoes', ['carro', 'tipo', 'usuario'], [car_id, 0, username]) self.database.commit_without_return(query) self.r = {'Message': 'OK', 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 400} return self.r def autenticar_usuario(self, data): try: username = data['Username'] password = data['Password'] user_id = self.database.return_user_id(username) user_type = self.database.return_user_type(user_id) if not user_id: raise Exception('Usuário não encontrado no banco de dados') token = self.search_token(user_id) if len(token) == 0: raise Exception('User ID não possui token') salt = self.database.return_salt(username) password = self.crypto(password, salt) response = self.compare_password(username, password) if response: query = f'select first_name, second_name, photo_path from usuarios where id = {user_id}' first_name, second_name, photo_path = self.database.commit_with_return( query)[0] self.active_token(token, user_id) self.r = { 'Message': { 'Username': username, 'UserType': user_type, 'FirstName': first_name, 'SecondName': second_name, 'PhotoPath': photo_path, 'Token': token }, 'Status': 200 } else: self.r = { 'Message': { 'Error': 'incorrect username or password' }, 'Status': 404 } except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def realizar_logoff(self, data): try: username = data['Username'] user_type = data['UserType'] token = data['Token'] self.deactive_token(token) self.r = {'Message': 'OK'} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 200} def buscar_notificacoes(self, data): try: username = data['Username'] user_type = data['UserType'] user_id = self.database.return_user_id(username) notifications = self.database.return_notifications(user_id) self.r = {'Message': notifications, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def buscar_limpezas_veiculo(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] car_id = self.database.return_car_id(license_plate) query = f'select * from limpezas where carro = {car_id}' response = self.database.commit_with_return(query) limpezas = {} for limpeza in response: limpezas[f'Limpeza{limpeza[0]}'] = { 'ID': limpeza[0], 'CarId': limpeza[1], 'LicensePlate': license_plate, 'DateTime': limpeza[2] } self.r = {'Message': limpezas, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def buscar_resumo_veiculo(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] car_id = self.database.return_car_id(license_plate) informacoes = self.database.return_resume_of_vehicle(car_id) self.r = {'Message': informacoes, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def buscar_ultima_limpeza_veiculo(self, data): try: username = data['Username'] user_type = data['UserType'] license_plate = data['LicensePlate'] car_id = self.database.return_car_id(license_plate) query = f'select limpezas.id, limpezas.data from limpezas where carro={car_id} order by limpezas.id desc limit 1' response = self.database.commit_with_return(query)[0] ultima_limpeza = { 'LimpezaId': response[0], 'LimpezaData': response[1] } self.r = {'Message': ultima_limpeza, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def buscar_limpeza(self, data): try: username = data['Username'] user_type = data['UserType'] limpeza_id = data['LimpezaId'] query = f'select data from limpezas where id = {limpeza_id}' limpeza = self.database.commit_with_return(query)[0][0] self.r = {'Message': limpeza, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r def buscar_resumo_usuario(self, data): try: username = data['Username'] user_type = data['UserType'] user_id = self.database.return_user_id(username) query = 'select type, first_name, second_name from usuarios where id = ' + str( user_id) user_type, first_name, second_name = self.database.commit_with_return( query)[0] usuario = { 'Username': username, 'UserId': user_id, 'UserType': user_type, 'FirstName': first_name, 'SecondName': second_name } self.r = {'Message': usuario, 'Status': 200} except Exception as e: self.r = {'Message': {'Error': str(e)}, 'Status': 404} return self.r
def manage_args(args): database = Database() database.start() database.create_tables() if args.new_job is not None: print(args.new_job) args.new_job[1] = int(args.new_job[1]) database.create_new_job(args.new_job) elif args.list: database.list_jobs() elif args.job is not None and (args.period is None and not args.earnings): today = date.today() time = start_stopwatch() session = Session(args.job, today, time) if want_to_save(): database.store_session(session) elif args.earnings or args.period is not None: database.get_stats(args.job, args.earnings, args.period) else: print("Something went wrong somewhere") database.close()
log_to_file = False #https://realpython.com/python-logging/ if log_to_file: logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s') print("The logs are logged in app.log") else: logging.basicConfig(level=1, format='%(name)s - %(levelname)s - %(message)s') logging.info('This will get logged to terminal') #Global Variables SUCCESS_STATUS = 200 GITHUB_URI = "https://jobs.github.com/positions.json?" PAGES = 6 DB = Database() SESSION = DB.get_session() def get_connection(url: str, method: str, **kargs) -> bytes: """ This method creates an Http client and makes a call based on the method specified (**kargs) -> lets the user of the method pass a args to the method for more on this method go through this documentation https://urllib3.readthedocs.io/en/latest/user-guide.html :param url: uri resource :param method: method to call on resource GET | POST etc :param kargs: any other configurations for the connection :return Response: Response to the call """ http = urllib3.PoolManager(cert_reqs='CERT_REQUIRED',ca_certs=certifi.where())
class PivNetDownloader: def __init__(self, api_key): self.token = api_key self.secure_url = end_point self.headers = {"content-type": "application/json"} self.secure_headers = { "content-type": "application/json", "Accept": "application/json", "Authorization": "Token token=" + self.token, } self.database = Database() def download_files(self, file_name, download_path): file = open(file_name) for line in file.readlines(): items = [] for x in line.split(" "): if len(x) > 1: items.append(x) name = items[0] release_version = items[1] file_name = items[2].strip() product_id, slug = self.database.get_product_details(name) # print('slug=%s,version=%s,file=%s'%(slug,release_version,file_name)) data = self.database.get_release_id(slug, release_version) print(data) if data: release_id = data[0] file_data = self.database.get_file_id(release_id, file_name) if file_data: file_id = file_data[0] # print('product id =%s,release=%s,file_name=%s'%(product_id,release_id,file_name)) self.acceptEULA(product_id, release_id) self.downloadFile(product_id, release_id, file_id, file_name, download_path) def acceptEULA(self, product_id, release_id): url = ( self.secure_url + "/api/v2/products/" + str(product_id) + "/releases/" + str(release_id) + "/eula_acceptance" ) r = requests.post(url, headers=self.secure_headers, proxies=proxies) return r def downloadFile(self, product_id, release_id, file_id, file_name, download_path): if not os.path.exists("product_files"): os.makedirs("product_files") url = ( self.secure_url + "/api/v2/products/" + str(product_id) + "/releases/" + str(release_id) + "/product_files/" + str(file_id) + "/download" ) # print(url) r = requests.post(url, headers=self.secure_headers, stream=True, proxies=proxies) # print(download_path) print("Going to download %s from %s" % (file_name, url)) full_path = os.path.join(download_path, file_name) with open(full_path, "wb") as f: for chunk in r.iter_content(chunk_size=1024): if chunk: # filter out keep-alive new chunks f.write(chunk) f.flush() return file_name def unzipper(self, file_name): path = "product_files" subdir = file_name[:-8] # remove '.pivotal' if not os.path.exists("product_files/" + subdir): os.makedirs("product_files/" + subdir) with zipfile.ZipFile("product_files/" + file_name, "r") as z: z.extractall("product_files/" + subdir) return subdir
class PivNetUpdater: def __init__(self, api_key): self.token = api_key self.secure_url = end_point self.secure_headers = { 'content-type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Token token=' + self.token } self.database = Database() # print('token=%s, secure_url=%s, secure_headers=%s' % # (self.token, self.secure_url, self.secure_headers)) def update_db(self): self.database.clear_all_tables() products = self.getProducts() for product in products: product_id = product.get('id') slug = product.get('slug') pname = product.get('name') p_file_groups = product.get('_links').get('file_groups').get( 'href') p_product_files = product.get('_links').get('product_files').get( 'href') p = Product(id=product_id, slug=slug, name=pname, file_groups_url=p_file_groups, product_files_url=p_product_files) self.database.session.add(p) self.database.commit() # print( # 'Found product %s,%s,%s,%s,%s' % # (product_id, # slug, # pname, # p_file_groups, # p_product_files)) # if p_file_groups: # p_groups = self.getFileGroups(p_file_groups) # if p_groups: # for p_group in p_groups: # print( # json.dumps( # p_group, # sort_keys=True, # indent=4)) # if p_product_files: # p_files = self.getProductFiles(p_product_files) # if p_files: # for p_file in p_files: # print( # json.dumps( # p_file, # sort_keys=True, # indent=4)) releases = self.getReleases(slug) if releases: for release in releases: rid = release.get('id') version = release.get('version') r_file_groups = release.get('_links').get( 'file_groups').get('href') r_product_files = release.get('_links').get( 'product_files').get('href') r = Release(id=rid, version=version, product_slug=p.slug, file_groups_url=r_file_groups, product_files_url=r_product_files) self.database.session.add(r) self.database.commit() # print( # 'Found release %s,%s,%s,%s,%s' % # (rid, version, p.slug, r_file_groups, r_product_files)) if r_file_groups: groups = self.getFileGroups(r_file_groups) if groups: for group in groups: self.addFiles(group.get('product_files'), product_id, rid) self.addFiles(self.getProductFiles(r_product_files), product_id, rid) print("Local Pivotal Network db has been updated.") def addFiles(self, files, product_id, rid): if files: for file in files: try: file_id = file.get('id') file_detail = self.getProductFile(product_id, rid, file_id) url = file_detail.get('_links').get('download').get('href') name = file_detail.get('aws_object_key').split('/')[-1] md5 = file_detail.get('md5').lower() released_at = file_detail.get('released_at') f = ProductFile(id=file_id, release_id=rid, filename=name, download_url=url, md5=md5, release_date=released_at) # print( # 'file id=%s,release id=%s,filename=%s,url=%s,md5=%s,date=%s' % # (f.id, r.id, name, url, md5, released_at)) self.database.session.add(f) self.database.commit() except exc.IntegrityError: self.database.session.rollback() print('Duplicate: %s' % (file_detail)) except: print('addFile (%s, %s, %s) exception: %s' % (product_id, rid, file_id, sys.exc_info()[0])) # print( # json.dumps( # file, # sort_keys=True, # indent=4)) # print( # json.dumps( # file_detail, # sort_keys=True, # indent=4)) def getProducts(self): url = self.secure_url + "/api/v2/products/" for i in range(0, 3): try: r = requests.get(url, headers=self.secure_headers, proxies=proxies) data = json.loads(r.content.decode('utf-8')) # print('getProducts %s' % (url)) # print(json.dumps(data, sort_keys=True, indent=4)) products = data.get('products') return products except requests.exceptions.RequestException as e: print('getProducts (i=%s) %s e=%s' % (i, url, e)) print('getProducts giving up after %s tries' % (i)) def getReleases(self, slug): url = self.secure_url + "/api/v2/products/" + slug + "/releases" for i in range(0, 3): try: r = requests.get(url, headers=self.secure_headers, proxies=proxies) data = json.loads(r.content.decode('utf-8')) # print('getReleases %s' % (url)) # print(json.dumps(data, sort_keys=True, indent=4)) releases = data.get('releases') return releases except requests.exceptions.RequestException as e: print('getReleases (i=%s) %s e=%s' % (i, url, e)) print('getReleases giving up after %s tries' % (i)) def getFileGroups(self, url): for i in range(0, 3): try: r = requests.get(url, headers=self.secure_headers, proxies=proxies) data = json.loads(r.content.decode('utf-8')) # print('getFileGroups %s' % (url)) # print(json.dumps(data, sort_keys=True, indent=4)) file_groups = data.get('file_groups') return file_groups except requests.exceptions.RequestException as e: print('getFileGroups (i=%s) %s e=%s' % (i, url, e)) print('getFileGroups giving up after %s tries' % (i)) def getProductFiles(self, url): for i in range(0, 3): try: r = requests.get(url, headers=self.secure_headers, proxies=proxies) data = json.loads(r.content.decode('utf-8')) # print('getProductFiles %s' % (url)) # print(json.dumps(data, sort_keys=True, indent=4)) product_files = data.get('product_files') return product_files except requests.exceptions.RequestException as e: print('getProductFiles (i=%s) %s e=%s' % (i, url, e)) print('getProductFiles giving up after %s tries' % (i)) def getProductFile(self, product_id, release_id, file_id): url = self.secure_url + '/api/v2/products/' + str(product_id) \ + '/releases/' + str(release_id) + '/product_files/' \ + str(file_id) # print(url) for i in range(0, 3): try: r = requests.get(url, headers=self.secure_headers, proxies=proxies) data = json.loads(r.content.decode('utf-8')) # print('getProductFile %s' % (url)) # print(json.dumps(data, sort_keys=True, indent=4)) product_file = data.get('product_file') return product_file except requests.exceptions.RequestException as e: print('getProductFile (i=%s) %s e=%s' % (i, url, e)) print('getProductFile giving up after %s tries' % (i))
class PivNetUploader: def __init__(self): self.database = Database() def upload_files(self, config, folder_path, force=False): file_names = [] print('folder path: %s, force=%s' % (folder_path, force)) files = os.listdir(folder_path) for file in files: # if not file.endswith(('.pivotal', '.tgz')): if not file.endswith(('.pivotal')): print( 'Skipping %s - only tiles (.pivotal) and stemcells (.tgz) are uploaded.' % (file)) elif not os.path.isfile(os.path.join(folder_path, file)): print('Skipping %s - is not a file.' % (file)) elif not self.database.check_file_exists(file) and not force: print( 'Skipping %s - is not in product db. Either remove unknown file, or use --force to upload every file in %s whether its in the db or not.' % (file, folder_path)) else: file_names.append(file) self.upload(config, folder_path, file_names) def upload(self, config, dest_path, file_names): # print(config) for opsman in config["opsmanager"]: if not "access_token" in opsman: print('No Ops Manager access_token') return elif not "url" in opsman: print('No Ops Manager URL') return else: access_token = opsman["access_token"] url = opsman["url"] for filename in file_names: full_path = os.path.join(dest_path, filename) # print( # 'access_token = %s, url = %s, file = %s' % # (access_token, url, full_path)) # continue try: c = pycurl.Curl() c.setopt( c.URL, "https://" + opsman["url"] + "/api/v0/available_products") c.setopt(pycurl.HTTPHEADER, ['Authorization: bearer %s' % (access_token)]) c.setopt(pycurl.VERBOSE, 0) c.setopt(c.SSL_VERIFYPEER, 0) c.setopt(c.SSL_VERIFYHOST, 0) c.setopt(c.NOPROGRESS, 0) c.setopt(c.HTTPPOST, [ ('product[file]', ( c.FORM_FILE, full_path, )), ]) print('Uploading %s' % (full_path)) result = c.perform() finally: c.close()
class PivNetDownloader: def __init__(self, api_key): self.token = api_key self.secure_url = end_point self.headers = {'content-type': 'application/json'} self.secure_headers = { 'content-type': 'application/json', 'Accept': 'application/json', 'Authorization': 'Token token=' + self.token } self.database = Database() # print('token=%s, secure_url=%s, headers=%s, secure_headers=%s' % # (self.token, self.secure_url, self.headers, self.secure_headers)) def download_files(self, file_name, download_path): with open(file_name) as infile: reader = csv.reader(infile, dialect='excel') for row in reader: # print(row) name = row[0].strip() release_version = row[1].strip() file_name = row[2].strip() data = self.database.get_product_details(name) if data: # print(data) product_id = data[0] slug = data[1] else: print('Could not get product details for %s' % (name)) continue # print( # 'slug=%s,version=%s,file=%s' % # (slug, release_version, file_name)) data = self.database.get_release_id(slug, release_version) # print(data) if data: release_id = data[0] file_data = self.database.get_file_details( release_id, file_name) if file_data: # print(file_data) file_id = file_data[0] file_name = file_data[2] url = file_data[3] md5 = file_data[4] # print( # 'URL = %s, filename = %s, md5 = %s' % # (url, file_name, md5)) self.acceptEULA(product_id, release_id) md5_download = "" i = 0 while md5 != md5_download: md5_download = self.downloadFile( url, file_name, download_path) i += 1 if i > 2: break if md5 != md5_download: print( 'MD5 PivNet does not match download (%s != %s)' % (md5, md5_download)) else: print( 'Could not get release ID for release ID=%s, file name=%s' % (release_id, file_name)) else: print( 'Could not get file details for slug=%s, version=%s' % (slug, release_version)) def acceptEULA(self, product_id, release_id): url = self.secure_url + "/api/v2/products/" + \ str(product_id) + "/releases/" + str(release_id) + "/eula_acceptance" r = requests.post(url, headers=self.secure_headers, proxies=proxies) return r def downloadFile(self, url, file_name, download_path): sig = hashlib.md5() r = requests.post(url, headers=self.secure_headers, stream=True, proxies=proxies) # print(download_path) print("Downloading %s from %s" % (file_name, url)) full_path = os.path.join(download_path, file_name) with open(full_path, 'wb') as f: for chunk in r.iter_content(chunk_size=8192): if chunk: # filter out keep-alive new chunks f.write(chunk) sig.update(chunk) f.flush() return sig.hexdigest().lower() def unzipper(self, file_name): path = "product_files" subdir = file_name[:-8] # remove '.pivotal' if not os.path.exists("product_files/" + subdir): os.makedirs("product_files/" + subdir) with zipfile.ZipFile("product_files/" + file_name, "r") as z: z.extractall("product_files/" + subdir) return subdir
parentdir = os.path.dirname(currentdir) sys.path.append(parentdir + "/data_gathering") from database_manager import Database log_to_file = False if log_to_file: logging.basicConfig(filename='app.log', filemode='w', format='%(name)s - %(levelname)s - %(message)s') print("The logs are logged in app.log") else: logging.basicConfig(level=1, format='%(name)s - %(levelname)s - %(message)s') logging.info('This will get logged to terminal') DB = Database() app = Flask(__name__) api = Api(app) class Job(Resource): def get(self, id): return {'new': 'job'} class JobList(Resource): def __init__(self): self.parser = reqparse.RequestParser() self.parser.add_argument("title") self.parser.add_argument("company")