def write_contents_to_file(contents): ch.find_and_create_dirs(os.path.dirname(cc.contents_path)) print os.path.dirname(cc.contents_path) with open(cc.contents_path, 'w') as fp: for content in contents: fp.write(content) fp.write("\n")
def execute_update_sqls_pulls_and_stars(): conn = get_database_connection() cur = conn.cursor() sql_dir = ch.find_and_create_dirs( cc.incremental_update_sql_dir_for_stars_and_pulls) docker_names = ch.read_object_from_file( cc.available_docker_names_for_db_path_for_stars_and_pulls) try: for docker_name in docker_names: print docker_name sql_name = generate_update_sql_fname_for_stars_and_pulls( docker_name) update_sql_path = os.path.join(sql_dir, sql_name) if os.path.exists(update_sql_path) is False: logging.error( "execute update sql pulls and stars file error, update sql file does not exist: [%s]" % update_sql_path) continue docker_id = get_docker_id_database(docker_name) if docker_id is None: logging.error( "execute update sql pulls and stars file error, docker does not exist in database: [%s]" % docker_name) continue try: execute_sql_with_path(update_sql_path, cur) conn.commit() except Exception as e: conn.rollback() logging.exception(e.message, e.args) print(e.message) finally: conn.close() return pass
def execute_insert_sqls(): conn = get_database_connection() cur = conn.cursor() sql_dir = ch.find_and_create_dirs(cc.semi_tag_rec_sql_dir) docker_names = get_all_docker_names_database() try: for docker_name in docker_names: # print docker_name sql_name = generate_insert_sql_fname(docker_name) insert_sql_path = os.path.join(sql_dir, sql_name) if os.path.exists(insert_sql_path) is False: logging.error( "execute insert sql file error, insert sql file does not exist: [%s]" % insert_sql_path) continue docker_id = get_docker_id_database(docker_name) if docker_id is None: logging.error( "execute insert sql file error, docker exists in database: [%s]" % docker_name) continue try: execute_sql_with_path(insert_sql_path, cur) conn.commit() except Exception as e: conn.rollback() print(e.message) finally: conn.close() return
def generate_insert_sql(): print "%s: start generate insert sql" % ch.get_time() sql_dir = ch.find_and_create_dirs(cc.incremental_insert_sql_dir) # docker_names = ch.get_new_docker_names() docker_names = ch.read_object_from_file( cc.available_new_docker_names_for_db_path) for docker_name in docker_names: sql_name = generate_insert_sql_fname(docker_name) insert_sql_path = os.path.join(sql_dir, sql_name) if os.path.exists(insert_sql_path): logging.warn("insert sql file exists: [%s]" % insert_sql_path) continue sql_file = open(insert_sql_path, 'w') sql_file.write("\nbegin;\n") sql_insert_docker = insert_docker_sql(docker_name) sql_insert_docker_versions = insert_docker_versions_sql(docker_name) sql_insert_tags = insert_docker_tags_sql(docker_name) sql = "%s\n%s\n%s" % (sql_insert_docker, sql_insert_docker_versions, sql_insert_tags) sql_file.write(sql) sql_file.write("commit;\n") sql_file.close() print "%s: done generate insert sql" % ch.get_time() return None
def execute_sqls(sqls): conn = get_database_connection() cur = conn.cursor() temp_dir = "./.temp/" ch.find_and_create_dirs(temp_dir) for sql in sqls: # print("execute %s" % sql) try: temp_sql_path = os.path.join(temp_dir, "%s.sql" % id(sql)) with open(temp_sql_path, "w") as sql_file: sql_file.write(sql) print("Write sql success") execute_sql_with_path(temp_sql_path, cur) conn.commit() except Exception as e: print("execute sql exception: %s" % sql) print("ExceptionMessage: %s" % e.message) conn.rollback() conn.close() import shutil shutil.rmtree(path=temp_dir)
def generate_update_db_semi_tag_rec_sql(): sql_dir = ch.find_and_create_dirs(cc.semi_tag_rec_sql_dir) # docker_names = ch.get_updated_docker_names() docker_names = get_all_docker_names_database() for docker_name in docker_names: sql_name = generate_insert_sql_fname(docker_name) semi_tag_sql_path = os.path.join(sql_dir, sql_name) if os.path.exists(semi_tag_sql_path): logging.warn("semi_tag_sql_path sql file exists: [%s]" % semi_tag_sql_path) continue with open(semi_tag_sql_path, "w") as sql_file: sql_file.write("\nbegin;\n") # we never change the tags of dockers in the database # sql_update_tags = update_docker_tags_sql(docker_name) sql = insert_docker_tags_sql(docker_name) sql_file.write(sql) sql_file.write("commit;\n") print "%s: done generate update sql" % ch.get_time() return None
def generate_update_sqls_and_write_to_file_for_stars_and_pulls(): print "%s: start generate update sql for stars and pull" % ch.get_time() sql_dir = ch.find_and_create_dirs( cc.incremental_update_sql_dir_for_stars_and_pulls) # docker_names = ch.get_updated_docker_names() docker_names = ch.read_object_from_file( cc.available_docker_names_for_db_path_for_stars_and_pulls) for docker_name in docker_names: sql_name = generate_update_sql_fname_for_stars_and_pulls(docker_name) update_sql_path = os.path.join(sql_dir, sql_name) if os.path.exists(update_sql_path): logging.warn("update sql file exists: [%s]" % update_sql_path) continue sql_file = open(update_sql_path, "w") sql_file.write("\nbegin;\n") sql_update_docker = update_docker_sql_for_stars_and_pulls(docker_name) # we never change the tags of dockers in the database # sql_update_tags = update_docker_tags_sql(docker_name) sql = "%s" % sql_update_docker sql_file.write(sql) sql_file.write("commit;\n") print "%s: done generate update sql for stars and pull" % ch.get_time() return None