def run_unconfirmed_data_queries(self, query_name, file_dim, table_dim, years): filter = 'profile\\profilewizardid{irange,3,0,5,5} & !(affinityid\\12883618{match,1,0} | ' \ 'affinityid\\12883619{match,1,0} | affinityid\\12883620{match,1,0} |' \ ' affinityid\\12883621{match,1,0} | affinityid\\12883625{match,1,0} | ' \ 'affinityid\\12883626{match,1,0} | affinityid\\12883627{match,1,0} | ' \ 'affinityid\\12883628{match,1,0} | affinityid\\12883629{match,1,0} | ' \ 'affinityid\\12883630{match,1,0} | affinityid\\12883631{match,1,0} | ' \ 'affinityid\\12883632{match,1,0} | affinityid\\12883633{match,1,0} | ' \ 'affinityid\\12883634{match,1,0} | affinityid\\12883636{match,1,0} | ' \ 'affinityid\\12883638{match,1,0} | affinityid\\12883639{match,1,0} | ' \ 'affinityid\\12883640{match,1,0}){999,d,0,0}' datage = dates.DateTimeFormats() end_date = str( datage.find_first_of_current_month().strftime("%Y-%m-%d")) start_date = str( datage.find_first_of_month_x_years_ago(years).strftime("%Y-%m-%d")) dash_reports = rq.AnonReports(self.in_files, self.out_files) dash_reports.post_analysis_tool_count_table_report( "psbuilder02a", query_name, start_date, end_date, filter, first_dimension_list=file_dim, second_dimension_list=table_dim)
def run_iqr_queries(self, query_name, file_dim, table_dim, years): """ :param query_name: Title of the file you're going to create :param file_dim: The file dimension you want to build :param table_dim: The table dimension you want to build :param years: The number of years back you want the query to start """ main_filter = 'Location\Country\United States{match,1,0} & survey\survey\PayScale Main{match,1,0}' datage = dates.DateTimeFormats() end_date = str( datage.find_first_of_current_month().strftime("%Y-%m-%d")) start_date = str( datage.find_first_of_month_x_years_ago(years).strftime("%Y-%m-%d")) dash_reports = rq.AnonReports(self.in_files, self.out_files) dash_reports.post_analysis_tool_query_dim_list_create( 'localhost', query_name, start_date, end_date, main_filter, "Combined TCC IQR Measure", 'Median', file_dim, table_dim, )
def post_analysis_tool_query(self, index_server, in_file, start_date, end_date, main_filter, report_measure, return_type, first_dimension_file, second_dimension_file, dimension=None, ranges="Fixed"): execute.AnonReports(self.temp_in_files, self.temp_out_files).\ post_analysis_tool_query_pre_made_file_dims(index_server, in_file, start_date, end_date, main_filter, report_measure, return_type, first_dimension_file, second_dimension_file, dimension=dimension, ranges=ranges)
def run_model_data_queries(self): dash_reports = rq.AnonReports(self.in_files, self.out_files) dash_reports.post_analysis_tool_model_report( "localhost", "Ryan Data Dashboard Jobs with Model Canada", "CA") dash_reports.post_analysis_tool_model_report( "localhost", "Ryan Data Dashboard Jobs with Model US", "US")
def main_query_build(self): main_filter = "(Profile\ProfileType\JobOffer{match,1,0} | Profile\ProfileType\CurrentJob{match,1,0}){999,d,0,0}" build.AnonReports(input_path, output_path)\ .post_analysis_tool_query_dim_list_create("localhost", "Kayla Question Activation Counts", self.start_date, self.end_date, main_filter, "Profile Age Measure", "Count", file_path + "All_Questions.csv", None, dimension="ONET 30 Dimension", string_builder_type="Question")
def run_analysis_tool(self): dates = di.DateTimeFormats() end_date = dates.find_first_of_current_month().strftime("%Y-%m-%d") start_date = dates.find_first_of_month_x_years_ago(2).strftime( "%Y-%m-%d") query = rq.AnonReports(self.temp_in_files, self.temp_out_files) query.post_analysis_tool_query_dim_list_create( 'localhost', 'Ryan Job Rollup Suggestor EAC', start_date, end_date, 'Location\\Country\\United States{match,1,0}', 'EAC Measure', 'Medians', first_dimension_list=None, second_dimension_list=self.pull_jobs_no_rollups(), string_builder_type='Job')
def run_overall_table_queries(self, query_name, file_dim, table_dim, years, filter=""): """ :param query_name: Title of the file you're going to create :param file_dim: The file dimension you want to build :param table_dim: The table dimension you want to build :param years: The number of years back you want the query to start """ datage = dates.DateTimeFormats() end_date = str( datage.find_first_of_current_month().strftime("%Y-%m-%d")) start_date = str( datage.find_first_of_month_x_years_ago(years).strftime("%Y-%m-%d")) dash_reports = rq.AnonReports(self.in_files, self.out_files) dash_reports.post_analysis_tool_query_dim_list_create( 'psstats03', query_name, start_date, end_date, filter, "Profile Age Measure", 'Count', file_dim, table_dim, )
def fire_queries(self): anon = rq.AnonReports(const.anon_in, const.anon_out, self.update_string_progress) #alumni_files = os.listdir(const.alumni_analytics_in) for x in xrange(1, 11): self.update_string_progress.emit( "Starting %s... " % (self.query_base_name + 'Large' + str(x))) dimension_to_use = 'Custom' anon.post_analysis_tool_query_pre_made_file_dims( 'localhost', self.query_base_name + 'Large ' + str(x), self.start_date, self.end_date, self.main_filter, report_measure, return_type, 'Alumni Analytics File Dimension Majors', 'Alumni Analytics Major Dimension %s' % x, dimension=dimension_to_use, ranges=ranges, custom_dimension_tuple=('school', 'educationallevelaffiliateschool'), min_per_row='0') #for i in xrange(1, 6): for i in xrange(2, 4): custom = None dimension_to_use = None if type(main_dimension[i]) is tuple: custom = main_dimension[i] dimension_to_use = 'Custom' else: custom = None dimension_to_use = main_dimension[i] self.update_string_progress.emit("Starting %s... " % (self.query_base_name + str(i))) if i < 4: anon.post_analysis_tool_query_pre_made_file_dims( 'localhost', self.query_base_name + str(i), self.start_date, self.end_date, self.main_filter, report_measure, return_type, file_dimension[i], table_dimension[i], dimension=dimension_to_use, ranges=ranges, custom_dimension_tuple=custom, min_per_row=min_per_row[i]) else: report_setting = None sub_filter = 'ConfirmedOnly' if i == 4: report_setting = 'CountTableDefinition' sample_measures = [None, None, None, None, None] else: report_setting = 'SampleDefinition' sample_measures = [ '1000', 'Top', 'false', 'ProfileID', 'false' ] anon.post_analysis_tool_count_table_report( 'localhost', self.query_base_name + str(i), self.start_date, self.end_date, self.main_filter, first_dimension_file=file_dimension[i], second_dimension_file=table_dimension[i], custom_dimension_tuple=main_dimension[i], report_definition=report_setting, sub_filter=sub_filter, min_per_row=min_per_row[i], profile_count=sample_measures[0], sampling_method=sample_measures[1], group_by_first_dim=sample_measures[2], row_names=sample_measures[3], add_dim_definition=sample_measures[4])
class ProfileCounts: def __int__(self): self.job_offer = {} self.current_job = {} self.alumni = {} self.informational = {} active_profiles = ProfileCounts() all_profiles = ProfileCounts() #-------------------------# #Let's fire this thing off# #-------------------------# anon = rq.AnonReports(const.anon_in, const.anon_out) anon.post_analysis_tool_query_pre_made_file_dims( 'localhost', profiles_by_type, last_month_start, last_month_end, main_filter, report_measure, return_type, file_dimension, table_dimension, dimension=dimension, ranges=ranges, ) anon.post_analysis_tool_query_pre_made_file_dims(