Ejemplo n.º 1
0
 def sharepoint_queries(self, handle_id):
     print ''
     inputs = {}
     inputs['START_DATE'] = dsz.ui.GetString('Enter the start date of your search (YYYY-MM-DD):')
     inputs['END_DATE'] = dsz.ui.GetString('Enter the end date of your search (YYYY-MM-DD):')
     base_query_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'Data', 'database_plans', 'SQL Server')
     file_list_query = os.path.join(base_query_dir, 'Sharepoint File List.sql')
     (status, list_id) = sql_utils.run_query_from_file(handle_id, file_list_query, echo=True, mapping=inputs)
     if (not status):
         print ''
         dsz.ui.Echo("Couldn't get the current file list, try reconnecting.", dsz.ERROR)
         return None
     header = sql_xml_parser.header_from_id(list_id)
     data = [row for row in sql_xml_parser.data_from_id(list_id)]
     print_data_with_rownums(data, header)
     to_get = prompt_for_items('Would you like to pull any of the above files?')
     if (not to_get):
         return None
     input_handle = open(os.path.join(base_query_dir, 'Sharepoint Content Query.sql'), 'rb')
     content_query = Template(input_handle.read().strip())
     input_handle.close()
     output_dir = os.path.join(dsz.lp.GetLogsDirectory(), 'GetFiles', 'Sharepoint_Decrypted')
     for number in to_get:
         row_data = dict(zip(header, data[(int(number) - 1)]))
         query = content_query.safe_substitute(row_data)
         (status, content_id) = sql_utils.run_query(handle_id, query, echo=True, max_col_size=self.max_col_size)
         if (not status):
             print ''
             dsz.ui.Echo(('Error getting file #%s\n' % number), dsz.ERROR)
             continue
         output_path = os.path.join(output_dir, ('%s-%s' % (content_id, row_data['LeafName'])))
         sql_xml_parser.save_blob_from_file(content_id, output_path, column_index=0)
Ejemplo n.º 2
0
def write_csv_output(command_id, query_folder, file_prefix):
    output_dir = os.path.join(dsz.lp.GetLogsDirectory(), 'GetFiles', 'sql_decrypted', os.path.basename(query_folder))
    file_name = ('%05d-%s.csv' % (command_id, file_prefix))
    output_file = os.path.join(output_dir, file_name)
    header = sql_xml_parser.header_from_id(command_id)
    data = sql_xml_parser.data_from_id(command_id)
    sql_xml_parser.write(data, header, output_file)
Ejemplo n.º 3
0
def write_csv_output(command_id, query_folder, file_prefix):
    output_dir = os.path.join(dsz.lp.GetLogsDirectory(), 'GetFiles',
                              'sql_decrypted', os.path.basename(query_folder))
    file_name = ('%05d-%s.csv' % (command_id, file_prefix))
    output_file = os.path.join(output_dir, file_name)
    header = sql_xml_parser.header_from_id(command_id)
    data = sql_xml_parser.data_from_id(command_id)
    sql_xml_parser.write(data, header, output_file)
Ejemplo n.º 4
0
 def sharepoint_queries(self, handle_id):
     print ''
     inputs = {}
     inputs['START_DATE'] = dsz.ui.GetString(
         'Enter the start date of your search (YYYY-MM-DD):')
     inputs['END_DATE'] = dsz.ui.GetString(
         'Enter the end date of your search (YYYY-MM-DD):')
     base_query_dir = os.path.join(os.path.dirname(__file__), '..', '..',
                                   'Data', 'database_plans', 'SQL Server')
     file_list_query = os.path.join(base_query_dir,
                                    'Sharepoint File List.sql')
     (status, list_id) = sql_utils.run_query_from_file(handle_id,
                                                       file_list_query,
                                                       echo=True,
                                                       mapping=inputs)
     if (not status):
         print ''
         dsz.ui.Echo(
             "Couldn't get the current file list, try reconnecting.",
             dsz.ERROR)
         return None
     header = sql_xml_parser.header_from_id(list_id)
     data = [row for row in sql_xml_parser.data_from_id(list_id)]
     print_data_with_rownums(data, header)
     to_get = prompt_for_items(
         'Would you like to pull any of the above files?')
     if (not to_get):
         return None
     input_handle = open(
         os.path.join(base_query_dir, 'Sharepoint Content Query.sql'), 'rb')
     content_query = Template(input_handle.read().strip())
     input_handle.close()
     output_dir = os.path.join(dsz.lp.GetLogsDirectory(), 'GetFiles',
                               'Sharepoint_Decrypted')
     for number in to_get:
         row_data = dict(zip(header, data[(int(number) - 1)]))
         query = content_query.safe_substitute(row_data)
         (status,
          content_id) = sql_utils.run_query(handle_id,
                                            query,
                                            echo=True,
                                            max_col_size=self.max_col_size)
         if (not status):
             print ''
             dsz.ui.Echo(('Error getting file #%s\n' % number), dsz.ERROR)
             continue
         output_path = os.path.join(
             output_dir, ('%s-%s' % (content_id, row_data['LeafName'])))
         sql_xml_parser.save_blob_from_file(content_id,
                                            output_path,
                                            column_index=0)
Ejemplo n.º 5
0
 def top_ten_query(self, handle_id, table_query_file=None, top_ten_query_template=None):
     print ''
     dsz.ui.Echo('Running query to find all user tables...', dsz.GOOD)
     while (not table_query_file):
         prompt = 'Please provide a query file that will pull the list of tables'
         table_query_file = dsz.ui.GetString(prompt)
         if (not os.path.exists(table_query_file)):
             dsz.ui.Echo(('%s does not exist... try again.' % table_query_file), dsz.ERROR)
             table_query_file = None
     (status, command_id) = sql_utils.run_query_from_file(handle_id, table_query_file, echo=False)
     if (not status):
         print ''
         dsz.ui.Echo("Couldn't get the table list, try reconnecting.", dsz.ERROR)
         return None
     header = sql_xml_parser.header_from_id(command_id)
     data = [row for row in sql_xml_parser.data_from_id(command_id)]
     if (not data):
         print ''
         dsz.ui.Echo("Couldn't read the XML list of tables. There may be an error in sql_xml_parser. Go find a script dev!", dsz.ERROR)
         return None
     print ''
     if dsz.ui.Prompt(('Found %s tables, would you like to see the names?' % len(data))):
         print ''
         pprint(data, header)
     print ''
     if (not dsz.ui.Prompt('Would you like to pull the first 10 rows of each table?')):
         return None
     print ''
     for row in data:
         table_name = row[0]
         if (not top_ten_query_template):
             top_ten_query_text = ('select top 10 * from %s' % table_name)
         else:
             top_ten_query_text = (top_ten_query_template % table_name)
         (status, command_id) = sql_utils.run_query(handle_id, top_ten_query_text, echo=True, max_col_size=self.max_col_size)
         dsz.ui.Echo(('ID: %s Status: %s' % (command_id, status)))
         if (not status):
             if (not dsz.ui.Prompt('Looks like a query failed, would you like to continue?')):
                 return None
         if (self.csv_output and status):
             sql_utils.write_csv_output(command_id, 'TopTenSurvey', table_name)
     return None