Exemplo n.º 1
0
 def test_line_delimited(self):
     with open('fixtures/line_delimited.json') as json_file:
         outline = make_outline(json_file, True, None)
         expected = {
             'map': [
                 ('message_Revised', 'message.Revised'),
                 ('message_original', 'message.original'),
                 ('source_author', 'source.author'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 2
0
 def test_line_delimited(self):
     with open('fixtures/line_delimited.json') as json_file:
         outline = make_outline(json_file, True, None)
         expected = {
             'map': [
                 ('message_Revised', 'message.Revised'),
                 ('message_original', 'message.original'),
                 ('source_author', 'source.author'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 3
0
 def test_basic(self):
     with open('fixtures/data.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('message_Revised', 'message.Revised'),
                 ('message_original', 'message.original'),
                 ('source_author', 'source.author'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 4
0
 def test_basic(self):
     with open('fixtures/data.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('message_Revised', 'message.Revised'),
                 ('message_original', 'message.original'),
                 ('source_author', 'source.author'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 5
0
 def test_deeply_nested(self):
     with open('fixtures/deeply_nested.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('one_0_two_0_three_0', 'one.0.two.0.three.0'),
                 ('one_0_two_0_three_1', 'one.0.two.0.three.1'),
                 ('one_0_two_0_three_2', 'one.0.two.0.three.2'),
                 ('one_0_two_1_three_0', 'one.0.two.1.three.0'),
                 ('one_0_two_1_three_1', 'one.0.two.1.three.1'),
                 ('one_0_two_1_three_2', 'one.0.two.1.three.2'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 6
0
 def test_deeply_nested(self):
     with open('fixtures/deeply_nested.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('one_0_two_0_three_0', 'one.0.two.0.three.0'),
                 ('one_0_two_0_three_1', 'one.0.two.0.three.1'),
                 ('one_0_two_0_three_2', 'one.0.two.0.three.2'),
                 ('one_0_two_1_three_0', 'one.0.two.1.three.0'),
                 ('one_0_two_1_three_1', 'one.0.two.1.three.1'),
                 ('one_0_two_1_three_2', 'one.0.two.1.three.2'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 7
0
 def test_different_keys_per_row(self):
     "Outline should contain the union of the keys."
     with open('fixtures/different_keys_per_row.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('tags_0', 'tags.0'),
                 ('tags_1', 'tags.1'),
                 ('tags_2', 'tags.2'),
                 ('that', 'that'),
                 ('theother', 'theother'),
                 ('this', 'this'),
             ]
         }
         self.assertEqual(outline, expected)
Exemplo n.º 8
0
 def test_different_keys_per_row(self):
     "Outline should contain the union of the keys."
     with open('fixtures/different_keys_per_row.json') as json_file:
         outline = make_outline(json_file, False, 'nodes')
         expected = {
             'collection': 'nodes',
             'map': [
                 ('tags_0', 'tags.0'),
                 ('tags_1', 'tags.1'),
                 ('tags_2', 'tags.2'),
                 ('that', 'that'),
                 ('theother', 'theother'),
                 ('this', 'this'),
             ]
         }
         self.assertEqual(outline, expected)
def main():
    common.start()
    common.CONF['data_to_fetch'] = common.ask('Fetch Tweet Data or User Data? 1/Tweet 2/User',
                                answer=list, default='2', options=[1, 2])
    request_params = {}
    if common.CONF['data_to_fetch'] == '2':
        print("You requested User Data")
        common.CONF['query'] = common.ask('Search terms? ' +
                            'Found here: https://dev.twitter.com/rest/public/search',
                            answer=common.str_compat)
        request_params['q'] = common.CONF['query']
        url = 'https://api.twitter.com/1.1/users/search.json?'
    else:
        print("You requested Tweet Data")
        common.CONF['query'] = common.ask('Search terms? ' +
                            'Found here: https://dev.twitter.com/rest/public/search',
                            answer=common.str_compat)
        request_params['q'] = common.CONF['query']
        result_data_type = common.ask('Type of search results? 1/Popular 2/Recent 3/Mixed',
                               answer=list, default='1', options=[1, 2, 3])
        request_params['result_type'] = common.RESULT_MAP[result_data_type]
        location = common.ask('Location? Eg. 1600 Amphitheatre Parkway, Mountain View, CA',
                       answer=common.str_compat, default=" ")
        if location.strip():
            encode_location = urllib.urlencode({'address': location})
            response_location = requests.get('https://maps.googleapis.com/maps/api/geocode/json?' +
                                             encode_location)
            try:
                location_json = response_location.json()
                location_data = location_json['results'][0]['geometry']['location']
                location_array = [str(value) for value in location_data.itervalues()]
                if location_array:
                    radius_mi = common.ask('Distance to search within in miles',
                                    answer=common.str_compat)

                    location_array.append(radius_mi + u'mi')
                    common.CONF['geocode'] = ",".join(location_array)
                    request_params['geocode'] = common.CONF['geocode']
            except:
                print('Unable to fetch lat and long for location')

        # date = common.ask('Include tweets before? eg. 2015-07-19', answer=dateObject, default=" ")
        # if date.strip():
        #     request_params['until'] = date
        url = 'https://api.twitter.com/1.1/search/tweets.json?'
    output_file_name = common.ask('Output file name',
                           answer=common.str_compat, default="output")
    print('Sending request to API...')
    json_search_data = get_json_data(url, request_params, common.CONF['consumer_key'],
                                     common.CONF['consumer_secret'],
                                     common.CONF['api_key'], common.CONF['api_secret'])
    if json_search_data['nodes']:
        print('API response received.')
        with open('json_dump.json', 'w') as outfile:
            json.dump(json_search_data, outfile)
        outline = gen_outline.make_outline('json_dump.json', False, 'nodes')
        print('Generating outline file..')
        outfile = 'outline.json'
        with open(outfile, 'w') as f:
            json.dump(outline, f, indent=2, sort_keys=True)
        print('Outline file generation done.')
        with open(outfile) as f:
            key_map = json.load(f)
        loader = json2csv.Json2Csv(key_map)
        outfile = output_file_name + '.csv'
        if os.path.isfile(outfile):
            os.remove(outfile)
        print('Writing to %s' % outfile)
        with open('json_dump.json') as f:
            loader.load(f)
        loader.write_csv(filename=outfile, make_strings=True)
        print('Output file generated.')
    else:
        print('Search yield no results')