Пример #1
0
 def __init__(self):
     '''  The __init__ access tokens and credentials are being imported from the config.py module.
     This file must be included in the root for the script to function'''
     self.SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
     self.KEY_FILE_LOCATION = './auth.json'
     self.VIEW_ID = config.view_id
     self.analytics = Analytics()
Пример #2
0
def main():
    '''Main function of the script'''
    parser = argparse.ArgumentParser()
    parser.add_argument('--all', '-a', action='store_true')
    parser.add_argument('--interactive', '-i', action='store_true')

    args = parser.parse_args()

    logger = CliLogger()
    screen = Screen()

    analytics = Analytics(logger)
    resources = Resources()
    analytics.ignore = ANALYTICS_IGNORE
    resources.load(analytics)

    if args.interactive:
        handle = wait_league_window(logger, (0, 0, 1024, 768))
        screen.d3d.capture(target_fps=10, region=find_rect(handle))
        while True:
            if keyboard.is_pressed('x'):
                cv2.destroyAllWindows()
                screen.d3d.stop()
                break
            img = screen.d3d.get_latest_frame()
            if img is None:
                continue
            try:
                img_bgr = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
                objs = tick(logger, analytics, resources, img)
                draw_objects(img_bgr,
                             objs,
                             wait=False,
                             title='League Vision - Interactive')
                logger.log('Press and hold x to exit bot.')
            except NoCharacterInMinimap:
                pass
            logger.log('-' * 50)
            time.sleep(1)
        return

    if args.all:
        files = glob.glob('screenshots/*.png')
    else:
        files = glob.glob('screenshots/*.png')[:1]

    for file in files:
        img_bgr = cv2.imread(file)
        img = cv2.cvtColor(img_bgr, cv2.COLOR_BGR2RGB)
        try:
            objs = tick(logger, analytics, resources, img)
            logger.log('Press x to exit.')
        except NoCharacterInMinimap:
            pass
        logger.log('-' * 50)
        if draw_objects(img_bgr, objs, title=f'League Vision - {file}') == 120:
            break
Пример #3
0
    def find(self, df, mode, param):
        self.available_columns = available_columns = list(df.columns)
        original_variables = [col for col in df.columns if '_impact' in col]
        self.impact_variables = [
            col for col in original_variables if not '_rescaled' in col
        ]
        self.y_variable = param["y_variable"]
        self.y_variable_predict = param["y_variable_predict"]
        self.param = param
        self.instance_id = self.random_string_generator()
        self.create_dir("data_storage")
        self.create_dir("data_storage/user")
        self.user_id = None
        self.df = df

        self.analytics = Analytics()
        self.analytics['ip'] = self.analytics.finding_ip()
        self.analytics['mac'] = self.analytics.finding_address()
        self.analytics['instance_id'] = self.instance_id
        self.analytics['time'] = str(datetime.datetime.now())
        self.analytics['total_columns'] = len(self.available_columns)
        self.analytics['total_rows'] = len(self.df)
        self.analytics['os'] = self.analytics.finding_system()
        self.analytics['model_name'] = self.param["model_name"]
        self.analytics["function"] = 'explainx.ai'
        self.analytics["query"] = "all"
        self.analytics['finish_time'] = ''

        self.callback_input = [
            Input(f + '_slider', 'value') for f in self.param["columns"]
        ]
        self.callback_input.append(Input('submit-button-state', 'n_clicks'))

        # self.callback_input_prototype = [Input(f + '-slider', 'value') for f in self.param["columns"]]
        # self.callback_input_prototype.append(Input('btn-nclicks-1', 'n_clicks'))

        self.prototype_array = []
        for f in self.param["columns"]:
            self.prototype_array.append([f + '_slider', 'value'])
        self.prototype_array.append(['btn-nclicks-1', 'n_clicks'])
        try:
            user_id = pd.read_csv("data_storage/user/user_id.csv")
            user_id.drop_duplicates(['id'], keep='first', inplace=True)
            user_id = user_id['id'].iloc[0]
            self.user_id = user_id
        except Exception as e:
            # print("inside user track" )
            user_id_val = self.random_string_generator()
            user_id_csv = pd.DataFrame(data={"id": [user_id_val]})
            user_id_csv.to_csv("data_storage/user/user_id.csv", index=False)
            self.user_id = user_id_val

        self.analytics['user_id'] = self.user_id
        self.analytics.insert_data()
        self.insights = insights(self.param)
        d = self.dash(df, mode)
        return True
Пример #4
0
def main():
    global start_of_event
    global last_window
    global last_event
    global html_update_time

    analytic = Analytics()

    print("""
---------------------------------------
TRACK YOUR TIME - DON'T WASTE IT!
---------------------------------------

  TIME           CATEGORY""")

    while True:
        mouse_idle = is_mouse_idle()
        keyboard_idle = is_keyboard_idle(0.01)

        current_window = get_window_name()
        idle = mouse_idle and keyboard_idle

        if idle:
            current_event = 'idle'
        else:
            current_event = current_window

        if current_event != last_event:
            if last_event == 'idle':
                category = 'idle'
            else:
                category = analytic.get_cat(last_window)

            duration = time.time() - start_of_event
            if duration > 2:

                save_data([time.time(), category, int(duration)])
                try:
                    if sys.version_info.major > 2:
                        mins = int(np.floor(duration / 60))
                        secs = int(np.floor(duration - mins * 60))
                        print("{0: 3}:{1:02} min\t".format(mins, secs),
                              "{}\t".format(category),
                              "({})".format(last_event[:30]))
                except UnicodeDecodeError:
                    print("{0: 5.0f} s\t".format(duration),
                          "UNICODE DECODE ERROR")

            last_window = current_window
            start_of_event = time.time()
            last_event = current_event

        if time.time() > html_update_time:
            analytic.create_html()
            html_update_time = time.time() + 60
Пример #5
0
 def initialize(self):
     self._uuid = self._get_setting([
         ["plugins", "discovery", "upnpUuid"],
     ], ["public", "uuid"]) or self._generate_uuid()
     self._search_id = self._settings.get(["public", "search_id"
                                           ]) or self._generate_search_id()
     self._analytics = Analytics(self)
     self._url = self._settings.get(["url"])
     self._logger.info("FindMyMrBeam enabled: %s", self.is_enabled())
     self._analytics.log_enabled(self.is_enabled())
     self.update_frontend()
Пример #6
0
def request_analytics_by_date(analytics_date):

    #redirect to date_error page is invalid date format
    if not isValidDate(analytics_date):
        return render_template('date_error.html')

    # create Analytics object and populate with analytics data.
    data = Analytics(analytics_date)
    data.get_analytics()

    # return the output dictionary in json format to the browser
    return jsonify(data.output)
Пример #7
0
 def test_collect_event(self):
     config = {
         Configuration.POLICIES: {
             Configuration.ANALYTICS_POLICY: ["mock_analytics_provider"]
         },
         "option": "value"
     }
     with temp_config(config) as config:
         work = self._work(title="title", with_license_pool=True)
         [lp] = work.license_pools
         Analytics.collect_event(self._db, lp, CirculationEvent.CHECKIN, None)
         mock = Analytics.instance().providers[0]
         eq_(1, mock.count)
Пример #8
0
    def __init__(self, _db, collection):
        if collection.protocol != ExternalIntegration.ODILO:
            raise ValueError(
                "Collection protocol is %s, but passed into OdiloAPI!" %
                collection.protocol)

        self._db = _db
        self.analytics = Analytics(self._db)

        self.collection_id = collection.id
        self.token = None
        self.client_key = collection.external_integration.username
        self.client_secret = collection.external_integration.password
        self.library_api_base_url = collection.external_integration.setting(
            self.LIBRARY_API_BASE_URL).value

        if not self.client_key or not self.client_secret or not self.library_api_base_url:
            raise CannotLoadConfiguration("Odilo configuration is incomplete.")

        # Use utf8 instead of unicode encoding
        settings = [
            self.client_key, self.client_secret, self.library_api_base_url
        ]
        self.client_key, self.client_secret, self.library_api_base_url = (
            setting.encode('utf8') for setting in settings)

        # Get set up with up-to-date credentials from the API.
        self.check_creds()
        if not self.token:
            raise CannotLoadConfiguration(
                "Invalid credentials for %s, cannot intialize API %s" %
                (self.client_key, self.library_api_base_url))
Пример #9
0
    def __init__(self, collection, api_class=OdiloAPI, **kwargs):
        """Constructor.

        :param collection: Provide bibliographic coverage to all
            Odilo books in the given Collection.
        :param api_class: Instantiate this class with the given Collection,
            rather than instantiating OdiloAPI.
        """
        super(OdiloBibliographicCoverageProvider,
              self).__init__(collection, **kwargs)
        if isinstance(api_class, OdiloAPI):
            # Use a previously instantiated OdiloAPI instance
            # rather than creating a new one.
            self.api = api_class
        else:
            # A web application should not use this option because it
            # will put a non-scoped session in the mix.
            _db = Session.object_session(collection)
            self.api = api_class(_db, collection)

        self.replacement_policy = ReplacementPolicy(
            identifiers=True,
            subjects=True,
            contributions=True,
            links=True,
            formats=True,
            rights=True,
            link_content=True,
            # even_if_not_apparently_updated=False,
            analytics=Analytics(self._db))
Пример #10
0
class TestAnalytics(unittest.TestCase):
    ''' Test Class to test Class Analytics '''
    test_data = Analytics("2019-08-01")
    test_data.get_analytics()

    # test the customer count for "2019-08-01"
    def testCustomers(self):
        self.assertEqual(TestAnalytics.test_data.output['customers'], 9)

    # test the total_discount_amount for "2019-08-01"
    def test_total_discount_amount(self):
        self.assertAlmostEqual(
            TestAnalytics.test_data.output['total_discount_amount'],
            130429980.24)

    # test the total items sold for "2019-08-01"
    def test_items(self):
        self.assertEqual(TestAnalytics.test_data.output['items'], 2895)

    # The average order total for "2019-08-01"
    def test_order_total_average(self):
        self.assertAlmostEqual(
            TestAnalytics.test_data.output['order_total_average'], 15895179.73)

    # The average discount rate applied to the items sold for "2019-08-01"
    def test_discount_rate_avg(self):
        self.assertAlmostEqual(
            TestAnalytics.test_data.output['discount_rate_avg'], 0.13)

    # The total amount of commissions generated for "2019-08-01"
    def test_commissions_total(self):
        self.assertAlmostEqual(TestAnalytics.test_data.commissions['total'],
                               20833236.94)
Пример #11
0
 def __init__(self, label, params={}, **kwargs):
     self.label = label
     self.setup = Setup()
     self.ana = Analytics()
     if 'analysis_type' in kwargs:
         self.analysis_type = kwargs['analysis_type']
     else:
         self.analysis_type = 'dynamical'
     # set default analysis and circuit parameter
     self._set_up_circuit(params, kwargs)
     # set parameter derived from analysis and circuit parameter
     new_vars = self.setup.get_params_for_analysis(self)
     new_vars['label'] = self.label
     self._set_class_variables(new_vars)
     # set variables which require calculation in analytics class
     self._calc_variables()
Пример #12
0
class SuburbanCollection:
    def __init__(self):
        '''  The __init__ access tokens and credentials are being imported from the config.py module.
        This file must be included in the root for the script to function'''
        self.SCOPES = ['https://www.googleapis.com/auth/analytics.readonly']
        self.KEY_FILE_LOCATION = './auth.json'
        self.VIEW_ID = config.view_id
        self.analytics = Analytics()

    def date_strftime(self):
        now = datetime.datetime.now()
        now = now.strftime("%m-%d-%y")
        return now

    def todays_date(self):
        now = datetime.datetime.now()
        now = now.strftime("%m/%d/%Y")
        return now

    def yesterdays_date(self):
        yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
        yesterday.strftime('%d+%B+%Y')
        return yesterday

    def daily_activity_report(self):
        self.DAILY_REPORT = str(self.analytics.getDailyReport())
        return self.DAILY_REPORT

    def daily_activity_report_output(self):
        self.file_name = 'SuburbanDigitalAdReport_' + self.date_strftime(
        ) + '.txt'
        output = open(self.file_name, 'w')
        output.write('Date: ' + str(self.todays_date()) + '\n' +
                     str(self.daily_activity_report()))
        output.close()

    def monthly_activity_report(self):
        self.MONTHLY_REPORT = str(self.analytics.getMonthlyReport())
        return self.MONTHLY_REPORT

    def monthly_activity_report_output(self):
        self.file_name = 'MonthlyDealerBudgetTemplate_' + self.date_strftime(
        ) + '.txt'
        output = open(self.file_name, 'w')
        output.write('Date: ' + str(self.todays_date()) + '\n' +
                     str(self.monthly_activity_report()))
        output.close()
Пример #13
0
def collection_and_analysis(data, is_analytic):
    entrants = []
    for fio in data:
        entrant = EntrantApplications(fio)
        entrants.append(entrant)
    result = []
    for entrant in entrants:
        result.append(entrant.fname)
        result.append('Согласие о зачислении подано на направление: ' +
                      entrant.priority[1])
        for direction in entrant.apps:
            result.append(direction[1])
        result.append('')
    if is_analytic:
        analysis = Analytics(entrants)
        result.extend(analysis.get_analytics())
    return result
Пример #14
0
def main():
    '''Main function of the script'''
    logger = CliLogger()
    analytics = Analytics(logger)
    analytics.ignore = [
        'screenshot', 'get_minimap_coor', 'get_minimap_areas', 'get_objects'
    ]
    screen = Screen()
    hwnd = wait_league_window(logger, (0, 0, 1024, 768))
    time.sleep(1)

    logger.log('Press and hold x to exit bot.')
    while True:
        if keyboard.is_pressed('x'):
            break
        img = screen.screenshot(find_rect(hwnd))
        time.sleep(1)
Пример #15
0
def main():
    '''Main function of the script'''
    paused = False

    logger = CliLogger()
    screen = Screen()
    resources = Resources()
    analytics = Analytics(logger)
    cooldown = Cooldown(COOLDOWNS)
    analytics.ignore = ANALYTICS_IGNORE
    resources.load(analytics)
    utility = Utility(logger, screen, resources, analytics, cooldown)
    logic = Logic(utility)
    try:
        handle = wait_league_window(logger, (0, 0, 1024, 768))
    except CantForgroundWindowError:
        pass
    logger.log('Press and hold x to exit bot.')
    screen.d3d.capture(target_fps=10, region=find_rect(handle))
    while True:
        try:
            if keyboard.is_pressed('x'):
                raise BotExitException
            if keyboard.is_pressed('ctrl+u'):
                paused = False
            if paused:
                time.sleep(0.1)
                continue
            if keyboard.is_pressed('ctrl+p'):
                paused = True
                logger.log(
                    'Bot paused. Press ctrl+u to unpause. Press x to exit.')
                continue
            logic.tick()
            time.sleep(random.randint(*TICK_INTERVAL) / 1000)
        except BotContinueException as exp:
            time.sleep(random.randint(*exp.tick_interval) / 1000)
        except NoCharacterInMinimap:
            time.sleep(1)
        except BotExitException:
            screen.d3d.stop()
            break
        except Exception:  # pylint:disable=broad-except
            traceback.print_exc()
            screen.d3d.stop()
            break
Пример #16
0
def get_analytics():
    try:
        args = json.dumps(request.json)
        args = json.loads(args)
        arg = args[0]
        analytic_object = Analytics(es_instance)
        result = []
        if (arg == "category"):
            result = analytic_object.get_category_analytics()
        elif (arg == "entity"):
            result = analytic_object.get_entities_analytics()
        elif (arg == "reporter"):
            result = analytic_object.get_reporter_analytics()
        elif (arg == "avg_leads_cat"):
            result = analytic_object.get_avg_category_analytics()
        elif (arg == "avg_leads_ent"):
            result = analytic_object.get_avg_entity_analytics()
        else:
            result = ["Invalid choice"]
            logging.error("Invalid Choice")
        return jsonify({"success": True, "data": result})

    except Exception as e:
        logging.error("In fetching analytics" + str(e))
        return jsonify({"success": False})
Пример #17
0
 def test_load_providers_from_config(self):
     config = {
         Configuration.POLICIES: {
             Configuration.ANALYTICS_POLICY: ["mock_analytics_provider"]
         },
         "option": "value"
     }
     providers = Analytics.load_providers_from_config(config)
     eq_("mock_analytics_provider", providers[0])
Пример #18
0
def get_weekly_plot():
    """
    Send weekly plot file via this route
    :return: none
    """
    ana = Analytics.get_instance()
    ana.weekly_plot()
    filename = 'images/weekly.png'
    return send_file(filename, mimetype='image/png')
Пример #19
0
    def run_abstract_service(self, service_name, persist=False):
        #print "run run_abstract_service(Events, persist) from run_events_service(self, persist=False)"
        i = 1
        service_instance = None
        while True:
            try:
                if isinstance(self.__connection,
                              ONVIFCamera) and self.__connect_state:
                    #print 'Run ' + str(service_name) + ' service'
                    support = self.loop_check_status_abstract_service(
                        'support', service_name)
                    init = self.loop_check_status_abstract_service(
                        'init', service_name)
                    #run = self.loop_check_status_abstract_service('run', service_name)
                    if self.__loop_mode:
                        if support and init:
                            #print 'change state run True ' + str(service_name) + ' service'
                            #print "Call loop_edit_status_abstract_service(run, Events) from run_abstract_service(Events, persist)"
                            self.loop_edit_status_abstract_service(
                                'run', service_name)
                    else:
                        print 'Start ' + str(service_name) + ' service'
                        # self.__services['Event'] = EventService(self.__connection, persist)
                        self.loop_edit_status_abstract_service(
                            'run', service_name)
                        if service_name == 'Analytics':
                            service_instance = Analytics(self, persist)
                        if service_name == 'Device':
                            service_instance = Device(self, persist)
                        if service_name == 'Events':
                            service_instance = Events(self, persist)
                        if service_name == 'Imaging':
                            service_instance = Imaging(self, persist)
                        if service_name == 'Media':
                            service_instance = Media(self, persist)
                        if service_name == 'PTZ':
                            service_instance = PTZ(self, 0, 0, 0, 20, persist,
                                                   None)
                        if service_instance is not None:
                            self.loop_edit_status_abstract_service(
                                'instance', service_name, service_instance)
                            break
                        else:
                            print 'service_instance of ' + str(
                                service_name) + ' is None'

                else:
                    if not isinstance(self.__connection,
                                      ONVIFCamera) or not self.__connect_state:
                        # print 'The connection has not yet been established or connection has been lost'
                        self.try_connect(i)
            except ONVIFError as e:
                self.__connect_state = False
                print 'Exception inside get_support_device: ' + str(e.reason)
                self.try_connect(i)
                continue
            i += 1
Пример #20
0
def _rand_event(instant):
    component = choice([
        _rand_first_button, _rand_success_button, _rand_range,
        _rand_first_name, _rand_last_name
    ])()
    component['timestamp'] = time_ns()
    component['session'] = choice(list(_sessions))
    component['instant'] = instant
    return Analytics(component)
Пример #21
0
    def test_initialize(self):
        # supports multiple analytics providers, site-wide or with libraries

        # Two site-wide integrations
        mock_integration, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="mock_analytics_provider")
        mock_integration.url = self._str
        local_integration, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="local_analytics_provider")

        # A broken integration
        missing_integration, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="missing_provider")

        # Two library-specific integrations
        l1, ignore = create(self._db, Library, short_name="L1")
        l2, ignore = create(self._db, Library, short_name="L2")

        library_integration1, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="mock_analytics_provider")
        library_integration1.libraries += [l1, l2]

        library_integration2, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="mock_analytics_provider")
        library_integration2.libraries += [l2]

        analytics = Analytics(self._db)
        eq_(2, len(analytics.sitewide_providers))
        assert isinstance(analytics.sitewide_providers[0],
                          MockAnalyticsProvider)
        eq_(mock_integration.url, analytics.sitewide_providers[0].url)
        assert isinstance(analytics.sitewide_providers[1],
                          LocalAnalyticsProvider)
        assert missing_integration.id in analytics.initialization_exceptions

        eq_(1, len(analytics.library_providers[l1.id]))
        assert isinstance(analytics.library_providers[l1.id][0],
                          MockAnalyticsProvider)

        eq_(2, len(analytics.library_providers[l2.id]))
        for provider in analytics.library_providers[l2.id]:
            assert isinstance(provider, MockAnalyticsProvider)
Пример #22
0
def click():
    if 'id' not in session:
        session['id'] = uuid.uuid4()
    session_id = session['id']
    data = request.get_json()
    app.logger.info('Event received: %s', data)
    data['session'] = session_id.hex
    app.logger.info('Event enriched: %s', data)
    analytics.set(uuid.uuid4(), Analytics(data)).result()
    return Response(status=202)
Пример #23
0
def load_population_from_file(archive_path, extract_path="."):
    """Load a population from a snapshot.

    Load a population from a snapshot,
    which should be stored in a .tar archive.
    
    archive_path - path to archive
    extract_path - path to directory where files will be extracted
    """
    popn_archive = tarfile.open(archive_path)

    clone_fname = mut_fname = param_fname = anlt_fname = None

    # get filenames for extraction
    # NB this may break if filenames in save_population_to_file() are changed
    members_to_extract = []
    for member in popn_archive.getmembers():
        if member.name.startswith('clones_'):
            clone_fname = "{}/{}".format(extract_path, member.name)
            members_to_extract.append(member)
        elif member.name.startswith('mutations_'):
            mut_fname = "{}/{}".format(extract_path, member.name)
            members_to_extract.append(member)
        elif member.name.startswith('params_'):
            param_fname = "{}/{}".format(extract_path, member.name)
            members_to_extract.append(member)
        elif member.name.startswith('anlt_'):
            anlt_fname = "{}/{}".format(extract_path, member.name)
            members_to_extract.append(member)

    fnames = [clone_fname, mut_fname, param_fname, anlt_fname]
    if not all(fnames):
        raise Exception("population archive is missing required files")

    # extract snapshot files
    popn_archive.extractall(path=extract_path, members=members_to_extract)
    popn_archive.close()

    # load parameters, mutations and clones
    t_curr, opt, popn_params = load_parameters_from_file(param_fname)
    analytics = Analytics.init_from_file(anlt_fname)
    all_muts, mutation_map = load_muts_from_file(opt, mut_fname)
    root_clone = load_clones_from_file(opt, mutation_map, clone_fname)

    # construct population from parameter set,
    # clone tree and mutation dictionary
    new_popn = Population.init_from_file(opt, analytics, popn_params,
                                         root_clone, all_muts)

    # now delete the individual snapshot files,
    # as we will always load popn from an archive
    for fname in fnames:
        delete_local_file(fname)

    return t_curr, opt, new_popn
Пример #24
0
def main():
    a = Analytics()
    a.start_timer()
    img = cv2.imread('lvision/assets/images/locations/basing.png')
    points = numpy.where(numpy.all(img == [0, 255, 0], axis=-1))
    points = list(zip(*points[::-1]))
    print(points)
    a.end_timer()
Пример #25
0
    def test_collect_event(self):
        sitewide_integration, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="mock_analytics_provider")

        library, ignore = create(self._db, Library, short_name="library")
        library_integration, ignore = create(
            self._db,
            ExternalIntegration,
            goal=ExternalIntegration.ANALYTICS_GOAL,
            protocol="mock_analytics_provider",
        )
        library_integration.libraries += [library]

        work = self._work(title="title", with_license_pool=True)
        [lp] = work.license_pools
        analytics = Analytics(self._db)
        sitewide_provider = analytics.sitewide_providers[0]
        library_provider = analytics.library_providers[library.id][0]

        analytics.collect_event(self._default_library, lp,
                                CirculationEvent.DISTRIBUTOR_CHECKIN, None)

        # The sitewide provider was called.
        eq_(1, sitewide_provider.count)
        eq_(CirculationEvent.DISTRIBUTOR_CHECKIN, sitewide_provider.event_type)

        # The library provider wasn't called, since the event was for a different library.
        eq_(0, library_provider.count)

        analytics.collect_event(library, lp,
                                CirculationEvent.DISTRIBUTOR_CHECKIN, None)

        # Now both providers were called, since the event was for the library provider's library.
        eq_(2, sitewide_provider.count)
        eq_(1, library_provider.count)
        eq_(CirculationEvent.DISTRIBUTOR_CHECKIN, library_provider.event_type)

        # Here's an event that we couldn't associate with any
        # particular library.
        analytics.collect_event(None, lp,
                                CirculationEvent.DISTRIBUTOR_CHECKOUT, None)

        # It's counted as a sitewide event, but not as a library event.
        eq_(3, sitewide_provider.count)
        eq_(1, library_provider.count)
Пример #26
0
    def loop_check_edit_instance_service(self, name_service):
        run = self.loop_check_status_abstract_service('run', name_service)
        instance = self.loop_check_status_abstract_service(
            'instance', name_service)
        #print 'NAME PTZ SERVICE BEFORE IF:' + str(name_service)
        #print 'NAME PTZ SERVICE RUN:' + str(run)
        #print 'NAME PTZ SERVICE INSTANCE:' + str(instance)

        #instance_service = None
        #instance_flag = False
        if run and not instance:
            if name_service == 'Analytics':
                instance_service = Analytics(self, True)
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
            if name_service == 'Device':
                instance_service = Device(self, True)
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
            if name_service == 'Events':
                instance_service = Events(self, True)
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
            if name_service == 'Imaging':
                instance_service = Imaging(self, True)
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
            if name_service == 'Media':
                instance_service = Media(self, True)
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
            if name_service == 'PTZ':
                #print 'MY<>NAME<>PTZ'
                instance_service = PTZ(self, 0, 0, 0, 20, True, None)
                #print 'Vse NORM'
                self.loop_edit_status_abstract_service('instance',
                                                       name_service,
                                                       instance_service)
                instance_flag = True
Пример #27
0
 def __init__(self, root):
     builder = pygubu.Builder()
     builder.add_from_file('ldesigner/gui.ui')
     builder.get_object('main_frame', root)
     builder.connect_callbacks(self)
     root.title('League of Legends Tile Designer')
     root.geometry('640x480+0+480')
     logger = CliLogger('%H:%M:%S')
     keyboard.add_hotkey('a', self.set_true)
     keyboard.add_hotkey('s', self.set_false)
     self.analytics = Analytics(logger)
     self.builder = Builder(builder)
     self.tiles = numpy.zeros((183, 183, 3), numpy.uint8)
     self.coor = None
     threading.Thread(target=self.monitor_league, daemon=True).start()
Пример #28
0
 def __init__(self, label, params={}, **kwargs):
     self.label = label
     self.setup = Setup()
     self.ana = Analytics()
     if 'analysis_type' in kwargs:
         self.analysis_type = kwargs['analysis_type']
     else:
         self.analysis_type = 'dynamical'
     # set default analysis and circuit parameter
     self._set_up_circuit(params, kwargs)
     # set parameter derived from analysis and circuit parameter
     new_vars = self.setup.get_params_for_analysis(self)
     new_vars['label'] = self.label
     self._set_class_variables(new_vars)
     # set variables which require calculation in analytics class
     self._calc_variables()
 def __init__(self, radius, name, time_step, n_bodies):
     self._radius = radius
     self._name = name
     self._body_list = []
     self._f_matrix = np.zeros(shape=(3, n_bodies))
     self.a_matrix = np.zeros(shape=(3, n_bodies))
     self.dt = time_step
     self._n_bodies = n_bodies
     self._analytics = Analytics()
     self._acceleration_analytics = Analytics()
     self._velocity_analytics = Analytics()
     self._t = 0
Пример #30
0
    def __init__(self):
        self.connection = psql_connection()
        self.analytics = Analytics(token=MIXPANEL_TOKEN)
        self.payment_processor = Payment(token=STRIPE_TOKEN)

        try:
            cpus = cpu_count()
        except NotImplementedError:
            cpus = 2

        self.queue = Queue()
        self.processes = list()

        for i in xrange(cpus):
            print "Worker Process #{} Initialized".format(i)

            p = Process(target=work_queue, args=(self.queue,))
            p.start()
            self.processes.append(p)
Пример #31
0
    def post(self, opt):
        params = tornado.escape.json_decode(self.request.body)
        logging.info('load data', opt, params)
        res = {}
        if opt == 'upload':
            AnalyticsHandler.data = Dataset(
                pd.DataFrame.from_dict(params['data']))
        else:
            AnalyticsHandler.data = Dataset(pd.read_csv(params['url']))

        if '$select' in params:
            AnalyticsHandler.data.select(**params['$select'])
        if '$preprocess' in params:
            AnalyticsHandler.data.preprocess(**params['$preprocess'])
        if '$transform' in params:
            AnalyticsHandler.data.transform(**params['$transform'])

        AnalyticsHandler.program = Analytics(AnalyticsHandler.data,
                                             AnalyticsHandler.models)
        res = AnalyticsHandler.program.metadata()
        self.write(res)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:25:35 2015

@author: 
"""
from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data')
ndvi = a.applyBandMath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')
arrays2 = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data2')
ndvi2 = a.applyBandMath(arrays2, '((array1 - array2) / (array1 + array2))', 'ndvi2')
average = a.applyBandMath([ndvi, ndvi2], '((array1 + array2) / 2)', 'average')
result = e.executePlan(a.plan)

plot(e.cache['average'])


Пример #33
0
def analytics():
    user_id = int(request.form['id'])
    analytics = Analytics()
    result = analytics.run(user_id)
    return jsonify(**result)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:11:59 2015

@author: woo409
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40'], dimensions, 'get_data')
pq_data = a.createArray('LS5TMPQ', ['PQ'], dimensions, 'pq_data')
mask = a.applyCloudMask(arrays, pq_data, 'mask')

result = e.executePlan(a.plan)

plot(e.cache['mask'])
Пример #35
0
Created on Tue Jul  7 08:56:10 2015

@author: woo409
"""
# In[2]:
from glue.core import Data, DataCollection
from glue.qt.glue_application import GlueApplication
from datetime import datetime, date, timedelta
from gdf import GDF
from gdf import dt2secs, secs2dt
from analytics import Analytics
from execution_engine import ExecutionEngine

# In[4]:

a = Analytics()
e = ExecutionEngine()
g = GDF()
g.debug = False


# In[5]:

start_date = dt2secs(date(year=2010,month=1,day=1))
end_date = dt2secs(date(year=2010, month=1, day=18))
data_request_descriptor = {'storage_type': 'LS5TM',
                               'variables': ('B40',),
                               'dimensions': {'X': {'range': (149.0699, 149.152)},
                                              'Y': {'range': (-35.3117, -35.2842)},
                                              #'T': {'range': (start_date, end_date),
                                              #      'array_range': (0, 4)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:27:20 2015

@author: 
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data')
ndvi = a.applyBandMath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')
arrays2 = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data2')
ndvi2 = a.applyBandMath(arrays2, '((array1 - array2) / (array1 + array2))', 'ndvi2')
average = a.applyBandMath([ndvi, ndvi2], '((array1 + array2) / 2)', 'average')
pq_data = a.createArray('LS5TMPQ', ['PQ'], dimensions, 'pq_data')
mask = a.applyCloudMask(average, pq_data, 'mask')
result = e.executePlan(a.plan)

plot(e.cache['mask'])
Пример #37
0
from analytics import Analytics
from venture.venturemagics.ip_parallel import *
v=mk_p_ripl()
v.assume('x','(beta 1 1)')
v.observe('(flip x)','true')
ana = Analytics(v)
h=ana.sampleFromJoint(10)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:03:49 2015

@author: 
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.0, 148.0)}, 'Y': {'range': (-37.0, -36.0)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40',], dimensions, 'get_data')
#ndvi = a.applyBandMath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')

e.executePlan(a.plan)

plot(e.cache['get_data'])

Пример #39
0
class Circuit(object):
    """Provides functions to calculate the stationary and dynamical
    properties of a given circuit.

    Arguments:
    label: string specifying circuit, options: 'microcircuit'

    Keyword Arguments:
    params: dictionary specifying parameter of the circuit, default
            parameter given in params_circuit.py will be overwritten
    analysis_type: string specifying level of analysis that is requested
                   default: 'dynamical'
                   options:
                   - None: only circuit and default analysis parameter
                     are set
                   - 'stationary': circuit and default analysis parameter
                      are set, mean and variance of input to each
                      populations as well as firing rates are calculated
                   - 'dynamical': circuit and default analysis parameter
                      are set, mean and variance of input to each
                      populations as well as firing rates are calculated,
                      variables for calculation of spectra are calculated
                      including the transfer function for all populations
    fmin: minimal frequency in Hz, default: 0.1 Hz
    fmax: maximal frequency in Hz, default: 150 Hz
    df: frequency spacing in Hz, default: 1.0/(2*np.pi) Hz
    to_file: boolean specifying whether firing rates and transfer
             functions are written to file, default: True
    from_file: boolean specifying whether firing rates and transfer
               functions are read from file, default: True
               if set to True and file is not found firing rates and
               transfer function are calculated
    """
    def __init__(self, label, params={}, **kwargs):
        self.label = label
        self.setup = Setup()
        self.ana = Analytics()
        if 'analysis_type' in kwargs:
            self.analysis_type = kwargs['analysis_type']
        else:
            self.analysis_type = 'dynamical'
        # set default analysis and circuit parameter
        self._set_up_circuit(params, kwargs)
        # set parameter derived from analysis and circuit parameter
        new_vars = self.setup.get_params_for_analysis(self)
        new_vars['label'] = self.label
        self._set_class_variables(new_vars)
        # set variables which require calculation in analytics class
        self._calc_variables()

    # updates variables of Circuit() and Analysis() classes, new variables
    # are specified in the dictionary new_vars
    def _set_class_variables(self, new_vars):
        for key, value in new_vars.items():
            setattr(self, key, value)
        if 'params' in new_vars:
            for key, value in new_vars['params'].items():
                setattr(self, key, value)
        self.ana.update_variables(new_vars)

    # updates class variables of variables of Circuit() and Analysis()
    # such that default analysis and circuit parameters are known
    def _set_up_circuit(self, params, args):
        # set default analysis parameter
        new_vars = self.setup.get_default_params(args)
        self._set_class_variables(new_vars)
        # set circuit parameter
        new_vars = self.setup.get_circuit_params(self, params)
        self._set_class_variables(new_vars)

    # quantities required for stationary analysis are calculated
    def _set_up_for_stationary_analysis(self):
        new_vars = self.setup.get_working_point(self)
        self._set_class_variables(new_vars)

    # quantities required for dynamical analysis are calculated
    def _set_up_for_dynamical_analysis(self):
        new_vars = self.setup.get_params_for_power_spectrum(self)
        self._set_class_variables(new_vars)

    # calculates quantities needed for analysis specified by analysis_type
    def _calc_variables(self):
        if self.analysis_type == 'dynamical':
            self._set_up_for_stationary_analysis()
            self._set_up_for_dynamical_analysis()
        elif self.analysis_type == 'stationary':
            self._set_up_for_stationary_analysis()

    def alter_params(self, params):
        """Parameter specified in dictionary params are changed.
        Changeable parameters are default analysis and circuit parameter,
        as well as label and analysis_type.

        Arguments:
        params: dictionary, specifying new parameters
        """
        self.params.update(params)
        new_vars = self.setup.get_altered_circuit_params(self, self.label)
        self._set_class_variables(new_vars)
        new_vars = self.setup.get_params_for_analysis(self)
        self._set_class_variables(new_vars)
        self._calc_variables()

    def create_power_spectra(self):
        """Returns frequencies and power spectra.
        See: Eq. 9 in Bos et al. (2015)
        Shape of output: (len(self.populations), len(self.omegas))

        Output:
        freqs: vector of frequencies in Hz
        power: power spectra for all populations,
               dimension len(self.populations) x len(freqs)
        """
        power = np.asarray(map(self.ana.spec, self.ana.omegas))
        return self.ana.omegas/(2.0*np.pi), np.transpose(power)

    def create_power_spectra_approx(self):
        """Returns frequencies and power spectra approximated by
        dominant eigenmode.
        See: Eq. 15 in Bos et al. (2015)
        Shape of output: (len(self.populations), len(self.omegas))

        Output:
        freqs: vector of frequencies in Hz
        power: power spectra for all populations,
               dimension len(self.populations) x len(freqs)
        """
        power = np.asarray(map(self.ana.spec_approx, self.ana.omegas))
        return self.ana.omegas/(2.0*np.pi), np.transpose(power)

    def create_eigenvalue_spectra(self, matrix):
        """Returns frequencies and frequency dependence of eigenvalues of
        matrix.

        Arguments:
        matrix: string specifying the matrix, options are the effective
                connectivity matrix ('MH'), the propagator ('prop') and
                the inverse of the propagator ('prop_inv')

        Output:
        freqs: vector of frequencies in Hz
        eigs: spectra of all eigenvalues,
              dimension len(self.populations) x len(freqs)
        """
        eigs = [self.ana.eigs_evecs(matrix, w)[0] for w in self.ana.omegas]
        eigs = np.transpose(np.asarray(eigs))
        return self.ana.omegas/(2.0*np.pi), eigs

    def create_eigenvector_spectra(self, matrix, label):
        """Returns frequencies and frequency dependence of
        eigenvectors of matrix.

        Arguments:
        matrix: string specifying the matrix, options are the effective
                connectivity matrix ('MH'), the propagator ('prop') and
                the inverse of the propagator ('prop_inv')
        label: string specifying whether spectra of left or right
               eigenvectors are returned, options: 'left', 'right'

        Output:
        freqs: vector of frequencies in Hz
        evecs: spectra of all eigenvectors,
               dimension len(self.populations) x len(freqs) x len(self.populations)
        """
        # one list entry for every eigenvector, evecs[i][j][k] is the
        # ith eigenvectors at the jth frequency for the kth component
        evecs = [np.zeros((len(self.ana.omegas), self.ana.dimension),
                          dtype=complex) for i in range(self.ana.dimension)]
        for i, w in enumerate(self.ana.omegas):
            eig, vr, vl = self.ana.eigs_evecs(matrix, w)
            if label == 'right':
                v = vr
            elif label == 'left':
                v = vl
            for j in range(self.ana.dimension):
                evecs[j][i] = v[j]
        evecs = np.asarray([np.transpose(evecs[i]) for i in range(self.ana.dimension)])
        return self.ana.omegas/(2.0*np.pi), evecs

    def reduce_connectivity(self, M_red):
        """Connectivity (indegree matrix) is reduced, while the working
        point is held constant.

        Arguments:
        M_red: matrix, with each element specifying how the corresponding
               connection is altered, e.g the in-degree from population
               j to population i is reduced by 30% with M_red[i][j]=0.7
        """
        M_original = self.M_full[:]
        M_original_fast = self.M_full_fast[:]
        M_original_slow = self.M_full_slow[:]
        if M_red.shape != M_original.shape:
            raise RuntimeError('Dimension of mask matrix has to be the '
                               + 'same as the original indegree matrix.')
        self.M = M_original*M_red
        self.M_fast = M_original_fast*M_red
        self.M_slow = M_original_slow*M_red
        self.ana.update_variables({'M': self.M, 'M_fast': self.M_fast,
                                   'M_slow': self.M_slow})

    def restore_full_connectivity(self):
        '''Restore connectivity to full connectivity.'''
        self.M = self.M_full
        self.M_fast = self.M_full_fast
        self.M_slow = self.M_full_slow
        self.ana.update_variables({'M': self.M, 'M_fast': self.M_fast,
                                   'M_slow': self.M_slow})

    def get_effective_connectivity(self, freq):
        """Returns effective connectivity matrix.

        Arguments:
        freq: frequency in Hz
        """
        return self.ana.create_MH(2*np.pi*freq)

    def get_sensitivity_measure(self, freq, index=None):
        """Returns sensitivity measure.
        see: Eq. 21 in Bos et al. (2015)

        Arguments:
        freq: frequency in Hz

        Keyword arguments:
        index: specifies index of eigenmode, default: None
               if set to None the dominant eigenmode is assumed
        """
        MH  = self.get_effective_connectivity(freq)
        e, U = np.linalg.eig(MH)
        U_inv = np.linalg.inv(U)
        if index is None:
            # find eigenvalue closest to one
            index = np.argmin(np.abs(e-1))
        T = np.outer(U_inv[index],U[:,index])
        T /= np.dot(U_inv[index],U[:,index])
        T *= MH
        return T

    def get_transfer_function(self):
        """Returns dynamical transfer function depending on frequency.
        Shape of output: (len(self.populations), len(self.omegas))

        Output:
        freqs: vector of frequencies in Hz
        dyn_trans_func: power spectra for all populations,
                        dimension len(self.populations) x len(freqs)
        """
        dyn_trans_func = np.asarray(map(self.ana.create_H, self.ana.omegas))
        return self.ana.omegas/(2.0*np.pi), np.transpose(dyn_trans_func)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:03:49 2015

@author: 
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
ndvi = a.applySensorSpecificBandMath('LS5TM', 'ndvi', dimensions, 'step1_get_data', 'step2_ndvi')
result = e.executePlan(a.plan)

plot(e.cache['ndvi'])

Пример #41
0
    return activity

import sys
args = sys.argv[1:]
if len(args) != 3:
    print 'Usage: cohort.py path/to/repo yyyy-mm month_count'
    sys.exit()
repo_path, first_month_string, month_count_string = args

repo = Repo(repo_path)
first_month = parse_month(first_month_string)
month_count = int(month_count_string)

periods = range(month_count)
activity = repo_activity(repo, first_month, month_count)
analytics = Analytics(periods, activity)

def format_counts(counts):
    return ','.join(str(count) for count in counts)

def format_counts_list(counts_list):
    return '\n'.join(format_counts(counts) for counts in counts_list)

analytics.acquisition()
print '''\
Acquisition analysis
{} contributors
{} contributors in original cohort
{} new contributors
New contributors in {} months
(excluding first month)
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:28:43 2015

@author: 
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data')
ndvi = a.applyBandMath(arrays, '((array1 - array2) / (array1 + array2))', 'ndvi')
arrays2 = a.createArray('LS5TM', ['B40', 'B30'], dimensions, 'get_data2')
ndvi2 = a.applyBandMath(arrays2, '((array1 - 2.0*array2) / (array2))', 'ndvi2')
average = a.applyBandMath([ndvi, ndvi2], '((array1 + array2) / 2)', 'average')
pq_data = a.createArray('LS5TMPQ', ['PQ'], dimensions, 'pq_data')
mask = a.applyCloudMask(average, pq_data, 'mask')
median_t = a.applyGenericReduction(mask, ['T'], 'median(array1)', 'medianT')
result = e.executePlan(a.plan)

plot(e.cache['medianT'])
Пример #43
0
class HackerNewsApp:
    HN_URL_PREFIX = "https://news.ycombinator.com/item?id="
    UPDATE_URL = "https://github.com/captn3m0/hackertray#upgrade"
    ABOUT_URL = "https://github.com/captn3m0/hackertray"
    MIXPANEL_TOKEN = "51a04e37dad59393c7371407e84a8050"
    def __init__(self, args):
        #Load the database
        home = expanduser("~")
        with open(home + '/.hackertray.json', 'a+') as content_file:
            content_file.seek(0)
            content = content_file.read()
            try:
                self.db = set(json.loads(content))
            except ValueError:
                self.db = set()

        # Setup analytics
        self.tracker = Analytics(args.dnt, HackerNewsApp.MIXPANEL_TOKEN)

        # create an indicator applet
        self.ind = appindicator.Indicator("Hacker Tray", "hacker-tray", appindicator.CATEGORY_APPLICATION_STATUS)
        self.ind.set_status(appindicator.STATUS_ACTIVE)
        self.ind.set_icon(get_icon_filename("hacker-tray.png"))

        # create a menu
        self.menu = gtk.Menu()

        #The default state is false, and it toggles when you click on it
        self.commentState = args.comments

        # create items for the menu - refresh, quit and a separator
        menuSeparator = gtk.SeparatorMenuItem()
        menuSeparator.show()
        self.menu.append(menuSeparator)

        btnComments = gtk.CheckMenuItem("Show Comments")
        btnComments.show()
        btnComments.set_active(args.comments)
        btnComments.connect("activate", self.toggleComments)
        self.menu.append(btnComments)

        btnAbout = gtk.MenuItem("About")
        btnAbout.show()
        btnAbout.connect("activate", self.showAbout)
        self.menu.append(btnAbout)

        btnRefresh = gtk.MenuItem("Refresh")
        btnRefresh.show()
        #the last parameter is for not running the timer
        btnRefresh.connect("activate", self.refresh, True, args.chrome)
        self.menu.append(btnRefresh)

        if Version.new_available():
            btnUpdate = gtk.MenuItem("New Update Available")
            btnUpdate.show()
            btnUpdate.connect('activate',self.showUpdate)
            self.menu.append(btnUpdate)

        btnQuit = gtk.MenuItem("Quit")
        btnQuit.show()
        btnQuit.connect("activate", self.quit)
        self.menu.append(btnQuit)

        self.menu.show()

        self.ind.set_menu(self.menu)
        self.refresh(chrome_data_directory=args.chrome, firefox_data_directory=args.firefox)
        self.launch_analytics(args)

    def launch_analytics(self, args):
        # Now that we're all done with the boot, send a beacone home
        launch_data = vars(args)
        launch_data['version'] = Version.current()
        launch_data['platform'] = platform.linux_distribution()
        try:
            launch_data['browser'] = subprocess.check_output(["xdg-settings","get","default-web-browser"]).strip()
        except subprocess.CalledProcessError as e:
            launch_data['browser'] = "unknown"
        self.tracker.track('launch', launch_data)

    def toggleComments(self, widget):
        """Whether comments page is opened or not"""
        self.commentState = not self.commentState

    def showUpdate(self,widget):
        """Handle the update button"""
        webbrowser.open(HackerNewsApp.UPDATE_URL)
        # Remove the update button once clicked
        self.menu.remove(widget)
        self.tracker.visit(HackerNewsApp.UPDATE_URL)


    def showAbout(self, widget):
        """Handle the about btn"""
        webbrowser.open(HackerNewsApp.ABOUT_URL)
        self.tracker.visit(HackerNewsApp.ABOUT_URL)

    #ToDo: Handle keyboard interrupt properly
    def quit(self, widget, data=None):
        """ Handler for the quit button"""
        l = list(self.db)
        home = expanduser("~")

        #truncate the file
        with open(home + '/.hackertray.json', 'w+') as file:
            file.write(json.dumps(l))

        gtk.main_quit()
        self.tracker.track('quit')

    def run(self):
        signal.signal(signal.SIGINT, self.quit)
        gtk.main()
        return 0

    def open(self, widget, event=None, data=None):
        """Opens the link in the web browser"""
        #We disconnect and reconnect the event in case we have
        #to set it to active and we don't want the signal to be processed
        if not widget.get_active():
            widget.disconnect(widget.signal_id)
            widget.set_active(True)
            widget.signal_id = widget.connect('activate', self.open)

        self.db.add(widget.item_id)
        webbrowser.open(widget.url)

        if self.commentState:
            webbrowser.open(self.HN_URL_PREFIX + str(widget.hn_id))
        self.tracker.visit(widget.url)

    def addItem(self, item):
        """Adds an item to the menu"""
        #This is in the case of YC Job Postings, which we skip
        if item['points'] == 0 or item['points'] is None:
            return

        i = gtk.CheckMenuItem(
            "(" + str(item['points']).zfill(3) + "/" + str(item['comments_count']).zfill(3) + ")    " + item['title'])

        visited = item['history'] or item['id'] in self.db

        i.set_active(visited)
        i.url = item['url']
        tooltip = "{url}\nPosted by {user} {timeago}".format(url=item['url'], user=item['user'], timeago=item['time_ago'])
        i.set_tooltip_text(tooltip)
        i.signal_id = i.connect('activate', self.open)
        i.hn_id = item['id']
        i.item_id = item['id']
        self.menu.prepend(i)
        i.show()

    def refresh(self, widget=None, no_timer=False, chrome_data_directory=None, firefox_data_directory=None):

        """Refreshes the menu """
        try:
            # Create an array of 20 false to denote matches in History
            searchResults = [False]*20
            data = list(reversed(HackerNews.getHomePage()[0:20]))
            urls = [item['url'] for item in data]
            if(chrome_data_directory):
                searchResults = self.mergeBoolArray(searchResults, Chrome.search(urls, chrome_data_directory))

            if(firefox_data_directory):
                searchResults = self.mergeBoolArray(searchResults, Firefox.search(urls, firefox_data_directory))

            #Remove all the current stories
            for i in self.menu.get_children():
                if hasattr(i, 'url'):
                    self.menu.remove(i)

            #Add back all the refreshed news
            for index, item in enumerate(data):
                item['history'] = searchResults[index]
                if item['url'].startswith('item?id='):
                    item['url'] = "https://news.ycombinator.com/" + item['url']

                self.addItem(item)
        # Catch network errors
        except requests.exceptions.RequestException as e:
            print "[+] There was an error in fetching news items"
        finally:
            # Call every 10 minutes
            if not no_timer:
                gtk.timeout_add(10 * 30 * 1000, self.refresh, widget, no_timer, chrome_data_directory)

    # Merges two boolean arrays, using OR operation against each pair
    def mergeBoolArray(self, original, patch):
        for index, var in enumerate(original):
            original[index] = original[index] or patch[index]
        return original
Пример #44
0
    def __init__(self, args):
        #Load the database
        home = expanduser("~")
        with open(home + '/.hackertray.json', 'a+') as content_file:
            content_file.seek(0)
            content = content_file.read()
            try:
                self.db = set(json.loads(content))
            except ValueError:
                self.db = set()

        # Setup analytics
        self.tracker = Analytics(args.dnt, HackerNewsApp.MIXPANEL_TOKEN)

        # create an indicator applet
        self.ind = appindicator.Indicator("Hacker Tray", "hacker-tray", appindicator.CATEGORY_APPLICATION_STATUS)
        self.ind.set_status(appindicator.STATUS_ACTIVE)
        self.ind.set_icon(get_icon_filename("hacker-tray.png"))

        # create a menu
        self.menu = gtk.Menu()

        #The default state is false, and it toggles when you click on it
        self.commentState = args.comments

        # create items for the menu - refresh, quit and a separator
        menuSeparator = gtk.SeparatorMenuItem()
        menuSeparator.show()
        self.menu.append(menuSeparator)

        btnComments = gtk.CheckMenuItem("Show Comments")
        btnComments.show()
        btnComments.set_active(args.comments)
        btnComments.connect("activate", self.toggleComments)
        self.menu.append(btnComments)

        btnAbout = gtk.MenuItem("About")
        btnAbout.show()
        btnAbout.connect("activate", self.showAbout)
        self.menu.append(btnAbout)

        btnRefresh = gtk.MenuItem("Refresh")
        btnRefresh.show()
        #the last parameter is for not running the timer
        btnRefresh.connect("activate", self.refresh, True, args.chrome)
        self.menu.append(btnRefresh)

        if Version.new_available():
            btnUpdate = gtk.MenuItem("New Update Available")
            btnUpdate.show()
            btnUpdate.connect('activate',self.showUpdate)
            self.menu.append(btnUpdate)

        btnQuit = gtk.MenuItem("Quit")
        btnQuit.show()
        btnQuit.connect("activate", self.quit)
        self.menu.append(btnQuit)

        self.menu.show()

        self.ind.set_menu(self.menu)
        self.refresh(chrome_data_directory=args.chrome, firefox_data_directory=args.firefox)
        self.launch_analytics(args)
Пример #45
0
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 14 15:08:14 2015

@author: 
"""

from pprint import pprint
from datetime import datetime
from analytics import Analytics
from execution_engine import ExecutionEngine
from analytics_utils import plot

a = Analytics()
e = ExecutionEngine()

dimensions = {'X': {'range': (147.968, 148.032)}, 'Y': {'range': (-36.032, -35.968)}, 'T': {'range': (1262304000.0, 1325375999.999999)}, }
arrays = a.createArray('LS5TM', ['B40'], dimensions, 'get_data')
median_t = a.applyGenericReduction(arrays, ['T'], 'median(array1)', 'medianT')

result = e.executePlan(a.plan)

plot(e.cache['medianT'])