Esempio n. 1
0
def warden_metadata():
    from utils import pickle_it
    meta = {}
    meta['full_df'] = specter_df()
    meta['wallet_list'] = current_app.specter.wallet_alias_list()

    # Load pickle with previous checkpoint df
    df_pkl = 'txs_pf.pkl'
    data = pickle_it(action='load', filename=df_pkl)
    if not isinstance(data, pd.DataFrame):
        if data == 'file not found':
            meta['df_old'] = None
    else:
        meta['df_old'] = data

    # load difference / changes in addresses from file
    ack_file = 'txs_diff.pkl'
    data = pickle_it(action='load', filename=ack_file)
    if data == 'file not found':
        meta['ack_file'] = None
    else:
        meta['ack_file'] = data
        meta['old_new_df_old'] = data['deleted']
        meta['old_new_df_new'] = data['added']

    return (meta)
Esempio n. 2
0
def get_local_ip():
    from utils import pickle_it
    s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    s.connect(('8.8.8.8', 1))  # connect() for UDP doesn't send packets
    local_ip_address = s.getsockname()[0]
    pickle_it('save', 'local_ip_address.pkl', local_ip_address)
    return (local_ip_address)
def run_experiment(directory, dataset_path, params, nproc=None):
    global A, y
    # ray.init(address="auto")
    if not os.path.exists(directory):
        os.makedirs(directory)
    pickle_it(params, 'params', directory)

    print('load dataset')
    with open(dataset_path, 'rb') as f:
        A, y = joblib.load(f)
        # A_obj = put(A)
        # y_obj = put(y)
        # for i in range(0, n_repeat):
        # params[i].A_obj = A_obj
        # params[i].y_obj = y_obj
        # print("A.shape= ", A.shape, params[i].A_obj, params[i].y_obj)

    print('start experiment')
    # pool = Pool(ray_address='auto')
    # results = pool.map(run_logistic, params)
    results = run_logistic(params[0])
    print(results)

    pickle_it(results, 'results', directory)
    print('results saved in "{}"'.format(directory))
Esempio n. 4
0
    def refresh_txs(self, load=True):
        # Returns a dictionary with keys
        # dict_keys(['pageCount', 'txlist', 'last_update'])
        try:
            if load:
                data = pickle_it(action='load', filename='specter_txs.pkl')
                if data != 'file not found':
                    return (data)

            session = self.init_session()
            response = session.post(self.tx_url, data=self.tx_payload)
            specter_data = response.json()
            # Include last update_time
            specter_data['last_update'] = datetime.now().strftime(
                '%m/%d/%Y, %H:%M:%S')
            specter_data['txlist'] = json.loads(specter_data['txlist'])
            # Save to pickle file
            pickle_it(action='save',
                      filename='specter_txs.pkl',
                      data=specter_data)
            return (specter_data)

        except requests.exceptions.Timeout as e:
            return ('[Specter Error] [refresh] Could not login to ' +
                    f'{self.base_url} <> Check address <> Error: {e}')
        except Exception as e:
            return ('[Specter Error] [refresh] {0}'.format(e))
Esempio n. 5
0
def test_RealTimeBTC():
    from pricing_engine.engine import realtime_price
    ticker = 'BTC'
    results = realtime_price(ticker)
    run_time = datetime.now()

    if results is None:
        health = False
        price = None
        error_message = 'Realtime Price returned None'
    try:
        price = results['price']
        health = True
        error_message = None
    except Exception as e:
        health = False
        price = None
        error_message = f"Realtime Price returned an error: {e}"

    data = {
        'health': health,
        'price': price,
        'error_message': error_message,
        'run_time': run_time
    }

    filename = 'status_realtime_btc.pkl'
    pickle_it(action='save', filename=filename, data=data)

    return (data)
Esempio n. 6
0
    def refresh_txs(self, load=True):
        # Returns a dictionary with keys
        # dict_keys(['pageCount', 'txlist', 'last_update'])
        try:
            if load:
                data = pickle_it(action='load', filename='specter_txs.pkl')
                if data != 'file not found':
                    return (data)

            session = self.init_session()
            response = session.post(self.tx_url,
                                    data=self.tx_payload,
                                    verify=False)
            specter_data = response.json()
            session.close()

            # Include last update_time
            specter_data['last_update'] = datetime.now().strftime(
                '%m/%d/%Y, %H:%M:%S')
            specter_data['txlist'] = json.loads(specter_data['txlist'])
            '''
              Handling for multiple outputs, CoinJoin transactions
            '''
            for tx in specter_data["txlist"]:
                # sum outputs when more than one exists
                if isinstance(tx["address"], list):
                    tx["amount"] = np.sum(np.array(tx["amount"], dtype=float))

                # fees are only relevant for spends
                if tx["category"] == "send":
                    # save a copy, as the original is still needed if it gets updated
                    try:
                        fee = tx["fee"]
                        # accounting for CoinJoins is unique because there's no realized fee for re-mixes
                        if tx["amount"] == fee:
                            tx["fee"] = 0

                        tx["amount"] -= fee

                        # ensure positivity for WARden (this needs to happen last)
                        tx["fee"] = abs(tx["fee"])

                    except Exception:
                        tx['fee'] = 0

            # Save to pickle file
            pickle_it(action='save',
                      filename='specter_txs.pkl',
                      data=specter_data)
            return (specter_data)

        except requests.exceptions.Timeout as e:
            return ('[Specter Error] [refresh] Could not login to ' +
                    f'{self.base_url} <> Check address <> Error: {e}')
        except Exception as e:
            return ('[Specter Error] [refresh] {0}'.format(e))
Esempio n. 7
0
def host_list():
    services = pickle_it('load', 'services_found.pkl')
    hosts = pickle_it('load', 'hosts_found.pkl')
    if request.method == "GET":
        delete = request.args.get("delete")
        if delete is not None:
            try:
                del services[delete]
            except Exception:
                pass
            try:
                host = delete.strip("http://")
                host = host.strip("https://")
                host = host.strip("/")
                if ':' in host:
                    host = host.split(":")[0]
                for key, item in hosts.items():
                    if item['host'] == host:
                        del hosts[key]
                        break

            except Exception:
                pass

            pickle_it('save', 'services_found.pkl', services)
            pickle_it('save', 'hosts_found.pkl', hosts)
            return redirect(url_for("warden.running_services"))

        return (json.dumps(hosts))
    if request.method == "POST":
        url = request.form.get("new_url")
        url = url_parser(url)
        hosts[url] = {'ip': url, 'host': url, 'last_time': None}

        pickle_it('save', 'hosts_found.pkl', hosts)

        try:
            if not '.onion' in url:
                host_ip = socket.gethostbyname(url)
            else:
                host_ip = url
            services[host_ip] = {
                'url': url,
                'status': 'Loading...',
                'port': None,
                'service': 'Checking Status'
            }
            pickle_it('save', 'services_found.pkl', services)

        except Exception:
            pass

        return redirect(url_for("warden.running_services"))
Esempio n. 8
0
    def wallet_info(self, wallet_alias, load=True):
        if wallet_alias is None:
            return None

        if load:
            data = pickle_it(action='load',
                             filename=f'wallet_info_{wallet_alias}.pkl')
            if data != 'file not found':
                return (data)
        url = self.base_url + f'wallets/wallet/{wallet_alias}/settings/'
        metadata = {}
        session = self.init_session()
        page = session.get(url, verify=False)
        soup = BeautifulSoup(page.text, 'html.parser')
        session.close()
        metadata['url'] = url
        # Get device list
        div_id = 'wallet_info_settings_tab'
        data = soup.find("div", {"id": div_id})
        if data is None:
            metadata = {'error': True}
            return metadata
        metadata['title'] = data.find('h2').get_text()
        if metadata['title'] == 'Devices':
            metadata['subtitle'] = data.find_all('p')[0].get_text()
        else:
            metadata['subtitle'] = 'Single key'
        # Get list of devices for this wallet
        data = data.find_all('a', href=True)
        metadata['devices'] = {}
        for element in data:
            device_info = {}
            link = element['href']
            alias = list(filter(None, link.split('/')))[-1]
            device_info['url'] = self.base_url[:-1] + link
            device_info['image'] = self.base_url[:-1] + element.find(
                'img').get('src')
            tmp = element.get_text().split('\n')
            tmp = list(filter(None, tmp))
            device_info['name'] = tmp[0].lstrip()
            metadata['devices'][alias] = device_info
            metadata['error'] = False

        metadata['rescan'] = self.rescan_progress(wallet_alias=wallet_alias,
                                                  load=load,
                                                  session=session)

        # Save to pickle file
        pickle_it(action='save',
                  filename=f'wallet_info_{wallet_alias}.pkl',
                  data=metadata)
        return metadata
    def estimate_parameters(self, docs, tokenized_docs, feature_names):
        """
        Estimates probability of each class, each feature in each class and saves the trained parameters.

        :param list docs: Documents with defined classes
        :param tokenized_docs: Tokenized documents
        :param feature_names: Feature vector which is computed with scikit-learn TfidfVectorizer module
        :return:
        """
        self.class_probability(docs)
        self.feature_probability(feature_names, tokenized_docs)
        pickle_it(self, "naive_bayes_model")
        print("Parameter estimation complete. Predicting trained documents...")
Esempio n. 10
0
def trim_graph(graph, reduce_sample=True, pickle=True, from_scratch=True):
    if not graph and not from_scratch:
        graph = reload_json(USER_GRAPH_FNAME, transform=nx.node_link_graph)
        return graph

    rng_state = reload_object(RNG_FNAME, random.getstate)
    random.setstate(rng_state)
    print("Trimming graph...")
    significant_id_set = set()

    for direct in (Direct.IN, Direct.OUT):
        sample = []
        ids = []
        for user_id in graph:
            ids.append(user_id)
            num_neighb = direct.deg_view(graph)[user_id]
            sample.append(num_neighb)
        sample_mean = mean(sample)
        pop_stdev = stdev(sample)
        for i, degree in enumerate(sample):
            if abs(degree - sample_mean) > STDEV_MOD * pop_stdev:
                user_id = ids[i]
                significant_id_set.add((user_id, degree))

    by_asc_degree = sorted(list(significant_id_set), key=lambda x: x[1])
    significant_ids = [i[0] for i in by_asc_degree]

    to_subgraph = set()
    for user_id in significant_ids:
        try:
            others = set(graph.neighbors(user_id))
        except KeyError:
            continue
        if reduce_sample and len(others) != 0:
            others = random.sample(others, int(len(others) * OTHERS_MOD))

        if len(others) == 0:
            continue

        to_subgraph.add(user_id)
        for other in others:
            to_subgraph.add(other)

    pickle_it(rng_state, RNG_FNAME)

    user_graph = graph.subgraph(to_subgraph)

    if pickle:
        json_it(user_graph, USER_GRAPH_FNAME, nx.node_link_data)

    return user_graph
Esempio n. 11
0
 def onion_string():
     from utils import pickle_it
     if app.settings['SERVER'].getboolean('onion_server'):
         try:
             pickle_it('save', 'onion_address.pkl',
                       app.tor_service_id + '.onion')
             return (f"""
     {emoji.emojize(':onion:')} Tor Onion server running at:
     {yellow(app.tor_service_id + '.onion')}
                 """)
         except Exception:
             return (yellow("[!] Tor Onion Server Not Running"))
     else:
         return ('')
Esempio n. 12
0
def start_hidden_service(app):
    if app.controller is None:
        return
    app.controller.reconnect()
    key_path = os.path.abspath(os.path.join(home_path(), ".tor_service_key"))

    app.tor_service_id = None

    if not os.path.exists(key_path):
        service = app.controller.create_ephemeral_hidden_service(
            {app.tor_port: app.port}, await_publication=True)
        app.tor_service_id = service.service_id
        print("* Started a new hidden service with the address of %s.onion" %
              app.tor_service_id)

        with open(key_path, "w") as key_file:
            key_file.write("%s:%s" %
                           (service.private_key_type, service.private_key))
    else:
        with open(key_path) as key_file:
            key_type, key_content = key_file.read().split(":", 1)

        try:
            service = app.controller.create_ephemeral_hidden_service(
                {app.tor_port: app.port},
                key_type=key_type,
                key_content=key_content,
                await_publication=True,
            )
            app.tor_service_id = service.service_id
            print("")
            print(success("✅ Resumed %s.onion" % app.tor_service_id))
            from utils import pickle_it
            pickle_it('save', 'onion_address.pkl',
                      app.tor_service_id + '.onion')
            print("")
        except Exception:
            pass

    try:
        # save address to file
        if app.save_tor_address_to is not None:
            with open(app.save_tor_address_to, "w") as f:
                f.write("%s.onion" % app.tor_service_id)
        app.tor_service_id = app.tor_service_id
        from utils import pickle_it
        pickle_it('save', 'onion_address.pkl', app.tor_service_id + '.onion')
        app.tor_enabled = True
    except Exception:
        pass
Esempio n. 13
0
def internet_connected(host="8.8.8.8", port=53, timeout=3):
    """
    Host: 8.8.8.8 (google-public-dns-a.google.com)
    OpenPort: 53/tcp
    Service: domain (DNS/TCP)
    """
    connected = False
    try:
        socket.setdefaulttimeout(timeout)
        socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
        connected = True
    except socket.error as ex:
        connected = False
    pickle_it('save', 'diags_internet_connected.pkl', connected)
    return (connected)
Esempio n. 14
0
def background_specter_health():
    health = app.specter.is_healthy()
    if health is False:
        message = Message(
            category='Specter Server',
            message_txt=
            f"<span class='text-danger'>Health Check Returned False at {app.specter.base_url}</span>"
        )
        app.message_handler.add_message(message)
        health_text = pickle_it('load', 'specter_health.pkl')
        message = Message(
            category='Specter Server',
            message_txt=
            f"<span class='text-danger'>Health Check Result: {health_text}</span>"
        )
        app.message_handler.add_message(message)
        # Check for other URLs where Specter could be located
        seeker = app.specter.seek_specter()
        if seeker is not None:
            # Open Config and update
            config_file = Config.config_file
            config_settings = configparser.ConfigParser()
            config_settings.read(config_file)
            config_settings['SPECTER']['url'] = seeker['url']
            with open(config_file, 'w') as file:
                config_settings.write(file)

            message = Message(
                category='Specter Server',
                message_txt=
                f"<span class='text-success'>Found an alternative server at {app.specter.base_url}.<br>Updated to use this server.</span>"
            )
            app.message_handler.add_message(message)
            return
        else:
            message = Message(
                category='Specter Server',
                message_txt=
                f"<span class='text-danger'>Could not find any alternative servers running</span>"
            )
            app.message_handler.add_message(message)

    health_text = pickle_it('load', 'specter_health.pkl')
    message = Message(
        category='Specter Server',
        message_txt=
        f"<span class='text-info'>Health Check Result: {health_text}</span>")
    app.message_handler.add_message(message)
Esempio n. 15
0
def get_pickle():
    filename = request.args.get("filename")
    if not filename:
        return None
    filename += ".pkl"
    data_loader = pickle_it(action='load', filename=filename)
    return (json.dumps(data_loader, default=lambda o: '<not serializable>'))
Esempio n. 16
0
def run_experiment(directory, dataset_pickle, params, nproc=None):
    global X, y
    if not os.path.exists(directory):
        os.makedirs(directory)
    pickle_it(params, 'params', directory)

    print('load dataset')
    with open(dataset_pickle, 'rb') as f:
        X, y = pickle.load(f)

    print('start experiment')
    with mp.Pool(nproc) as pool:
        results = pool.map(run_logistic, params)

    pickle_it(results, 'results', directory)
    print('results saved in "{}"'.format(directory))
    def word_to_vec(self, data, is_test=True):
        """
        Converts documents to tf-idf vectors.

        :param list data: Text documents to be converted
        :param bool is_test: If true then uses trained tf-idf model. Else, trains and saved tf-idf model
        :return:
        """
        if is_test:
            vectorizer = unpickle_it("tfidf_model")
            self.tfidf_vectors = vectorizer.transform(data)
        else:
            vectorizer = TfidfVectorizer()
            vectorizer.fit(data)
            self.tfidf_vectors = vectorizer.transform(data)
            pickle_it(vectorizer, "tfidf_model")
            self.feature_names = vectorizer.get_feature_names()
Esempio n. 18
0
def run_experiment(directory, dataset_path, params, nproc=None):
    global A, y
    if not os.path.exists(directory):
        os.makedirs(directory)
    pickle_it(params, 'params', directory)

    print('load dataset')
    with open(dataset_path, 'rb') as f:
      A, y = pickle.load(f)
      print("A.shape= ", A.shape)

    print('start experiment')
    # with mp.Pool() as pool:
        # results = pool.map(run_logistic, params)
    results = run_logistic(params[0])
    print(results)

    pickle_it(results, 'results', directory)
    print('results saved in "{}"'.format(directory))
Esempio n. 19
0
 def rescan_progress(self, wallet_alias, load=True, session=None):
     if load:
         data = pickle_it(action='load',
                          filename=f'specter_rescan_{wallet_alias}.pkl')
         if data != 'file not found':
             return (data)
     try:
         url = self.base_url + f"wallets/wallet/{wallet_alias}/rescan_progress"
         if not session:
             session = self.init_session()
         response = session.get(url)
         data = response.json()
         # Save to pickle file
         pickle_it(action='save',
                   filename=f'specter_rescan_{wallet_alias}.pkl',
                   data=data)
         return (data)
     except Exception as e:
         return ('[Specter Error] [rescan] {0}'.format(e))
Esempio n. 20
0
def build_graph(pickle=False, from_scratch=True):
    user_graph = nx.DiGraph()
    if not from_scratch:
        print("Loading graph data.")
        user_graph = reload_object(USER_GRAPH_FNAME, nx.DiGraph)
        if user_graph:
            return user_graph
    print("Building graph...")
    if VALID_USER_FRAME.empty:
        sys.stderr.write("ERROR:  A user or tweet dictionary is empty.")
        sys.exit(NO_DATA_EXIT_CODE)

    for i, user_id in enumerate(USER_LIST):
        new_edges = {Direct.IN: [], Direct.OUT: []}
        for direct in (Direct.IN, Direct.OUT):
            num, expected = get_expected_connection_bounds(user_graph,
                                                           user_id,
                                                           direct=direct)
            if num == 0:
                others = (VALID_USER_FRAME.loc[user_id])[direct.twit_key()]
                for ident in others:
                    edge = direct.make_edge(int(user_id), int(ident))
                    new_edges[direct].append(edge)
            elif num != expected:
                print(
                    f"followers mismatch for node {user_id}: {num}, {expected}, {direct}"
                )
                print("Skipping")
                print()
        new_in = new_edges[Direct.IN]
        new_out = new_edges[Direct.OUT]
        if len(new_in) != 0 or len(new_out) != 0:
            user_graph.add_edges_from(new_in)
            user_graph.add_edges_from(new_out)

        if (i + 1) % 100 == 0:
            print(f"Analyzed %d users" % (i + 1))
    if pickle:
        pickle_it(user_graph, FULL_GRAPH_FNAME)
    return user_graph
Esempio n. 21
0
    def is_healthy(self):
        reach = self.is_reachable()
        if reach is False:
            pickle_it("save", "specter_health.pkl", "Unreacheable")
            return False
        auth = self.is_auth()
        if auth is False:
            pickle_it("save", "specter_health.pkl", "Could not Authenticate")
            return False

        # Check Node Health
        try:
            if self.home_parser()['bitcoin_core_is_syncing'] is True:
                pickle_it("save", "specter_health.pkl",
                          "Bitcoin Node Not Fully Synched")
                return False
            else:
                pickle_it("save", "specter_health.pkl", "Running")
                return True
        except Exception as e:
            pickle_it("save", "specter_health.pkl", f"Error: {e}")
            return False
Esempio n. 22
0
def is_service_running(service, expiry=None):
    # Expiry in seconds since last time reached
    # usage: is_service_running('WARden Server', 10)
    services_found = pickle_it('load', 'services_found.pkl')
    if services_found == 'file not found':
        return (False, None)
    for key, val in services_found.items():
        if val['service'] == service:
            if expiry is not None:
                utc_time = datetime.utcnow()
                epoch_time = (utc_time - datetime(1970, 1, 1)).total_seconds()
                if epoch_time - val['last_update'] > expiry:
                    continue
            return (True, val)
    return (False, None)
Esempio n. 23
0
def alert_activity():
    alerts = False
    # Don't send any alerts as activity still being downloaded
    if current_app.downloading:
        return alerts
    ack_file = 'txs_diff.pkl'
    try:
        data = pickle_it(action='load', filename=ack_file)
        if data == 'file not found':
            raise FileNotFoundError
        if data['changes_detected_on'] is not None:
            return (True)
        else:
            return (False)
    except Exception:
        return (False)
Esempio n. 24
0
def apikey(source, required=True):
    # Check if a cryptocompare key is stored at home directory
    if source == 'cryptocompare':
        from utils import pickle_it
        API_KEY = pickle_it('load', 'cryptocompare_api.pkl')
        if "\n" in API_KEY:
            API_KEY = API_KEY.strip("\n")
        if API_KEY != 'file not found':
            return API_KEY

    # GET API_KEY
    if load_config().has_option('API', source):
        API_KEY = load_config()['API'][source]
    else:
        API_KEY = None
    if required and API_KEY is None:
        raise Exception(f'{source} requires an API KEY and none was found.')
    return API_KEY
Esempio n. 25
0
    def init_session(self):
        with requests.session() as session:
            if "onion" in self.base_url:
                config = load_config()
                port = config['TOR'].get('port')
                TOR = pickle_it('load', 'tor.pkl')
                port = TOR['port']
                if port is None or port == 'failed':
                    port = 9050

                session.proxies = {
                    "http": "socks5h://0.0.0.0:" + port,
                    "https": "socks5h://0.0.0.0:" + port,
                }
            response = session.post(self.login_url,
                                    data=self.login_payload,
                                    timeout=60,
                                    verify=False)
            # Check if authorized
            if response.status_code == 401:
                raise Exception(
                    'Unauthorized Login to Specter. Check Username and/or Password.'
                )
            return (session)
Esempio n. 26
0
                                [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                                [20, 20, 20, 20],
                                [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20]],
                          'Thursday': [[20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20]],
                          "Friday": [[20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20]],
                          "Saturday": [[20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                               [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20]],
                          "Sunday": [[20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20],
                             [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20], [20, 20, 20, 20]]
                          }


if __name__ == '__main__':
    from utils import pickle_it
    pickle_it(DEFAULT_TIMER_SETTINGS, "timer")

Esempio n. 27
0
def run_parallel_experiment(directory, dataset_pickle, models, cores, baseline, repeat=3):
    if not os.path.exists(directory):
        os.makedirs(directory)
    pickle_it([m(1) for m in models], 'models', directory)
    pickle_it(cores, 'cores', directory)

    print('load dataset')
    with open(dataset_pickle, 'rb') as f:
        X, y = pickle.load(f)

    print('start experiment')

    chronos = np.zeros((len(models), len(cores), repeat))
    stop_times = np.zeros((len(models), len(cores), repeat), dtype=int)

    for r in range(repeat):
        for c_idx, core in enumerate(cores):
            for m_idx, model in enumerate(models):
                p = model(core)
                print("{} - cores {} - repeat {}".format(p, core, r))
                m = LogisticParallelSGD(p)
                timing, epoch, iteration, losses = m.fit_until(X, y, num_features=X.shape[1], num_samples=X.shape[0],
                                                               baseline=baseline)
                chronos[m_idx, c_idx, r] = timing
                stop_times[m_idx, c_idx, r] = epoch * X.shape[0] + iteration

                pickle_it(chronos, 'chronos', directory)
                pickle_it(stop_times, 'stop_times', directory)

    pickle_it(chronos, 'chronos', directory)
    pickle_it(stop_times, 'stop_times', directory)
    print('results saved in "{}"'.format(directory))
Esempio n. 28
0
def main(args):

    with open(args.data_dir+'/ptb.vocab.json', 'r') as file:
        vocab = json.load(file)

    # required to map between integer-value sentences and real sentences
    w2i, i2w = vocab['w2i'], vocab['i2w']

    # make sure our models for the VAE and Actor exist
    if not os.path.exists(args.load_vae):
        raise FileNotFoundError(args.load_vae)

    model = SentenceVAE(
        vocab_size=len(w2i),
        sos_idx=w2i['<sos>'],
        eos_idx=w2i['<eos>'],
        pad_idx=w2i['<pad>'],
        unk_idx=w2i['<unk>'],
        max_sequence_length=args.max_sequence_length,
        embedding_size=args.embedding_size,
        rnn_type=args.rnn_type,
        hidden_size=args.hidden_size,
        word_dropout=args.word_dropout,
        embedding_dropout=args.embedding_dropout,
        latent_size=args.latent_size,
        num_layers=args.num_layers,
        bidirectional=args.bidirectional
    )

    model.load_state_dict(
        torch.load(args.load_vae, map_location=lambda storage, loc: storage))
    model.eval()
    print("vae model loaded from %s"%(args.load_vae))

    # to run in constraint mode, we need the trained generator
    if args.constraint_mode:
        if not os.path.exists(args.load_actor):
            raise FileNotFoundError(args.load_actor)

        actor = Actor(
            dim_z=args.latent_size, dim_model=2048, num_labels=args.n_tags)
        actor.load_state_dict(
            torch.load(args.load_actor, map_location=lambda storage, loc:storage))
        actor.eval()
        print("actor model loaded from %s"%(args.load_actor))

    if torch.cuda.is_available():
        model = model.cuda()
        if args.constraint_mode:
            actor = actor.cuda() # TODO: to(self.devices)

    if args.sample:
        print('*** SAMPLE Z: ***')
        # get samples from the prior
        sample_sents, z = model.inference(n=args.num_samples)
        sample_sents, sample_tags = get_sents_and_tags(sample_sents, i2w, w2i)
        pickle_it(z.cpu().numpy(), 'samples/z_sample_n{}.pkl'.format(args.num_samples))
        pickle_it(sample_sents, 'samples/sents_sample_n{}.pkl'.format(args.num_samples))
        pickle_it(sample_tags, 'samples/tags_sample_n{}.pkl'.format(args.num_samples))
        print(sample_sents, sep='\n')

        if args.constraint_mode:

            print('*** SAMPLE Z_PRIME: ***')
            # get samples from the prior, conditioned via the actor
            all_tags_sample_prime = []
            all_sents_sample_prime = {}
            all_z_sample_prime = {}
            for i, condition in enumerate(LABELS):

                # binary vector denoting each of the PHRASE_TAGS
                labels = torch.Tensor(condition).repeat(args.num_samples, 1).cuda()

                # take z and manipulate using the actor to generate z_prime
                z_prime = actor.forward(z, labels)

                sample_sents_prime, z_prime = model.inference(
                    z=z_prime, n=args.num_samples)
                sample_sents_prime, sample_tags_prime = get_sents_and_tags(
                    sample_sents_prime, i2w, w2i)
                print('conditoned on: {}'.format(condition))
                print(sample_sents_prime, sep='\n')
                all_tags_sample_prime.append(sample_tags_prime)
                all_sents_sample_prime[LABEL_NAMES[i]] = sample_sents_prime
                all_z_sample_prime[LABEL_NAMES[i]] = z_prime.data.cpu().numpy()
            pickle_it(all_tags_sample_prime, 'samples/tags_sample_prime_n{}.pkl'.format(args.num_samples))
            pickle_it(all_sents_sample_prime, 'samples/sents_sample_prime_n{}.pkl'.format(args.num_samples))
            pickle_it(all_z_sample_prime, 'samples/z_sample_prime_n{}.pkl'.format(args.num_samples))

    if args.interpolate:
        # get random samples from the latent space
        z1 = torch.randn([args.latent_size]).numpy()
        z2 = torch.randn([args.latent_size]).numpy()
        z = to_var(torch.from_numpy(interpolate(start=z1, end=z2, steps=args.num_samples-2)).float())

        print('*** INTERP Z: ***')
        interp_sents, _ = model.inference(z=z)
        interp_sents, interp_tags = get_sents_and_tags(interp_sents, i2w, w2i)
        pickle_it(z.cpu().numpy(), 'samples/z_interp_n{}.pkl'.format(args.num_samples))
        pickle_it(interp_sents, 'samples/sents_interp_n{}.pkl'.format(args.num_samples))
        pickle_it(interp_tags, 'samples/tags_interp_n{}.pkl'.format(args.num_samples))
        print(interp_sents, sep='\n')

        if args.constraint_mode:
            print('*** INTERP Z_PRIME: ***')
            all_tags_interp_prime = []
            all_sents_interp_prime = {}
            all_z_interp_prime = {}

            for i, condition in enumerate(LABELS):

                # binary vector denoting each of the PHRASE_TAGS
                labels = torch.Tensor(condition).repeat(args.num_samples, 1).cuda()

                # z prime conditioned on this particular binary variable
                z_prime = actor.forward(z, labels)

                interp_sents_prime, z_prime = model.inference(
                    z=z_prime, n=args.num_samples)
                interp_sents_prime, interp_tags_prime = get_sents_and_tags(
                    interp_sents_prime, i2w, w2i)
                print('conditoned on: {}'.format(condition))
                print(interp_sents_prime, sep='\n')
                all_tags_interp_prime.append(interp_tags_prime)
                all_sents_interp_prime[LABEL_NAMES[i]] = interp_sents_prime
                all_z_interp_prime[LABEL_NAMES[i]] = z_prime.data.cpu().numpy()

            pickle_it(all_tags_interp_prime, 'samples/tags_interp_prime_n{}.pkl'.format(args.num_samples))
            pickle_it(all_sents_interp_prime, 'samples/sents_interp_prime_n{}.pkl'.format(args.num_samples))
            pickle_it(all_z_interp_prime, 'samples/z_interp_prime_n{}.pkl'.format(args.num_samples))

    import IPython; IPython.embed()
Esempio n. 29
0
    def home_parser(self, load=True):
        if load:
            data = pickle_it(action='load', filename='specter_home.pkl')
            if data != 'file not found':
                return (data)
        url = self.base_url + 'about'

        metadata = {}
        try:
            session = self.init_session()
            page = session.get(url, verify=False)
            session.close()
        except Exception as e:
            metadata['error'] = str(e)
            return (metadata)

        metadata['specter_health'] = pickle_it('load', 'specter_health.pkl')
        if metadata['specter_health'] == 'file not found':
            metadata['specter_health'] = 'Failed Check'

        soup = BeautifulSoup(page.text, 'html.parser')
        # Get Specter Version
        try:
            metadata['version'] = (soup.find(
                text=re.compile('Specter Version')).parent()[0].get_text())
        except Exception as e:
            metadata['version'] = f'Error: {e}'
        # Get Bitcoin Core Data

        # Check Sync Status
        metadata['bitcoin_core_is_syncing'] = False
        # This is the text that will be searched on Specter
        # page to signal that Core is still synching
        txt_sc = 'Bitcoin Core is still syncing'
        if txt_sc in page.text:
            metadata['bitcoin_core_is_syncing'] = True
        # Same but for updating Wallets Check
        txt_sc = 'Updating wallets data'
        metadata['wallets_is_syncing'] = False
        if txt_sc in page.text:
            metadata['wallets_is_syncing'] = True

        try:
            div_id = 'bitcoin_core_info'
            data = soup.find("div", {"id": div_id})
            data = data.find('table')
            data = data.find_all('tr')
            bitcoin_core_data = {}
            for element in data:
                cols = element.find_all('td')
                bitcoin_core_data[cols[0].get_text().split(':')
                                  [0]] = cols[1].get_text()
            metadata['bitcoin_core_data'] = bitcoin_core_data
            # Format blocks count
            try:
                metadata['bitcoin_core_data']['Blocks count'] = (
                    "{0:,.0f}".format(
                        float(metadata['bitcoin_core_data']['Blocks count'])))
                metadata['bitcoin_core_data']['Difficulty'] = (
                    "{0:,.0f}".format(
                        float(metadata['bitcoin_core_data']['Difficulty'])))
            except Exception:
                pass
        except Exception as e:
            metadata['bitcoin_core_html'] = (
                f"<span class='text-warning'>Error: {str(e)}</span>")
            metadata['bitcoin_core_data'] = {'error': str(e)}
        # Get Wallet Names
        wallet_dict = {}
        wallet_alias = []
        try:
            div_id = "wallets_list"
            data = soup.find("div", {"id": div_id})
            data = data.find_all('a', href=True)

            for element in data:
                try:
                    link = element['href']
                    if 'wallets/new_wallet/' in link:
                        continue
                    alias = list(filter(None, link.split('/')))[-1]
                    wallet_alias.append(alias)
                    wallet_dict[alias] = {}
                    wallet_dict[alias]['url'] = self.base_url[:-1] + link
                    find_class = 'grow'
                    wallet_info = element.findAll("div",
                                                  {"class": find_class})[0]
                    wallet_info = wallet_info.get_text().split('\n')
                    wallet_info = list(filter(None, wallet_info))
                    wallet_dict[alias]['name'] = wallet_info[0].lstrip()
                    wallet_dict[alias]['keys'] = wallet_info[1]
                except Exception as e:
                    pass

            metadata['alias_list'] = wallet_alias
            metadata['wallet_dict'] = wallet_dict
        except Exception as e:
            metadata['alias_list'] = None
            metadata['wallet_dict'] = {'error': str(e)}
        # Get Device Names
        device_dict = {}
        device_list = []
        try:
            div_id = "devices_list"
            data = soup.find("div", {"id": div_id})
            data = data.find_all('a', href=True)
            for element in data:
                try:
                    link = element['href']
                    if 'devices/new_device_type' in link:
                        continue
                    alias = list(filter(None, link.split('/')))[-1]
                    device_list.append(alias)
                    device_dict[alias] = {}
                    device_dict[alias]['url'] = self.base_url[:-1] + link
                    device_dict[alias][
                        'image'] = self.base_url[:-1] + element.find(
                            'img').get('src')
                    device_info = element.get_text().split('\n')
                    device_dict[alias]['name'] = list(filter(
                        None, device_info))[0].lstrip()
                    device_dict[alias]['keys'] = list(filter(
                        None, device_info))[1]
                except Exception:
                    pass

            metadata['device_list'] = device_list
            metadata['device_dict'] = device_dict

            metadata['last_update'] = datetime.now().strftime(
                '%m/%d/%Y, %H:%M:%S')
        except Exception as e:
            metadata['device_list'] = None
            metadata['device_dict'] = {'error': str(e)}

        # Save to pickle file
        pickle_it(action='save', filename='specter_home.pkl', data=metadata)
        return (metadata)
               regularizer=1 / n,
               estimate='mean',
               qsgd_s=2**4))
params.append(
    Parameters(name="qsgd-2bit",
               num_epoch=NUM_EPOCH,
               lr_type='bottou',
               initial_lr=10.,
               regularizer=1 / n,
               estimate='mean',
               qsgd_s=2**2))


def run_logistic(param):
    m = LogisticSGD(param)
    res = m.fit(X, y)
    print('{} - score: {}'.format(param, m.score(X, y)))
    return res


if not os.path.exists(RESULT_DIR):
    os.makedirs(RESULT_DIR)
pickle_it(params, 'params', RESULT_DIR)

print('start experiment')
with mp.Pool(len(params)) as pool:
    results = pool.map(run_logistic, params)

pickle_it(results, 'results', RESULT_DIR)
print('results saved in "{}"'.format(RESULT_DIR))
print('load dataset')
with open(args.dataset_file, 'rb') as f:
    X, y = pickle.load(f)

n, d = X.shape

model = Parameters(num_epoch=args.num_epochs,
                   lr_type=args.lr,
                   initial_lr=args.initial_lr,
                   n_cores=args.num_cores,
                   regularizer=1. / n,
                   take_k=args.k,
                   take_top=args.model is 'top',
                   with_memory=with_memory,
                   estimate='final')

print('start experiment')

m = LogisticParallelSGD(model)
iters, timers, losses = m.fit(X,
                              y,
                              num_features=X.shape[1],
                              num_samples=X.shape[0])

pickle_it(iters, 'iters', directory)
pickle_it(timers, 'timers', directory)
pickle_it(losses, 'losses', directory)

print('results saved in "{}"'.format(directory))
Esempio n. 32
0
MAIN_SETTINGS = {'HEAT_THRESHOLD': 0.8,
                 'TEMP_TOLERANCE': 1,
                 'HUM_TOLERANCE': 10,
                 'MAIN_SENSOR': 'mock_location1'}

if __name__ == '__main__':
    from utils import pickle_it
    pickle_it(MAIN_SETTINGS, "main_settings")
    print("pickled: " + str(MAIN_SETTINGS))