Esempio n. 1
0
def claim(modem):
    try:
        datastore = Datastore()
        modem.extractInfo()
        isp = ISP.acquire_isp(
            operator_code=modem.details["modem.3gpp.operator-code"])
        # print("[+] Deduced ISP:", isp)
        router = False
        if "isp" in CONFIGS["ROUTER"] and CONFIGS["ROUTER"]["isp"] == isp:
            router = True
        # print("[+] ROUTER SET TO: ", router)
        new_message = datastore.acquire_message(
            modem_index=modem.index,
            modem_imei=modem.details["modem.3gpp.imei"],
            isp=isp,
            router=router)
    except Exception as error:
        raise Exception(error)
    else:
        if not new_message == None:
            sms = SMS(messageID=new_message["id"])
            sms.create_sms(phonenumber=new_message["phonenumber"],
                           text=new_message["text"])

            return sms
        else:
            return None
Esempio n. 2
0
 def get_datastore(self, dsid=None):
     if dsid is not None:
         req = self.get_request(
             'v1/datastores/{0}'.format(dsid))
         return Datastore(**req.json())
     req = self.get_request('v1/datastores')
     return list([Datastore(**res) for res in req.json()])
Esempio n. 3
0
def build_app(**kwargs):

    print "Initializing app with flags: {}".format(kwargs)

    Datastore.initialize()

    return app
Esempio n. 4
0
def snapshot(ds: Datastore, newRst: dict) -> None:
    q = "INSERT INTO sphinxbuilder (owner, name, path, hash) values "
    for owner, attr in newRst.items():
        for path, v in attr['rst'].items():
            q += f"('{owner}', '{v['file']}', '{path}', '{v['hash']}'), "

    ds.execute("DELETE FROM sphinxbuilder")
    ds.execute(q[:-2])
Esempio n. 5
0
def build():
    ds = Datastore(f'{homeDir}lootnika_tasks_journal.db')
    try:
        newRst = check_rst(ds)
    except Exception as e:
        log.fatal(f"Error: fail check docs rst: {traceback.format_exc()}")

    sphinxecutor(newRst)
    snapshot(ds, newRst)
    ds.close()
Esempio n. 6
0
def cu_make_time_serieses(time_resolution, data_store, param_store):
    """ Generate cumulative Dash bar charts for all root accounts """
    preventupdate_if_empty(data_store)
    params: Params = Params.from_json(param_store)
    if not time_resolution:
        time_resolution = params.init_time_res
    data_store: Datastore() = Datastore.from_json(data_store, params.cu_roots)
    trans: pd.DataFrame = data_store.trans
    account_tree: ATree = data_store.account_tree
    if len(params.cu_roots) > 0:
        account_list = params.cu_roots
    else:
        account_list = [account_tree.root]
    unit: str = params.unit
    data_title = params.ds_data_title
    result: list = []
    # make one chart for each item in the Cumulative account filter

    if not isinstance(account_list, list):
        app.logger.warning(
            f"Account list should be a list but isn't: {account_list}")
        raise PreventUpdate
    for account in account_list:
        fig: go.Figure = go.Figure(layout=layouts['base'])
        fig.update_layout(
            title={"text": f"{data_title} {account}: Cumulative {unit}"},
            xaxis={
                "showgrid": True,
                "nticks": 20
            },
            yaxis={"showgrid": True},
            legend={
                "xanchor": "left",
                "x": 0,
                "yanchor": "bottom",
                "y": 0,
                "bgcolor": "rgba(0, 0, 0, 0)",
            },
            barmode="relative",
        )
        subaccounts: iter = account_tree.get_children_ids(account)
        for j, subaccount in enumerate(subaccounts):
            sub_desc = account_tree.get_descendent_ids(subaccount)
            sub_desc.append(subaccount)
            tba = trans[trans["account"].isin(sub_desc)]
            if len(tba) > 0:
                fig.add_trace(
                    make_cum_area(tba, subaccount, j, time_resolution))
        output = dcc.Graph(id=f"{account}{j}", figure=fig)
        if len(result) > 0:
            result.append(output)
        else:
            result = [output]
    return [result]
Esempio n. 7
0
def get_logs():
    return_json = {"status": ""}
    try:
        # TODO: Determine ISP before sending messages
        # datastore = Datastore(configs_filepath="libs/configs/config.ini")
        datastore = Datastore()
        logs = datastore.get_logs()
        return_json["status"] = 200
        return_json["logs"] = logs
        return_json["size"] = len(logs)
    except Exception as err:
        print(traceback.format_exc())

    return jsonify(return_json)
Esempio n. 8
0
	def __init__(self):
		self.r = Datastore.factory()
		self.DIGRAM = "DIGRAM"
		self.WORD2ID = "WORD2ID"
		self.TOP_N = 5
		self.bgm = nltk.collocations.BigramAssocMeasures
		self.SCORER_FN = self.bgm.likelihood_ratio
Esempio n. 9
0
def init(num):
    """ Initialization function. Must be explicitly called: globes.init(n) """

    global server_num       # this server's number (i.e. this instance)
    global total_servers    # the total number of servers in the system (usually 4)
    global command_sock     # this server's UDP socket for sending/receiving commands
    global reply_sock       # this server's UDP socket for receiving reply values/successes
    global db               # this server's Datastore object
    global delays           # delays[i] returns the avg delay from this server to server i
    global addresses        # addresses[i] returns the 'localhost:1500#' of server i
    global num_replicas     # number of replicas for each key
    global command_counter  # counter for the number of commands send
    global port_offset      # offset between a server's command port and reply port

    server_num = num

    command_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
    reply_sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)

    db = Datastore()

    # read in the config.json file to initialize delays and addresses
    config_json = open('config.json')
    data = json.load(config_json)
    delays = data['avg_delays'][server_num] # delays is a list of avg delay times
    addresses = data['addresses']

    total_servers = len(addresses)

    num_replicas = 3
    command_counter = 0
    port_offset = 100
Esempio n. 10
0
    def __init__(self):
        """Creates an instance of a Bouncer.

        Initializes an empty set and an empty dictionary for later use, then
        reloads previously connected networks from the Datastore.
        """

        self.clients = set()
        self.networks = dict()

        try: # Attempt a Datastore Connection

            self.datastore = Datastore(hostname="localhost", port="6379")
            self.datastore.database.ping()

        except redis.ConnectionError: # Continue Without Persistence

            self.datastore = None
            print("Failed to Connect to a Redis Instance.\n"
                  "Continuing Without Persistence.")

        if self.datastore:

            if not self.datastore.get_password():
                self.datastore.set_password()

            history = self.datastore.get_networks()
            for credentials in history.values():
                self.add_network(**credentials)
Esempio n. 11
0
def update_status_on_ds_tab_content(data_store: str, param_store: str):
    """When the loaded files change, and the data source tab is open,
    then presumably the files changed because of user input to the
    tab controls. So, show feedback.  If the loaded files change
    through the URL mechanism, and the data source tab isn't open,
    then this callback is ignored."""
    preventupdate_if_empty(data_store)
    data_store: Datastore() = Datastore.from_json(data_store)
    params = Params.from_json(param_store)
    trans: pd.DataFrame = data_store.trans
    preventupdate_if_empty(trans)
    trans_filename = params.ds_data_title
    c1: pd.DataFrame = trans.iloc[0]
    r1: list = [
        c1.account,
        c1.amount,
        c1.date,
        c1.get("desc"),
        c1.get("full account name"),
        c1.get("parent account"),
    ]

    # As quick hack to get linebreaks in Dash for pre-formatted text, generate status info as lists,
    # then render lists into Divs

    earliest_trans: datetime64 = trans["date"].min()
    latest_trans: datetime64 = trans["date"].max()

    trans_summary: list = [
        f"{trans_filename}: {len(trans)} records loaded, between {pretty_date(earliest_trans)} and {pretty_date(latest_trans)}"
    ]  # NOQA

    atree = data_store.account_tree
    atree_summary: str = None
    atree_display: str = None
    if atree and len(atree) > 0:
        atree_summary: str = (
            f"{len(atree)} accounts loaded, {atree.depth()} levels deep")
        atree_display: str = atree.show_to_string()

    eras = data_store.eras
    eras_summary: str = None
    if len(eras) > 0:
        eras_summary: str = f"{len(eras)} reporting eras"

    return r1 + [trans_summary, atree_summary, atree_display, eras_summary]
Esempio n. 12
0
def apply_burst_click(
    burst_clickData,
    burst_figure,
    pe_selection_store,
    data_store: str,
    param_store: str,
    time_resolution: int,
    time_span: str,
):
    """Clicking on a slice in the Sunburst updates the transaction list
    with matching transactions burst_figure Input is used only to
    guarantee a trigger on initial page load.

    """
    preventupdate_if_empty(data_store)
    preventupdate_if_empty(pe_selection_store)
    trans, atree, eras, dstore = Datastore.get_parts(data_store)
    preventupdate_if_empty(trans)
    earliest_trans = dstore.earliest_trans
    latest_trans = dstore.latest_trans

    # get the selection parameters from the master time series, via an intermediary store.
    date_start: np.datetime64 = pd.to_datetime(
        pe_selection_store.get("start", earliest_trans))
    date_end: np.datetime64 = pd.to_datetime(
        pe_selection_store.get("end", latest_trans))
    max_trans_count: int = pe_selection_store.get("count", 0)
    click_accounts: list = pe_selection_store.get("accounts", [])

    # Figure out which accounts to use to filter transactions.  If any
    # account(s) were selected in the sunburst click, they override
    # the selection passed through from master_time_series
    if burst_clickData:
        raw_click_account = burst_clickData["points"][0]["id"]
        # strip any SUFFFIXes from the label that were added in the sunburst hack
        if CONST["leaf_suffix"] in raw_click_account:
            click_accounts = [
                raw_click_account.replace(CONST["leaf_suffix"], "")
            ]
        elif CONST["subtotal_suffix"] in raw_click_account:
            click_accounts = [
                raw_click_account.replace(CONST["subtotal_suffix"], "")
            ]
        else:
            click_accounts = [raw_click_account]

    if len(click_accounts) > 0:
        sub_accounts: list = []
        for account in click_accounts:
            sub_accounts = atree.get_descendent_ids(account)
        sel_accounts = click_accounts + sub_accounts
        sel_trans = trans[trans["account"].isin(sel_accounts)
                          & (trans["date"] >= date_start)
                          & (trans["date"] <= date_end)]
        num_trans = len(sel_trans)
        account_text = f"{num_trans} selected for {', '.join(click_accounts)}"
        if (len_sub := len(sub_accounts)) > 0:
            account_text = account_text + f" and {len_sub} sub-accounts"
Esempio n. 13
0
class DuplicatesFilter(object):
	'''
	Filters duplicate urls
	Assigns each url a unique id
	Sets url -> id and id -> url in redis
	Indexes inlinks and outlinks
	'''
	def __init__(self):
		self.r = Datastore()
		self.URL2ID = "URL2ID"
		self.ID2URL = "ID2URL"
		self.URL_SET = "URL_SET"
		self.URL_CTR = "URL_CTR"
		self.MEM_THRESHOLD = 10 * (10 ** 9)
		self.redis_process = None
		self.scrapy_process = None
		#self.r.set(self.URL_CTR, -1)

		for i in psutil.process_iter():
			if i.name.find("redis-server") >= 0:
				self.redis_process = i
			if i.name.find("scrapy") >= 0:
				self.scrapy_process = i

	def process_item(self, item, spider):
		if not item:
			raise DropItem

		print "DuplicatesFilter:", item['url']

		if self.redis_process.get_memory_info().rss + self.scrapy_process.get_memory_info().rss > self.MEM_THRESHOLD:
			self.r.set("POWER_SWITCH", "OFF")
			item['shutdown'] = True

		if item['shutdown']:
			return item

		if not item['link_set']:
			raise DropItem
		else:
			self.buildURLIndex(item)
		return item

	def buildURLIndex(self, item):
		'''
Esempio n. 14
0
def send_sms(modem, sms):
    datastore = Datastore()
    send_status = None
    try:
        messageLogID = datastore.new_log(messageID=sms.messageID)
    except Exception as error:
        raise (error)
    else:
        try:
            send_status = modem.send_sms(modem.set_sms(sms))
            if send_status is None:
                logging.warning("[-] Send status failed with sys error")

            elif not send_status["state"]:
                logging.warning("[-] Failed to send...")
                datastore.release_message(messageID=sms.messageID,
                                          status="failed")
                modem.remove_sms(sms)
            else:
                logging.info("[+] Message sent!")
        except Exception as error:
            print("[+] Exception as:", error)
            raise Exception(error)
        else:
            datastore.update_log(messageLogID=messageLogID,
                                 status=send_status["status"],
                                 message=send_status["message"])
            print(">>send status:", send_status)
            return send_status["state"]
Esempio n. 15
0
	def __init__(self):
		self.r = Datastore()
		self.URL2ID = "URL2ID"
		self.WORD_SET = "WORD_SET"
		self.WORD2ID = "WORD2ID"
		self.WORD_IN = "WORD_IN"
		self.WORD_CTR = "WORD_CTR"
		#self.r.set(self.WORD_CTR, -1)
		self.stemmer = nltk.stem.PorterStemmer()
		self.stopwords = set([self.clean(x) for x in nltk.corpus.stopwords.words('english')])
Esempio n. 16
0
class KeywordExtractor(object):
	'''
	Extracts keywords from title, extracted_text, meta_description
	'''
	def __init__(self):
		self.r = Datastore()
		self.URL2ID = "URL2ID"
		self.WORD_SET = "WORD_SET"
		self.WORD2ID = "WORD2ID"
		self.WORD_IN = "WORD_IN"
		self.WORD_CTR = "WORD_CTR"
		#self.r.set(self.WORD_CTR, -1)
		self.stemmer = nltk.stem.PorterStemmer()
		self.stopwords = set([self.clean(x) for x in nltk.corpus.stopwords.words('english')])

	def process_item(self, item, spider):
		if item['shutdown']:
			return item

		print item['url']

		text = item['title'] + " . " + item['extracted_text'] + " . " + item['meta_description']
		words = [self.clean(x) for x in nltk.wordpunct_tokenize(text)]
		item['ordered_words'] = words
		cleaned_words = set(words) - self.stopwords
		cleaned_words = [self.clean(w) for w in cleaned_words if w.isalnum() and len(w) > 1 and not w.isdigit()]
		item['words'] = cleaned_words
		if not item['words']:
			raise DropItem

		self.buildWordIndex(item)

		return item

	def buildWordIndex(self, item):
		'''
		Get current url id
		For each word in current url's text,
			add the url to the set of urls which contain that word
		'''
		url_id = self.r.get("%s:%s" % (self.URL2ID, hashxx(item['url'])))
		word_id = ""
		for word in item['words']:
			if self.r.sadd(self.WORD_SET, word):
				word_id = str(self.r.incr(self.WORD_CTR, 1))
				self.r.set("%s:%s" % (self.WORD2ID, word), word_id)
			else:
				word_id = self.r.get("%s:%s" % (self.WORD2ID, word))
			self.r.sadd("%s:%s" % (self.WORD_IN, word_id), url_id)

	def clean(self, s):
		return self.stemmer.stem(s.lower())
Esempio n. 17
0
def pe_load_params(trigger: str, data_store: str, param_store: str):
    """ When the param store changes and this tab is visible, update the top params"""
    preventupdate_if_empty(param_store)
    params = Params(**json.loads(param_store))
    tr_options = CONST["time_res_options"]
    if data_store and len(Datastore.from_json(data_store).eras) > 0:
        tr_options = [CONST["time_res_era_option"]] + tr_options
    return [
        params.init_time_res, tr_options, params.init_time_span,
        params.start_date, params.end_date
    ]
Esempio n. 18
0
	def __init__(self):
		self.f_url = open("data/url.txt", "a+")
		self.f_key = open("data/keywords.txt", "a+")
		self.f_mat = open("data/matrix.mtx", "a+")
		self.f_cla = open("data/classes.txt", "a+")
		self.r = Datastore()

		self.URL2ID = "URL2ID"
		self.ID2URL = "ID2URL"
		self.PROCESSED_CTR = "PROCESSED_CTR"

		'''l = enumerate(os.listdir("/home/nvdia/kernel_panic/core/config_data/classes_odp"))
Esempio n. 19
0
class RequestsLimiter(object):
	def __init__(self):
		self.r = Datastore()
		#self.r.flushdb()
		self.DOMAIN = "DOMAIN"
		self.LIMIT = 200
		self.DOMAIN_SET = "DOMAIN_SET"

	def process_request(self, request, spider):
		try:
			domain = urlparse(request.url).hostname
			if int(self.r.get(self.DOMAIN + ":" + domain) or 0) < self.LIMIT:
				self.r.sadd(self.DOMAIN_SET, domain)
				self.r.incr(self.DOMAIN + ":" + domain, 1)
				return None
			else:
				log.msg("DOMAIN limit Crossed:%s" % request.url, level=log.CRITICAL)
				raise IgnoreRequest
		except TypeError as e:
			raise IgnoreRequest


	def process_response(self, request, response, spider):
		try:

			if 'text/html' not in response.headers['Content-Type'] and 'text/plain' not in response.headers['Content-Type']:
				log.msg("Non-HTML/Plain:%s" % request.url, level=log.CRITICAL)
				raise IgnoreRequest

			if langid.classify(response.body)[0] != 'en':
				log.msg("Non-English:%s" % request.url, level=log.CRITICAL)
				raise IgnoreRequest
		except KeyError:
			log.msg("KeyError(Content-Type):%s" % request.url, level=log.CRITICAL)
			raise IgnoreRequest

		del request
		return response
Esempio n. 20
0
def get_locality(county_id, state_fips):
  ds = Datastore('/tmp/ansi.db')
  c = ds.connect()
  
  c.execute(
    """SELECT state_id, county_id, county_name
      FROM
        Ansi
      WHERE
        state_id=? AND
        county_id=?""",
    (
      state_fips,
      county_id,
    )
  )
  
  row = dict(c.fetchone())
  id = "".join([str(row['state_id']),str(row['county_id']).rjust(3,'0')])
  name = row['county_name']
  ds.close()
  
  return (id,name,)
Esempio n. 21
0
def list_users():
    filters = []
    ds = Datastore()
    res = []
    territory = request.args.get('territory', default='', type=str)
    los = request.args.get('line_of_service', default='', type=str)
    last_name = request.args.get('last_name', default='', type=str)
    ids = request.args.getlist('id', type=None)
    if territory:
        filters.append(('territory', '=', territory))
    if los:
        filters.append(('line_of_service', '=', los))
    if last_name:
        filters.append(('last_name', '=', last_name))
    if ids:
        for every in ids[0].split(','):
            for user in ds.list_users('test', [('id', '=', every)]):
                res.append(dict(user))
        return Response(json.dumps(res), mimetype='application/json')

    users = ds.list_users('test', filters)
    for user in users:
        res.append(dict(user))
    return Response(json.dumps(res), mimetype='application/json')
Esempio n. 22
0
def start():
    global DATASTORE, URL, KEY

    default_tmp = "/tmp/itzod"
    if sys.platform.startswith('win'):
        default_tmp = "C:\\temp\\itzod"

    parser = argparse.ArgumentParser()
    parser.add_argument("-H",
                        "--host",
                        help="Web server Host address to bind to",
                        default="0.0.0.0",
                        action="store",
                        required=False)
    parser.add_argument("-p",
                        "--port",
                        help="Web server Port to bind to",
                        default=8000,
                        action="store",
                        required=False)
    parser.add_argument("-k",
                        "--key",
                        help="Itzod User APIKey for accessing json urls",
                        action="store",
                        required=True)
    parser.add_argument("-u",
                        "--url",
                        help="Base itzod URL for accessing api",
                        default="https://pool.itzod.ru/apiex.php",
                        action="store",
                        required=False)
    parser.add_argument("-d",
                        "--datadir",
                        help="Data directory to store state",
                        default=default_tmp,
                        action="store",
                        required=False)
    args = parser.parse_args()

    logging.basicConfig()
    DATASTORE = Datastore(args.datadir)
    URL = args.url
    KEY = args.key

    t = Timer(60, poll, ())
    t.start()

    run(host=args.host, port=args.port, reloader=True)
Esempio n. 23
0
	def __init__(self):
		self.r = Datastore()
		self.URL2ID = "URL2ID"
		self.ID2URL = "ID2URL"
		self.URL_SET = "URL_SET"
		self.URL_CTR = "URL_CTR"
		self.MEM_THRESHOLD = 10 * (10 ** 9)
		self.redis_process = None
		self.scrapy_process = None
		#self.r.set(self.URL_CTR, -1)

		for i in psutil.process_iter():
			if i.name.find("redis-server") >= 0:
				self.redis_process = i
			if i.name.find("scrapy") >= 0:
				self.scrapy_process = i
Esempio n. 24
0
class DataWriter(object):
	def __init__(self):
		self.f_url = open("data/url.txt", "a+")
		self.f_key = open("data/keywords.txt", "a+")
		self.f_mat = open("data/matrix.mtx", "a+")
		self.f_cla = open("data/classes.txt", "a+")
		self.r = Datastore()

		self.URL2ID = "URL2ID"
		self.ID2URL = "ID2URL"
		self.PROCESSED_CTR = "PROCESSED_CTR"

		'''l = enumerate(os.listdir("/home/nvdia/kernel_panic/core/config_data/classes_odp"))
		l = [(x[0] + 1, x[1]) for x in l]
		self.classes = dict(l)'''

	def process_item(self, item, spider):
		if item['shutdown']:
			self.f_url.close()
			self.f_key.close()
			self.f_mat.close()
			self.f_cla.close()
			self.r.set("POWER_SWITCH", "KILL")
			return item

		self.writeURL(item)
		self.writeKeywords(item)
		self.writeWebMatrix(item)
		#self.writeClasses(item)
		self.r.incr(self.PROCESSED_CTR, 1)
		return item

	def writeURL(self, item):
		self.f_url.write(item['url'] + "\n")

	def writeKeywords(self, item):
		for k in item['words']:
			self.f_key.write("%s," % k)
		self.f_key.write("\n")

	def writeWebMatrix(self, item):
		'''
		Builds web graph in matrix market format file
		'''
		u = self.r.get("%s:%s" % (self.URL2ID, hashxx(item['url'])))
		v = 0
		for link in set(item['link_set']):
			v = self.r.get("%s:%s" % (self.URL2ID, hashxx(link)))
			self.f_mat.write("%s\t%s\t1\n" % (u, v))

	def writeClasses(self, item):
		self.f_cla.write("%s:%s\n" % (item['title'], self.classes[item['predict'][0]]))
Esempio n. 25
0
    def __init__(self):
        self.datastore = getattr(g, '_datastore', None)
        if not self.datastore:
            info("Creating connection to datastore")
            self.minio_host = "127.0.0.1:9000"
            self.minio_key = "test_key"
            self.minio_secret = "test_secret"
            self.minio_bucket = "seismic"
            self.datastore = g._datastore = Datastore(self.minio_host,
                                                      self.minio_key,
                                                      self.minio_secret,
                                                      self.minio_bucket)

        self.metadata = getattr(g, '_metadata', None)
        if not self.metadata:
            info("Creating connection to Metadata DB")
            self.crate_endpoints = ["localhost:4200"]
            self.metadata = g._metadata = Metadata(self.crate_endpoints)
            self.metadata.create_tables()
Esempio n. 26
0
def new_messages():
    if request.method == 'POST':
        request_body = request.json
        if not 'text' in request_body:
            return jsonify({"status": 400, "message": "missing text"})

        if not 'phonenumber' in request_body:
            return jsonify({"status": 400, "message": "missing phonenumber"})

        text = request_body["text"]
        phonenumber = isp.rm_country_code(request_body["phonenumber"])
        dec_isp = isp.deduce_isp(phonenumber)

        # TODO: authenticate min length
        # TODO: put logger in here to log everything
        print(
            f"[+] New sending message...\n\t-text: {text}\n\t-phonenumber: {phonenumber},\n\t-isp: {dec_isp}"
        )

        return_json = {"status": ""}
        try:
            # TODO: Determine ISP before sending messages
            # datastore = Datastore(configs_filepath="libs/configs/config.ini")
            datastore = Datastore()
            messageID = datastore.new_message(text=text,
                                              phonenumber=phonenumber,
                                              isp=dec_isp,
                                              _type="sending")
            return_json["status"] = 200
            return_json["messageID"] = messageID
        except Exception as err:
            print(traceback.format_exc())

    elif request.method == 'GET':
        print("[?] Fetching messages....")
        return_json = {"status": "", "tstate": ""}
        try:
            # datastore = Datastore(configs_filepath="libs/configs/config.ini")
            datastore = Datastore()
            messages = datastore.get_all_received_messages()
            return_json["status"] = 200
            return_json["messages"] = messages
            return_json["size"] = len(messages)
        except Exception as err:
            print(traceback.format_exc())

    return jsonify(return_json)
Esempio n. 27
0
class NoFilter(BaseDupeFilter):
	"""Request Fingerprint duplicates filter"""

	def __init__(self, path=None):
		self.r = Datastore()
		self.URL2ID = "URL2ID"

	@classmethod
	def from_settings(cls, settings):
		return cls(job_dir(settings))

	def request_seen(self, request):
		print "filter:", request.url
		uid = self.r.get("%s:%s" % (self.URL2ID, hashxx(request.url)))
		if not uid or int(uid) > 0:
			pass
		else:
			log.msg("FILTER SEEN:%s" % request.url, level = log.CRITICAL)
			return True

	def close(self, reason):
		pass
Esempio n. 28
0
def pe_make_master_time_series(time_resolution: int, time_span: str,
                               start_date: str, end_date: str, data_store: str,
                               param_store: str):
    """ Generate a Dash bar chart figure from transactional data """
    preventupdate_if_empty(data_store)
    params: Params() = Params.from_json(param_store)
    if not time_resolution:
        time_resolution = params.init_time_res
    if not time_span:
        time_span = params.init_time_span
    trans, atree, eras, dstore = Datastore.get_parts(data_store,
                                                     params.pe_roots)
    unit = params.unit
    chart_fig: go.Figure = go.Figure(layout=layouts["periodic"])
    # get everything, remembering that it's already been pre-filtered by pe_roots
    root_account_id: str = atree.root
    selected_accounts = atree.get_children_ids(root_account_id)
    factor = Ledger.prorate_factor(time_span, ts_resolution=time_resolution)
    for i, account in enumerate(selected_accounts):
        try:
            bar = periodic_bar(trans,
                               atree,
                               account,
                               time_resolution,
                               time_span,
                               factor,
                               eras,
                               i,
                               deep=True,
                               positize=True,
                               unit=unit,
                               sel_start_date=start_date,
                               sel_end_date=end_date)
            if bar:
                chart_fig.add_trace(bar)
        except LError:
            pass
    return [chart_fig]
Esempio n. 29
0
	def __init__(self):
		self.r = Datastore()
		#self.r.flushdb()
		self.DOMAIN = "DOMAIN"
		self.LIMIT = 200
		self.DOMAIN_SET = "DOMAIN_SET"
Esempio n. 30
0
"""

import yaml

from flask import Flask, jsonify, make_response, request, url_for, abort
import flask_httpauth
from flask_httpauth import HTTPBasicAuth
from datastore import Datastore

API_SERVER_VERSION = "0.0.1"
LISTEN_PORT = 5000

api_app = Flask(__name__)  # pylint disable=invalid-name
http_auth = HTTPBasicAuth()  # pylint disable=invalid-name
app_users = {}  # pylint disable=invalid-name
db = Datastore()  # pylint disable=invalid-name

#############################################################################
# Error handlers
#############################################################################


@api_app.errorhandler(404)
def not_found(error):
    """Error handler for 404 (pastie not found) errors."""
    return make_response(jsonify({
        'error': 'Not Found',
        'details': error
    }), 404)

Esempio n. 31
0
def pe_time_series_selection_to_sunburst_and_transaction_table(
        figure, selectedData, time_resolution, time_span, data_store,
        param_store):
    """Selecting specific points from the time series chart updates the
    account burst and the detail labels.  Reminder to self: When you
    think selectedData input is broken, remember that unaltered
    default action in the graph is to zoom, not to select.

    Note: all of the necessary information is in figure but that
    doesn't seem to trigger reliably.  Adding selectedData as a second
    Input causes reliable triggering.

    """
    params: Params() = Params.from_json(param_store)
    preventupdate_if_empty(data_store)
    trans, atree, eras, dstore = Datastore.get_parts(data_store,
                                                     params.pe_roots)
    if not time_resolution:
        time_resolution = params.init_time_res
    if not time_span:
        time_span = params.init_time_span
    if len(trans) == 0:
        app.logger.error(
            "Tried to make burst figure from transactions, but no transactions provided."
        )
        raise PreventUpdate()
    unit = params.unit

    ts_label = CONST["time_span_lookup"].get(time_span)["label"]
    min_period_start: np.datetime64 = None
    max_period_end: np.datetime64 = None
    selected_accounts = []
    selected_trans = pd.DataFrame()
    desc_account_count = 0
    colormap = {}

    # Get the names and colors of all accounts in the Input figure.
    # If anything is clicked, set the selection dates, accounts, and transactions.
    if figure:
        for trace in figure.get("data"):
            account = trace.get("name")
            points = trace.get("selectedpoints")
            colormap[account] = trace.get("marker").get("color")
            if not points:
                continue
            selected_accounts.append(account)
            for point in points:
                point_x = trace["x"][point]
                period_start, period_end = period_to_date_range(
                    time_resolution, point_x, eras)
                if min_period_start is None:
                    min_period_start = period_start
                else:
                    min_period_start = min(min_period_start, period_start)
                if max_period_end is None:
                    max_period_end = period_end
                else:
                    max_period_end = max(max_period_end, period_end)
                desc_accounts = atree.get_descendent_ids(account)
                desc_account_count = desc_account_count + len(desc_accounts)
                subtree_accounts = [account] + desc_accounts
                new_trans = (trans.loc[trans["account"].isin(
                    subtree_accounts)].loc[trans["date"] >= period_start].loc[
                        trans["date"] <= period_end])
                new_trans = Ledger.positize(
                    new_trans)  # each top-level account should net positive
                if len(selected_trans) > 0:
                    selected_trans = selected_trans.append(new_trans)
                else:
                    selected_trans = new_trans
    selected_count = len(selected_trans)

    if selected_count > 0 and len(selected_accounts) > 0:
        # If there are selected data, describe the contents of the sunburst
        # TODO: desc_account_count is still wrong.
        description = Burst.pretty_account_label(
            selected_accounts,
            desc_account_count,
            selected_count,
        )
    else:
        # If no trans are selected, show everything.  Note that we
        # could logically get here even if valid accounts are
        # selected, in which case it would be confusing to get back
        # all trans instead of none, but this should never happen haha
        # because any clickable bar must have $$, and so, trans
        description = f"Click a bar in the graph to filter from {len(trans):,d} records"
        selected_trans = trans
        min_period_start = trans["date"].min()
        max_period_end = trans["date"].max()

    title = f"{ts_label} {unit} from {pretty_date(min_period_start)} to {pretty_date(max_period_end)}"
    pe_selection_store = {
        "start": min_period_start,
        "end": max_period_end,
        "count": len(selected_trans),
        "accounts": selected_accounts,
    }

    duration = round(
        pd.to_timedelta((max_period_end - min_period_start), unit="ms") /
        np.timedelta64(1, "M"))
    factor = Ledger.prorate_factor(time_span, duration=duration)
    try:
        sun_fig = Burst.from_trans(atree, selected_trans, time_span, unit,
                                   factor, colormap, title)
    except LError as E:
        text = f"Failed to generate sunburst.  Error: {E}"
        app.logger.warning(text)
        description = text

    return (sun_fig, title, description, pe_selection_store)
Esempio n. 32
0
	def __init__(self, path=None):
		self.r = Datastore()
		self.URL2ID = "URL2ID"
Esempio n. 33
0
def setupdb(filepath, config):
  print "Creating database..."
  datastore = Datastore(filepath)
  cursor = datastore.connect()
  
  cleanup(cursor)
  datastore.commit()
  
  create_tables(cursor)
  datastore.commit()
    
  ansi = Datastore('/tmp/ansi.db')
  c = ansi.connect()
  cleanup(c)
  load_ansi(c, 'national.txt')
  ansi.commit()
  ansi.close()
  
  load_data(cursor, config)
  datastore.commit()
  
  update_data(cursor)
  datastore.commit()
  
  datastore.close()
Esempio n. 34
0
import csv
import pandas as pd
from flask import Flask, render_template, request, redirect, url_for,\
                  make_response, jsonify
import predictor
from werkzeug.contrib.cache import SimpleCache
from config_handler import ConfigHandler
from pathlib import Path
from datastore import Datastore, PickledDatastore

config = ConfigHandler()

if config.has('redis') and config.get('redis'):
    cache = Datastore()
    config = ConfigHandler(PickledDatastore(cache))
else:
    cache = SimpleCache()
    config = ConfigHandler(cache)

app = Flask(__name__)

data = {}
if config.has('input', 'ieee'):
    data['ieee'] = pd.read_csv(config.get('input', 'ieee'))

if config.has('input', 'acm'):
    acm_data = pd.read_csv(config.get('input', 'acm'))
    acm_data['url'] = acm_data['id'].apply(
        "https://dl.acm.org/citation.cfm?id={}&preflayout=flat#abstract".format
    )
    acm_data['title'] = acm_data['title'].fillna(acm_data['booktitle'])
Esempio n. 35
0
 def setUp(self):
     self.datastore = Datastore()
Esempio n. 36
0
 def test_index_already_exists(self):
     message = 'INDEX|cloog|gmp,isl,pkg-config\n'
     dependency_map = {'cloog':[], 'ceylon':['cloog']}
     datastore = Datastore(dependency_map)
     self.assertEqual(str(Message.OK), datastore.process_message(message))
Esempio n. 37
0
from task_worker import TaskWorker
from datastore import Datastore
from cloud_storage import CloudStorage
from cloudwatch import CloudWatch
import threading
import pika

ENTITY_KIND = 'job'
QUEUE = 'queue'
AMQP_URL = ''

DATASTORE = Datastore()
STORAGE = CloudStorage()
CLOUDWATCH = CloudWatch()


def publish_queue_length(channel):
    try:
        count = channel.queue_declare(queue=QUEUE,
                                      durable=True).method.message_count
        CLOUDWATCH.publish_queue_length(count)
    finally:
        threading.Timer(5, publish_queue_length, [channel]).start()


def callback(ch, method, properties, body):
    try:
        entity = DATASTORE.get(ENTITY_KIND, long(body))
        worker = TaskWorker(DATASTORE, STORAGE, entity)
        worker.start()
    finally:
Esempio n. 38
0
 def post(self, test, id):
     query_dict = dict((param, request.args.get(param)) for param in Echo.query_params)
     datastore = Datastore(test, id,query_dict.get('digits',None))
     return Response(datastore.read(), mimetype='text/xml')
Esempio n. 39
0
 def put(self, test, id):
     query_dict =  dict((param, request.args.get(param)) for param in Echo.query_params)
     datastore = Datastore(test, id, query_dict.get('digits',None), request.data)
     datastore.save()
     return {"status" : True}
Esempio n. 40
0
from flask import Flask, render_template, Response, redirect, session
from datastore import Datastore
from datetime import datetime
import wakeword as wa
import squat as sq
import cv2
import time

# Flaskサーバーの初期化
app = Flask(__name__)
# データストアの初期化
datastore = Datastore('data.json')
# ウェイクワード検知の初期化
wakeword_detector = wa.Detector('./ai_models/wakeword-detection.h5')
# 人体検知の初期化
squat_detector = sq.Detector('./ai_models/person_detection.tflite')
squat_detector.start()
# カウンターの作成
counter = sq.Counter()


# ルートパス (/) にアクセスがあれば実行する
@app.route('/')
def index():
    # 直近2週間分のスクワット記録を取得
    items = datastore.get_items(days=14)
    # クライアントに index.html を返す
    return render_template('index.html', items=items)


# /wakeword にアクセスがあれば実行する
Esempio n. 41
0
def ex_apply_selection(dummy, selectedData, figure, data_store, param_store):
    """Take the selected account from the main explorer chart
    and show it in a series of drill-down charts
    """
    preventupdate_if_empty(data_store)
    data_store: Datastore() = Datastore.from_json(data_store)
    tree: ATree = data_store.account_tree
    params: Params() = Params.from_json(param_store)
    unit: str = params.unit

    if not selectedData or len(selectedData) == 0:
        account = tree.root
    else:
        for i, indexed_fig in enumerate(figure):
            try:
                account = figure[i]["data"][0]["customdata"][
                    selectedData[i]["points"][0]["pointNumber"]]
                if account and len(account) > 0:
                    break
            except TypeError:
                # happens when clicking on the second or later chart, because
                # the corresponding selectedData will be empty
                pass
    if not account:
        raise PreventUpdate
    lineage = tree.get_lineage_ids(account) + [account]
    charts: list = []
    trans: pd.DataFrame = data_store.trans
    tree: ATree = data_store.account_tree
    tree = tree.append_sums_from_trans(trans)
    tree = tree.roll_up_subtotals()
    palette = cb.Set3
    selection_color = None
    color_data = pd.DataFrame(columns=["account", "color"])

    # iterate through the lineage and make a new stacked bar chart for each level.
    for i, node in enumerate(lineage):
        palette_mod = 12 - i  # compensate for shrinking palette
        drill_data = pd.DataFrame(
            columns=["account", "child_id", "child_tag", "color", "amount"])
        children = tree.children(node)
        level_selection = []
        if len(children) > 0:
            try:
                level_selection = [
                    x.identifier for x in children
                    if x.identifier == lineage[i + 1]
                ]
            except IndexError:
                pass
            for j, point in enumerate(children):
                point_id = point.identifier
                color = palette[j % palette_mod]
                color_data = color_data.append(dict(account=point_id,
                                                    color=color),
                                               ignore_index=True)
                if len(level_selection) > 0:  # If there is a selection …
                    if point_id == level_selection[0]:
                        selection_color = color
                    else:
                        color = "rgba(100, 100, 100, .5)"
                drill_data = drill_data.append(
                    dict(
                        account=node,
                        child_id=point.identifier,
                        child_tag=point.tag,
                        color=color,
                        amount=point.data["total"],
                    ),
                    ignore_index=True,
                )
        else:
            continue
        try:
            drill_data = drill_data.sort_values("amount")
            node_bar: go.Bar = go.Bar(
                y=drill_data["account"],
                x=drill_data["amount"],
                marker_color=drill_data["color"],
                textposition="inside",
                text=drill_data["child_tag"],
                texttemplate="%{text}<br>" + unit + "%{value:,.0f}",
                hovertemplate="%{text}<br>" + unit +
                "%{value:,.0f}<extra></extra>",
                customdata=drill_data["child_id"],
                orientation="h",
            )
            fig: go.Figure = go.Figure(data=node_bar)
            fig.update_layout(layouts["drill"])
            fig.update_traces(traces["drill"])
            if selection_color and len(selection_color) > 0:
                # Don't reuse selected colors in later bars.
                palette = list(set(cb.Set3) - set([selection_color]))
                if i > 0:
                    fig.update_layout(title_text=node, title_x=0, title_y=0.98)
            charts = charts + [
                dcc.Graph(figure=fig, id={
                    "type": "ex_chart",
                    "index": i
                })
            ]
        except Exception as E:
            charts = charts + [html.Div(f"Error making {node}: {E}")]

    if len(lineage) > 1:
        selected_accounts = tree.get_descendent_ids(lineage[-1]) + [lineage[i]]
        sel_trans = trans[trans["account"].isin(selected_accounts)]
        color_data = color_data.set_index("account")
        sel_trans["color"] = sel_trans.account.map(color_data.color)
        sel_trans["color"] = sel_trans["color"].fillna("darkslategray")
    else:
        sel_trans = trans
        sel_trans["color"] = "darkslategray"
    wrapper = textwrap.TextWrapper(width=40)

    def brfill(text, TW):
        return "<br>".join(TW.wrap(text))

    sel_trans["wrap"] = sel_trans["description"].apply(brfill, TW=wrapper)
    sel_trans["pretty_value"] = sel_trans["amount"].apply("{:,.0f}".format)

    sel_trans["customdata"] = (sel_trans["account"] + "<br>" +
                               sel_trans["date"].astype(str) + "<br>" +
                               sel_trans["pretty_value"] + "<br>" +
                               sel_trans["wrap"])
    dot_fig = px.scatter(
        sel_trans,
        x="date",
        y="amount",
        color="color",
        color_discrete_map="identity",
    )
    dot_fig.update_layout(layouts["dot_fig"])
    dot_fig.update_traces(traces["dot_fig"])
    dot_fig.update_traces(customdata=sel_trans["customdata"],
                          hovertemplate="%{customdata}<extra></extra>")

    charts = charts + [dcc.Graph(figure=dot_fig, id="ex_dot_chart")]
    return [charts]
Esempio n. 42
0
import sys
sys.path.append("../")
from datastore import Datastore

r = Datastore.factory()

DEFAULT = "../cusp/b_cpu"
RANK = "RANK"
SORTED_WORD_IN = "SORTED_WORD_IN"
WORD2ID = "WORD2ID"
FOUND_IN = "FOUND_IN"

try:
	f = open (sys.argv[1])
except IndexError:
	f = open (DEFAULT)

for i, l in enumerate(f):
	r.set(RANK + ":" + str(i + 1), l.strip())

f.close ()

for word in r.smembers("WORD_SET"):
	word_id = r.get(WORD2ID + ":" + word)
	for url_id in r.smembers(FOUND_IN + ":" + word_id):
		rank = r.get(RANK + ":" + url_id)
		r.zadd(SORTED_WORD_IN + ":" + word_id, url_id, float(rank))

# to be commented if everything screws up
for word in r.smembers("WORD_SET"):
	word_id = r.get(WORD2ID + ":" + word)
Esempio n. 43
0
 def test_index_with_all_indexed_dependencies(self):
     message = 'INDEX|cloog|gmp,isl,pkg-config\n'
     dependency_map = {'gmp':[], 'isl':['cmake'], 'pkg-config':[]}
     datastore = Datastore(dependency_map)
     self.assertEqual(str(Message.OK), datastore.process_message(message))
Esempio n. 44
0
class TestDataStore(unittest.TestCase):
    """Tests for the Datastore class."""
    def setUp(self):
        self.datastore = Datastore()

    def test_can_connect_to_db(self):
        """Verify that we can connect to the database."""
        self.assertTrue(self.datastore.is_connected)

    def test_can_get_pastie_count(self):
        """Test that we can get the pastie count and it's a number."""
        try:
            pastie_count = int(self.datastore.get_pasties_count())
        except ValueError:
            self.fail("get_pasties_count() returned something non-numeric")
        self.assertIsNotNone(pastie_count)

    def test_can_look_up_pasties(self):
        "Verify that get_pastie works."
        test_record = self.datastore.get_pastie(1)
        self.assertIsNotNone(test_record)
        self.assertEqual(test_record["id"], 1)

    def test_look_up_undefined_pastie_returns_none(self):
        """Make sure we get None back when we look up a nonexistent pastie."""
        test_record = self.datastore.get_pastie(0)
        self.assertIsNone(test_record)

    def test_can_create_pasties(self):
        """Verify that we can create a new pastie."""
        pass

    def test_new_pasties_fill_in_missing_attrs(self):
        """Check that missing attributes are filled in on creation."""
        pass

    def test_create_pastie_with_duplicate_id_throws_error(self):
        """Check that creation enforces unique ids"""
        pass

    def test_update_pastie(self):
        """Verify that we can update a pastie."""
        pass

    def test_update_does_not_change_id(self):
        """Verify that the ID field is immutable once created."""
        pass

    def test_delete_pastie(self):
        """Verify that we can delete a pastie"""
        pass

    def test_delete_invalid_pastie_returns_error(self):
        """Verify that we get an error on deleting an invalid pastie ID"""
        pass

    def test_pastie_exists(self):
        """Verify that the pastie_exists method works"""
        pass

    def test_list_pasties(self):
        """Verify that list_pasties works"""
        pass

    def test_list_pasties_with_filter(self):
        """Verify that list_pasties works with a filter expression"""
        pass

    def test_list_pasties_with_limit(self):
        """Verify that list_pasties works with a limit value"""
        pass

    def test_list_pasties_with_filter_and_limit(self):
        """Verify that list_pasties works with a limit and filter"""
        pass
Esempio n. 45
0
 def test_index_with_no_dependencies(self):
     message = 'INDEX|ceylon|\n'
     dependency_map = {}
     datastore = Datastore(dependency_map)
     self.assertEqual(str(Message.OK), datastore.process_message(message))
Esempio n. 46
0
#!/usr/bin/env python
import json
import time
import datetime
import os
import logging
from config import Config
from datastore import Datastore

config = Config()

datastore = Datastore(db=2)


def write_event(event_type, message, app_name='global'):
    try:
        # Pipeline adding to app event set and then global
        epoch_timestamp = time.time()
        message = "Event: {}, Message: {}".format(event_type, message)
        datastore.writeEvent(app_name, message)
    except Exception as e:
        logging.error("Unable to write log event")


def get_events(app_name='all'):
    events_data = datastore.getEvent(app_name)

    events_formatted = list()
    for event in events_data:
        print event
        message = event[0]
Esempio n. 47
0
 def test_remove_already_doesnt_exist(self):
     message = 'REMOVE|cloog|\n'
     dependency_map = {}
     datastore = Datastore(dependency_map)
     self.assertEqual(str(Message.OK), datastore.process_message(message))
Esempio n. 48
0
    except ModuleNotFoundError as e:
        log.fatal(f"Can't initialize picker {pickerType}: {e}")
        raise SystemExit(1)
    except AttributeError as e:
        log.fatal(f'Wrong picker initializing: {e}')
        raise SystemExit(1)
    except Exception as e:
        log.fatal(f'Fail initialize picker: {e}')
        raise SystemExit(1)


if __name__ != "__main__":
    log.debug("Starting main thread")

    selfControl = SelfControl()
    ds = Datastore(f'{homeDir}lootnika_tasks_journal.db')

    sphinxbuilder.check_rst(ds)

    from scheduler import Scheduler, first_start_calc
    startTime, taskCycles, repeatMin = first_start_calc(cfg['schedule'])

    # Scheduler и Picker должны видеть друг друга
    scheduler = Scheduler(cfg['schedule']['tasks'], taskCycles, repeatMin,
                          startTime)
    Picker = load_picker()
    Thread(name='Scheduler', target=scheduler.run, args=(Picker, )).start()

    import restserv
    Thread(
        name='RestServer',
Esempio n. 49
0
 def test_remove_exists(self):
     message = 'REMOVE|cloog|\n'
     dependency_map = {'gmp':['cmake'], 'cloog':[]}
     dependents_count = {'cmake':1}
     datastore = Datastore(dependency_map, dependents_count)
     self.assertEqual(str(Message.OK), datastore.process_message(message))
Esempio n. 50
0
def check_rst(ds: Datastore) -> dict:
    #TODO need refactoring
    log.debug("Check documentation sources")
    changed = False
    try:
        rows = ds.select('SELECT * FROM sphinxbuilder', )
        oldRst = {
            'lootnika': {
                'path': "docs/rst/",
                'type': 'lootnika',
                'rst': {}
            }
        }
        for row in rows:
            if row[1] not in oldRst:
                oldRst[row[1]] = {'rst': {}}

            oldRst[row[1]]['rst'][row[3]] = {'file': row[2], 'hash': row[4]}

        newRst = {
            'lootnika': {
                'path': "docs/rst/",
                'type': 'lootnika',
                'rst': {}
            }
        }
        for exporter in os.listdir(f'{homeDir}exporters'):
            path = f"exporters/{exporter}/docs/rst/"
            ls = os.listdir(f"{homeDir}{path}")
            if ls == []:
                log.warning(f"No documentation sources found for {exporter}")
                continue

            if exporter not in oldRst:
                log.info(f"Found new exporter docs: {exporter}")
                oldRst[exporter] = {
                    'path': path,
                    'type': 'exporter',
                    'rst': {}
                }

            newRst[exporter] = {'path': path, 'type': 'exporter', 'rst': {}}
            for file in ls:
                rst = f"{path}{file}"
                with open(f"{homeDir}{rst}", encoding='utf-8',
                          mode='r') as cnt:
                    hsh = f"{cityhash.CityHash64(cnt.read())}"

                newRst[exporter]['rst'][rst] = {'file': file, 'hash': hsh}
                if rst in oldRst[exporter]['rst']:
                    if not oldRst[exporter]['rst'][rst]['hash'] == hsh:
                        changed = True
                else:
                    changed = True

        for picker in os.listdir(f'{homeDir}pickers'):
            path = f"pickers/{picker}/docs/rst/"
            ls = os.listdir(f"{homeDir}{path}")
            if ls == []:
                log.warning(f"No documentation sources found for {picker}")
                continue

            if picker not in oldRst:
                log.info(f"Found new picker docs: {picker}")
                oldRst[picker] = {'path': path, 'type': 'exporter', 'rst': {}}

            newRst[picker] = {'path': path, 'type': 'picker', 'rst': {}}
            for file in ls:
                rst = f"{path}{file}"
                with open(f"{homeDir}{rst}", encoding='utf-8',
                          mode='r') as cnt:
                    hsh = f"{cityhash.CityHash64(cnt.read())}"

                newRst[picker]['rst'][rst] = {'file': file, 'hash': hsh}
                if rst in oldRst[picker]['rst']:
                    if not oldRst[picker]['rst'][rst]['hash'] == hsh:
                        changed = True
                else:
                    changed = True

        exporter = "lootnika"
        path = newRst[exporter]['path']
        ls = os.listdir(f"{homeDir}{path}")
        for file in ls:
            rst = f"{path}{file}"
            with open(f"{homeDir}{rst}", encoding='utf-8', mode='r') as cnt:
                hsh = f"{cityhash.CityHash64(cnt.read())}"

            newRst[exporter]['rst'][rst] = {'file': file, 'hash': hsh}
            if rst in oldRst[exporter]['rst']:
                if not oldRst[exporter]['rst'][rst]['hash'] == hsh:
                    changed = True
            else:
                changed = True

        if changed:
            log.warning(
                "Found changes in documentations. Start me with <make-doc> key."
            )

        return newRst
    except Exception as e:
        raise Exception(
            f"Fail check sources for help documentation: {traceback.format_exc()}"
        )
Esempio n. 51
0
 def test_query_doesnt_exist(self):
     message = 'QUERY|cloog|\n'
     dependency_map = {'gmp':['cmake']}
     datastore = Datastore(dependency_map)
     self.assertEqual(str(Message.Fail), datastore.process_message(message))
Esempio n. 52
0
# Spotless is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
# GNU General Public License for more details.


import os

from datastore import Datastore
from node import Node
from utils import generate_hash, walk_directory

DB_NAME = 'test.db'
if os.path.exists(DB_NAME):
    os.remove(DB_NAME)
ds = Datastore(DB_NAME)
ds.clear()

print 'Can a Node be added to the Datastore and increase the row count?'
node = Node('testdata/hello.txt')
ds.add_or_update_node(node)
assert(ds.num_items() == 1)
print 'PASSED\n'

print 'Does a file Node generate an expected hash?'
assert(generate_hash(node) == '60fde9c2310b0d4cad4dab8d126b04387efba289')
print 'PASSED\n'

print 'If we run the Node\'s update_info() method do we see valid attributes set on the Node?'
node.update_info(ds)
assert(node.hash == '60fde9c2310b0d4cad4dab8d126b04387efba289')
Esempio n. 53
0
class Bouncer(object):
    """A singleton that manages connected devices.

    The Bouncer provides the base functionality needed to instantiate a new
    Client or Network. It also acts as a bridge between connected Clients and
    Networks by maintaining an authoritative record of each connected device.

    Attributes:
        clients (set of sputnik.Client): A set of connected Clients.
        datastore (sputnik.Datastore): A Redis interface.
        networks (dict of sputnik.Network): A dictionary of connected Networks.
    """

    def __init__(self):
        """Creates an instance of a Bouncer.

        Initializes an empty set and an empty dictionary for later use, then
        reloads previously connected networks from the Datastore.
        """

        self.clients = set()
        self.networks = dict()

        try: # Attempt a Datastore Connection

            self.datastore = Datastore(hostname="localhost", port="6379")
            self.datastore.database.ping()

        except redis.ConnectionError: # Continue Without Persistence

            self.datastore = None
            print("Failed to Connect to a Redis Instance.\n"
                  "Continuing Without Persistence.")

        if self.datastore:

            if not self.datastore.get_password():
                self.datastore.set_password()

            history = self.datastore.get_networks()
            for credentials in history.values():
                self.add_network(**credentials)

    def start(self, hostname="", port=6667):
        """Starts the IRC and HTTP listen servers.

        This creates the IRC server-portion of the Bouncer, allowing it to
        accept connections from IRC clients. It also starts the HTTP server,
        enabling browsers to connect to the web interface.

        Note:
            This is a blocking call.

        Args:
            hostname (str, optional): Hostname to use. Defaults to ``""``.
            port (int, optional): The port to listen on. Defaults to 6667.
        """

        hport = os.getenv("RUPPELLS_SOCKETS_LOCAL_PORT")
        if hport: port = int(hport)

        loop = asyncio.get_event_loop()
        coro = loop.create_server(lambda: Client(self), hostname, port)
        loop.run_until_complete(coro)
        HTTPServer(self).start()

        try: loop.run_forever()
        except KeyboardInterrupt: pass
        finally: loop.close()

    def add_network(self, network, hostname, port,
                    nickname, username, realname,
                    password=None, usermode=0):
        """Connects the Bouncer to an IRC network.

        This forms the credentials into a dictionary. It then registers the
        network in the datastore, and connects to the indicated IRC network.

        Args:
            network (str): The name of the IRC network to connect to.
            hostname (str): The hostname of the IRC network to connect to.
            port (int): The port to connect using.
            nickname (str): The IRC nickname to use when connecting.
            username (str): The IRC ident to use when connecting.
            realname (str): The real name of the user.
            password (str, optional): Bouncer password. Defaults to ``None``.
            usermode (int, optional): The IRC usermode. Defaults to ``0``.
        """

        credentials = { "network"  : network,
                        "nickname" : nickname,
                        "username" : username,
                        "realname" : realname,
                        "hostname" : hostname,
                        "port"     : port,
                        "password" : password }

        if self.datastore: self.datastore.add_network(**credentials)
        loop = asyncio.get_event_loop()
        coro = loop.create_connection(lambda: Network(self, **credentials),
                                      hostname, port)
        asyncio.async(coro)

    def remove_network(self, network):
        """Removes a network from the Bouncer.

        This disconnects the Bouncer from the indicated network and unregisters
        the network from the datastore.

        Args:
            network (str): the name of a network.
        """

        if network in self.networks:
            self.networks[network].connected = False
            self.networks[network].transport.close()
        if self.datastore: self.datastore.remove_network(network)