Example #1
0
        def wrapper(*args, **kwargs):
            try:
                return func(*args, **kwargs)
            except Exception, e:
                logging.error("Error: %s : %s", func.__name__, str(e))

                if not hasattr(e, 'errorCode'):
                    out.failureMessage("Sorry, operation has failed!!!.")
                    tools.exitErr()

                errorCode = int(e.errorCode)

                # auth-token error, re-auth
                if errorCode == 9:
                    storage = Storage()
                    storage.removeUser()
                    GeekNote()
                    return func(*args, **kwargs)

                elif errorCode == 3:
                    out.failureMessage("Sorry, you do not have permissions "
                                       "to do this operation.")

                # Rate limited
                # Patched because otherwise if you get rate limited you still keep
                # hammering the server on scripts
                elif errorCode == 19:
                    print("\nRate Limit Hit: Please wait %s seconds before continuing" %
                          str(e.rateLimitDuration))
                    tools.exitErr()

                else:
                    return False

                tools.exitErr()
Example #2
0
    def textToENML(content, raise_ex=False, format='markdown'):
        """
        Create an ENML format of note.
        """
        if not isinstance(content, str):
            content = ""
        try:
            content = unicode(content, "utf-8")
            # add 2 space before new line in paragraph for creating br tags
            content = re.sub(r'([^\r\n])([\r\n])([^\r\n])', r'\1  \n\3', content)
            if format=='markdown':
              storage = Storage()
              extras = storage.getUserprop('markdown2_extras')
              #contentHTML = markdown.markdown(content).encode("utf-8")
              contentHTML = markdown.markdown(
                      content, extras=extras).encode("utf-8")
              # Non-Pretty HTML output
              contentHTML = str(BeautifulSoup(contentHTML, 'html.parser'))
            else:
              contentHTML = Editor.HTMLEscape(content)
            return Editor.wrapENML(contentHTML)
        except:
            if raise_ex:
                raise Exception("Error while parsing text to html."
                                " Content must be an UTF-8 encode.")

            logging.error("Error while parsing text to html. "
                          "Content must be an UTF-8 encode.")
            out.failureMessage("Error while parsing text to html. "
                               "Content must be an UTF-8 encode.")
            return tools.exitErr()
Example #3
0
def parse_chars(e, otus):
    v = []
    for chars in e.findall(NEXML+"characters"):
        c = Storage(chars.attrib)
        c.states = parse_states(chars)
        c.meta = Storage()
        for meta in chars.findall(NEXML+"meta"):
            a = meta.attrib
            if a.get("content"):
                value = META_DATATYPE[a["datatype"]](a["content"])
                c.meta[a["property"]] = value
        c.matrices = []
        for matrix in chars.findall(NEXML+"matrix"):
            m = Storage()
            m.rows = []
            for row in matrix.findall(NEXML+"row"):
                r = Storage(row.attrib)
                r.otu = otus[r.otu]
                s = row.findall(NEXML+"seq")[0].text
                substrs = []
                for ss in AMBIG_RE.split(s):
                    if ss.startswith("{"):
                        key = frozenset(ss[1:-1])
                        val = c.states.states2symb.get(key)
                        if key and not val:
                            sys.stderr.write("missing ambig symbol for %s\n" %
                                             "".join(sorted(key)))
                        ss = val or "?"
                    substrs.append(ss)
                s = "".join(substrs)
                r.seq = s
                m.rows.append(r)
            c.matrices.append(m)
        v.append(c)
    return v
Example #4
0
    def edit(self):
        """
        Call the system editor, that types as a default in the system.
        Editing goes in markdown format, and then the markdown
        converts into HTML, before uploading to Evernote.
        """

        # Try to find default editor in the system.
        storage = Storage()
        editor = storage.getUserprop('editor')

        if not editor:
            editor = os.environ.get("editor")

        if not editor:
            editor = os.environ.get("EDITOR")

        if not editor:
            # If default editor is not finded, then use nano as a default.
            if sys.platform == 'win32':
                editor = config.DEF_WIN_EDITOR
            else:
                editor = config.DEF_UNIX_EDITOR

        # Make a system call to open file for editing.
        logging.debug("launch system editor: %s %s" % (editor, self.tempfile))

        out.preloader.stop()
        os.system(editor + " " + self.tempfile)
        out.preloader.launch()
        newContent = open(self.tempfile, 'r').read()

        return newContent
Example #5
0
	def rooms_occupied_by(self, people):
		s = Storage(self.name, self.token, True)
		room_numbers, rooms, person_names, persons, current_timestamp = s.get_stats()
		log = s.get_log()

		people_wanted = { (t[1]+name):True for t, name in people }
		rooms = {}
		result = {}

		for event in log:
			if not event.who in people_wanted or event.room == -1:
				continue

			if event.action == 'A':
				if event.room in rooms:
					people_preset = rooms[event.room]
				else:
					people_present = {}
					rooms[event.room] = people_present

				people_present[event.who] = True
				if people_present == people_wanted:
					result[event.room] = True

			elif event.action == 'L':
				if event.who in rooms[event.room]:
					rooms[event.room].pop(event.who)

		if len(result) == 0:
			return

		result = [e for e in result.keys()]
		result.sort()
		result2 = [str(e) for e in result]
		print(','.join(result2))
Example #6
0
    def __init__(self, section, cfgfile="tor2web.conf"):
        Storage.__init__(self)

        self._cfgfile = cfgfile
        self._cfgparser = ConfigParser.ConfigParser()
        self._cfgparser.read([self._cfgfile])
        self._section = section
Example #7
0
 def test_routes_absolute(self):
     """
     Test absolute URL
     """
     load(data="")
     r = Storage()
     r.env = Storage()
     r.env.http_host = "domain.com"
     r.env.wsgi_url_scheme = "httpx"  # distinguish incoming scheme
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f")), "/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", host=True)), "httpx://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", host="host.com")), "httpx://host.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=True)), "httpx://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=False)), "/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme="https")), "https://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme="wss")), "wss://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=True, host=True)), "httpx://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme="https", host=True)), "https://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=False, host=True)), "httpx://domain.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=True, host="host.com")), "httpx://host.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=False, host="host.com")), "httpx://host.com/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", port=1234)), "httpx://domain.com:1234/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", scheme=True, port=1234)), "httpx://domain.com:1234/a/c/f")
     self.assertEqual(str(URL(r=r, a="a", c="c", f="f", host="host.com", port=1234)), "httpx://host.com:1234/a/c/f")
     self.assertEqual(
         str(URL(r=r, a="a", c="c", f="f", scheme="wss", host="host.com", port=1234)), "wss://host.com:1234/a/c/f"
     )
Example #8
0
def parse_trees(e, otus):
    "e is a nexml document parsed by etree"
    from ivy.tree import Node
    #from tree import Node
    v = []
    for tb in e.findall(NEXML+"trees"):
        for te in tb.findall(NEXML+"tree"):
            t = Storage()
            t.attrib = Storage(te.attrib)
            t.nodes = {}
            for n in te.findall(NEXML+"node"):
                node = Node()
                if n.attrib.get("otu"):
                    node.isleaf = True
                    node.otu = otus[n.attrib["otu"]]
                    node.label = node.otu.label
                t.nodes[n.attrib["id"]] = node
            for edge in te.findall(NEXML+"edge"):
                d = edge.attrib
                n = t.nodes[d["target"]]
                p = t.nodes[d["source"]]
                length = d.get("length")
                if length:
                    n.length = float(length)
                p.add_child(n)
            r = [ n for n in t.nodes.values() if not n.parent ]
            assert len(r)==1
            r = r[0]
            r.isroot = True
            for i, n in enumerate(r): n.id = i+1
            t.root = r
            v.append(t)
    return v
Example #9
0
class EvernoteController(object):
    def __init__(self, token, isSpecialToken = False, sandbox = False, isInternational = False, notebooks = None):
        self.token = token
        if sandbox:
            self.client = EvernoteClient(token=self.token)
        elif isInternational:
            self.client = EvernoteClient(token=self.token, service_host='www.evernote.com')
        else:
            self.client = EvernoteClient(token=self.token, service_host='app.yinxiang.com')
        self.isSpecialToken = isSpecialToken
        self.userStore = self.client.get_user_store()
        self.noteStore = self.client.get_note_store()
        self.storage = Storage(notebooks)
    def get_upload_limit(self):
        return {
            1: 25 * 1024 * 1024,
            3: 100 * 1024 * 1024,
            5: 200 * 1024 * 1024,
        }.get(self.userStore.getUser().privilege, 0)
    def create_notebook(self, noteFullPath):
        if self.get(noteFullPath): return False
        notebook = Types.Notebook()
        notebook.name = noteFullPath
        try:
            notebook = self.noteStore.createNotebook(notebook)
        except EDAMUserException, e:
            if e.errorCode == 10 and e.parameter == 'Notebook.name':
                self.storage.update(self.token, self.noteStore)
                return True
            else:
                raise e
        self.storage.create_notebook(notebook)
        return True
Example #10
0
	def total_time_spent_by(self, who, is_employee):
		who = 'E' + who if is_employee else 'G' + who

		s = Storage(self.name, self.token, True, True)
		if s.error:
			print(0)
			return

		s.verify_log()

		room_numbers, rooms, person_names, persons, current_timestamp = s.get_stats()

		if not who in person_names:
			print(0)
			return

		person = person_names[who]
		if person.is_employee != is_employee:
			libgeneric.print_error_and_exit("invalid")

		total_time = person.time_in_gallery

		if person.gallery_timestamp >= 0:
			total_time += (current_timestamp - person.gallery_timestamp)
		print(total_time)
    def add(self, item):
        bookmark = self.find(item)

        if not bookmark:
            Storage.add(self, item)

            self.save()
Example #12
0
	def rooms_entered_by(self, who, is_employee):
		who = 'E' + who if is_employee else 'G' + who

		s = Storage(self.name, self.token, True)
		room_numbers, rooms, person_names, persons, _ = s.get_stats()
		log = s.get_log()
		print(','.join([str(e.room) for e in log if e.who == who and e.action == 'A' and e.room != -1 ]))
Example #13
0
	def state(self):
		s = Storage(self.name, self.token, True)
		s.verify_log()
		room_numbers, rooms, person_names, persons, _ = s.get_stats()
		if persons:
			res = [p.name[1:] for p in persons if p.gallery_timestamp >= 0 and p.is_employee]
			res.sort()
			print(','.join(res))
			res = [p.name[1:] for p in persons if p.gallery_timestamp >= 0 and p.is_employee == False]
			res.sort()
			print(','.join(res))
		else:
			print()

		room_dir = {}

		for room in rooms:
			room_dir[room.number] = []

		for person in persons:
			if person.room >= 0:
				room_dir[rooms[person.room].number].append(person.name[1:])

		rooms_and_attendees = []
		for number, people in room_dir.items():
			people.sort()
			rooms_and_attendees.append( (number, ','.join(people)) )

		rooms_and_attendees.sort(key=lambda e : e[0])

		for rooms in rooms_and_attendees:
			if rooms[1] != '':
				print(str(rooms[0]) + ": " + rooms[1])
Example #14
0
        def wrapper(*args, **kwargs):
            try:
                return func(*args, **kwargs)
            except Exception, e:
                logging.error("Error: %s : %s", func.__name__, str(e))

                if not hasattr(e, 'errorCode'):
                    out.failureMessage("Sorry, operation has failed!!!.")
                    tools.exit()

                errorCode = int(e.errorCode)

                # auth-token error, re-auth
                if errorCode == 9:
                    storage = Storage()
                    storage.removeUser()
                    GeekNote()
                    return func(*args, **kwargs)

                elif errorCode == 3:
                    out.failureMessage("Sorry, you do not have permissions to do this operation.")

                else:
                    return False

                tools.exit()
Example #15
0
def doit():

    set_logger(get_logfile_logger())

    my_remote_callbacks = MyRemoteCallbacks()

    set_remote_callbacks(my_remote_callbacks)

    environment = Environment(True, ProbeMode_STANDARD_WRITE_MOCKUP if save_mockup else ProbeMode_STANDARD, TargetMode_DIRECT)
    if save_mockup:
        environment.set_mockup_filename("mockup.xml")

    storage = Storage(environment)

    try:
        storage.probe()
    except Exception as exception:
        print(exception.what())
        exit(1)

    print()

    probed = storage.get_probed()

    print(probed)

    if save_devicegraph:
        probed.save("devicegraph.xml")
Example #16
0
def read_config(config_file):
    '''Reads the ~/.goobookrc and ~/.netrc.
    returns the configuration as a dictionary.

    '''
    config = Storage({ # Default values
        'email': '',
        'password': '',
        'max_results': '9999',
        'cache_filename': '~/.goobook_cache',
        'cache_expiry_hours': '24',
        })
    config_file = os.path.expanduser(config_file)
    if os.path.lexists(config_file) or os.path.lexists(config_file + '.gpg'):
        try:
            parser = ConfigParser.SafeConfigParser()
            if os.path.lexists(config_file):
                log.info('Reading config: %s', config_file)
                f = open(config_file)
            else:
                log.info('Reading config: %s', config_file + '.gpg')
                sp = subprocess.Popen(['gpg', '--no-tty', '-q', '-d', config_file + ".gpg"], stdout=subprocess.PIPE)
                f = sp.stdout
            parser.readfp(f)
            config.update(dict(parser.items('DEFAULT', raw=True)))
        except (IOError, ConfigParser.ParsingError), e:
            print >> sys.stderr, "Failed to read configuration %s\n%s" % (config_file, e)
            sys.exit(1)
Example #17
0
class Updater(object):

	days_befor_delete = 3

	def __init__(self):
		self.__logger = logging.getLogger(__name__)
		self.__load_config();
		self.__database = Database(self.__host,self.__db,self.__user,self.__password)
		self.__web = Web(self.__api_key, self.__url)
		self.__storage = Storage()
		self.last_update = self.__storage.get('last_update',date(1980,01,01))
		self.last_delete = self.__storage.get('last_delete',date.today())
		self.days = timedelta(days = self.days_befor_delete)
		self.to_delete = date.today() - self.days

	def __delete(self):
		self.__logger.info('Removing old data from the database...')
		data = self.__database.get_delete_data(str(self.last_delete), str(self.to_delete))
		self.__web.delete_geodata(data)
		self.__logger.info('finished')
		self.__storage['last_delete'] = self.to_delete


	def __add(self):
		self.__logger.info('Adding new data to the database...')
		if (self.last_update > self.to_delete):
			data = self.__database.get_new_data(self.last_update)
		else:
			data = self.__database.get_data(self.last_update, self.to_delete)
		self.__web.add_geodata(data)
		self.__logger.info('finished')
		self.__storage['last_update'] = date.today()

	def __close(self):
		self.__logger.debug("Updateing stored dates. last_update: " + str(date.today()) + ", last_delete: " + str(self.to_delete))
		self.__storage.close()

	def run(self):
		self.__logger.info('Starting updater...')
		self.__logger.debug('Last update: ' + str(self.last_update) + ' with delete date: ' + str(self.last_delete) + '. Delete data which is ' + str(self.days_befor_delete) + ' old.')
		if (self.last_update < date.today()):
			self.__add()
			self.__delete()
		else:
			self.__logger.info('Not run. Data is uptodate.')
		self.__close()
		self.__logger.info('Finish updater')

	def __load_config(self):
		config = RawConfigParser()
		config.readfp(open('config.cfg'))
		# Database config
		self.__host = config.get('Database','host')
		self.__db = config.get('Database','database')
		self.__user = config.get('Database','user')
		self.__password = config.get('Database','password')
		# Web config
		self.__api_key = config.get('web','api_key')
		self.__url = config.get('web','url')
Example #18
0
    def storeIndex(self):
        self.timer = Timer()
        self.timer.start()

        storage = Storage()
        storage.saveIndex(self.dictionary)

        self.timer.stop()
Example #19
0
	def append(self, timestamp, action, person, is_employee, room=-1):
		if room == None:
			room = -1
		else:
			room = int(room)

		s = Storage(self.name, self.token, readonly=False)
		s.append(person, is_employee, action, int(timestamp), room)
Example #20
0
def buildform(_table_name, *field):
    form = Storage()
    form._tablename = _table_name
    for f in field:
        f._tablename = _table_name
        form[f.name] = f
    form.fields = [f.name for f in field]
    return form
Example #21
0
 def test_getlast(self):
     # usually with request.vars
     a = Storage()
     a.x = 'abc'
     a.y = ['abc', 'def']
     self.assertEqual(a.getlast('x'), 'abc')
     self.assertEqual(a.getlast('y'), 'def')
     self.assertEqual(a.getlast('z'), None)
Example #22
0
 def get(self, mode):
     
     if mode == 'clear':
         Storage.clear_cache()
         self.write('cleared')
     else:
         self.write('unknown mode')
         
Example #23
0
 def test_getlist(self):
     # usually used with request.vars
     a = Storage()
     a.x = 'abc'
     a.y = ['abc', 'def']
     self.assertEqual(a.getlist('x'), ['abc'])
     self.assertEqual(a.getlist('y'), ['abc', 'def'])
     self.assertEqual(a.getlist('z'), [])
Example #24
0
 def __init__(
     self,
     db=None,
     tablename='web2py_ticket'
 ):
     Storage.__init__(self)
     self.db = db
     self.tablename = tablename
Example #25
0
  def test(self, model_class_mock):
    save_mock = Mock(return_value='mocked')
    model_class_mock.return_value.save = save_mock

    storage = Storage()
    result = storage.populate()
    self.assertEqual(save_mock.call_count, 1)
    self.assertEqual(result, 'mocked')
Example #26
0
    def testURL(self):
        self.assertEqual(URL('a', 'c', 'f', args='1'), '/a/c/f/1')
        self.assertEqual(URL('a', 'c', 'f', args=('1', '2')), '/a/c/f/1/2')
        self.assertEqual(URL('a', 'c', 'f', args=['1', '2']), '/a/c/f/1/2')
        self.assertEqual(URL('a', 'c', '/f'), '/a/c/f')
        self.assertEqual(URL('a', 'c', 'f.json'), '/a/c/f.json')
        self.assertRaises(SyntaxError, URL, *['a'])

        request = Storage()
        request.application = 'a'
        request.controller = 'c'
        request.function = 'f'
        request.env = {}

        from globals import current  # Can't be moved with other import
        current.request = request

        must_return = '/a/c/f'
        self.assertEqual(URL(), must_return)
        self.assertEqual(URL('f'), must_return)
        self.assertEqual(URL('c', 'f'), must_return)
        self.assertEqual(URL('a', 'c', 'f'), must_return)
        self.assertEqual(URL('a', 'c', 'f', extension='json'), '/a/c/f.json')

        def weird():
            pass
        self.assertEqual(URL('a', 'c', weird), '/a/c/weird')
        self.assertRaises(SyntaxError, URL, *['a', 'c', 1])
        # test signature
        rtn = URL(
            a='a', c='c', f='f', args=['x', 'y', 'z'],
            vars={'p': (1, 3), 'q': 2}, anchor='1', hmac_key='key'
            )
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=a32530f0d0caa80964bb92aad2bedf8a4486a31f#1')
        # test _signature exclusion
        rtn = URL(
            a='a', c='c', f='f', args=['x', 'y', 'z'],
            vars={'p': (1, 3), 'q': 2, '_signature': 'abc'},
            anchor='1', hmac_key='key'
            )
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=a32530f0d0caa80964bb92aad2bedf8a4486a31f#1')
        # emulate user_signature
        current.session = Storage(auth=Storage(hmac_key='key'))
        self.assertEqual(URL(user_signature=True), '/a/c/f?_signature=c4aed53c08cff08f369dbf8b5ba51889430cf2c2')
        # hash_vars combination
        rtn = URL('a', 'c', 'f', args=['x', 'y', 'z'], vars={'p': (1, 3), 'q': 2}, hmac_key='key')
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=a32530f0d0caa80964bb92aad2bedf8a4486a31f')
        rtn = URL('a', 'c', 'f', args=['x', 'y', 'z'], vars={'p': (1, 3), 'q': 2}, hmac_key='key', hash_vars=True)
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=a32530f0d0caa80964bb92aad2bedf8a4486a31f')
        rtn = URL('a', 'c', 'f', args=['x', 'y', 'z'], vars={'p': (1, 3), 'q': 2}, hmac_key='key', hash_vars=False)
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=0b5a0702039992aad23c82794b8496e5dcd59a5b')
        rtn = URL('a', 'c', 'f', args=['x', 'y', 'z'], vars={'p': (1, 3), 'q': 2}, hmac_key='key', hash_vars=['p'])
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=5d01b982fd72b39674b012e0288071034e156d7a')
        rtn = URL('a', 'c', 'f', args=['x', 'y', 'z'], vars={'p': (1, 3), 'q': 2}, hmac_key='key', hash_vars='p')
        self.assertEqual(rtn, '/a/c/f/x/y/z?p=1&p=3&q=2&_signature=5d01b982fd72b39674b012e0288071034e156d7a')
        # test CRLF detection
        self.assertRaises(SyntaxError, URL, *['a\n', 'c', 'f'])
        self.assertRaises(SyntaxError, URL, *['a\r', 'c', 'f'])
Example #27
0
 def user_agent(self):
     from gluon.contrib import user_agent_parser
     session = current.session
     user_agent = session._user_agent = session._user_agent or \
         user_agent_parser.detect(self.env.http_user_agent)
     user_agent = Storage(user_agent)
     for key,value in user_agent.items():
         if isinstance(value,dict): user_agent[key] = Storage(value)
     return user_agent
Example #28
0
 def __init__(self, config={}):
     '''
     config["path"] should contain the directory of where to read and write
     data to
     '''
     Storage.__init__(self, config)
     if type(config) is dict and config["path"] is not None:
         self.pickle_filepath = config["path"]
     else:
         self.pickle_filepath = "/tmp/pickle.pickle"
 def test_StaticURL(self):
     # test response.static_version coupled with response.static_version_urls
     self.assertEqual(URL('a', 'c', 'f'), '/a/c/f')
     self.assertEqual(URL('a', 'static', 'design.css'), '/a/static/design.css')
     response = Storage()
     response.static_version = '1.2.3'
     from globals import current
     current.response = response
     self.assertEqual(URL('a', 'static', 'design.css'), '/a/static/design.css')
     response.static_version_urls = True
     self.assertEqual(URL('a', 'static', 'design.css'), '/a/static/_1.2.3/design.css')
Example #30
0
class Project:
    def __init__(self, database):
        self.project = {}
        self.storage = Storage(database)

    def get_project_info(self, projectname):
        self.project = self.storage.readstorage('projects', 'uri', projectname)
	return self.project

    def get_all_projects(self):
	return self.storage.get_all_data() 
Example #31
0
    def __init__(self):
        Storage.__init__(self)
        self._section = 'main'
        self._parser = ConfigParser.ConfigParser()

        parser = OptionParser()
        parser.add_option("-c",
                          "--configfile",
                          dest="configfile",
                          default="/etc/tor2web.conf")
        parser.add_option("-p",
                          "--pidfile",
                          dest="pidfile",
                          default='/var/run/tor2web/t2w.pid')
        parser.add_option("-u", "--uid", dest="uid", default='')
        parser.add_option("-g", "--gid", dest="gid", default='')
        parser.add_option("-n",
                          "--nodaemon",
                          dest="nodaemon",
                          default=False,
                          action="store_true")
        parser.add_option("-d",
                          "--rundir",
                          dest="rundir",
                          default='/var/run/tor2web/')
        parser.add_option("-x", "--command", dest="command", default='start')
        options, _ = parser.parse_args()

        self._file = options.configfile

        self.__dict__['configfile'] = options.configfile
        self.__dict__['pidfile'] = options.pidfile
        self.__dict__['uid'] = options.uid
        self.__dict__['gid'] = options.gid
        self.__dict__['nodaemon'] = options.nodaemon
        self.__dict__['command'] = options.command
        self.__dict__['nodename'] = 'tor2web'
        self.__dict__['datadir'] = '/home/tor2web'
        self.__dict__['sysdatadir'] = '/usr/share/tor2web/data'
        self.__dict__['ssl_key'] = None
        self.__dict__['ssl_cert'] = None
        self.__dict__['ssl_intermediate'] = None
        self.__dict__['ssl_dh'] = None
        self.__dict__['rundir'] = options.rundir
        self.__dict__['logreqs'] = False
        self.__dict__['debugmode'] = False
        self.__dict__['debugtostdout'] = False
        self.__dict__['processes'] = 1
        self.__dict__['requests_per_process'] = 1000000
        self.__dict__['transport'] = 'BOTH'
        self.__dict__['listen_ipv4'] = '127.0.0.1'
        self.__dict__['listen_ipv6'] = None
        self.__dict__['listen_port_http'] = 80
        self.__dict__['listen_port_https'] = 443
        self.__dict__['basehost'] = 'tor2web.org'
        self.__dict__['sockshost'] = '127.0.0.1'
        self.__dict__['socksport'] = 9050
        self.__dict__['socksoptimisticdata'] = True
        self.__dict__['sockmaxpersistentperhost'] = 5
        self.__dict__['sockcachedconnectiontimeout'] = 240
        self.__dict__['sockretryautomatically'] = True
        self.__dict__['cipher_list'] = 'ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES256-SHA384:' \
                                       'ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES128-SHA256:' \
                                       'ECDHE-RSA-AES256-SHA:DHE-DSS-AES256-SHA:DHE-RSA-AES128-SHA:'
        self.__dict__['ssl_tofu_cache_size'] = 100
        self.__dict__['mode'] = 'BLOCKLIST'
        self.__dict__['onion'] = None
        self.__dict__['blockcrawl'] = True
        self.__dict__['overriderobotstxt'] = True
        self.__dict__['blockhotlinking'] = True
        self.__dict__['blockhotlinking_exts'] = ['jpg', 'png', 'gif']
        self.__dict__['deny_caching'] = True
        self.__dict__['extra_http_response_headers'] = None
        self.__dict__['disable_disclaimer'] = False
        self.__dict__['disable_banner'] = False
        self.__dict__['disable_tor_redirection'] = False
        self.__dict__['disable_gettor'] = False
        self.__dict__['avoid_rewriting_visible_content'] = False
        self.__dict__['smtpuser'] = '******'
        self.__dict__['smtppass'] = '******'
        self.__dict__['smtpmail'] = '*****@*****.**'
        self.__dict__[
            'smtpmailto_exceptions'] = '*****@*****.**'
        self.__dict__[
            'smtpmailto_notifications'] = '*****@*****.**'
        self.__dict__['smtpdomain'] = 'demo.globaleaks.org'
        self.__dict__['smtpport'] = 9267
        self.__dict__['smtpsecurity'] = 'TLS'
        self.__dict__['exit_node_list_refresh'] = 600
        self.__dict__['automatic_blocklist_updates_source'] = ''
        self.__dict__['automatic_blocklist_updates_refresh'] = 600
        self.__dict__['automatic_blocklist_updates_mode'] = "MERGE"
        self.__dict__['publish_lists'] = False
        self.__dict__['mirror'] = []
        self.__dict__['dummyproxy'] = None
        self.__dict__['proto'] = 'http://' if self.__dict__[
            'transport'] == 'HTTP' else 'https://'
        self.__dict__['bufsize'] = 4096

        # Development VS. Production
        localpath = os.path.abspath(
            os.path.join(os.path.dirname(sys.argv[0]), "..", "data"))
        if os.path.exists(localpath):
            self.__dict__['sysdatadir'] = localpath

        self.load()

        if self.__dict__['ssl_key'] is None:
            self.__dict__['ssl_key'] = os.path.join(self.__dict__['datadir'],
                                                    "certs/tor2web-key.pem")

        if self.__dict__['ssl_cert'] is None:
            self.__dict__['ssl_cert'] = os.path.join(self.__dict__['datadir'],
                                                     "certs/tor2web-cert.pem")

        if self.__dict__['ssl_intermediate'] is None:
            self.__dict__['ssl_intermediate'] = os.path.join(
                self.__dict__['datadir'], "certs/tor2web-intermediate.pem")

        if self.__dict__['ssl_dh'] is None:
            self.__dict__['ssl_dh'] = os.path.join(self.__dict__['datadir'],
                                                   "certs/tor2web-dh.pem")

        if self.__dict__['mode'] == 'BLACKLIST':
            self.__dict__['mode'] = 'BLOCKLIST'
Example #32
0
            start = int(start)
            end = int(end)
        except ValueError:
            return "ValueError"

        if start > end and start > 0:
            return "Start should be smaller than end"

        track_list = s.get_tracks(start, end)
        return jsonify(track_list)

    app.run(host="0.0.0.0")


if __name__ == "__main__":
    s = Storage()

    config_persist = ConfigPersist(CONFIG_FILE)
    oauth2 = OAuth2(config_persist)

    # try:
    stop_event = Event()

    t = Thread(target=do_poll, args=(stop_event, s, oauth2))
    t.start()

    start_server(s)
    stop_event.set()

    #t.join(1)
Example #33
0
 def __init__(self):
     self.storage = Storage(self.storage_file)
Example #34
0
class Grid(object):
    def __init__(self):
        self.comp_sites = []
        self.dfCEs = None
        self.storage = Storage()
        self.total_cores = 0
        self.status_in_time = []  # ts, running, queued, finished
        self.ds_assignements = {}
        self.all_jobs = []

        self.core_seconds = 0
        self.queue_seconds = 0

        self.cloud_weights = None
        self.site_weights = {}
        self.create_infrastructure()
        self.samples = multiprocessing.Queue()
        self.init()

    def init(self):
        sampler = Sampler(self.cloud_weights, self.site_weights, self.samples)
        sampler.daemon = True
        sampler.start()
        if conf.STEPS_TO_FILE:
            self.logfile = open(conf.BASE_DIR + conf.TITLE + ".log", "w", buffering=1)

    def create_infrastructure(self):

        self.dfCEs = ou.load_compute()
        self.total_cores = self.dfCEs.cores.sum()
        print('total cores:', self.total_cores)

        # create origin server
        self.storage.add_storage('Data Lake', parent_name='', servers=1, level=2, origin=True)

        # create cloud level cache servers
        self.cloud_weights = self.dfCEs.groupby('cloud').sum()['cores']
        print(self.cloud_weights)

        if conf.CLOUD_LEVEL_CACHE:
            for cloud, sum_cores in self.cloud_weights.items():
                servers = sum_cores // 2000 + 1
                self.storage.add_storage(cloud, parent_name='Data Lake', servers=servers, level=1, origin=False)

        # create CEs. CEs have local caches.
        for ce in self.dfCEs.itertuples():
            servers = ce.cores // 1000 + 1
            if conf.CLOUD_LEVEL_CACHE:
                p_name = ce.cloud
            else:
                p_name = 'Data Lake'
            self.storage.add_storage(ce.name, parent_name=p_name, servers=servers, level=0, origin=False)
            self.comp_sites.append(Compute(ce.name, ce.tier, ce.cloud, ce.cores, self.storage, ce.name))

        # calculate site weights
        self.cloud_weights /= self.cloud_weights.sum()
        for cl, clv in self.dfCEs.groupby('cloud'):
            self.site_weights[cl] = clv['cores']
            self.site_weights[cl] /= self.site_weights[cl].sum()
        # print(self.cloud_weights,  self.site_weights)

    def get_dataset_vp(self, name):

        if name in self.ds_assignements:
            return self.ds_assignements[name]

        if name is None:  # We have a lot of tasks without dataset.
            if conf.NO_INPUT_JOBS_FILL_UP:
                return []  # Leave it empty and the grid will give job to a site with lowest occupancy
            # Throw them randomly.
            return self.samples.get()

        site_samples = self.samples.get()
        # print(site_samples)

        self.ds_assignements[name] = site_samples
        return site_samples

    def add_task(self, task):
        # find sites to execute task at
        sites = self.get_dataset_vp(task.dataset)
        # print('adding task:\n', task, sites)

        files_per_job = 0
        per_file_bytes = 0
        if task.ds_files > 0 and task.Sinputfiles > 0:
            files_per_job = round(task.Sinputfiles / task.jobs)
            per_file_bytes = round(task.ds_size / task.ds_files)

        job_duration = int(task.Swall_time / task.jobs)
        # cores = conf.CORE_NUMBERS[bisect(conf.CORE_NUMBERS, task.cores / task.jobs)]
        cores = int(round(task.Scores / task.jobs))

        # print('jobs:', task.jobs, '\tcores:', cores, '\tfiles per job', files_per_job, '\tduration:', job_duration)

        file_counter = 0
        for job_number in range(task.jobs):
            files = []
            for _ in range(files_per_job):
                fn = task.dataset + str(file_counter % task.ds_files)
                fn = hashlib.md5(fn.encode('utf-8')).hexdigest()
                files.append(fn)
                file_counter += 1

            self.all_jobs.append((
                task.created_at + conf.JOB_START_DELAY * job_number,
                cores,
                job_duration,
                files,
                per_file_bytes,
                sites,
                task.taskid
            )
            )

    def process_jobs(self, until=None):
        """ gives jobs to CEs to process. 
        If until is given only jobs starting up to that time will be processed.
        This is to spare memory.
        """

        print('sort jobs')
        self.all_jobs.sort(key=lambda x: x[0])
        total_jobs = len(self.all_jobs)
        print('jobs to do:', total_jobs)

       # loop over times
        print('start time:', time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(self.all_jobs[0][0])))
        print('end   time:', time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(self.all_jobs[-1][0])))
        ts = (self.all_jobs[0][0] - conf.STEP) // conf.STEP * conf.STEP
        while True:
            if until and ts >= until:
                print('remaining jobs:', len(self.all_jobs))
                return
            ts += conf.STEP
            if not ts % conf.BINS:
                self.storage.stats(ts)
                remain = self.stats(ts)
                print('remaining jobs: ', remain)
                if remain == 0 and until is None:
                    print('All DONE.')
                    break

            # loop over sites process events get ce statuses
            for ce in self.comp_sites:
                ncores, nqueued = ce.process_events(ts)
                self.core_seconds += ncores * conf.STEP
                self.queue_seconds += nqueued * conf.STEP

            jobs_added = 0
            for job in self.all_jobs:
                if job[0] > ts + conf.STEP:
                    break
                sites = job[5]

                if not sites:  # no input dataset can go to any site
                    sites = set(range(self.dfCEs.shape[0]))

                # the first site not having anything in the queue gets the job
                found_empty = False
                minocc = 99999999.0
                minsite = 999
                for site in sites:
                    occ = len(self.comp_sites[site].queue) / self.comp_sites[site].cores
                    if not occ:
                        self.comp_sites[site].add_job(job[0], job[1], job[2], job[3], job[4], job[6])
                        found_empty = True
                        break
                    if occ < minocc:
                        minocc = occ
                        minsite = site

                # if job unassigned give it to the one with the smallest ratio
                if not found_empty:
                    self.comp_sites[minsite].add_job(job[0], job[1], job[2], job[3], job[4], job[6])

                jobs_added += 1

            # print('deleting:', jobs_added, 'from:', len(self.all_jobs))
            del self.all_jobs[:jobs_added]

    def stats(self, ts):
        """ returns sum of running and queued jobs so we can say when the emulation is done """
        srunning = susedcores = squeued = sfinished = 0
        for ce in self.comp_sites:
            (runn, queu, fini, core) = ce.collect_stats(ts)
            srunning += runn
            squeued += queu
            sfinished += fini
            susedcores += core
        self.status_in_time.append([ts, srunning, squeued, sfinished, susedcores])
        print('time:', time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(ts)),
              '\trunning:', srunning, '\t cores used:',
              susedcores, '\tqueued:', squeued, '\tfinished:', sfinished)
        if conf.STEPS_TO_FILE:
            self.logfile.write(time.strftime('%Y/%m/%d %H:%M:%S', time.gmtime(ts)) +
                               '\trunning:' + str(srunning) + '\t cores used:' +
                               str(susedcores) + '\tqueued:' + str(squeued) + '\tfinished:' + str(sfinished) + '\n')
        return srunning + squeued

    def save_stats(self):
        print('core hours:', self.core_seconds / 3600)
        print('queue job hours:', self.queue_seconds / 3600)
        stats = pd.DataFrame(self.status_in_time)
        stats.columns = ['time', 'running', 'queued', 'finished', 'cores used']
        stats['cores used'] /= self.total_cores
        stats['finished'] /= stats.finished.max()
        stats = stats.set_index('time', drop=True)
        stats.index = pd.to_datetime(stats.index, unit='s')
        stats.to_hdf(conf.BASE_DIR + 'results/' + conf.TITLE + '.h5', key='compute', mode='a', complevel=1)
        # print(stats)

        summary = pd.DataFrame.from_dict({
            'core hours': self.core_seconds / 3600,
            'queue hours': self.queue_seconds / 3600
        }, orient='index')
        summary.to_hdf(conf.BASE_DIR + 'results/' + conf.TITLE + '.h5', key='summary', mode='a')

        dic = globals().get('conf', None).__dict__
        for k in dic.keys():
            if k.startswith('s_'):
                dic.pop(k)
        conf_df = pd.DataFrame.from_dict(dic, orient='index')
        conf_df.to_hdf(conf.BASE_DIR + 'results/' + conf.TITLE + '.h5', key='config', mode='a')

        task_finish_times = {}
        for ce in self.comp_sites:
            ce.save_stats()
            for tid, ftime in ce.task_finish_times.items():
                if tid not in task_finish_times:
                    task_finish_times[tid] = ftime
                else:
                    if task_finish_times[tid] < ftime:
                        task_finish_times[tid] = ftime

        self.storage.save_stats()
        tft = pd.DataFrame.from_dict(task_finish_times, orient='index')
        tft.columns = ['model_finish']
        tft.to_hdf(conf.BASE_DIR + 'results/' + conf.TITLE + '.h5', key='tft', mode='a', complevel=1)
Example #35
0
class Menu(object):
    def __init__(self):
        self.config = Config()
        self.datatype = "main"
        self.title = "网易云音乐"
        self.datalist = [
            "排行榜",
            "艺术家",
            "新碟上架",
            "精选歌单",
            "我的歌单",
            "主播电台",
            "每日推荐歌曲",
            "每日推荐歌单",
            "私人FM",
            "搜索",
            "帮助",
        ]
        self.offset = 0
        self.index = 0
        self.storage = Storage()
        self.storage.load()
        self.collection = self.storage.database["collections"]
        self.player = Player()
        self.player.playing_song_changed_callback = self.song_changed_callback
        self.cache = Cache()
        # self.ui = Ui()
        self.api = NetEase()
        # self.screen = curses.initscr()
        # self.screen.keypad(1)
        self.step = 10
        self.stack = []
        self.djstack = []
        self.at_playing_list = False
        self.enter_flag = True
        # signal.signal(signal.SIGWINCH, self.change_term)
        # signal.signal(signal.SIGINT, self.send_kill)
        self.menu_starts = time.time()
        self.countdown_start = time.time()
        self.countdown = -1
        self.is_in_countdown = False

        self.keyword = ''

    @property
    def user(self):
        return self.storage.database["user"]

    @property
    def account(self):
        return self.user["username"]

    @property
    def md5pass(self):
        return self.user["password"]

    @property
    def userid(self):
        return self.user["user_id"]

    @property
    def username(self):
        return self.user["nickname"]

    def login(self):
        if self.account and self.md5pass:
            account, md5pass = self.account, self.md5pass
        else:
            #modified
            account = str(input('name:'))
            password = str(input('password:'******'that is right........')
            return True
        else:
            self.storage.logout()
            # x = self.ui.build_login_error()
            # if x != ord("1"):
            #     return False

            return self.login()

    def to_login(self, username, passwd):
        if self.account and self.md5pass:
            account, md5pass = self.account, self.md5pass
        else:
            # modified
            # account = str(input('name:'))
            # password = str(input('password:'******'that is right........')
            return True
        else:
            self.storage.logout()
            # x = self.ui.build_login_error()
            # if x != ord("1"):
            #     return False

            # return self.login()
            return False

    def search(self, category):
        # self.ui.screen.timeout(-1)
        SearchArg = namedtuple("SearchArg",
                               ["prompt", "api_type", "post_process"])
        category_map = {
            "songs": SearchArg("搜索歌曲:", 1, lambda datalist: datalist),
            "albums": SearchArg("搜索专辑:", 10, lambda datalist: datalist),
            "artists": SearchArg("搜索艺术家:", 100, lambda datalist: datalist),
            "playlists": SearchArg("搜索网易精选集:", 1000,
                                   lambda datalist: datalist),
        }

        prompt, api_type, post_process = category_map[category]
        # keyword = self.ui.get_param(prompt)
        # keyword = str(input('Input the song\'s name:'))
        keyword = self.keyword
        if not keyword:
            return []

        data = self.api.search(keyword, api_type)
        if not data:
            return data

        datalist = post_process(data.get(category, []))
        return self.api.dig_info(datalist, category)

    def change_term(self, signum, frame):
        self.ui.screen.clear()
        self.ui.screen.refresh()

    def send_kill(self, signum, fram):
        self.player.stop()
        self.cache.quit()
        self.storage.save()
        # curses.endwin()
        sys.exit()

    def update_alert(self, version):
        latest = Menu().check_version()
        if latest != version and latest != 0:
            notify("MusicBox Update is available", 1)
            time.sleep(0.5)
            notify(
                "NetEase-MusicBox installed version:" + version +
                "\nNetEase-MusicBox latest version:" + latest,
                0,
            )

    def check_version(self):
        # 检查更新 && 签到
        try:
            mobile = self.api.daily_task(is_mobile=True)
            pc = self.api.daily_task(is_mobile=False)

            if mobile["code"] == 200:
                notify("移动端签到成功", 1)
            if pc["code"] == 200:
                notify("PC端签到成功", 1)

            data = self.api.get_version()
            return data["info"]["version"]
        except KeyError as e:
            return 0

    def start_fork(self, version):
        pid = os.fork()
        if pid == 0:
            Menu().update_alert(version)
        else:
            Menu().start()

    def play_pause(self):
        if self.player.is_empty:
            return
        if not self.player.playing_flag:
            self.player.resume()
        else:
            self.player.pause()

    def next_song(self):
        if self.player.is_empty:
            return
        self.player.next()

    def previous_song(self):
        if self.player.is_empty:
            return
        self.player.prev()

    def start(self):

        # while True:

        #     print('input 1:login,2:search,100:break')
        #     num = int(input('Please input your choice:'))
        #     print('you input {}'.format(num))

        #     if (num == 1):
        #         print('username before: {}'.format(self.user))
        #         myplaylist = self.request_api(self.api.user_playlist, self.userid)
        #         print(myplaylist)
        #         print('username: {}'.format(self.user))
        #     elif num == 2:
        #         datalist = self.search('songs')
        #         print('search result:')
        #         for idxx,val in enumerate(datalist):
        #             print('{}:{}-{}'.format(idxx,val['song_name'],val['artist']))
        #             if idxx > 10:
        #                 break;

        #     elif num == 100:
        #         break

        #############################################################afer:

        def print_info():
            print('----------------------------')
            print('1:清空信息并退出')
            print('2:上移')
            print('3:下移')
            print('4:搜索')
            print('5:播放')
            print('6:登录')
            print('7:个人歌单')
            print('100:直接退出')
            print('----------------------------')

        while True:
            datatype = self.datatype
            title = self.title
            datalist = self.datalist
            offset = self.offset
            idx = self.index
            step = self.step

            print_info()

            key = int(input('请输入你的选择:'))

            if key == 100:
                print('正在退出....')
                self.player.stop()
                self.storage.save()
                break

            elif key == 1:
                self.api.logout()
                print('正在退出....')
                self.player.stop()
                break

            elif key == 2:
                if idx == offset:
                    if offset == 0:
                        continue
                    self.offset -= step
                    # 移动光标到最后一列
                    self.index = offset - 1
                else:
                    self.index = carousel(
                        offset,
                        min(len(datalist), offset + step) - 1, idx - 1)
                self.menu_starts = time.time()
            elif key == 3:
                if idx == min(len(datalist), offset + step) - 1:
                    if offset + step >= len(datalist):
                        continue
                    self.offset += step
                    # 移动光标到第一列
                    self.index = offset + step
                else:
                    self.index = carousel(
                        offset,
                        min(len(datalist), offset + step) - 1, idx + 1)
                self.menu_starts = time.time()
            elif key == 4:
                self.index = 0
                self.offset = 0
                idx = 1
                SearchCategory = namedtuple("SearchCategory",
                                            ["type", "title"])
                idx_map = {
                    0: SearchCategory("playlists", "精选歌单搜索列表"),
                    1: SearchCategory("songs", "歌曲搜索列表"),
                    2: SearchCategory("artists", "艺术家搜索列表"),
                    3: SearchCategory("albums", "专辑搜索列表"),
                }
                self.datatype, self.title = idx_map[idx]
                self.datalist = self.search(self.datatype)

                print('search result:')
                for idxx, val in enumerate(self.datalist):
                    print('{}:{}-{}'.format(idxx, val['song_name'],
                                            val['artist']))
                    if idxx > 10:
                        break

                which_one = int(input('输入想要播放的序号:'))

                while which_one > 10 or which_one < 0:
                    which_one = int(input('序号不合理,重新输入:'))

                self.player.new_player_list('songs', self.title, self.datalist,
                                            -1)
                self.idx = which_one
                self.player.play_or_pause(self.idx, self.at_playing_list)

            elif key == 5:
                print('当前的歌单:')
                cnt = 0
                for key in self.player.songs.keys():
                    print('{}.{}----{}'.format(
                        cnt, self.player.songs[key]['song_name'],
                        self.player.songs[key]['artist']))
                    cnt += 1
                    if cnt > 10:
                        break

                which_one = int(input('输入想要播放的序号:'))
                while which_one > 10 or which_one < 0:
                    which_one = int(input('序号不合理,重新输入:'))
                self.idx = which_one
                self.player.play_or_pause(self.idx, self.at_playing_list)
            elif key == 6:
                myplaylist = self.request_api(self.api.user_playlist,
                                              self.userid)
                self.datatype = 'top_playlists'
                myplaylist = self.api.dig_info(myplaylist, self.datatype)
                notify('登录成功')
            elif key == 7:
                myplaylist = self.request_api(self.api.user_playlist,
                                              self.userid)
                self.datatype = 'top_playlists'
                myplaylist = self.api.dig_info(myplaylist, self.datatype)
                print('{}的歌单:'.format(self.username))
                for x, y in enumerate(myplaylist):
                    print('{}.{}'.format(x, y['playlist_name']))

    def get_songs_info(self, search_info, choice):
        self.keyword = search_info

        if choice < 0 or choice > 3:
            notify('选择有误')
            return

        idx = choice
        SearchCategory = namedtuple("SearchCategory", ["type", "title"])
        idx_map = {
            0: SearchCategory("playlists", "精选歌单搜索列表"),
            1: SearchCategory("songs", "歌曲搜索列表"),
            2: SearchCategory("artists", "艺术家搜索列表"),
            3: SearchCategory("albums", "专辑搜索列表"),
        }
        self.datatype, self.title = idx_map[idx]
        self.datalist = self.search(self.datatype)
        res = []
        if choice == 1:
            for idxx, val in enumerate(self.datalist):
                res.append('{}(歌曲名)-{}(艺术家))'.format(val['song_name'],
                                                     val['artist']))
                # if idxx > 10:
                #     break;
        elif choice == 2:
            for idxx, val in enumerate(self.datalist):
                res.append('艺术家:{}'.format(val['artists_name']))
                # if idxx > 10:
                #     break;

        elif choice == 3:
            # print(self.datalist)
            for idxx, val in enumerate(self.datalist):
                res.append('{}(专辑)-{}(艺术家)'.format(val['albums_name'],
                                                   val['artists_name']))
                # if idxx > 10:
                #     break;
        else:
            pass

        return res

    def play_which_song(self, which):
        # self.player.new_player_list('songs', self.title, self.datalist, -1)
        #         #  self.idx = which
        # self.player.play_or_pause(self.idx, self.at_playing_list)

        # print('self.at...',self.at_playing_list)

        self.player.new_player_list("songs", self.title, self.datalist, -1)

        # self.player.end_callback = None
        self.player.play_or_pause(which, self.at_playing_list)
        # self.at_playing_list = True

    def now_total_time(self):
        return self.player.process_location, self.player.process_length

    def dispatch_enter(self, idx):
        # The end of stack
        netease = self.api
        datatype = self.datatype
        title = self.title
        datalist = self.datalist
        offset = self.offset
        index = self.index
        self.stack.append([datatype, title, datalist, offset, index])

        if idx >= len(self.datalist):
            return False

        if datatype == "main":
            self.choice_channel(idx)

        # 该艺术家的热门歌曲
        elif datatype == "artists":
            artist_name = datalist[idx]["artists_name"]
            artist_id = datalist[idx]["artist_id"]

            self.datatype = "artist_info"
            self.title += " > " + artist_name
            self.datalist = [
                {
                    "item": "{}的热门歌曲".format(artist_name),
                    "id": artist_id
                },
                {
                    "item": "{}的所有专辑".format(artist_name),
                    "id": artist_id
                },
            ]

        elif datatype == "artist_info":
            self.title += " > " + datalist[idx]["item"]
            artist_id = datalist[0]["id"]
            if idx == 0:
                self.datatype = "songs"
                songs = netease.artists(artist_id)
                self.datalist = netease.dig_info(songs, "songs")

            elif idx == 1:
                albums = netease.get_artist_album(artist_id)
                self.datatype = "albums"
                self.datalist = netease.dig_info(albums, "albums")

        elif datatype == "djchannels":
            radio_id = datalist[idx]["id"]
            programs = netease.djprograms(radio_id)
            self.title += " > " + datalist[idx]["name"]
            self.datatype = "songs"
            self.datalist = netease.dig_info(programs, "songs")

        # 该专辑包含的歌曲
        elif datatype == "albums":
            album_id = datalist[idx]["album_id"]
            songs = netease.album(album_id)
            self.datatype = "songs"
            self.datalist = netease.dig_info(songs, "songs")
            self.title += " > " + datalist[idx]["albums_name"]

        # 精选歌单选项
        elif datatype == "recommend_lists":
            data = self.datalist[idx]
            self.datatype = data["datatype"]
            self.datalist = netease.dig_info(data["callback"](), self.datatype)
            self.title += " > " + data["title"]

        # 全站置顶歌单包含的歌曲
        elif datatype in ["top_playlists", "playlists"]:
            playlist_id = datalist[idx]["playlist_id"]
            songs = netease.playlist_detail(playlist_id)
            self.datatype = "songs"
            self.datalist = netease.dig_info(songs, "songs")
            self.title += " > " + datalist[idx]["playlist_name"]

        # 分类精选
        elif datatype == "playlist_classes":
            # 分类名称
            data = self.datalist[idx]
            self.datatype = "playlist_class_detail"
            self.datalist = netease.dig_info(data, self.datatype)
            self.title += " > " + data

        # 某一分类的详情
        elif datatype == "playlist_class_detail":
            # 子类别
            data = self.datalist[idx]
            self.datatype = "top_playlists"
            log.error(data)
            self.datalist = netease.dig_info(netease.top_playlists(data),
                                             self.datatype)
            self.title += " > " + data

        # 歌曲评论
        elif datatype in ["songs", "fmsongs"]:
            song_id = datalist[idx]["song_id"]
            comments = self.api.song_comments(song_id, limit=100)
            try:
                hotcomments = comments["hotComments"]
                comcomments = comments["comments"]
            except KeyError:
                hotcomments = comcomments = []
            self.datalist = []
            for one_comment in hotcomments:
                self.datalist.append("(热评 %s❤️ ️)%s:%s" % (
                    one_comment["likedCount"],
                    one_comment["user"]["nickname"],
                    one_comment["content"],
                ))
            for one_comment in comcomments:
                self.datalist.append(one_comment["content"])
            self.datatype = "comments"
            self.title = "网易云音乐 > 评论:%s" % datalist[idx]["song_name"]
            self.offset = 0
            self.index = 0

        # 歌曲榜单
        elif datatype == "toplists":
            songs = netease.top_songlist(idx)
            self.title += " > " + self.datalist[idx]
            self.datalist = netease.dig_info(songs, "songs")
            self.datatype = "songs"

        # 搜索菜单
        elif datatype == "search":
            self.index = 0
            self.offset = 0
            SearchCategory = namedtuple("SearchCategory", ["type", "title"])
            idx_map = {
                0: SearchCategory("playlists", "精选歌单搜索列表"),
                1: SearchCategory("songs", "歌曲搜索列表"),
                2: SearchCategory("artists", "艺术家搜索列表"),
                3: SearchCategory("albums", "专辑搜索列表"),
            }
            self.datatype, self.title = idx_map[idx]
            self.datalist = self.search(self.datatype)
        else:
            self.enter_flag = False

    def show_playing_song(self):
        if self.player.is_empty:
            return

        if not self.at_playing_list:
            self.stack.append([
                self.datatype, self.title, self.datalist, self.offset,
                self.index
            ])
            self.at_playing_list = True

        self.datatype = self.player.info["player_list_type"]
        self.title = self.player.info["player_list_title"]
        self.datalist = [
            self.player.songs[i] for i in self.player.info["player_list"]
        ]
        self.index = self.player.info["idx"]
        self.offset = self.index // self.step * self.step

    def song_changed_callback(self):
        if self.at_playing_list:
            self.show_playing_song()

    def fm_callback(self):
        # log.debug('FM CallBack.')
        data = self.get_new_fm()
        self.player.append_songs(data)
        if self.datatype == "fmsongs":
            if self.player.is_empty:
                return
            self.datatype = self.player.info["player_list_type"]
            self.title = self.player.info["player_list_title"]
            self.datalist = []
            for i in self.player.info["player_list"]:
                self.datalist.append(self.player.songs[i])
            self.index = self.player.info["idx"]
            self.offset = self.index // self.step * self.step
            if not self.player.playing_flag:
                switch_flag = False
                self.player.play_or_pause(self.index, switch_flag)

    def request_api(self, func, *args):
        result = func(*args)
        if result:
            return result
        if not self.login():
            print('you really need to login')
            notify("You need to log in")
            return False
        return func(*args)

    def get_new_fm(self):
        data = self.request_api(self.api.personal_fm)
        if not data:
            return []
        return self.api.dig_info(data, "fmsongs")

    def choice_channel(self, idx):
        self.offset = 0
        self.index = 0

        if idx == 0:
            self.datalist = self.api.toplists
            self.title += " > 排行榜"
            self.datatype = "toplists"
        elif idx == 1:
            artists = self.api.top_artists()
            self.datalist = self.api.dig_info(artists, "artists")
            self.title += " > 艺术家"
            self.datatype = "artists"
        elif idx == 2:
            albums = self.api.new_albums()
            self.datalist = self.api.dig_info(albums, "albums")
            self.title += " > 新碟上架"
            self.datatype = "albums"
        elif idx == 3:
            self.datalist = [
                {
                    "title": "全站置顶",
                    "datatype": "top_playlists",
                    "callback": self.api.top_playlists,
                },
                {
                    "title": "分类精选",
                    "datatype": "playlist_classes",
                    "callback": lambda: [],
                },
            ]
            self.title += " > 精选歌单"
            self.datatype = "recommend_lists"
        elif idx == 4:
            myplaylist = self.request_api(self.api.user_playlist, self.userid)
            self.datatype = "top_playlists"
            self.datalist = self.api.dig_info(myplaylist, self.datatype)
            self.title += " > " + self.username + " 的歌单"
        elif idx == 5:
            self.datatype = "djchannels"
            self.title += " > 主播电台"
            self.datalist = self.api.djchannels()
        elif idx == 6:
            self.datatype = "songs"
            self.title += " > 每日推荐歌曲"
            myplaylist = self.request_api(self.api.recommend_playlist)
            if myplaylist == -1:
                return
            self.datalist = self.api.dig_info(myplaylist, self.datatype)
        elif idx == 7:
            myplaylist = self.request_api(self.api.recommend_resource)
            self.datatype = "top_playlists"
            self.title += " > 每日推荐歌单"
            self.datalist = self.api.dig_info(myplaylist, self.datatype)
        elif idx == 8:
            self.datatype = "fmsongs"
            self.title += " > 私人FM"
            self.datalist = self.get_new_fm()
        elif idx == 9:
            self.datatype = "search"
            self.title += " > 搜索"
            self.datalist = ["歌曲", "艺术家", "专辑", "网易精选集"]
        elif idx == 10:
            self.datatype = "help"
            self.title += " > 帮助"
            self.datalist = shortcut
Example #36
0
    def getStorage(self):
        if GeekNote.storage:
            return GeekNote.storage

        GeekNote.storage = Storage()
        return GeekNote.storage
Example #37
0
 def setUp(self):
     self.c = Category(1, "C")
     self.t = Topic(1, "T", "C:\\user")
     self.d = Document(1, 1, 1, "D")
     self.s = Storage()
Example #38
0
class TestDocumentManagement(unittest.TestCase):
    def setUp(self):
        self.c = Category(1, "C")
        self.t = Topic(1, "T", "C:\\user")
        self.d = Document(1, 1, 1, "D")
        self.s = Storage()

    def test_category_init(self):
        self.assertEqual(self.c.id, 1)
        self.assertEqual(self.c.name, "C")

    def test_category_edit(self):
        self.c.edit("new")
        self.assertEqual(self.c.name, "new")

    def test_category_repr(self):
        self.assertEqual(str(self.c), "Category 1: C")

    def test_document_init(self):
        self.assertEqual(self.d.id, 1)
        self.assertEqual(self.d.category_id, 1)
        self.assertEqual(self.d.topic_id, 1)
        self.assertEqual(self.d.file_name, "D")
        self.assertEqual(self.d.tags, [])

    def test_document_from_insances(self):
        doc = Document.from_instances(1, self.c, self.t, "Doc")
        self.assertEqual(doc.id, 1)
        self.assertEqual(doc.category_id, 1)
        self.assertEqual(doc.topic_id, 1)
        self.assertEqual(doc.file_name, "Doc")
        self.assertEqual(doc.tags, [])

    def test_document_add_tag(self):
        self.d.add_tag("tag")
        self.d.add_tag("tag")
        self.assertEqual(self.d.tags, ["tag"])

    def test_document_remove_tag(self):
        self.d.add_tag("tag")
        self.d.add_tag("tag")
        self.d.remove_tag("tag")
        self.assertEqual(self.d.tags, [])

    def test_document_edit(self):
        self.d.edit("new")
        self.assertEqual(self.d.file_name, "new")

    def test_document_repr(self):
        self.d.add_tag("tag")
        self.assertEqual(str(self.d),
                         'Document 1: D; category 1, topic 1, tags: tag')

    def test_topic_init(self):
        self.assertEqual(self.t.id, 1)
        self.assertEqual(self.t.id, 1)
        self.assertEqual(self.t.storage_folder, "C:\\user")

    def test_topic_edit(self):
        self.t.edit("new topic", "new folder")
        self.assertEqual(self.t.topic, "new topic")
        self.assertEqual(self.t.storage_folder, "new folder")

    def test_topic_repr(self):
        self.assertEqual(str(self.t), "Topic 1: T in C:\\user")

    def test_storage_init(self):
        self.assertEqual(self.s.categories, [])
        self.assertEqual(self.s.topics, [])
        self.assertEqual(self.s.documents, [])

    def test_storage_add_category(self):
        self.s.add_category(self.c)
        self.s.add_category(self.c)
        self.assertEqual(self.s.categories, [self.c])

    def test_storage_add_topic(self):
        self.s.add_topic(self.t)
        self.s.add_topic(self.t)
        self.assertEqual(self.s.topics, [self.t])

    def test_storage_add_document(self):
        self.s.add_document(self.d)
        self.s.add_document(self.d)
        self.assertEqual(self.s.documents, [self.d])

    def test_storage_edit_category(self):
        self.s.add_category(self.c)
        self.s.edit_category(1, "new")
        self.assertEqual(self.s.categories[0].name, "new")

    def test_storage_edit_topic(self):
        self.s.add_topic(self.t)
        self.s.edit_topic(1, "new", "new storage")
        self.assertEqual(self.s.topics[0].topic, "new")
        self.assertEqual(self.s.topics[0].storage_folder, "new storage")

    def test_storage_edit_document(self):
        self.s.add_document(self.d)
        self.s.edit_document(1, "new")
        self.assertEqual(self.s.documents[0].file_name, "new")

    def test_storage_delete_category(self):
        self.s.add_category(self.c)
        self.s.delete_category(1)
        self.assertEqual(self.s.categories, [])

    def test_storage_delete_topic(self):
        self.s.add_topic(self.t)
        self.s.delete_topic(1)
        self.assertEqual(self.s.topics, [])

    def test_storage_delete_document(self):
        self.s.add_document(self.d)
        self.s.delete_document(1)
        self.assertEqual(self.s.documents, [])

    def test_storage_repr(self):
        self.s.add_category(self.c)
        self.s.add_topic(self.t)
        self.s.add_document(self.d)
        expected = str(self.s).strip('\n')
        self.assertEqual(expected,
                         "Document 1: D; category 1, topic 1, tags: ")
Example #39
0
    def __call__(self,
                 c=None,
                 f='index',
                 args=None,
                 vars=None,
                 extension=None,
                 target=None,
                 ajax=False,
                 ajax_trap=False,
                 url=None,
                 user_signature=False,
                 content='loading...',
                 **attr):
        if args is None:
            args = []
        vars = Storage(vars or {})
        import globals
        target = target or 'c' + str(random.random())[2:]
        attr['_id'] = target
        request = self.environment['request']
        if '.' in f:
            f, extension = f.rsplit('.', 1)
        if url or ajax:
            url = url or html.URL(request.application,
                                  c,
                                  f,
                                  r=request,
                                  args=args,
                                  vars=vars,
                                  extension=extension,
                                  user_signature=user_signature)
            script = html.SCRIPT('$.web2py.component("%s","%s")' %
                                 (url, target),
                                 _type="text/javascript")
            return html.TAG[''](script, html.DIV(content, **attr))
        else:
            if not isinstance(args, (list, tuple)):
                args = [args]
            c = c or request.controller

            other_request = Storage(request)
            other_request['env'] = Storage(request.env)
            other_request.controller = c
            other_request.function = f
            other_request.extension = extension or request.extension
            other_request.args = List(args)
            other_request.vars = vars
            other_request.get_vars = vars
            other_request.post_vars = Storage()
            other_response = globals.Response()
            other_request.env.path_info = '/' + \
                '/'.join([request.application, c, f] +
                         map(str, other_request.args))
            other_request.env.query_string = \
                vars and html.URL(vars=vars).split('?')[1] or ''
            other_request.env.http_web2py_component_location = \
                request.env.path_info
            other_request.cid = target
            other_request.env.http_web2py_component_element = target
            other_response.view = '%s/%s.%s' % (c, f, other_request.extension)
            other_environment = copy.copy(self.environment)
            other_response._view_environment = other_environment
            other_response.generic_patterns = \
                copy.copy(current.response.generic_patterns)
            other_environment['request'] = other_request
            other_environment['response'] = other_response

            ## some magic here because current are thread-locals

            original_request, current.request = current.request, other_request
            original_response, current.response = current.response, other_response
            page = run_controller_in(c, f, other_environment)
            if isinstance(page, dict):
                other_response._vars = page
                other_response._view_environment.update(page)
                run_view_in(other_response._view_environment)
                page = other_response.body.getvalue()
            current.request, current.response = original_request, original_response
            js = None
            if ajax_trap:
                link = html.URL(request.application,
                                c,
                                f,
                                r=request,
                                args=args,
                                vars=vars,
                                extension=extension,
                                user_signature=user_signature)
                js = "$.web2py.trap_form('%s','%s');" % (link, target)
            script = js and html.SCRIPT(js, _type="text/javascript") or ''
            return html.TAG[''](html.DIV(html.XML(page), **attr), script)
Example #40
0
import time
from flask import Flask, request, jsonify, render_template, url_for, redirect, session
from flask_cors import CORS

sys.path.append('..')
from model.labels import categories
from save_labels import store_info
from serve import get_model_api
from storage import Storage

app = Flask(__name__)
CORS(app)

app.config['SECRET_KEY'] = b'J\x9dU\x0c\xde\xa2aE\x9b\x0b\xc0W\x17\xcfX\xea'

images_after_inference = Storage('inference')
ulabeled_images = Storage('raw')
model_api = get_model_api()


def get_v():
    return int(time.time())


# Pages
@app.route('/')
def index():
    return render_template('index.html', v=get_v())


@app.route('/label', methods=['GET'])
Example #41
0
OPT = Storage({
    "add":
    Option("--add",
           default=None,
           action="store",
           help="A list member to add to the value pointed by :opt:`--param`. "
           "If :opt:`--index` is set, insert the new element at the "
           "specified position in the list."),
    "backlog":
    Option("--backlog",
           default=None,
           action="store",
           dest="backlog",
           help="A size expression limiting the volume of data fetched "
           "from the log file tail. Default is 10k."),
    "color":
    Option("--color",
           default="auto",
           action="store",
           dest="color",
           help="Colorize output. Possible values are:\n\n"
           "* auto: guess based on tty presence\n"
           "* always|yes: always colorize\n"
           "* never|no: never colorize"),
    "config":
    Option(
        "--config",
        default=None,
        action="store",
        dest="parm_config",
        help="The configuration to use as template when creating or "
        "installing a service. The value can be ``-`` or ``/dev/stdin`` "
        "to read the json-formatted configuration from stdin, or a file "
        "path, or uri pointing to a ini-formatted configuration, or a "
        "service selector expression (ATTENTION with cloning existing live "
        "services that include more than containers, volumes and backend "
        "ip addresses ... this could cause disruption on the cloned service)."
    ),
    "debug":
    Option(
        "--debug",
        default=False,
        action="store_true",
        dest="debug",
        help="Increase stream and file log verbosity up to the debug level."),
    "daemon":
    Option("--daemon",
           default=False,
           action="store_true",
           dest="daemon",
           help="A flag inhibiting the command daemonization. Set by the "
           "daemonization routine."),
    "disable_rollback":
    Option("--disable-rollback",
           default=False,
           action="store_true",
           dest="disable_rollback",
           help="If set, don't try to rollback resources activated before a "
           "start action interrupts on error."),
    "discard":
    Option("--discard",
           default=False,
           action="store_true",
           dest="discard",
           help="Discard the stashed, invalid, configuration file."),
    "dry_run":
    Option("--dry-run",
           default=False,
           action="store_true",
           dest="dry_run",
           help="Show the action execution plan."),
    "env":
    Option("--env",
           default=[],
           action="append",
           dest="env",
           help="Export the uppercased variable in the os environment.\n\n"
           "With the create action only, set a env section parameter in "
           "the service configuration file. Multiple ``--env <key>=<val>`` "
           "can be specified. For all other actions."),
    "eval":
    Option(
        "--eval",
        default=False,
        action="store_true",
        dest="eval",
        help="If set with the :cmd:`svcmgr get` action, the printed value of "
        ":opt:`--param` is evaluated, scoped and dereferenced. If set "
        "with the :cmd:`svcmgr set` action, the current value is "
        "evaluated before mangling."),
    "filter":
    Option("--filter",
           default="",
           action="store",
           dest="jsonpath_filter",
           help="A JSONPath expression to filter a JSON output."),
    "follow":
    Option("--follow",
           default=False,
           action="store_true",
           dest="follow",
           help="Follow the logs as they come. Use crtl-c to interrupt."),
    "force":
    Option("-f",
           "--force",
           default=False,
           action="store_true",
           dest="force",
           help="Force action, ignore sanity checks."),
    "format":
    Option(
        "--format",
        default=None,
        action="store",
        dest="format",
        help="Specify a data formatter. Possible values are json, flat_json, "
        "csv or table. csv and table formatters are available only for "
        "commands returning tabular data."),
    "help":
    Option("-h",
           "--help",
           default=None,
           action="store_true",
           dest="parm_help",
           help="Show this help message and exit."),
    "hide_disabled":
    Option(
        "--hide-disabled",
        default=None,
        action="store_false",
        dest="show_disabled",
        help="Do not include the disabled resources. This option supersedes "
        "the :kw:`show_disabled` value in the service configuration."),
    "impersonate":
    Option("--impersonate",
           default=None,
           action="store",
           help="Impersonate a peer node when evaluating keywords."),
    "index":
    Option("--index",
           default=None,
           action="store",
           type="int",
           help="The position in the list pointed by --param where to add "
           "the new list element on a set action"),
    "interactive":
    Option("-i",
           "--interactive",
           default=False,
           action="store_true",
           dest="interactive",
           help="Prompt the user for a choice instead of using defaults, "
           "or failing if no default is defined."),
    "interval":
    Option("--interval",
           default=0,
           action="store",
           dest="interval",
           type="int",
           help="with --watch, set the refresh interval. defaults "
           "to 0, to refresh on event only."),
    "kw":
    Option(
        "--kw",
        action="append",
        dest="kw",
        help=
        "An expression like ``[<section>.]<keyword>[@<scope>][[<index>]]<op><value>`` where\n\n"
        "* <section> can be:\n\n"
        "  * a resource id\n"
        "  * a resource driver group name (fs, ip, ...). For the set and unset actions only, set the keyword for all matching resources.\n"
        "* <op> can be:\n\n"
        "  * ``=``\n"
        "  * ``+=``\n"
        "  * ``-=``\n\n"
        "Multiple --kw can be set to apply multiple configuration change "
        "in a file with a single write.\n\n"
        "Examples:\n\n"
        "* app.start=false\n"
        "  Turn off app start for all app resources\n"
        "* app#1.start=true\n"
        "  Turn on app start for app#1\n"
        "* nodes+=node3\n"
        "  Append node3 to nodes\n"
        "* nodes[0]+=node3\n"
        "  Preprend node3 to nodes\n"),
    "leader":
    Option(
        "--leader",
        default=None,
        action="store_true",
        dest="leader",
        help=
        "Switch the provision action behaviour to leader, ie provision shared resources that are not provisionned by default."
    ),
    "local":
    Option("--local",
           default=False,
           action="store_true",
           dest="local",
           help="Execute the service action on the local service "
           "instances only, ignoring cluster-wide considerations."),
    "master":
    Option("--master",
           default=False,
           action="store_true",
           dest="master",
           help="Limit the action scope to the master service resources."),
    "namespace":
    Option(
        "--namespace",
        action="store",
        dest="namespace",
        help=
        "The namespace to switch to for the action. Namespaces are cluster partitions. A default namespace can be set for the session setting the OSVC_NAMESPACE environment variable."
    ),
    "node":
    Option(
        "--node",
        default="",
        action="store",
        dest="node",
        help=
        "The node to send a request to. If not specified the local node is targeted."
    ),
    "nolock":
    Option(
        "--nolock",
        default=False,
        action="store_true",
        dest="nolock",
        help=
        "Don't acquire the action lock. Dangerous, but can be useful to set parameters from an action trigger."
    ),
    "nopager":
    Option("--no-pager",
           default=False,
           action="store_true",
           dest="nopager",
           help="Do not display the command result in a pager."),
    "parallel":
    Option(
        "-p",
        "--parallel",
        default=False,
        action="store_true",
        dest="parallel",
        help=
        "Start actions on specified services in parallel. :kw:`max_parallel` "
        "in node.conf limits the number of parallel running subprocesses."),
    "param":
    Option(
        "--param",
        default=None,
        action="store",
        dest="param",
        help="An expression like ``[<section>.]<keyword>`` where\n\n"
        "* <section> can be:\n\n"
        "  * a resource id\n"
        "  * a resource driver group name (fs, ip, ...). For the set and unset actions only, set the keyword for all matching resources."
    ),
    "provision":
    Option("--provision",
           default=False,
           action="store_true",
           dest="provision",
           help="Provision the service resources after config file creation. "
           "Defaults to False."),
    "purge_collector":
    Option("--purge-collector",
           default=False,
           action="store_true",
           dest="purge_collector",
           help="On service delete, also remove the service collector-side"),
    "recover":
    Option("--recover",
           default=False,
           action="store_true",
           dest="recover",
           help="Recover the stashed erroneous configuration file "
           "in a :cmd:`svcmgr edit config` command"),
    "refresh":
    Option("-r",
           "--refresh",
           default=False,
           action="store_true",
           dest="refresh",
           help="Drop status caches and re-evaluate before printing."),
    "remove":
    Option(
        "--remove",
        default=None,
        action="store",
        help="A list member to drop from the value pointed by :kw:`--param`."),
    "resource":
    Option(
        "--resource",
        default=[],
        action="append",
        help="A resource definition in json dictionary format fed to create "
        "or update. The ``rtype`` key point the driver group name, and "
        "the ``type`` key the driver name (translated to type in the "
        "configuration file section)."),
    "restore":
    Option(
        "--restore",
        default=False,
        action="store_true",
        dest="restore",
        help=
        "Keep the same service id as the template or config file referenced by the create action. The default behaviour is to generate a new id."
    ),
    "rid":
    Option(
        "--rid",
        default=None,
        action="store",
        dest="parm_rid",
        help=
        "A resource specifier expression like ``<spec>[,<spec>]``, where ``<spec>`` can be:\n\n"
        "* A resource id\n"
        "* A driver group name (app, fs, disk, ...)\n\n"
        "Examples:\n\n"
        "* ``app``\n"
        "  all app resources\n"
        "* ``container#1,ip#1``\n"
        "  only container#1 and ip#1\n"),
    "sections":
    Option("--sections",
           action="store",
           dest="sections",
           help="the comma-separated list of sections to display. "
           "if not set, all sections are displayed. sections "
           "names are: threads,arbitrators,nodes,services."),
    "service":
    Option(
        "-s",
        "--service",
        default=None,
        action="store",
        dest="parm_svcs",
        help=
        "A service selector expression ``[!]<expr>[<sep>[!]<expr>]`` where:\n\n"
        "- ``!`` is the expression negation operator\n\n"
        "- ``<sep>`` can be:\n\n"
        "  - ``,`` OR expressions\n\n"
        "  - ``+`` AND expressions\n\n"
        "- ``<expr>`` can be:\n\n"
        "  - a shell glob on service names\n\n"
        "  - ``<param><op><value>`` where:\n\n"
        "    - ``<param>`` can be:\n\n"
        "      - ``<rid>:``\n\n"
        "      - ``<group>:``\n\n"
        "      - ``<rid>.<key>``\n\n"
        "      - ``<group>.<key>``\n\n"
        "      - ``<single value jsonpath expression on the $.monitor.services.<path> dictionary extended under the 'nodes' key by each instance 'status' and 'config' data>``\n\n"
        "    - ``<op>`` can be:\n\n"
        "      - ``<``  ``>``  ``<=``  ``>=``  ``=``\n\n"
        "      - ``~`` with regexp value\n\n"
        "Examples:\n\n"
        "- ``*dns,ha*+app.timeout>1``\n\n"
        "- ``ip:+task:``\n\n"
        "- ``!*excluded``\n\n"
        "- ``$.avail=warn``\n\n"
        "- ``$.nodes.*.status.avail=warn``\n\n"
        "Note:\n\n"
        "- ``!`` usage requires single quoting the expression to prevent "
        "shell history expansion"),
    "show_disabled":
    Option("--show-disabled",
           default=None,
           action="store_true",
           dest="show_disabled",
           help="Include the disabled resources. This option supersedes "
           "the :kw:`show_disabled` value in the service configuration."),
    "slave":
    Option(
        "--slave",
        default=None,
        action="store",
        dest="slave",
        help="Limit the action to the service resources in the specified, comma-"
        "separated, slaves."),
    "slaves":
    Option("--slaves",
           default=False,
           action="store_true",
           dest="slaves",
           help="Limit the action scope to service resources in all slaves."),
    "status":
    Option(
        "--status",
        default=None,
        action="store",
        dest="parm_status",
        help=
        "Operate only on service with a local instance in the specified availability status "
        "(up, down, warn, ...)."),
    "subsets":
    Option(
        "--subsets",
        default=None,
        action="store",
        dest="parm_subsets",
        help=
        "Limit the action to the resources in the specified, comma-separated, list of subsets."
    ),
    "stats":
    Option(
        "--stats",
        default=False,
        action="store_true",
        dest="stats",
        help=
        "Show system resources usage metrics and refresh the information every --interval."
    ),
    "tags":
    Option("--tags",
           default=None,
           action="store",
           dest="parm_tags",
           help="A comma-separated list of resource tags to limit "
           "action to. The ``+`` separator can be used to impose "
           "multiple tag conditions. For example, ``tag1+tag2,tag3`` "
           "limits the action to resources with both tag1 and"
           " tag2, or tag3."),
    "template":
    Option("--template",
           default=None,
           action="store",
           dest="parm_template",
           help="The configuration file template name or id, "
           "served by the collector, to use when creating or "
           "installing a service."),
    "time":
    Option(
        "--time",
        default="300",
        action="store",
        dest="time",
        help="A duration expression like ``1m5s``. The maximum wait time for an "
        "async action to finish. Default is 300 seconds."),
    "unprovision":
    Option(
        "--unprovision",
        default=False,
        action="store_true",
        dest="unprovision",
        help=
        "Unprovision the service resources before config files file deletion. "
        "Defaults to False."),
    "value":
    Option("--value",
           default=None,
           action="store",
           dest="value",
           help="The value to set for the keyword pointed by :opt:`--param`"),
    "wait":
    Option("--wait",
           default=False,
           action="store_true",
           dest="wait",
           help="Wait for asynchronous action termination."),
    "waitlock":
    Option(
        "--waitlock",
        default="-1",
        action="store",
        dest="parm_waitlock",
        help=
        "A duration expression like ``5s``. The maximum wait time when acquiring "
        "the service action lock."),
    "watch":
    Option("-w",
           "--watch",
           default=False,
           action="store_true",
           dest="watch",
           help="refresh the information every --interval."),
})
Example #42
0
 def __init__(self, filename, max_items=10):
     Storage.__init__(self, filename, max_item_count=max_items)
     pass
Example #43
0
 def __del__(self):
     Storage.__del__(self)
     pass
 def __init__(self):
     logging.debug(
         f"{self.__class__.__name__ } - Initialising master server.")
     self.transport = None
     self.storage = Storage()
     self.protocols = Protocols()
Example #45
0
        if arm.connect():
            arm_device = s.device
            log.info("Detected UArm on device {}".format(s.device))
            display.msg("ARM OK")

    if arm is not None:
        break

    log.info("No UArm detected on any of the serial ports, retrying in {} seconds".format(config.serial_search_delay))
    display.msg("NO ARM, RETRY IN {} SECONDS".format(config.serial_search_delay))
    time.sleep(config.serial_search_delay)

log.info("Getting list of storage")
display.msg("DETECT STORAGE")

storage = Storage(config)
while True:
    if storage.detect() is True:
        break

    display.msg("NO STORAGE, RETRY IN {} SECONDS".format(config.storage_search_delay))
    time.sleep(config.storage_search_delay)

storage_path = storage.path

log.info("Detected storage '{}'".format(storage_path))
display.msg("STORAGE OK")

invocation_dir = "{}/brain-invocations/{}".format(storage_path, invocation_id)
os.makedirs(invocation_dir)
Example #46
0
app.url_map.converters["repopath"] = RepositoryPathConverter
app.url_map.converters["apirepopath"] = APIRepositoryPathConverter

Principal(app, use_sessions=False)

tf = app.config["DB_TRANSACTION_FACTORY"]

model_cache = get_model_cache(app.config)
avatar = Avatar(app)
login_manager = LoginManager(app)
mail = Mail(app)
prometheus = PrometheusPlugin(app)
chunk_cleanup_queue = WorkQueue(app.config["CHUNK_CLEANUP_QUEUE_NAME"], tf)
instance_keys = InstanceKeys(app)
ip_resolver = IPResolver(app)
storage = Storage(app, chunk_cleanup_queue, instance_keys, config_provider,
                  ip_resolver)
userfiles = Userfiles(app, storage)
log_archive = LogArchive(app, storage)
analytics = Analytics(app)
billing = Billing(app)
sentry = Sentry(app)
build_logs = BuildLogs(app)
authentication = UserAuthentication(app, config_provider,
                                    OVERRIDE_CONFIG_DIRECTORY)
userevents = UserEventsBuilderModule(app)
superusers = SuperUserManager(app)
instance_keys = InstanceKeys(app)
label_validator = LabelValidator(app)
build_canceller = BuildCanceller(app)

github_trigger = GithubOAuthService(app.config, "GITHUB_TRIGGER_CONFIG")
Example #47
0
    def __init__(self, filename, max_file_size_kb=-1):
        Storage.__init__(self, filename, max_file_size_kb=max_file_size_kb)

        self._enabled = True
        pass
class MasterServer:
    def __init__(self):
        logging.debug(
            f"{self.__class__.__name__ } - Initialising master server.")
        self.transport = None
        self.storage = Storage()
        self.protocols = Protocols()

    def connection_made(self, transport) -> NoReturn:
        self.transport = transport

    def datagram_received(self, data: bytes, address: Tuple[str,
                                                            int]) -> NoReturn:
        response = None
        logging.debug(
            f"{self.__class__.__name__ } - Recieved {data} from {address}")
        result: Dict = self.protocols.parse_data(data)

        if result.get("class", None) == "B2M":
            response = self.handle_client(result)
        elif result.get("class", None) == "S2M":
            response = self.handle_server(result, address)
        else:
            pass

        if response:
            self.send_response(response, address)

    def send_response(self, response: bytes, address: Tuple[str,
                                                            int]) -> NoReturn:
        logging.debug(
            f"{self.__class__.__name__ } - Sending {response} to {address}")
        self.transport.sendto(response, address)

    def handle_client(self, result: Dict) -> bytes:
        logging.debug(f"{self.__class__.__name__ } - Header belongs to client")
        response_header = result.get("resp", None)
        server_list = self.storage.list_server_addresses(result.get("game"))
        processed_server_list = [self.pack_address(_) for _ in server_list]
        return self.create_response(response_header, processed_server_list)

    def handle_server(self, result, address: Tuple[str, int]) -> bytes:
        logging.debug(f"{self.__class__.__name__ } - Header belongs to server")
        server = GameServer(address, result)
        if self.storage.get_server(server):
            self.storage.update_server(server)
        else:
            self.storage.create_server(server)

        if not server.active:
            self.storage.server_shutdown(server)

        return result.get("resp", None)

    @staticmethod
    def create_response(header: bytes, response: List[bytes]) -> bytes:
        seperator = b""
        if header:
            response.insert(0, header)

        return seperator.join(response)

    @staticmethod
    def pack_address(address: str) -> bytes:
        """
        Takes string formatted address;
        eg, '192.168.0.1:27910'
        Converts to 6 byte binary string.
        H = unsigned short
        """
        port_format = ">H"
        ip, port = address.split(":")
        ip = ip_address(ip).packed
        port = struct.pack(port_format, int(port))
        return ip + port
Example #49
0
"""
This file is part of the web2py Web Framework
Copyrighted by Massimo Di Pierro <*****@*****.**>
License: LGPLv3 (http://www.gnu.org/licenses/lgpl.html)
"""

import os
import sys
import socket
import platform
from storage import Storage

global_settings = Storage()
settings = global_settings  # legacy compatibility

if not hasattr(os, 'mkdir'):
    global_settings.db_sessions = True

if global_settings.db_sessions is not True:
    global_settings.db_sessions = set()

global_settings.gluon_parent = \
    os.environ.get('web2py_path', os.getcwd())

global_settings.applications_parent = global_settings.gluon_parent

global_settings.app_folders = set()

global_settings.debugging = False

global_settings.is_pypy = \
 def __init__(self):
     self.stt = STT()
     self.db = Storage()
     self.sadThreshold = 0.33
     self.happyThreshold = 0.66
Example #51
0
def main():
    storage = Storage()
Example #52
0
    def textToENML(content, raise_ex=False, format='markdown', rawmd=False):
        """
        Transform formatted text to ENML
        """

        if not isinstance(content, str):
            content = ""
        try:
            content = unicode(content, "utf-8")
            # add 2 space before new line in paragraph for creating br tags
            content = re.sub(r'([^\r\n])([\r\n])([^\r\n])', r'\1  \n\3',
                             content)
            # content = re.sub(r'\r\n', '\n', content)

            if format == 'pre':
                # For the 'pre' format, simply wrap the content with a 'pre' tag.
                # Do not perform any further parsing/mutation.
                contentHTML = u''.join(
                    ('<pre>', content, '</pre>')).encode("utf-8")
            elif format == 'markdown':
                # Markdown format https://daringfireball.net/projects/markdown/basics
                extras = None

                if not rawmd:
                    storage = Storage()
                    extras = storage.getUserprop('markdown2_extras')
                    content = Editor.HTMLEscapeTag(content)

                contentHTML = markdown.markdown(content, extras=extras)

                soup = BeautifulSoup(contentHTML, 'html.parser')
                Editor.checklistInSoupToENML(soup)
                contentHTML = str(soup)
            elif format == 'html':
                # Html to ENML http://dev.evernote.com/doc/articles/enml.php
                soup = BeautifulSoup(content, 'html.parser')
                ATTR_2_REMOVE = [
                    "id",
                    "class",
                    # "on*",
                    "accesskey",
                    "data",
                    "dynsrc",
                    "tabindex"
                ]

                for tag in soup.findAll():
                    if hasattr(tag, 'attrs'):
                        map(lambda x: tag.attrs.pop(x, None), [
                            k for k in tag.attrs.keys()
                            if k in ATTR_2_REMOVE or k.find('on') == 0
                        ])
                contentHTML = str(soup)
            else:
                # Plain text format
                contentHTML = Editor.HTMLEscape(content)

                tmpstr = ''
                for l in contentHTML.split('\n'):
                    if l == '':
                        tmpstr = tmpstr + u'<div><br/></div>'
                    else:
                        tmpstr = tmpstr + u'<div>' + l + u'</div>'

                contentHTML = tmpstr.encode("utf-8")
                contentHTML = contentHTML.replace(
                    '[x]', '<en-todo checked="true"></en-todo>')
                contentHTML = contentHTML.replace('[ ]', '<en-todo></en-todo>')

            return Editor.wrapENML(contentHTML)

        except:
            import traceback
            traceback.print_exc()
            if raise_ex:
                raise Exception("Error while parsing text to html.")
            logging.error("Error while parsing text to html.")
            out.failureMessage("Error while parsing text to html.")
            return tools.exitErr()
Example #53
0
class Menu:
    def __init__(self):
        reload(sys)
        sys.setdefaultencoding('UTF-8')
        self.config = Config()
        self.datatype = 'main'
        self.title = '网易云音乐'
        self.datalist = [
            '排行榜', '艺术家', '新碟上架', '精选歌单', '我的歌单', 'DJ节目', '每日推荐', '私人FM', '搜索',
            '帮助'
        ]
        self.offset = 0
        self.index = 0
        self.storage = Storage()
        self.storage.load()
        self.collection = self.storage.database['collections'][0]
        self.player = Player()
        self.player.playing_song_changed_callback = self.song_changed_callback
        self.cache = Cache()
        self.ui = Ui()
        self.netease = NetEase()
        self.screen = curses.initscr()
        self.screen.keypad(1)
        self.step = 10
        self.stack = []
        self.djstack = []
        self.userid = self.storage.database["user"]["user_id"]
        self.username = self.storage.database["user"]["nickname"]
        self.resume_play = True
        self.at_playing_list = False
        signal.signal(signal.SIGWINCH, self.change_term)
        signal.signal(signal.SIGINT, self.send_kill)
        self.START = time.time()

    def change_term(self, signum, frame):
        self.ui.screen.clear()
        self.ui.screen.refresh()

    def send_kill(self, signum, fram):
        self.player.stop()
        self.cache.quit()
        self.storage.save()
        curses.endwin()
        sys.exit()

    def update_alert(self, version):
        latest = Menu().check_version()
        if latest != version and latest != 0:
            if platform.system() == 'Darwin':
                os.system(
                    '/usr/bin/osascript -e \'display notification "MusicBox Update is available"sound name "/System/Library/Sounds/Ping.aiff"\''
                )
                time.sleep(0.5)
                os.system(
                    '/usr/bin/osascript -e \'display notification "NetEase-MusicBox installed version:'
                    + version + '\nNetEase-MusicBox latest version:' + latest +
                    '"\'')
            else:
                os.system(
                    '/usr/bin/notify-send "MusicBox Update is available"')

    def signin_alert(self, type):
        if type == 0:
            if platform.system() == 'Darwin':
                os.system(
                    '/usr/bin/osascript -e \'display notification "Mobile signin success"sound name "/System/Library/Sounds/Ping.aiff"\''
                )
            else:
                os.system('/usr/bin/notify-send "Mobile signin success"')
        else:
            if platform.system() == 'Darwin':
                os.system(
                    '/usr/bin/osascript -e \'display notification "PC signin success"sound name "/System/Library/Sounds/Ping.aiff"\''
                )
            else:
                os.system('/usr/bin/notify-send "PC signin success"')

    def check_version(self):
        # 检查更新 && 签到
        try:
            mobilesignin = self.netease.daily_signin(0)
            if mobilesignin != -1 and mobilesignin['code'] != -2:
                self.signin_alert(0)
            time.sleep(0.5)
            pcsignin = self.netease.daily_signin(1)
            if pcsignin != -1 and pcsignin['code'] != -2:
                self.signin_alert(1)
            tree = ET.ElementTree(
                ET.fromstring(str(self.netease.get_version())))
            root = tree.getroot()
            return root[0][4][0][0].text
        except:
            return 0

    def start_fork(self, version):
        pid = os.fork()
        if pid == 0:
            Menu().update_alert(version)
        else:
            Menu().start()

    def play_pause(self):
        if len(self.storage.database["player_info"]["player_list"]) == 0:
            return
        if self.player.pause_flag:
            self.player.resume()
        else:
            self.player.pause()
        time.sleep(0.1)

    def next_song(self):
        if len(self.storage.database["player_info"]["player_list"]) == 0:
            return
        self.player.next()
        time.sleep(0.1)

    def previous_song(self):
        if len(self.storage.database["player_info"]["player_list"]) == 0:
            return
        self.player.prev()
        time.sleep(0.1)

    def start(self):
        self.START = time.time() // 1
        self.ui.build_menu(self.datatype, self.title, self.datalist,
                           self.offset, self.index, self.step, self.START)
        self.ui.build_process_bar(
            self.player.process_location, self.player.process_length,
            self.player.playing_flag, self.player.pause_flag,
            self.storage.database['player_info']['playing_mode'])
        self.stack.append([
            self.datatype, self.title, self.datalist, self.offset, self.index
        ])
        if bind_global:
            keybinder.bind(self.config.get_item("global_play_pause"),
                           self.play_pause)
            keybinder.bind(self.config.get_item("global_next"), self.next_song)
            keybinder.bind(self.config.get_item("global_previous"),
                           self.previous_song)
        while True:
            datatype = self.datatype
            title = self.title
            datalist = self.datalist
            offset = self.offset
            idx = index = self.index
            step = self.step
            stack = self.stack
            djstack = self.djstack
            self.screen.timeout(500)
            key = self.screen.getch()
            if bind_global:
                keybinder.gtk.main_iteration(False)
            self.ui.screen.refresh()

            # term resize
            if key == -1:
                self.ui.update_size()
                self.player.update_size()

            # 退出
            if key == ord('q'):
                break

            # 退出并清除用户信息
            if key == ord('w'):
                self.storage.database['user'] = {
                    "username": "",
                    "password": "",
                    "user_id": "",
                    "nickname": "",
                }
                try:
                    os.remove(self.storage.cookie_path)
                except:
                    break
                break

            # 上移
            elif key == ord('k'):
                # turn page if at beginning
                if idx == offset:
                    if offset == 0:
                        continue
                    self.offset -= step
                    # 移动光标到最后一列
                    self.index = offset - 1
                else:
                    self.index = carousel(
                        offset,
                        min(len(datalist), offset + step) - 1, idx - 1)
                self.START = time.time()

            # 下移
            elif key == ord('j'):
                # turn page if at end
                if idx == min(len(datalist), offset + step) - 1:
                    if offset + step >= len(datalist):
                        continue
                    self.offset += step
                    # 移动光标到第一列
                    self.index = offset + step
                else:
                    self.index = carousel(
                        offset,
                        min(len(datalist), offset + step) - 1, idx + 1)
                self.START = time.time()

            # 数字快捷键
            elif ord('0') <= key <= ord('9'):
                if self.datatype == 'songs' or self.datatype == 'djchannels' or self.datatype == 'help':
                    continue
                idx = key - ord('0')
                self.ui.build_menu(self.datatype, self.title, self.datalist,
                                   self.offset, idx, self.step, self.START)
                self.ui.build_loading()
                self.dispatch_enter(idx)
                self.index = 0
                self.offset = 0

            # 向上翻页
            elif key == ord('u'):
                if offset == 0:
                    continue
                self.START = time.time()
                self.offset -= step

                # e.g. 23 - 10 = 13 --> 10
                self.index = (index - step) // step * step

            # 向下翻页
            elif key == ord('d'):
                if offset + step >= len(datalist):
                    continue
                self.START = time.time()
                self.offset += step

                # e.g. 23 + 10 = 33 --> 30
                self.index = (index + step) // step * step

            # 前进
            elif key == ord('l') or key == 10:
                if self.datatype == 'songs' or self.datatype == 'djchannels' or self.datatype == 'help' or len(
                        self.datalist) <= 0:
                    continue
                self.START = time.time()
                self.ui.build_loading()
                self.dispatch_enter(idx)
                self.index = 0
                self.offset = 0

            # 回退
            elif key == ord('h'):
                # if not main menu
                if len(self.stack) == 1:
                    continue
                self.START = time.time()
                up = stack.pop()
                self.datatype = up[0]
                self.title = up[1]
                self.datalist = up[2]
                self.offset = up[3]
                self.index = up[4]
                self.at_playing_list = False

            # 搜索
            elif key == ord('f'):
                # 8 is the 'search' menu
                self.dispatch_enter(8)

            # 播放下一曲
            elif key == ord(']'):
                self.next_song()

            # 播放上一曲
            elif key == ord('['):
                self.previous_song()

            # 增加音量
            elif key == ord('='):
                self.player.volume_up()

            # 减少音量
            elif key == ord('-'):
                self.player.volume_down()

            # 随机播放
            elif key == ord('?'):
                if len(self.storage.database["player_info"]
                       ["player_list"]) == 0:
                    continue
                self.player.shuffle()
                time.sleep(0.1)

            # 喜爱
            elif key == ord(','):
                return_data = self.request_api(self.netease.fm_like,
                                               self.player.get_playing_id())
                if return_data != -1:
                    if platform.system() == 'Darwin':
                        os.system(
                            '/usr/bin/osascript -e \'display notification "Added successfully"\''
                        )
                    else:
                        os.system('/usr/bin/notify-send "Added successfully"')

            # 删除FM
            elif key == ord('.'):
                if self.datatype == 'fmsongs':
                    if len(self.storage.database["player_info"]
                           ["player_list"]) == 0:
                        continue
                    self.player.next()
                    return_data = self.request_api(
                        self.netease.fm_trash, self.player.get_playing_id())
                    if return_data != -1:
                        if platform.system() == 'Darwin':
                            os.system(
                                '/usr/bin/osascript -e \'display notification "Deleted successfully"\''
                            )
                        else:
                            os.system(
                                '/usr/bin/notify-send "Deleted successfully"')
                    time.sleep(0.1)

            # 下一FM
            elif key == ord('/'):
                if self.datatype == 'fmsongs':
                    if len(self.storage.database["player_info"]
                           ["player_list"]) == 0:
                        continue
                    self.player.next()
                    time.sleep(0.1)

            # 播放、暂停
            elif key == ord(' '):
                # If not open a new playing list, just play and pause.
                try:
                    if self.datalist[idx]['song_id'] == self.player.playing_id:
                        self.player.play_and_pause(
                            self.storage.database['player_info']['idx'])
                        time.sleep(0.1)
                        continue
                except:
                    pass
                # If change to a new playing list. Add playing list and play.
                if datatype == 'songs':
                    self.resume_play = False
                    self.player.new_player_list('songs', self.title,
                                                self.datalist, -1)
                    self.player.end_callback = None
                    self.player.play_and_pause(idx)
                    self.at_playing_list = True
                elif datatype == 'djchannels':
                    self.resume_play = False
                    self.player.new_player_list('djchannels', self.title,
                                                self.datalist, -1)
                    self.player.end_callback = None
                    self.player.play_and_pause(idx)
                    self.at_playing_list = True
                elif datatype == 'fmsongs':
                    self.resume_play = False
                    self.storage.database['player_info']['playing_mode'] = 0
                    self.player.new_player_list('fmsongs', self.title,
                                                self.datalist, -1)
                    self.player.end_callback = self.fm_callback
                    self.player.play_and_pause(idx)
                    self.at_playing_list = True
                else:
                    self.player.play_and_pause(
                        self.storage.database['player_info']['idx'])
                time.sleep(0.1)

            # 加载当前播放列表
            elif key == ord('p'):
                self.show_playing_song()

            # 播放模式切换
            elif key == ord('P'):
                self.storage.database['player_info']['playing_mode'] = \
                    (self.storage.database['player_info']['playing_mode'] + 1) % 5

            # 添加到打碟歌单
            elif key == ord('a'):
                if datatype == 'songs' and len(datalist) != 0:
                    self.djstack.append(datalist[idx])
                elif datatype == 'artists':
                    pass

            # 加载打碟歌单
            elif key == ord('z'):
                self.stack.append([datatype, title, datalist, offset, index])
                self.datatype = 'songs'
                self.title = '网易云音乐 > 打碟'
                self.datalist = self.djstack
                self.offset = 0
                self.index = 0

            # 添加到收藏歌曲
            elif key == ord('s'):
                if (datatype == 'songs'
                        or datatype == 'djchannels') and len(datalist) != 0:
                    self.collection.append(datalist[idx])
                    if platform.system() == 'Darwin':
                        os.system(
                            '/usr/bin/osascript -e \'display notification "Added successfully"\''
                        )
                    else:
                        os.system('/usr/bin/notify-send "Added successfully"')

            # 加载收藏歌曲
            elif key == ord('c'):
                self.stack.append([datatype, title, datalist, offset, index])
                self.datatype = 'songs'
                self.title = '网易云音乐 > 收藏'
                self.datalist = self.collection
                self.offset = 0
                self.index = 0

            # 从当前列表移除
            elif key == ord('r'):
                if (datatype == 'songs'
                        or datatype == 'djchannels') and len(datalist) != 0:
                    self.datalist.pop(idx)
                    self.index = carousel(
                        offset,
                        min(len(datalist), offset + step) - 1, idx)

            # 当前项目下移
            elif key == ord("J"):
                if datatype != 'main' and len(
                        datalist) != 0 and idx + 1 != len(self.datalist):
                    self.START = time.time()
                    song = self.datalist.pop(idx)
                    self.datalist.insert(idx + 1, song)
                    self.index = idx + 1
                    # 翻页
                    if self.index >= offset + step:
                        self.offset = offset + step

            # 当前项目上移
            elif key == ord("K"):
                if datatype != 'main' and len(datalist) != 0 and idx != 0:
                    self.START = time.time()
                    song = self.datalist.pop(idx)
                    self.datalist.insert(idx - 1, song)
                    self.index = idx - 1
                    # 翻页
                    if self.index < offset:
                        self.offset = offset - step

            elif key == ord('m'):
                if datatype != 'main':
                    self.stack.append(
                        [datatype, title, datalist, offset, index])
                    self.datatype = self.stack[0][0]
                    self.title = self.stack[0][1]
                    self.datalist = self.stack[0][2]
                    self.offset = 0
                    self.index = 0

            elif key == ord('g'):
                if datatype == 'help':
                    webbrowser.open_new_tab(
                        'https://github.com/darknessomi/musicbox')

            # 开始下载
            elif key == ord("C"):
                s = self.datalist[idx]
                cache_thread = threading.Thread(
                    target=self.player.cacheSong1time,
                    args=(s['song_id'], s['song_name'], s['artist'],
                          s['mp3_url']))
                cache_thread.start()

            elif key == ord('i'):
                if self.player.playing_id != -1:
                    webbrowser.open_new_tab('http://music.163.com/#/song?id=' +
                                            str(self.player.playing_id))

            self.ui.build_process_bar(
                self.player.process_location, self.player.process_length,
                self.player.playing_flag, self.player.pause_flag,
                self.storage.database['player_info']['playing_mode'])
            self.ui.build_menu(self.datatype, self.title, self.datalist,
                               self.offset, self.index, self.step, self.START)

        self.player.stop()
        self.cache.quit()
        self.storage.save()
        curses.endwin()

    def dispatch_enter(self, idx):
        # The end of stack
        netease = self.netease
        datatype = self.datatype
        title = self.title
        datalist = self.datalist
        offset = self.offset
        index = self.index
        self.stack.append([datatype, title, datalist, offset, index])

        if idx > len(self.datalist):
            return False

        if datatype == 'main':
            self.choice_channel(idx)

        # 该艺术家的热门歌曲
        elif datatype == 'artists':
            artist_id = datalist[idx]['artist_id']
            songs = netease.artists(artist_id)
            self.datatype = 'songs'
            self.datalist = netease.dig_info(songs, 'songs')
            self.title += ' > ' + datalist[idx]['artists_name']

        # 该专辑包含的歌曲
        elif datatype == 'albums':
            album_id = datalist[idx]['album_id']
            songs = netease.album(album_id)
            self.datatype = 'songs'
            self.datalist = netease.dig_info(songs, 'songs')
            self.title += ' > ' + datalist[idx]['albums_name']

        # 精选歌单选项
        elif datatype == 'playlists':
            data = self.datalist[idx]
            self.datatype = data['datatype']
            self.datalist = netease.dig_info(data['callback'](), self.datatype)
            self.title += ' > ' + data['title']

        # 全站置顶歌单包含的歌曲
        elif datatype == 'top_playlists':
            log.debug(datalist)
            playlist_id = datalist[idx]['playlist_id']
            songs = netease.playlist_detail(playlist_id)
            self.datatype = 'songs'
            self.datalist = netease.dig_info(songs, 'songs')
            self.title += ' > ' + datalist[idx]['playlists_name']

        # 分类精选
        elif datatype == 'playlist_classes':
            # 分类名称
            data = self.datalist[idx]
            self.datatype = 'playlist_class_detail'
            self.datalist = netease.dig_info(data, self.datatype)
            self.title += ' > ' + data
            log.debug(self.datalist)

        # 某一分类的详情
        elif datatype == 'playlist_class_detail':
            # 子类别
            data = self.datalist[idx]
            self.datatype = 'top_playlists'
            self.datalist = netease.dig_info(netease.top_playlists(data),
                                             self.datatype)
            log.debug(self.datalist)
            self.title += ' > ' + data

        # 歌曲榜单
        elif datatype == 'toplists':
            songs = netease.top_songlist(idx)
            self.title += ' > ' + self.datalist[idx]
            self.datalist = netease.dig_info(songs, 'songs')
            self.datatype = 'songs'

        # 搜索菜单
        elif datatype == 'search':
            ui = self.ui
            # no need to do stack.append, Otherwise there will be a bug when you input key 'h' to return
            # if idx in range(1, 5):
            # self.stack.append([self.datatype, self.title, self.datalist, self.offset, self.index])
            self.index = 0
            self.offset = 0
            if idx == 0:
                # 搜索结果可以用top_playlists处理
                self.datatype = 'top_playlists'
                self.datalist = ui.build_search('search_playlist')
                self.title = '精选歌单搜索列表'

            elif idx == 1:
                self.datatype = 'songs'
                self.datalist = ui.build_search('songs')
                self.title = '歌曲搜索列表'

            elif idx == 2:
                self.datatype = 'artists'
                self.datalist = ui.build_search('artists')
                self.title = '艺术家搜索列表'

            elif idx == 3:
                self.datatype = 'albums'
                self.datalist = ui.build_search('albums')
                self.title = '专辑搜索列表'

    def show_playing_song(self):
        if len(self.storage.database['player_info']['player_list']) == 0:
            return
        if not self.at_playing_list:
            self.stack.append([
                self.datatype, self.title, self.datalist, self.offset,
                self.index
            ])
            self.at_playing_list = True
        self.datatype = self.storage.database['player_info'][
            'player_list_type']
        self.title = self.storage.database['player_info']['player_list_title']
        self.datalist = []
        for i in self.storage.database['player_info']['player_list']:
            self.datalist.append(self.storage.database['songs'][i])
        self.index = self.storage.database['player_info']['idx']
        self.offset = self.storage.database['player_info'][
            'idx'] / self.step * self.step
        if self.resume_play:
            if self.datatype == "fmsongs":
                self.player.end_callback = self.fm_callback
            else:
                self.player.end_callback = None
            self.storage.database['player_info']['idx'] = -1
            self.player.play_and_pause(self.index)
            self.resume_play = False

    def song_changed_callback(self):
        if self.at_playing_list:
            self.show_playing_song()

    def fm_callback(self):
        log.debug("FM CallBack.")
        data = self.get_new_fm()
        self.player.append_songs(data)
        if self.datatype == 'fmsongs':
            if len(self.storage.database['player_info']['player_list']) == 0:
                return
            self.datatype = self.storage.database['player_info'][
                'player_list_type']
            self.title = self.storage.database['player_info'][
                'player_list_title']
            self.datalist = []
            for i in self.storage.database['player_info']['player_list']:
                self.datalist.append(self.storage.database['songs'][i])
            self.index = self.storage.database['player_info']['idx']
            self.offset = self.storage.database['player_info'][
                'idx'] / self.step * self.step

    def request_api(self, func, *args):
        if self.storage.database['user']['user_id'] != "":
            result = func(*args)
            if result != -1:
                return result
        log.debug("Re Login.")
        user_info = {}
        if self.storage.database['user']['username'] != "":
            user_info = self.netease.login(
                self.storage.database['user']['username'],
                self.storage.database['user']['password'])
        if self.storage.database['user'][
                'username'] == "" or user_info['code'] != 200:
            data = self.ui.build_login()
            # 取消登录
            if data == -1:
                return -1
            user_info = data[0]
            self.storage.database['user']['username'] = data[1][0]
            self.storage.database['user']['password'] = data[1][1]
            self.storage.database['user']['user_id'] = user_info['account'][
                'id']
            self.storage.database['user']['nickname'] = user_info['profile'][
                'nickname']
        self.userid = self.storage.database["user"]["user_id"]
        self.username = self.storage.database["user"]["nickname"]
        return func(*args)

    def get_new_fm(self):
        myplaylist = []
        for count in range(0, 1):
            data = self.request_api(self.netease.personal_fm)
            if data == -1:
                break
            myplaylist += data
            time.sleep(0.2)
        return self.netease.dig_info(myplaylist, "fmsongs")

    def choice_channel(self, idx):
        # 排行榜
        netease = self.netease
        if idx == 0:
            self.datalist = netease.return_toplists()
            self.title += ' > 排行榜'
            self.datatype = 'toplists'

        # 艺术家
        elif idx == 1:
            artists = netease.top_artists()
            self.datalist = netease.dig_info(artists, 'artists')
            self.title += ' > 艺术家'
            self.datatype = 'artists'

        # 新碟上架
        elif idx == 2:
            albums = netease.new_albums()
            self.datalist = netease.dig_info(albums, 'albums')
            self.title += ' > 新碟上架'
            self.datatype = 'albums'

        # 精选歌单
        elif idx == 3:
            self.datalist = [{
                'title': '全站置顶',
                'datatype': 'top_playlists',
                'callback': netease.top_playlists
            }, {
                'title': '分类精选',
                'datatype': 'playlist_classes',
                'callback': netease.playlist_classes
            }]
            self.title += ' > 精选歌单'
            self.datatype = 'playlists'

        # 我的歌单
        elif idx == 4:
            myplaylist = self.request_api(self.netease.user_playlist,
                                          self.userid)
            if myplaylist == -1:
                return
            self.datatype = 'top_playlists'
            self.datalist = netease.dig_info(myplaylist, self.datatype)
            self.title += ' > ' + self.username + ' 的歌单'

        # DJ节目
        elif idx == 5:
            self.datatype = 'djchannels'
            self.title += ' > DJ节目'
            self.datalist = netease.djchannels()

        # 每日推荐
        elif idx == 6:
            self.datatype = 'songs'
            self.title += ' > 每日推荐'
            myplaylist = self.request_api(self.netease.recommend_playlist)
            if myplaylist == -1:
                return
            self.datalist = self.netease.dig_info(myplaylist, self.datatype)

        # 私人FM
        elif idx == 7:
            self.datatype = 'fmsongs'
            self.title += ' > 私人FM'
            self.datalist = self.get_new_fm()

        # 搜索
        elif idx == 8:
            self.datatype = 'search'
            self.title += ' > 搜索'
            self.datalist = ['歌曲', '艺术家', '专辑', '网易精选集']

        # 帮助
        elif idx == 9:
            self.datatype = 'help'
            self.title += ' > 帮助'
            self.datalist = shortcut

        self.offset = 0
        self.index = 0
Example #54
0
 def setUp(self):
     config = configparser.ConfigParser()
     config.read('../config.ini')
     self.storage = Storage(config)
Example #55
0
from datetime import datetime

from fastapi import FastAPI, HTTPException, Response
from pydantic import AnyHttpUrl, BaseModel, typing, validator

from storage import DuplicateKey, Storage

SHORTCODE_LENGTH = 6
ALLOWED_CHARS = set(string.ascii_lowercase + string.digits + "_")

app = FastAPI(
    title="URL Shortener", description="A code for an interview.", version="1.0.0"
)


storage = Storage()


class UnknownShortcode(HTTPException):
    def __init__(self, detail):
        super().__init__(status_code=412, detail=detail)


class InvalidShortcode(HTTPException):
    def __init__(self, detail):
        super().__init__(status_code=412, detail=detail)


class AlreadyInUse(HTTPException):
    def __init__(self, detail):
        super().__init__(status_code=409, detail=detail)
Example #56
0
 def __init__(self):
     self.storage_instance = Storage()
     self.storage = self.storage_instance.read_storage()
Example #57
0
async def start_redis(app):
    redis = await aioredis.create_redis_pool(REDIS_DSN)

    app['redis'] = redis
    app['storage'] = Storage(STORAGE_NAME, redis=redis)
    app['converter'] = Converter(storage=app['storage'])
class BlockchainProcessor(Processor):
    def __init__(self, config, shared):
        Processor.__init__(self)

        self.mtimes = {}  # monitoring
        self.shared = shared
        self.config = config
        self.up_to_date = False

        self.watch_lock = threading.Lock()
        self.watch_blocks = []
        self.watch_headers = []
        self.watched_addresses = {}

        self.history_cache = {}
        self.max_cache_size = 100000
        self.chunk_cache = {}
        self.cache_lock = threading.Lock()
        self.headers_data = ''
        self.headers_path = config.get('leveldb', 'path')

        self.mempool_values = {}
        self.mempool_addresses = {}
        self.mempool_hist = {}
        self.mempool_hashes = set([])
        self.mempool_lock = threading.Lock()

        self.address_queue = Queue()

        try:
            self.test_reorgs = config.getboolean(
                'leveldb', 'test_reorgs')  # simulate random blockchain reorgs
        except:
            self.test_reorgs = False
        self.storage = Storage(config, shared, self.test_reorgs)

        self.dblock = threading.Lock()

        self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
            config.get('viacoind', 'viacoind_user'),
            config.get('viacoind', 'viacoind_password'),
            config.get('viacoind', 'viacoind_host'),
            config.get('viacoind', 'viacoind_port'))

        self.sent_height = 0
        self.sent_header = None

        # catch_up headers
        self.init_headers(self.storage.height)

        self.blockchain_thread = threading.Thread(target=self.do_catch_up)
        self.blockchain_thread.start()

    def do_catch_up(self):

        self.header = self.block2header(
            self.bitcoind('getblock', [self.storage.last_hash]))
        self.header['utxo_root'] = self.storage.get_root_hash().encode('hex')
        self.catch_up(sync=False)
        print_log("Blockchain is up to date.")
        self.memorypool_update()
        print_log("Memory pool initialized.")

        while not self.shared.stopped():
            self.main_iteration()
            if self.shared.paused():
                print_log("viacoind is responding")
                self.shared.unpause()
            time.sleep(10)

    def mtime(self, name):
        now = time.time()
        if name != '':
            delta = now - self.now
            t = self.mtimes.get(name, 0)
            self.mtimes[name] = t + delta
        self.now = now

    def print_mtime(self):
        s = ''
        for k, v in self.mtimes.items():
            s += k + ':' + "%.2f" % v + ' '
        print_log(s)

    def bitcoind(self, method, params=[]):
        postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
        while True:
            try:
                respdata = urllib.urlopen(self.bitcoind_url, postdata).read()
                break
            except:
                print_log("cannot reach viacoind...")
                self.shared.pause()
                time.sleep(10)
                if self.shared.stopped():
                    # this will end the thread
                    raise
                continue

        r = loads(respdata)
        if r['error'] is not None:
            raise BaseException(r['error'])
        return r.get('result')

    def block2header(self, b):
        return {
            "block_height": b.get('height'),
            "version": b.get('version'),
            "prev_block_hash": b.get('previousblockhash'),
            "merkle_root": b.get('merkleroot'),
            "timestamp": b.get('time'),
            "bits": int(b.get('bits'), 16),
            "nonce": b.get('nonce'),
        }

    def get_header(self, height):
        block_hash = self.bitcoind('getblockhash', [height])
        b = self.bitcoind('getblock', [block_hash])
        return self.block2header(b)

    def init_headers(self, db_height):
        self.chunk_cache = {}
        self.headers_filename = os.path.join(self.headers_path,
                                             'blockchain_headers')

        if os.path.exists(self.headers_filename):
            height = os.path.getsize(
                self.headers_filename) / 80 - 1  # the current height
            if height > 0:
                prev_hash = self.hash_header(self.read_header(height))
            else:
                prev_hash = None
        else:
            open(self.headers_filename, 'wb').close()
            prev_hash = None
            height = -1

        if height < db_height:
            print_log("catching up missing headers:", height, db_height)

        try:
            while height < db_height:
                height += 1
                header = self.get_header(height)
                if height > 1:
                    if prev_hash != header.get('prev_block_hash'):
                        # The prev_hash block is orphaned, go back
                        print_log("reorganizing, a block in file is orphaned:",
                                  prev_hash)
                        # Go to the parent of the orphaned block
                        height -= 2
                        prev_hash = self.hash_header(self.read_header(height))
                        continue

                self.write_header(header, sync=False)
                prev_hash = self.hash_header(header)
                if (height % 1000) == 0:
                    print_log("headers file:", height)
        except KeyboardInterrupt:
            self.flush_headers()
            sys.exit()

        self.flush_headers()

    def hash_header(self, header):
        return rev_hex(
            Hash(header_to_string(header).decode('hex')).encode('hex'))

    def read_header(self, block_height):
        if os.path.exists(self.headers_filename):
            with open(self.headers_filename, 'rb') as f:
                f.seek(block_height * 80)
                h = f.read(80)
            if len(h) == 80:
                h = header_from_string(h)
                return h

    def read_chunk(self, index):
        with open(self.headers_filename, 'rb') as f:
            f.seek(index * 2016 * 80)
            chunk = f.read(2016 * 80)
        return chunk.encode('hex')

    def write_header(self, header, sync=True):
        if not self.headers_data:
            self.headers_offset = header.get('block_height')

        self.headers_data += header_to_string(header).decode('hex')
        if sync or len(self.headers_data) > 40 * 100:
            self.flush_headers()

        with self.cache_lock:
            chunk_index = header.get('block_height') / 2016
            if self.chunk_cache.get(chunk_index):
                self.chunk_cache.pop(chunk_index)

    def pop_header(self):
        # we need to do this only if we have not flushed
        if self.headers_data:
            self.headers_data = self.headers_data[:-40]

    def flush_headers(self):
        if not self.headers_data:
            return
        with open(self.headers_filename, 'rb+') as f:
            f.seek(self.headers_offset * 80)
            f.write(self.headers_data)
        self.headers_data = ''

    def get_chunk(self, i):
        # store them on disk; store the current chunk in memory
        with self.cache_lock:
            chunk = self.chunk_cache.get(i)
            if not chunk:
                chunk = self.read_chunk(i)
                self.chunk_cache[i] = chunk

        return chunk

    def get_mempool_transaction(self, txid):
        try:
            raw_tx = self.bitcoind('getrawtransaction', [txid, 0])
        except:
            return None

        vds = deserialize.BCDataStream()
        vds.write(raw_tx.decode('hex'))
        try:
            return deserialize.parse_Transaction(vds, is_coinbase=False)
        except:
            print_log("ERROR: cannot parse", txid)
            return None

    def get_history(self, addr, cache_only=False):
        with self.cache_lock:
            hist = self.history_cache.get(addr)
        if hist is not None:
            return hist
        if cache_only:
            return -1

        with self.dblock:
            hist = self.storage.get_history(addr)

        # add memory pool
        with self.mempool_lock:
            for txid, delta in self.mempool_hist.get(addr, []):
                hist.append({'tx_hash': txid, 'height': 0})

        with self.cache_lock:
            if len(self.history_cache) > self.max_cache_size:
                logger.info("clearing cache")
                self.history_cache.clear()
            self.history_cache[addr] = hist
        return hist

    def get_unconfirmed_value(self, addr):
        v = 0
        with self.mempool_lock:
            for txid, delta in self.mempool_hist.get(addr, []):
                v += delta
        return v

    def get_status(self, addr, cache_only=False):
        tx_points = self.get_history(addr, cache_only)
        if cache_only and tx_points == -1:
            return -1

        if not tx_points:
            return None
        if tx_points == ['*']:
            return '*'
        status = ''
        for tx in tx_points:
            status += tx.get('tx_hash') + ':%d:' % tx.get('height')
        return hashlib.sha256(status).digest().encode('hex')

    def get_merkle(self, tx_hash, height):

        block_hash = self.bitcoind('getblockhash', [height])
        b = self.bitcoind('getblock', [block_hash])
        tx_list = b.get('tx')
        tx_pos = tx_list.index(tx_hash)

        merkle = map(hash_decode, tx_list)
        target_hash = hash_decode(tx_hash)
        s = []
        while len(merkle) != 1:
            if len(merkle) % 2:
                merkle.append(merkle[-1])
            n = []
            while merkle:
                new_hash = Hash(merkle[0] + merkle[1])
                if merkle[0] == target_hash:
                    s.append(hash_encode(merkle[1]))
                    target_hash = new_hash
                elif merkle[1] == target_hash:
                    s.append(hash_encode(merkle[0]))
                    target_hash = new_hash
                n.append(new_hash)
                merkle = merkle[2:]
            merkle = n

        return {"block_height": height, "merkle": s, "pos": tx_pos}

    def add_to_history(self, addr, tx_hash, tx_pos, tx_height):
        # keep it sorted
        s = self.serialize_item(tx_hash, tx_pos, tx_height) + 40 * chr(0)
        assert len(s) == 80

        serialized_hist = self.batch_list[addr]

        l = len(serialized_hist) / 80
        for i in range(l - 1, -1, -1):
            item = serialized_hist[80 * i:80 * (i + 1)]
            item_height = int(rev_hex(item[36:39].encode('hex')), 16)
            if item_height <= tx_height:
                serialized_hist = serialized_hist[0:80 * (
                    i + 1)] + s + serialized_hist[80 * (i + 1):]
                break
        else:
            serialized_hist = s + serialized_hist

        self.batch_list[addr] = serialized_hist

        # backlink
        txo = (tx_hash + int_to_hex(tx_pos, 4)).decode('hex')
        self.batch_txio[txo] = addr

    def deserialize_block(self, block):
        txlist = block.get('tx')
        tx_hashes = []  # ordered txids
        txdict = {}  # deserialized tx
        is_coinbase = True
        for raw_tx in txlist:
            tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
            vds = deserialize.BCDataStream()
            vds.write(raw_tx.decode('hex'))
            try:
                tx = deserialize.parse_Transaction(vds, is_coinbase)
            except:
                print_log("ERROR: cannot parse", tx_hash)
                continue
            tx_hashes.append(tx_hash)
            txdict[tx_hash] = tx
            is_coinbase = False
        return tx_hashes, txdict

    def import_block(self,
                     block,
                     block_hash,
                     block_height,
                     sync,
                     revert=False):

        touched_addr = set([])

        # deserialize transactions
        tx_hashes, txdict = self.deserialize_block(block)

        # undo info
        if revert:
            undo_info = self.storage.get_undo_info(block_height)
            tx_hashes.reverse()
        else:
            undo_info = {}

        for txid in tx_hashes:  # must be ordered
            tx = txdict[txid]
            if not revert:
                undo = self.storage.import_transaction(txid, tx, block_height,
                                                       touched_addr)
                undo_info[txid] = undo
            else:
                undo = undo_info.pop(txid)
                self.storage.revert_transaction(txid, tx, block_height,
                                                touched_addr, undo)

        if revert:
            assert undo_info == {}

        # add undo info
        if not revert:
            self.storage.write_undo_info(block_height, self.bitcoind_height,
                                         undo_info)

        # add the max
        self.storage.db_undo.put(
            'height', repr(
                (block_hash, block_height, self.storage.db_version)))

        for addr in touched_addr:
            self.invalidate_cache(addr)

        self.storage.update_hashes()

    def add_request(self, session, request):
        # see if we can get if from cache. if not, add request to queue
        message_id = request.get('id')
        try:
            result = self.process(request, cache_only=True)
        except BaseException as e:
            self.push_response(session, {'id': message_id, 'error': str(e)})
            return

        if result == -1:
            self.queue.put((session, request))
        else:
            self.push_response(session, {'id': message_id, 'result': result})

    def do_subscribe(self, method, params, session):
        with self.watch_lock:
            if method == 'blockchain.numblocks.subscribe':
                if session not in self.watch_blocks:
                    self.watch_blocks.append(session)

            elif method == 'blockchain.headers.subscribe':
                if session not in self.watch_headers:
                    self.watch_headers.append(session)

            elif method == 'blockchain.address.subscribe':
                address = params[0]
                l = self.watched_addresses.get(address)
                if l is None:
                    self.watched_addresses[address] = [session]
                elif session not in l:
                    l.append(session)

    def do_unsubscribe(self, method, params, session):
        with self.watch_lock:
            if method == 'blockchain.numblocks.subscribe':
                if session in self.watch_blocks:
                    self.watch_blocks.remove(session)
            elif method == 'blockchain.headers.subscribe':
                if session in self.watch_headers:
                    self.watch_headers.remove(session)
            elif method == "blockchain.address.subscribe":
                addr = params[0]
                l = self.watched_addresses.get(addr)
                if not l:
                    return
                if session in l:
                    l.remove(session)
                if session in l:
                    print_log("error rc!!")
                    self.shared.stop()
                if l == []:
                    self.watched_addresses.pop(addr)

    def process(self, request, cache_only=False):

        message_id = request['id']
        method = request['method']
        params = request.get('params', [])
        result = None
        error = None

        if method == 'blockchain.numblocks.subscribe':
            result = self.storage.height

        elif method == 'blockchain.headers.subscribe':
            result = self.header

        elif method == 'blockchain.address.subscribe':
            address = str(params[0])
            result = self.get_status(address, cache_only)

        elif method == 'blockchain.address.get_history':
            address = str(params[0])
            result = self.get_history(address, cache_only)

        elif method == 'blockchain.address.get_mempool':
            address = str(params[0])
            result = self.get_unconfirmed_history(address, cache_only)

        elif method == 'blockchain.address.get_balance':
            address = str(params[0])
            confirmed = self.storage.get_balance(address)
            unconfirmed = self.get_unconfirmed_value(address)
            result = {'confirmed': confirmed, 'unconfirmed': unconfirmed}

        elif method == 'blockchain.address.get_proof':
            address = str(params[0])
            result = self.storage.get_proof(address)

        elif method == 'blockchain.address.listunspent':
            address = str(params[0])
            result = self.storage.listunspent(address)

        elif method == 'blockchain.utxo.get_address':
            txid = str(params[0])
            pos = int(params[1])
            txi = (txid + int_to_hex(pos, 4)).decode('hex')
            result = self.storage.get_address(txi)

        elif method == 'blockchain.block.get_header':
            if cache_only:
                result = -1
            else:
                height = int(params[0])
                result = self.get_header(height)

        elif method == 'blockchain.block.get_chunk':
            if cache_only:
                result = -1
            else:
                index = int(params[0])
                result = self.get_chunk(index)

        elif method == 'blockchain.transaction.broadcast':
            try:
                txo = self.bitcoind('sendrawtransaction', params)
                print_log("sent tx:", txo)
                result = txo
            except BaseException, e:
                result = str(e)  # do not send an error
                print_log("error:", result, params)

        elif method == 'blockchain.transaction.get_merkle':
            if cache_only:
                result = -1
            else:
                tx_hash = params[0]
                tx_height = params[1]
                result = self.get_merkle(tx_hash, tx_height)
Example #59
0
class BlockchainProcessor(Processor):

    def __init__(self, config, shared):
        Processor.__init__(self)

        # monitoring
        self.avg_time = 0,0,0
        self.time_ref = time.time()

        self.shared = shared
        self.config = config
        self.up_to_date = False

        self.watch_lock = threading.Lock()
        self.watch_blocks = []
        self.watch_headers = []
        self.watched_addresses = {}

        self.history_cache = {}
        self.merkle_cache = {}
        self.max_cache_size = 100000
        self.chunk_cache = {}
        self.cache_lock = threading.Lock()
        self.headers_data = ''
        self.headers_path = config.get('leveldb', 'path')

        self.mempool_fees = {}
        self.mempool_values = {}
        self.mempool_addresses = {}
        self.mempool_hist = {} # addr -> (txid, delta)
        self.mempool_unconfirmed = {} # txid -> set of unconfirmed inputs
        self.mempool_hashes = set()
        self.mempool_lock = threading.Lock()

        self.address_queue = Queue()

        try:
            self.test_reorgs = config.getboolean('leveldb', 'test_reorgs')   # simulate random blockchain reorgs
        except:
            self.test_reorgs = False
        self.storage = Storage(config, shared, self.test_reorgs)

        self.bitcoind_url = 'http://%s:%s@%s:%s/' % (
            config.get('bitcoind', 'bitcoind_user'),
            config.get('bitcoind', 'bitcoind_password'),
            config.get('bitcoind', 'bitcoind_host'),
            config.get('bitcoind', 'bitcoind_port'))

        self.sent_height = 0
        self.sent_header = None

        # catch_up headers
        self.init_headers(self.storage.height)
        # start catch_up thread
        if config.getboolean('leveldb', 'profiler'):
            filename = os.path.join(config.get('leveldb', 'path'), 'profile')
            print_log('profiled thread', filename)
            self.blockchain_thread = ProfiledThread(filename, target = self.do_catch_up)
        else:
            self.blockchain_thread = threading.Thread(target = self.do_catch_up)
        self.blockchain_thread.start()


    def do_catch_up(self):
        self.header = self.block2header(self.bitcoind('getblock', (self.storage.last_hash,)))
        self.header['utxo_root'] = self.storage.get_root_hash().encode('hex')
        self.catch_up(sync=False)
        if not self.shared.stopped():
            print_log("Blockchain is up to date.")
            self.memorypool_update()
            print_log("Memory pool initialized.")

        while not self.shared.stopped():
            self.main_iteration()
            if self.shared.paused():
                print_log("yacoind is responding")
                self.shared.unpause()
            time.sleep(10)


    def set_time(self):
        self.time_ref = time.time()

    def print_time(self, num_tx):
        delta = time.time() - self.time_ref
        # leaky averages
        seconds_per_block, tx_per_second, n = self.avg_time
        alpha = (1. + 0.01 * n)/(n+1)
        seconds_per_block = (1-alpha) * seconds_per_block + alpha * delta
        alpha2 = alpha * delta / seconds_per_block
        tx_per_second = (1-alpha2) * tx_per_second + alpha2 * num_tx / delta
        self.avg_time = seconds_per_block, tx_per_second, n+1
        if self.storage.height%100 == 0 \
            or (self.storage.height%10 == 0 and self.storage.height >= 300000)\
            or self.storage.height >= 1000000:
            msg = "block %d (%d %.2fs) %s" %(self.storage.height, num_tx, delta, self.storage.get_root_hash().encode('hex'))
            msg += " (%.2ftx/s, %.2fs/block)" % (tx_per_second, seconds_per_block)
            run_blocks = self.storage.height - self.start_catchup_height
            remaining_blocks = self.bitcoind_height - self.storage.height
            if run_blocks>0 and remaining_blocks>0:
                remaining_minutes = remaining_blocks * seconds_per_block / 60
                new_blocks = int(remaining_minutes / 10) # number of new blocks expected during catchup
                blocks_to_process = remaining_blocks + new_blocks
                minutes = blocks_to_process * seconds_per_block / 60
                rt = "%.0fmin"%minutes if minutes < 300 else "%.1f hours"%(minutes/60)
                msg += " (eta %s, %d blocks)" % (rt, remaining_blocks)
            print_log(msg)

    def wait_on_bitcoind(self):
        self.shared.pause()
        time.sleep(10)
        if self.shared.stopped():
            # this will end the thread
            raise BaseException()

    def bitcoind(self, method, params=()):
        postdata = dumps({"method": method, 'params': params, 'id': 'jsonrpc'})
        while True:
            try:
                response = urllib.urlopen(self.bitcoind_url, postdata)
                r = load(response)
                response.close()
            except:
                print_log("cannot reach yacoind...")
                self.wait_on_bitcoind()
            else:
                if r['error'] is not None:
                    if r['error'].get('code') == -28:
                        print_log("yacoind still warming up...")
                        self.wait_on_bitcoind()
                        continue
                    raise BaseException(r['error'])
                break
        return r.get('result')

    @staticmethod
    def block2header(b):
        return {
            "block_height": b.get('height'),
            "version": b.get('version'),
            "prev_block_hash": b.get('previousblockhash'),
            "merkle_root": b.get('merkleroot'),
            "timestamp": b.get('time'),
            "bits": int(b.get('bits'), 16),
            "nonce": b.get('nonce'),
        }

    def get_header(self, height):
        block_hash = self.bitcoind('getblockhash', (height,))
        b = self.bitcoind('getblock', (block_hash,))
        return self.block2header(b)

    def init_headers(self, db_height):
        self.headers_filename = os.path.join(self.headers_path, 'blockchain_headers')

        if os.path.exists(self.headers_filename):
            height = os.path.getsize(self.headers_filename)/80 - 1   # the current height
            if height > 0:
                prev_hash = self.hash_header(self.read_header(height))
            else:
                prev_hash = None
        else:
            open(self.headers_filename, 'wb').close()
            prev_hash = None
            height = -1

        if height < db_height:
            print_log("catching up missing headers:", height, db_height)

        try:
            while height < db_height:
                height += 1
                header = self.get_header(height)
                if height > 1:
                    if prev_hash != header.get('prev_block_hash'):
                        # The prev_hash block is orphaned, go back
                        print_log("reorganizing, a block in file is orphaned:", prev_hash)
                        # Go to the parent of the orphaned block
                        height -= 2
                        prev_hash = self.hash_header(self.read_header(height))
                        continue

                self.write_header(header, sync=False)
                prev_hash = self.hash_header(header)
                if (height % 1000) == 0:
                    print_log("headers file:", height)
        except KeyboardInterrupt:
            self.flush_headers()
            sys.exit()

        self.flush_headers()

    @staticmethod
    def hash_header(header):
        return rev_hex(Hash(header_to_string(header).decode('hex')).encode('hex'))

    def read_header(self, block_height):
        if os.path.exists(self.headers_filename):
            with open(self.headers_filename, 'rb') as f:
                f.seek(block_height * 80)
                h = f.read(80)
            if len(h) == 80:
                h = header_from_string(h)
                return h

    def read_chunk(self, index):
        with open(self.headers_filename, 'rb') as f:
            f.seek(index*2016*80)
            chunk = f.read(2016*80)
        return chunk.encode('hex')

    def write_header(self, header, sync=True):
        if not self.headers_data:
            self.headers_offset = header.get('block_height')

        self.headers_data += header_to_string(header).decode('hex')
        if sync or len(self.headers_data) > 40*100:
            self.flush_headers()

        with self.cache_lock:
            chunk_index = header.get('block_height')/2016
            if chunk_index in self.chunk_cache:
                del self.chunk_cache[chunk_index]

    def pop_header(self):
        # we need to do this only if we have not flushed
        if self.headers_data:
            self.headers_data = self.headers_data[:-40]

    def flush_headers(self):
        if not self.headers_data:
            return
        with open(self.headers_filename, 'rb+') as f:
            f.seek(self.headers_offset*80)
            f.write(self.headers_data)
        self.headers_data = ''

    def get_chunk(self, i):
        # store them on disk; store the current chunk in memory
        with self.cache_lock:
            chunk = self.chunk_cache.get(i)
            if not chunk:
                chunk = self.read_chunk(i)
                if chunk:
                    self.chunk_cache[i] = chunk

        return chunk

    def get_mempool_transaction(self, txid):
        try:
            raw_tx = self.bitcoind('getrawtransaction', (txid, 0))
        except:
            return None
        vds = deserialize.BCDataStream()
        vds.write(raw_tx.decode('hex'))
        try:
            return deserialize.parse_Transaction(vds, is_coinbase=False)
        except:
            print_log("ERROR: cannot parse", txid)
            return None

    def get_unconfirmed_history(self, addr):
        hist = []
        with self.mempool_lock:
            for tx_hash, delta in self.mempool_hist.get(addr, ()):
                height = -1 if self.mempool_unconfirmed.get(tx_hash) else 0
                fee = self.mempool_fees.get(tx_hash)
                hist.append({'tx_hash':tx_hash, 'height':height, 'fee':fee})
        return hist

    def get_history(self, addr, cache_only=False):
        with self.cache_lock:
            hist = self.history_cache.get(addr)
        if hist is not None:
            return hist
        if cache_only:
            return -1
        hist = self.storage.get_history(addr)
        hist.extend(self.get_unconfirmed_history(addr))
        with self.cache_lock:
            if len(self.history_cache) > self.max_cache_size:
                logger.info("clearing cache")
                self.history_cache.clear()
            self.history_cache[addr] = hist
        return hist

    def get_unconfirmed_value(self, addr):
        v = 0
        with self.mempool_lock:
            for txid, delta in self.mempool_hist.get(addr, ()):
                v += delta
        return v

    def get_status(self, addr, cache_only=False):
        tx_points = self.get_history(addr, cache_only)
        if cache_only and tx_points == -1:
            return -1

        if not tx_points:
            return None
        if tx_points == ['*']:
            return '*'
        status = ''.join(tx.get('tx_hash') + ':%d:' % tx.get('height') for tx in tx_points)
        return hashlib.sha256(status).digest().encode('hex')

    def get_merkle(self, tx_hash, height, cache_only):
        with self.cache_lock:
            out = self.merkle_cache.get(tx_hash)
        if out is not None:
            return out
        if cache_only:
            return -1

        block_hash = self.bitcoind('getblockhash', (height,))
        b = self.bitcoind('getblock', (block_hash,))
        tx_list = b.get('tx')
        tx_pos = tx_list.index(tx_hash)

        merkle = map(hash_decode, tx_list)
        target_hash = hash_decode(tx_hash)
        s = []
        while len(merkle) != 1:
            if len(merkle) % 2:
                merkle.append(merkle[-1])
            n = []
            while merkle:
                new_hash = Hash(merkle[0] + merkle[1])
                if merkle[0] == target_hash:
                    s.append(hash_encode(merkle[1]))
                    target_hash = new_hash
                elif merkle[1] == target_hash:
                    s.append(hash_encode(merkle[0]))
                    target_hash = new_hash
                n.append(new_hash)
                merkle = merkle[2:]
            merkle = n

        out = {"block_height": height, "merkle": s, "pos": tx_pos}
        with self.cache_lock:
            if len(self.merkle_cache) > self.max_cache_size:
                logger.info("clearing merkle cache")
                self.merkle_cache.clear()
            self.merkle_cache[tx_hash] = out
        return out

    @staticmethod
    def deserialize_block(block):
        txlist = block.get('tx')
        tx_hashes = []  # ordered txids
        txdict = {}     # deserialized tx
        is_coinbase = True
        for raw_tx in txlist:
            tx_hash = hash_encode(Hash(raw_tx.decode('hex')))
            vds = deserialize.BCDataStream()
            vds.write(raw_tx.decode('hex'))
            try:
                tx = deserialize.parse_Transaction(vds, is_coinbase)
            except:
                print_log("ERROR: cannot parse", tx_hash)
                continue
            tx_hashes.append(tx_hash)
            txdict[tx_hash] = tx
            is_coinbase = False
        return tx_hashes, txdict



    def import_block(self, block, block_hash, block_height, revert=False):

        touched_addr = set()

        # deserialize transactions
        tx_hashes, txdict = self.deserialize_block(block)

        # undo info
        if revert:
            undo_info = self.storage.get_undo_info(block_height)
            tx_hashes.reverse()
        else:
            undo_info = {}

        for txid in tx_hashes:  # must be ordered
            tx = txdict[txid]
            if not revert:
                undo = self.storage.import_transaction(txid, tx, block_height, touched_addr)
                undo_info[txid] = undo
            else:
                undo = undo_info.pop(txid)
                self.storage.revert_transaction(txid, tx, block_height, touched_addr, undo)

        if revert: 
            assert undo_info == {}

        # add undo info
        if not revert:
            self.storage.write_undo_info(block_height, self.bitcoind_height, undo_info)

        # add the max
        self.storage.save_height(block_hash, block_height)

        for addr in touched_addr:
            self.invalidate_cache(addr)

        self.storage.update_hashes()
        # batch write modified nodes 
        self.storage.batch_write()
        # return length for monitoring
        return len(tx_hashes)


    def add_request(self, session, request):
        # see if we can get if from cache. if not, add request to queue
        message_id = request.get('id')
        try:
            result = self.process(request, cache_only=True)
        except BaseException as e:
            self.push_response(session, {'id': message_id, 'error': str(e)})
            return 

        if result == -1:
            self.queue.put((session, request))
        else:
            self.push_response(session, {'id': message_id, 'result': result})


    def do_subscribe(self, method, params, session):
        with self.watch_lock:
            if method == 'blockchain.numblocks.subscribe':
                if session not in self.watch_blocks:
                    self.watch_blocks.append(session)

            elif method == 'blockchain.headers.subscribe':
                if session not in self.watch_headers:
                    self.watch_headers.append(session)

            elif method == 'blockchain.address.subscribe':
                address = params[0]
                l = self.watched_addresses.get(address)
                if l is None:
                    self.watched_addresses[address] = [session]
                elif session not in l:
                    l.append(session)


    def do_unsubscribe(self, method, params, session):
        with self.watch_lock:
            if method == 'blockchain.numblocks.subscribe':
                if session in self.watch_blocks:
                    self.watch_blocks.remove(session)
            elif method == 'blockchain.headers.subscribe':
                if session in self.watch_headers:
                    self.watch_headers.remove(session)
            elif method == "blockchain.address.subscribe":
                addr = params[0]
                l = self.watched_addresses.get(addr)
                if not l:
                    return
                if session in l:
                    l.remove(session)
                if session in l:
                    print_log("error rc!!")
                    self.shared.stop()
                if l == []:
                    del self.watched_addresses[addr]


    def process(self, request, cache_only=False):
        
        message_id = request['id']
        method = request['method']
        params = request.get('params', ())
        result = None
        error = None

        if method == 'blockchain.numblocks.subscribe':
            result = self.storage.height

        elif method == 'blockchain.headers.subscribe':
            result = self.header

        elif method == 'blockchain.address.subscribe':
            address = str(params[0])
            result = self.get_status(address, cache_only)

        elif method == 'blockchain.address.get_history':
            address = str(params[0])
            result = self.get_history(address, cache_only)

        elif method == 'blockchain.address.get_mempool':
            address = str(params[0])
            result = self.get_unconfirmed_history(address)

        elif method == 'blockchain.address.get_balance':
            address = str(params[0])
            confirmed = self.storage.get_balance(address)
            unconfirmed = self.get_unconfirmed_value(address)
            result = { 'confirmed':confirmed, 'unconfirmed':unconfirmed }

        elif method == 'blockchain.address.get_proof':
            address = str(params[0])
            result = self.storage.get_proof(address)

        elif method == 'blockchain.address.listunspent':
            address = str(params[0])
            result = self.storage.listunspent(address)

        elif method == 'blockchain.utxo.get_address':
            txid = str(params[0])
            pos = int(params[1])
            txi = (txid + int_to_hex4(pos)).decode('hex')
            result = self.storage.get_address(txi)

        elif method == 'blockchain.block.get_header':
            if cache_only:
                result = -1
            else:
                height = int(params[0])
                result = self.get_header(height)

        elif method == 'blockchain.block.get_chunk':
            if cache_only:
                result = -1
            else:
                index = int(params[0])
                result = self.get_chunk(index)

        elif method == 'blockchain.transaction.broadcast':
            try:
                txo = self.bitcoind('sendrawtransaction', params)
                print_log("sent tx:", txo)
                result = txo
            except BaseException, e:
                error = e.args[0]
                if error["code"] == -26:
                    # If we return anything that's not the transaction hash,
                    #  it's considered an error message
                    message = error["message"]
                    if "non-mandatory-script-verify-flag" in message:
                        result = "Your client produced a transaction that is not accepted by the Litecoin network any more. Please upgrade to Electrum 2.5.1 or newer\n"
                    else:
                        result = "The transaction was rejected by network rules.(" + message + ")\n" \
                            "[" + params[0] + "]"
                else:
                    result = error["message"]  # do send an error
                print_log("error:", result)

        elif method == 'blockchain.transaction.get_merkle':
            tx_hash = params[0]
            tx_height = params[1]
            result = self.get_merkle(tx_hash, tx_height, cache_only)
Example #60
0
def LOAD(c=None,
         f='index',
         args=None,
         vars=None,
         extension=None,
         target=None,
         ajax=False,
         ajax_trap=False,
         url=None,
         user_signature=False,
         timeout=None,
         times=1,
         content='loading...',
         **attr):
    """  LOAD a component into the action's document

    Timing options:
    -times: An integer or string ("infinity"/"continuous")
    specifies how many times the component is requested
    -timeout (milliseconds): specifies the time to wait before
    starting the request or the frequency if times is greater than
    1 or "infinity".
    Timing options default to the normal behavior. The component
    is added on page loading without delay.
    """
    from html import TAG, DIV, URL, SCRIPT, XML
    if args is None:
        args = []
    vars = Storage(vars or {})
    target = target or 'c' + str(random.random())[2:]
    attr['_id'] = target
    request = current.request
    if '.' in f:
        f, extension = f.rsplit('.', 1)
    if url or ajax:
        url = url or URL(request.application,
                         c,
                         f,
                         r=request,
                         args=args,
                         vars=vars,
                         extension=extension,
                         user_signature=user_signature)
        # timing options
        if isinstance(times, basestring):
            if times.upper() in ("INFINITY", "CONTINUOUS"):
                times = "Infinity"
            else:
                raise TypeError("Unsupported times argument %s" % times)
        elif isinstance(times, int):
            if times <= 0:
                raise ValueError(
                    "Times argument must be greater than zero, 'Infinity' or None"
                )
        else:
            raise TypeError("Unsupported times argument type %s" % type(times))
        if timeout is not None:
            if not isinstance(timeout, (int, long)):
                raise ValueError("Timeout argument must be an integer or None")
            elif timeout <= 0:
                raise ValueError(
                    "Timeout argument must be greater than zero or None")
            statement = "$.web2py.component('%s','%s', %s, %s);" \
                % (url, target, timeout, times)
            attr['_data-w2p_timeout'] = timeout
            attr['_data-w2p_times'] = times
        else:
            statement = "$.web2py.component('%s','%s');" % (url, target)
        attr['_data-w2p_remote'] = url
        if not target is None:
            return DIV(content, **attr)

    else:
        if not isinstance(args, (list, tuple)):
            args = [args]
        c = c or request.controller
        other_request = Storage(request)
        other_request['env'] = Storage(request.env)
        other_request.controller = c
        other_request.function = f
        other_request.extension = extension or request.extension
        other_request.args = List(args)
        other_request.vars = vars
        other_request.get_vars = vars
        other_request.post_vars = Storage()
        other_response = Response()
        other_request.env.path_info = '/' + \
            '/'.join([request.application, c, f] +
                     map(str, other_request.args))
        other_request.env.query_string = \
            vars and URL(vars=vars).split('?')[1] or ''
        other_request.env.http_web2py_component_location = \
            request.env.path_info
        other_request.cid = target
        other_request.env.http_web2py_component_element = target
        other_response.view = '%s/%s.%s' % (c, f, other_request.extension)

        other_environment = copy.copy(current.globalenv)  # NASTY

        other_response._view_environment = other_environment
        other_response.generic_patterns = \
            copy.copy(current.response.generic_patterns)
        other_environment['request'] = other_request
        other_environment['response'] = other_response

        ## some magic here because current are thread-locals

        original_request, current.request = current.request, other_request
        original_response, current.response = current.response, other_response
        page = run_controller_in(c, f, other_environment)
        if isinstance(page, dict):
            other_response._vars = page
            other_response._view_environment.update(page)
            run_view_in(other_response._view_environment)
            page = other_response.body.getvalue()
        current.request, current.response = original_request, original_response
        js = None
        if ajax_trap:
            link = URL(request.application,
                       c,
                       f,
                       r=request,
                       args=args,
                       vars=vars,
                       extension=extension,
                       user_signature=user_signature)
            js = "$.web2py.trap_form('%s','%s');" % (link, target)
        script = js and SCRIPT(js, _type="text/javascript") or ''
        return TAG[''](DIV(XML(page), **attr), script)