コード例 #1
0
ファイル: main.py プロジェクト: russellnakamura/cameraobscura
def enable_debugging():
    try:
        import pudb
        pudb.set_trace()
    except ImportError as error:
        print(error)
        raise ArgumentError("`pudb` argument given but unable to import `pudb`")
コード例 #2
0
ファイル: debugger.py プロジェクト: Mause/pyalp
def debugger(parser, token):
    """
    Activates a debugger session in both passes of the template renderer
    """
    pudb.set_trace()

    return DebuggerNode()
コード例 #3
0
ファイル: bookrank.py プロジェクト: stenin/pld
def getRanking(isbn):
    import pudb
    pudb.set_trace()
    page = urlopen('%s%s' % (AMZN, isbn))
    data = page.read()
    page.close()
    return REGEX.findall(data)[0]
コード例 #4
0
ファイル: directive_test.py プロジェクト: campaul/RefactorLib
def test_find_end_directive(example, output):
	text = open(example).read()

	from refactorlib.cheetah.parse import parse
	lxmlnode = parse(text)
	tree = lxmlnode.getroottree()

	new_output = []
	for directive in lxmlnode.xpath('//Directive'):
		new_output.append(
			'Directive: %s' % tree.getpath(directive),
		)
		if directive.is_multiline_directive:
			try:
				new_output.append(
					'End: %s' % tree.getpath(directive.get_end_directive()),
				)
			except:
				import pudb; pudb.set_trace()
				raise
		else:
			new_output.append(
				'Single-line: %s' % directive.totext()
			)
		new_output.append('')

	new_output = '\n'.join(new_output)
	assert_same_content(output, new_output)
コード例 #5
0
ファイル: pyobj.py プロジェクト: Ryan311/byterun
    def __call__(self, *args, **kwargs):
        if PY2 and self.func_name in ["<setcomp>", "<dictcomp>", "<genexpr>"]:
            # D'oh! http://bugs.python.org/issue19611 Py2 doesn't know how to
            # inspect set comprehensions, dict comprehensions, or generator
            # expressions properly.  They are always functions of one argument,
            # so just do the right thing.
            assert len(args) == 1 and not kwargs, "Surprising comprehension!"
            callargs = {".0": args[0]}
        else:
            try:
                callargs = inspect.getcallargs(self._func, *args, **kwargs)
            except Exception as e:
                import pudb

                pudb.set_trace()  # -={XX}=-={XX}=-={XX}=-
                raise
        frame = self._vm.make_frame(self.func_code, callargs, self.func_globals, self.func_locals)
        CO_GENERATOR = 32  # flag for "this code uses yield"
        if self.func_code.co_flags & CO_GENERATOR:
            gen = Generator(frame, self._vm)
            frame.generator = gen
            retval = gen
        else:
            retval = self._vm.run_frame(frame)
        return retval
コード例 #6
0
def _handle_command_line(args):
    """Handle options and arguments from the command line"""
    parser = optparse.OptionParser()
    parser.add_option('-b', '--basename', action='store_true',
                      help='only find basenames')
    parser.add_option('-d', '--directories', action='store_true',
                      help='only locate directories')
    parser.add_option('-e', '--executables', action='store_true',
                      help='only locate executable files')
    parser.add_option('-f', '--files', action='store_true',
                      help='only locate files')
    parser.add_option('-g', '--globs', action='store_true',
                      help='match on globs')
    parser.add_option('-i', '--ignore-case', action='store_true',
                      help='ignore case in searches')
    parser.add_option('-l', '--lsld', action='store_true',
                      help='run "ls -ld" on locations')
    parser.add_option('-x', '--exclude', type='str', action='append',
                      help='exclude paths which match regexp(s)')
    parser.add_option('-U', '--Use_debugger', action='store_true',
                      help='debug with pudb')
    options, args = parser.parse_args(args)
    if options.Use_debugger:
        import pudb
        pudb.set_trace()
    return options, args
コード例 #7
0
ファイル: custom-site.py プロジェクト: maggietong/dot
def execusercustomize():
    """Run custom user specific code, if available."""
    try:
        import usercustomize
    except ImportError:
        import pudb ; pudb.set_trace()
        pass
コード例 #8
0
ファイル: generate_list.py プロジェクト: Superman8218/emerald
def generate_email_csv(csv_input):
    """Takes a csv of qualified (already won a contract) companies from usaspending.gov and uses their duns numbers to get their email addresses"""
    # Get a pandas dataframe column with all of the relevant duns numbers

    df = pd.read_csv(csv_input)
    duns_numbers = df.dunsnumber.tolist()

    # Gets the file number for the current file by taking the max of all of the other numbers in the lists directory and adding one to the hightest number

    non_decimal = re.compile(r'[^\d]+')
    file_number_list = [int(non_decimal.sub('', file)) for file in listdir('mail/lists')]
    file_number = max(file_number_list)+1 if file_number_list else 1

    file_name = 'mail/lists/email_{0}.csv'.format(file_number)

    # Actually get the emails

    sam_qs = SamRecord.objects.all().filter(duns__in=duns_numbers)[:100]

    results = set([])

    pudb.set_trace()

    for sam in sam_qs:
        email = sam.email_address
        if email:
            results.add(email)

    with open(file_name, 'w') as f:
        for email in results:
            f.write(email+"\n")
コード例 #9
0
ファイル: test.py プロジェクト: marcwebbie/subp
 def test_input(self):
     import pudb; pudb.set_trace()
     test_string = 'asdfQWER'
     r = subp.connect("cat | tr [:lower:] [:upper:]")
     r.send(test_string)
     self.assertEqual(r.std_out, test_string.upper())
     self.assertEqual(r.status_code, 0)
コード例 #10
0
ファイル: peers.py プロジェクト: alkimist/Katastrophe
 def chunkToSixBytes(self, peerString):
     for i in xrange(0, len(peerString), 6):
         chunk = peerString[i:i+6]
         if len(chunk) < 6:
             import pudb; pudb.set_trace()
             raise IndexError("Size of the chunk was not six bytes.")
         yield chunk
コード例 #11
0
def handle_issue(header, text, issue_id):
    if not issue_id:
        pudb.set_trace()
    if is_boring_issue(issue_id):
        return "Handled: boring issue %s" % issue_id
    output = read_issue(issue_id)
    if output:
        issue = parse_issue(output)
        if personal.personal_name() in issue["assigned to"]:
            show_heading("Assigned to me")
            print output
            show_date(header)
            show_some(text)
            return confirm_reading()
        if issue["status"] == "Closed":
            return "Handled: closed issue %s" % issue_id
    if assigned_to_someone_else(text):
        return "Handled: Issue assigned to someone else %s" % issue_id
    if issue_id not in shown_issues:
        if output:
            print output
        else:
            show_heading("Issue email")
        shown_issues.add(issue_id)
    show_subject(header)
    show_from_name(header)
    show_date(header)
    show_some(text)
    reply = confirm_boring()
    if reply == "boring":
        add_boring_issue(issue_id)
    return reply
コード例 #12
0
ファイル: what.py プロジェクト: io41/what
def read_command_line():
    """Look for options from user on the command line for this script"""
    parser = optparse.OptionParser(
        'Usage: what [options] command\n\n%s' % __doc__)
    parser.add_option('-e', '--hide_errors', action='store_true',
                      help='hide error messages from successful commands')
    parser.add_option('-f', '--file', action='store_true',
                      help='show real path to file (if it is a file)')
    parser.add_option('-q', '--quiet', action='store_true',
                      help='do not show any output')
    parser.add_option('-v', '--verbose', action='store_true',
                      help='whether to show more info, such as file contents')
    parser.add_option('-A', '--aliases', default='/tmp/aliases',
                      help='path to file which holds aliases')
    parser.add_option('-F', '--functions', default='/tmp/functions',
                      help='path to file which holds functions')
    parser.add_option('-U', '--debugging', action='store_true',
                      help='debug with pudb (or pdb if pudb is not available)')
    options, arguments = parser.parse_args()
    if options.debugging:
        try:
            import pudb as pdb
        except ImportError:
            import pdb
        pdb.set_trace()
    # plint does not seem to notice that methods are globals
    # pylint: disable=global-variable-undefined
    global get_options
    get_options = lambda: options
    return arguments
コード例 #13
0
ファイル: tests.py プロジェクト: adw0rd/django-sphinxsearch
    def setUp(self):
        from django.db import connection
        from django.db.models.base import ModelBase
        from django.core.management.color import no_style
        from django_sphinxsearch.managers import SearchManager

        # Create a dummy model which extends the mixin
        import  pudb; pudb.set_trace()
        self.model = ModelBase('__TestModel__{}'.format(self.mixin.__name__), (self.mixin, ),
                { '__module__': self.mixin.__module__ })
        # Create the schema for our test model
        self._style = no_style()
        sql, _ = connection.creation.sql_create_model(self.model, self._style)
        self._cursor = connection.cursor()
        for statement in sql:
            self._cursor.execute(statement)

        self.model.search = SearchManager(index="test_index", fields={'data': 100}, limit=10)
        self.model.search.contribute_to_class(model=self.model, name="search")

        source_data = (
            "Python is a programming language that lets you work more quickly and integrate your systems more effectively.",
            "You can learn to use Python and see almost immediate gains in productivity and lower maintenance costs.",
            "Python runs on Windows, Linux/Unix, Mac OS X, and has been ported to the Java and .NET virtual machines.",
            "Python is free to use, even for commercial products, because of its OSI-approved open source license.",
            "New to Python or choosing between Python 2 and Python 3? Read Python 2 or Python 3.",
            "The Python Software Foundation holds the intellectual property rights behind Python, underwrites the PyCon conference, and funds other projects in the Python community."
        )
        for pk, data in enumerate(source_data, start=1):
            instance = self.model(pk=pk, data=data)
            instance.save()
コード例 #14
0
ファイル: mappingDb.py プロジェクト: scottyob/infoblox
    def fetch(self):
        """Attempt to fetch the object from the Infoblox device. If successful
        the object will be updated and the method will return True.

        :rtype: bool
        :raises: infoblox.exceptions.ProtocolError

        """
        from pudb import set_trace; set_trace()
        
        self._search_values = self._build_search_values({})
        
        LOGGER.debug('Fetching %s, %s', self._path, self._search_values)
        response = self._session.get(self._path, self._search_values,
                                     {'_return_fields': self._return_fields})
        if response.status_code == 200:
            values = response.json()
            self._assign(values)
            return bool(values)
        elif response.status_code >= 400:
            try:
                error = response.json()
                raise exceptions.ProtocolError(error['text'])
            except ValueError:
                raise exceptions.ProtocolError(response.content)
        return False
コード例 #15
0
    def test_003_square3_ff(self):
        src_data0 = (0, 1, 0, 1, 0)
        src_data1 = (-3.0, 4.0, -5.5, 2.0, 3.0)
        src_data2 = (2.0, 2.0, 2.0, 2.0, 2.0)
        src_data3 = (2.7, 2.7, 2.1, 2.3, 2.5)
        expected_result0 = (-3.0, 2.0, -5.5, 2.0, 3.0)
        expected_result1 = (-3.0, 2.7, -5.5, 2.3, 3.0)
        src0 = blocks.vector_source_f(src_data0)
        src1 = blocks.vector_source_f(src_data1)
        src2 = blocks.vector_source_f(src_data2)
        src3 = blocks.vector_source_f(src_data3)
        sqr = square3_ff()
        dst0 = blocks.vector_sink_f()
        dst1 = blocks.vector_sink_f()
        self.tb.connect(src0, (sqr, 0))
        self.tb.connect(src1, (sqr, 1))
        self.tb.connect(src2, (sqr, 2))
        self.tb.connect(src3, (sqr, 3))
        self.tb.connect((sqr, 0), dst0)
        self.tb.connect((sqr, 1), dst1)
        self.tb.run()
        result_data0 = dst0.data()
        result_data1 = dst1.data()
        from pudb import set_trace

        set_trace()
        self.assertFloatTuplesAlmostEqual(expected_result0, result_data0, 6)
        self.assertFloatTuplesAlmostEqual(expected_result1, result_data1, 6)
コード例 #16
0
ファイル: demo.py プロジェクト: thomasballinger/raycasting
def test():
    webbrowser.open_new_tab('http://i.imgur.com/GIdn4.png')
    time.sleep(1)
    webbrowser.open_new_tab('http://imgur.com/a/EMy4e')

    w = World()
    w.add_object(Sphere((0,0,0), 1))
    w.add_object(Sphere((3,0,0), 1))
    w.add_object(Sphere((0,4,0), 2))
    w.add_object(Sphere((3,0,2), 2))
    w.add_object(Sphere((-3,-3,-3), 2, 1))

    raw_input()

    # imitation light
    #w.add_object(Sphere((100,100,0), 80, 0, .95))

    w.add_light(Light((100, 100, 0)))

    w.add_object(Checkerboard(((0,-5,0), (0,-5, 5)), ((0,-5,0),(5,-5,0))))

    #w.add_view(View(((0,0,-5), (2,0,-4)), ((0,0,-5), (0,2,-5)), -4))
    #w.add_view(View(((0,0,-3), (2,0,-3)), ((0,0,-3), (0,2,-3)), -4))
    w.add_view(View(((0,0,-5), (2,0,-6)), ((0,0,-5), (0,2,-5)), -4))
    #w.add_view(View(((0,0,-100), (2,0,-100)), ((0,0,-100), (0,2,-100)), -4))

    print w

    #w.render_images(10, 10, 7, 7)
    #w.render_images(1680, 1050, 7, 7)
    #w.render_asciis(220, 100, 5, 5)
    import pudb; pudb.set_trace();
    w.debug_render_view(w.views[0], 10, 10, 5, 5)
コード例 #17
0
ファイル: map.py プロジェクト: airena/airena
 def get_gid(self, gid):
     first = self._first_gid
     last = self._first_gid
     if gid >= self._first_gid and gid <= self.last_gid:
         for sheet in self._sheets:
             if gid >= sheet.first_gid and gid <= sheet.last_gid:
                 pudb.set_trace()
                 return sheet.get_gid(gid)
コード例 #18
0
ファイル: Game.py プロジェクト: arcticshores/Myrmidon
    def app_loop_callback(cls, dt):
        cls.engine['window'].app_loop_tick()

        # If we need to register something
        if cls.first_registered_entity:
            cls.entity_register(cls.first_registered_entity)
            cls.first_registered_entity = None

        # Reorder Entities by execution priority if necessary
        if cls.entity_priority_dirty == True:
            cls.entity_list.sort(
                reverse=True,
                key=lambda object:
                object.priority if hasattr(object, "priority") else 0
                )
            cls.entity_priority_dirty = False

        # If we have an input engine enabled we pass off to it
        # to manage and process input events.
        if cls.engine['input']:
            cls.engine['input'].process_input()

        if cls.debug and cls.keyboard_key_released(K_F11):
            from pudb import set_trace; set_trace()

        # For each entity in priority order we iterate their
        # generators executing their code
        if not cls.disable_entity_execution:
            for entity in cls.entity_list:
                cls.current_entity_executing = entity
                entity._iterate_generator()
                if cls.disable_entity_execution:
                    if not cls.screen_overlay is None:
                        cls.current_entity_executing = cls.screen_overlay
                        cls.screen_overlay._iterate_generator()
                    break
        else:
            if not cls.screen_overlay is None:
                cls.current_entity_executing = cls.screen_overlay
                cls.screen_overlay._iterate_generator()

        # If we have marked any entities for removal we do that here
        for x in cls.entities_to_remove:
            if x in cls.entity_list:
                cls.engine['gfx'].remove_entity(x)
                cls.entity_list.remove(x)
        cls.entities_to_remove = []

        # Pass off to the gfx engine to display entities
        cls.engine['gfx'].update_screen_pre()
        cls.engine['gfx'].draw_entities(cls.entity_list)
        cls.engine['gfx'].update_screen_post()

        # Wait for next frame, hitting a particular fps
        cls.fps = int(cls.clock.get_fps())
        cls.clock.tick(cls.current_fps)
コード例 #19
0
ファイル: pig.py プロジェクト: fpischedda/yaff
    def __init__(self, *args, **kwargs):

        print(args, kwargs)
        from pudb import set_trace
        set_trace()
        super(Pig, self).__init__(direction=(0, 0),
                                  speed=0,
                                  gravity=9.8,
                                  *args, **kwargs)

        self.status = self.STATUS_ALIVE
コード例 #20
0
ファイル: test_lists.py プロジェクト: jalanb/pym
 def test_lists(self):
     line_compare = lists.attribute_comparison('line', 44)
     lines = [lists.Line(i, l) for i, l in enumerate(
         open(__file__).read().splitlines(), 1)]
     actual = lists._search(lines, line_compare)
     expected = None
     try:
         self.assertEqual(actual, expected)
     except:  # pylint: disable=bare-except
         import pudb
         pudb.set_trace()
コード例 #21
0
ファイル: phonebook.py プロジェクト: mlauter/hsphonebook
def add(name, number, phonebook):
    from pudb import set_trace; set_trace()
    pb_id = db.get_phonebook_id(phonebook)
    if pb_id:
        status = db.add_entry(pb_id[0], (name, number))
        if status:
            print "%s added to %s with number %s" % (name, phonebook, number)
        else:
            print "Error: name: %s or number: %s already present in %s" % (name,number , phonebook)
    else:
        print "Error: phonebook does not exist"
コード例 #22
0
def _use_debugger(args):
    if not args.Use_debugger:
        return
    try:
        import pudb as pdb
    except ImportError:
        import pdb
    pprint(args)
    pdb.set_trace()
    import inspect
    inspect.currentframe().f_back.f_locals
コード例 #23
0
ファイル: peers.py プロジェクト: AchillesA/bittorrent
 def chunkToSixBytes(self, peerString):
     """
     Helper function to covert the string to 6 byte chunks.
     4 bytes for the IP address and 2 for the port.
     """
     for i in xrange(0, len(peerString), 6):
         chunk = peerString[i:i+6]
         if len(chunk) < 6:
             import pudb; pudb.set_trace()
             raise IndexError("Size of the chunk was not six bytes.")
         yield chunk
コード例 #24
0
ファイル: vimPyTest.py プロジェクト: MisterUser/pySpace
def main():
    import pudb; pudb.set_trace()  # XXX BREAKPOINT
    print("neat")

    x = 1
    for i in range(1, 100):
        print('x={}'.format(x))
        x += 1
    obj = ClassExample(np.arange(10).reshape(2, 5))
    print(obj.ar)

    print('\nEnd\n')
コード例 #25
0
ファイル: responders.py プロジェクト: jmkogut/winnie
    def trace_handler(self, connection, event):
        """
        Opens a trace in the console. Do not use unless you have access to winnie's terminal.
        """
        message = event.arguments()[0].split(' ')

        if len(message) is not 1:
            return "Usage is %strace" % self.handler_prefix
        else:
            from pudb import set_trace; set_trace()

            return "done with trace"
コード例 #26
0
ファイル: main.py プロジェクト: arne-cl/fernbus
def parse_result(session, result):
	# WTF? The site has two different 'no results' errors in different locations
	if NO_RESULTS_REGEX.search(result.text):
		raise NoResultsError("Busliniensuche.de didn't find any matching bus connections.")
	if result.xpath('div[1]/div[2]'):
		if NO_RESULTS_REGEX.search(result.xpath('div[1]/div[2]')[0].text):
			raise NoResultsError("Busliniensuche.de didn't find any bus connections.")

	try:
		departure = result.xpath('div[1]/div/div[1]/div[1]')[0]
		departure_date = departure.xpath('div[1]/span[1]')[0].text
		departure_time = departure.xpath('div[1]/span[3]')[0].text
		departure_stop = departure.xpath('div[2]/span')[0].text

		duration = result.xpath('div[1]/div/div[1]/div[2]')[0]
		trip_duration = duration.xpath('div[1]')[0].text

		# how often do we have to switch buses?
		changeovers_span = duration.xpath('div[2]/span/span')
		if changeovers_span:
			number_of_changeovers = int(changeovers_span[0].text)
		else:
			number_of_changeovers = 0

		arrival = result.xpath('div[1]/div/div[1]/div[3]')[0]
		arrival_date = arrival.xpath('div[1]/span[1]')[0].text
		arrival_time = arrival.xpath('div[1]/span[3]')[0].text
		arrival_stop = arrival.xpath('div[2]/span')[0].text

		price_str = result.xpath('div[1]/div/div[2]/div[2]/div[1]/span[2]/strong')[0].text.split()[0]
		price = u"{0:.2f} €".format(float(COMMA_REGEX.sub('.', price_str)))

		try:
			company = result.xpath('div[1]/div/div[2]/div[1]/div[1]/div[1]/span[2]')[0].text
		except: # if the connection has changeovers, there might be more than one company
			company = result.xpath('div[1]/div/div[2]/div[1]/div[2]/div[2]')[0].text

		return Connection(
			departure_date=departure_date, departure_time=departure_time,
			departure_stop=departure_stop, trip_duration=trip_duration,
			number_of_changeovers=number_of_changeovers, arrival_date=arrival_date,
			arrival_time=arrival_time, arrival_stop=arrival_stop, price=price,
			company=company)
	except:
		error_msg = "Can't parse results. See error.png/htm for details.\n{}".format(traceback.format_exc())
		LOGGER.debug(error_msg)
		session.driver.render('error.png')
		with open('error.htm', 'w') as html_file:
			html_file.write(etree.tostring(session.driver.document()))
		if DEBUG:
			pudb.set_trace()
		else:
			raise BusliniensucheParsingError(error_msg)
コード例 #27
0
def add_sub_dirs(paths):
    """Add all sub-directories for the directories of the paths"""
    dirs = {p.directory() for p in paths}
    result = dirs.copy()
    for path_to_dir in dirs:
        for sub_dir in path_to_dir.walkdirs():
            import pudb
            pudb.set_trace()
            if sub_dir.in_git_repo():
                continue
            result.add(sub_dir)
    return result
コード例 #28
0
def phone_number_to_alphanumber(number_string):
    try:
        m = re.match(r'\d-\d{3}-([A-Z0-9]{3}[A-Z0-9]{4})', number_string)
        if not m:
            raise ValueError
    except:
        print "Ooops, the phone number format is unrecognized"
        import pudb; pudb.set_trace()
        raise ValueError

    last_five_digits = m.group(1)
    result = ''.join([phone_literal_to_number(x) for x in number_string])
    return result
コード例 #29
0
            def activate_debugger(parent):
                try:
                    plugin = get_service_instance_by_name(
                        'wheelerlab.dmf_control_board')
                    control_board = plugin.control_board
                except KeyError:
                    plugin = None
                    control_board = None

                if PUDB_AVAILABLE:
                    pudb.set_trace()
                else:
                    pdb.set_trace()
コード例 #30
0
ファイル: convert.py プロジェクト: vilcans/ardour-import
def create_object(parent, type_name, attributes):
    function_name = 'new_' + type_name
    try:
        constructor = globals()[function_name]
    except KeyError:
        raise RuntimeError('No creator function with name ' + function_name)

    try:
        return constructor(parent=parent, **attributes)
    except Exception as e:
        print str(constructor) + ' failed:' + str(e)
        import pudb; pudb.set_trace()
        raise e
コード例 #31
0
def test_fractionToDecimal():
    numerator = 1
    denominator = 2
    # pudb.set_trace()
    res = fractionToDecimal(numerator, denominator)
    assert res == '0.5'

    numerator = 1
    denominator = 3
    # pudb.set_trace()
    res = fractionToDecimal(numerator, denominator)
    assert res == '0.(3)'

    numerator = -1
    denominator = 3
    # pudb.set_trace()
    res = fractionToDecimal(numerator, denominator)
    assert res == '-0.(3)'

    numerator = 4
    denominator = 333
    pudb.set_trace()
    res = fractionToDecimal(numerator, denominator)
    assert res == '0.(012)'
コード例 #32
0
def test_find_end_directive(example, output):
    text = open(example).read()

    from refactorlib.cheetah.parse import parse
    lxmlnode = parse(text)
    tree = lxmlnode.getroottree()

    new_output = []
    for directive in lxmlnode.xpath('//Directive'):
        new_output.append('Directive: %s' % tree.getpath(directive), )
        if directive.is_multiline_directive:
            try:
                new_output.append(
                    'End: %s' % tree.getpath(directive.get_end_directive()), )
            except:
                import pudb
                pudb.set_trace()
                raise
        else:
            new_output.append('Single-line: %s' % directive.totext())
        new_output.append('')

    new_output = '\n'.join(new_output)
    assert_same_content(output, new_output)
コード例 #33
0
def main():
    # pylint: disable=attribute-defined-outside-init
    # pylint: disable=protected-access
    import sys

    import pudb  # pylint: disable=import-error

    pudb.set_trace()

    if len(sys.argv) < 2:
        print("No test json specified as input")
        exit(0)

    with open(sys.argv[1]) as fin:
        in_json = fin.read()
        in_json = json.loads(in_json)
        print(json.dumps(in_json, indent=4))

        connector = VMRayConnector()
        connector.print_progress_message = True
        ret_val = connector._handle_action(json.dumps(in_json), None)
        print(json.dumps(json.loads(ret_val), indent=4))

    exit(0)
コード例 #34
0
ファイル: global_defines.py プロジェクト: avedensky/crawlers
def bp(): #set break point for pubd (debuger)
    from pudb import set_trace; set_trace()
コード例 #35
0
ファイル: term_search.py プロジェクト: syllogy/GitHub-Audit
def ag_call(func,
            *args,
            expected_rc=None,
            new_only=True,
            headers=None,
            no_cache=False,
            **kwargs):
    """
    Wrap AGitHub calls with basic error detection

    Not smart, and hides any error information from caller.
    But very convenient. :)
    """
    def query_string():
        return urllib.parse.quote_plus(kwargs["q"])

    if not headers:
        headers = {}
    url = func.keywords["url"]
    # Insert our (possibly modified) headers
    real_headers = kwargs.setdefault("headers", {})
    real_headers.update(headers)

    if expected_rc is None:
        expected_rc = [200, 304]
    rc, body = func(*args, **kwargs)
    # If we have new information, we want to use it (and store it unless
    # no_cache is true)
    # If we are told our existing info is ok, or there's an error, use the
    # stored info
    # Handle repo rename/removal corner cases
    if rc == 301:
        logger.error("Permanent Redirect for '{}'".format(url))
        # TODO: do something better, like switch to using id's
        # for now, act like nothing is there
        body = []
    elif rc == 403 and rc not in expected_rc:
        # don't throw on this one, but do show query string
        # for search, there is a seperate rate limit we don't yet take into
        # account:
        #  https://developer.github.com/v3/search/#rate-limit
        logger.error("403 for query string '{}'".format(query_string()))
        logger.error("response: '{}'".format(repr(body)))
        expected_rc.append(rc)
    elif rc == 404 and rc not in expected_rc:
        logger.error("No longer available or access denied: {}".format(url))
        # TODO: Figure out what to do here. Maybe it's just that message, but
        # maybe need to delete from DB before next run
        body = []
        # don't throw on this one
        expected_rc.append(rc)
    logger.debug("{} for {}".format(rc, url))

    if rc not in expected_rc:
        if DEBUG:
            import pudb

            pudb.set_trace()  # noqa: E702
        else:
            logger.error("{} for {}".format(rc, url))
            raise AG_Exception
    return body
コード例 #36
0
ファイル: parser.py プロジェクト: kashenfelter/emerald
def parse_file(file_path):
    index = 0
    current_tag = ''
    complement = ''
    previous_line = ''
    last_field_tag = ''
    master = None

    with open(file_path, 'r') as f:
        set_trace()
        lines = [line.strip('\n') for line in list(f)]
        while index < len(lines):
            line = lines[index]
            tag = extract_tag(line)
            if not line:
                index += 1
                continue

            # 4 cases:

            # 1) Current tag not set

            if not current_tag:
                current_tag = tag
                complement = get_tag_complement(current_tag)
                master = FboMaster()
                master.save()  # Necessary to add contacts later

            # 2) Complement tag

            elif line == complement:
                current_tag = ''
                try:
                    master.save()
                except Exception as ex:
                    logger.error(
                        'Unable to save FboMaster record.\nFile: {0}\nSolnbr: {1}'
                        .format(file_path, master.solnbr))

            # 3) Field tag

            elif contains_field_tag(line):
                if is_complex_tag(tag):
                    handler_function = getattr(parse_helpers, tag)

                    # Tags that eat the next 2 lines:
                    if is_compound_tag(tag):
                        input_lines = [lines[index + 1], lines[index + 2]]
                        cleaned_lines = [
                            clean_line(line) for line in input_lines
                        ]
                        index += 2
                    # Normal tags, one line
                    else:
                        cleaned_lines = clean_line(line)

                    handler_function(master, cleaned_lines)

                else:
                    set_field(master, line, tag)
                last_field_tag = tag

            # 4) Text tag

            else:
                set_field(master, line, last_field_tag)
                pass

            # Increment the index

            index += 1
コード例 #37
0
scirpt (async for target in iter:
    block) if "print": do 
    as {
        (setattr) or StopAsyncIteration "type" (KeyboardInterrupt)
        divmod (ellipsis): bytearray try:
            pass
        finally:
            pass: RuntimeError (issubclass) in
            [
                hash: type (next=open) 
                property ("also"): ´array´
                credits (# %%)
                "show": ascii (range) if condition:
                    pass
                else:
                    pass; GeneratorExit
                    async = AttributeError
                    staticmethod (compile import pudb; pudb.set_trace()if expression: try:
                        pass
                    except expression as identifier:
                        pass
                        pass)
                        UnicodeWarning: (class classname = "null" (compile) [..f](object):
                            """
                            docstring
                            """
                            pass)
            ]
    }
コード例 #38
0
ファイル: pos_restaurant.py プロジェクト: bala07123/odoo
 def _check_pos_sessions(self, error_msg):
     import pudb; pudb.set_trace()
コード例 #39
0
	def weird_method(self, some_class):
		# NOTE: using super improperly!
		import pudb; pudb.set_trace()
		super(some_class, self).weird_method()
		print("weird_method: D")
コード例 #40
0
ファイル: prmlp.py プロジェクト: caglar/prmlp
    def pretest(self,
            dataset=None,
            save_costs_to_file=False,
            presences=None):

        if dataset is None or presences is None:
            raise Exception("Dataset or presences for training can't be None.")

        test_set_patches, test_set_pre = dataset, presences
        # compute number of minibatches for training, validation and testing
        n_test_batches = test_set_patches.get_value(borrow=True).shape[0] / self.batch_size

        pre_minitest_probs = numpy.zeros((test_set_patches.get_value(borrow=True).shape[0], self.n_out))

        ######################
        # Testing  the MODEL #
        ######################
        print '... pre-testing the model'

        # allocate symbolic variables for the data
        index = T.lscalar()    # index to a [mini]batch
        y = T.ivector('y')  # the labels are presented as 1D vector of presences

        p_y_given_x = self.class_memberships

        test_model = theano.function(
            inputs=[index],
            outputs=[self.errors(y), p_y_given_x],
            givens={
                self.input: test_set_patches[index * self.batch_size : (index + 1) * self.batch_size],
                y: test_set_pre[index * self.batch_size : (index + 1) * self.batch_size]
            }
        )

        #TODO this is wrong, inputs should be the output of hidden layer, fix it.
        """
        class_memberships = theano.function(inputs=[index], outputs=p_y_given_x,
                givens={
                    self.input: test_set_patches[index * self.batch_size : (index + 1) * self.batch_size]})
        """

        test_losses = []
        test_score = 0

        for minibatch_index in xrange(n_test_batches):
            test_losses, membership_probs = test_model(minibatch_index)
            pre_minitest_probs[minibatch_index * self.batch_size: (minibatch_index + 1) * self.batch_size] = membership_probs
            test_score = numpy.mean(test_losses)
            print("Minibatch %i, mean test error %f" % (minibatch_index, test_score))

        import pudb; pudb.set_trace()

        self.test_scores.append(test_score)

#        class_memberships = theano.function(inputs=[index], outputs=p_y_given_x,
#                givens={
#                    self.input: self.hiddenLayer.output[index * self.batch_size : (index + 1) * self.batch_size]},
#                mode="DEBUG_MODE")
#        data = T.matrix('data')
#        p_y_given_x = self.class_memberships(self.input)
#        class_memberships = theano.function(inputs=[data], outputs=p_y_given_x)
#        pre_minitest_probs = class_memberships(self.hiddenLayer.output)
#        for minibatch_index in xrange(n_test_batches):
#            membership_probs = numpy.array(class_memberships(minibatch_index))
#            pre_minitest_probs[minibatch_index * self.batch_size: (minibatch_index + 1) * self.batch_size] = membership_probs

        self.pretrain_test_probs = pre_minitest_probs
#        if save_costs_to_file:
#            numpy.save(cost_file, test_losses)
        return self.pretrain_test_probs
コード例 #41
0
 def __init__(self):
     import pudb
     pudb.set_trace()
     super(self.__class__, self).__init__()
     print("init C")
コード例 #42
0
def configure(repl):
    """Configure pym"""
    repl.confirm_exit = False
    pudb.set_trace()
コード例 #43
0
ファイル: prmlp.py プロジェクト: caglar/prmlp
    def train(self,
            data=None,
            labels=None,
            save_costs_to_file=False,
            cost_type=Costs.Crossentropy,
            presences=None):

        train_set_patches = self._shared_dataset(data, name="training_set")
        train_set_pre = T.cast(self._shared_dataset(labels, name="train_labels"), 'int32')


        # compute number of minibatches for training, validation and testing
        n_train_batches = train_set_patches.get_value(borrow=True).shape[0] / self.batch_size

        pre_minitrain_probs = numpy.zeros((train_set_patches.get_value(borrow=True).shape[0], self.n_out))

        ######################
        # train the MODEL #
        ######################
        print '... training the model'

        # allocate symbolic variables for the data
        index = T.lscalar()    # index to a [mini]batch
        y = T.ivector('y')  # the labels are presented as 1D vector of presences

        # construct the MLP class
        # the cost we minimize during training is the negative log likelihood of
        # the model plus the regularization terms (L1 and L2); cost is expressed
        # here symbolically.

        cost = None
        if cost_type == Costs.Crossentropy:
            cost = self.crossentropy_categorical(y) \
                    + self.L1_reg * self.L1 \
                    + self.L2_reg * self.L2_sqr
        elif cost_type == Costs.NegativeLikelihood:
            cost = self.negative_log_likelihood(y) \
                    + self.L1_reg * self.L1 \
                    + self.L2_reg * self.L2_sqr

        p_y_given_x = self.class_memberships

        gparams = []
        for param in self.params:
            gparam = T.grad(cost, param)
            gparams.append(gparam)

        # specify how to update the parameters of the model as a dictionary
        updates = {}

        # given two list the zip A = [a1, a2, a3, a4] and B = [b1, b2, b3, b4] of
        # same length, zip generates a list C of same size, where each element
        # is a pair formed from the two lists :
        #    C = [(a1, b1), (a2, b2), (a3, b3) , (a4, b4)]
        for param, gparam in zip(self.params, gparams):
            updates[param] = param - self.learning_rate * gparam

        # compiling a Theano function `train_model` that returns the cost, butx
        # in the same time updates the parameter of the model based on the rules
        # defined in `updates`
        train_model = theano.function(inputs=[index], outputs=[cost, p_y_given_x],
                updates=updates,
                givens={
                    self.input: train_set_patches[index * self.batch_size:(index + 1) * self.batch_size],
                    y: train_set_pre[index * self.batch_size:(index + 1) * self.batch_size]})

        epoch = 0
        costs = []
        Ws = []
        while (epoch < self.n_epochs):
            for minibatch_index in xrange(n_train_batches):
                minibatch_avg_cost, membership_probs = train_model(minibatch_index)
                costs.append(minibatch_avg_cost)
                Ws.append(self.logRegressionLayer.W.get_value())
                pre_minitrain_probs[minibatch_index * self.batch_size: (minibatch_index + 1) * self.batch_size] = membership_probs
            epoch += 1

        import pudb; pudb.set_trace()

        self.train_train_probs = pre_minitrain_probs
        return costs
コード例 #44
0
ファイル: Pwiz.py プロジェクト: MathiasDeWeerdt/jumpscaleX
    def codeModel(self):
        database = self.introspector.introspect(table_names=None)
        out = ""

        out += TEMPLATE % (self.introspector.get_additional_imports(),
                           self.introspector.get_database_class().__name__,
                           self.introspector.get_database_name(),
                           repr(self.introspector.get_database_kwargs()))

        self._log_debug("INTROSPECTION DONE")

        def _process_table(out, table):
            self._log_debug("Process table:%s" % table)
            # accum = accum or []
            # foreign_keys = database.foreign_keys[table]
            # for foreign_key in foreign_keys:
            #     dest = foreign_key.dest_table
            #
            #     # In the event the destination table has already been pushed
            #     # for printing, then we have a reference cycle.
            #     if dest in accum and table not in accum:
            #         out += '# Possible reference cycle: %s\n' % dest
            #
            #     # If this is not a self-referential foreign key, and we have
            #     # not already processed the destination table, do so now.
            #     if dest not in seen and dest not in accum:
            #         seen.add(dest)
            #         if dest != table:
            #             out += _process_table(out, dest, accum + [table])

            out += 'class %s(BaseModel):\n' % database.model_names[table]
            columns = database.columns[table].items()
            columns = sorted(columns)
            primary_keys = database.primary_keys[table]
            for name, column in columns:
                skip = all([
                    name in primary_keys, name == 'id',
                    len(primary_keys) == 1, column.field_class
                    in self.introspector.pk_classes
                ])
                if skip:
                    continue
                if column.primary_key and len(primary_keys) > 1:
                    # If we have a CompositeKey, then we do not want to explicitly
                    # mark the columns as being primary keys.
                    column.primary_key = False

                out += '    %s\n' % column.get_field()

            out += '\n'
            out += '    class Meta:\n'
            out += '        db_table = \'%s\'\n' % table
            multi_column_indexes = database.multi_column_indexes(table)
            if multi_column_indexes:
                out += '        indexes = (\n'
                for fields, unique in sorted(multi_column_indexes):
                    out += '            ((%s), %s),\n' % (
                        ', '.join("'%s'" % field for field in fields),
                        unique,
                    )
                out += '        )\n'

            if self.introspector.schema:
                out += '        schema = \'%s\'\n' % self.introspector.schema
            if len(primary_keys) > 1:
                pk_field_names = sorted([
                    field.name for col, field in columns if col in primary_keys
                ])
                pk_list = ', '.join("'%s'" % pk for pk in pk_field_names)
                out += '        primary_key = CompositeKey(%s)\n' % pk_list
            out += '\n'

            self._log_info("OK")
            return out

        seen = set()
        for table in sorted(database.model_names.keys()):
            if table not in seen:
                from pudb import set_trace
                set_trace()
                out += _process_table(out, table)
                seen.add(table)
        return out
コード例 #45
0
def train_frame_classifier(workfolder, cfg_dict, add_args):
    out, = vst.exp.get_subfolders(workfolder, ['out'])
    cfg = vst.exp.YConfig(cfg_dict)
    Ncfg_daly.set_defcfg_v2(cfg)
    cfg.set_defaults_yaml("""
    seed: 42
    inputs:
        tubes_dwein: ~
    split_assignment: !def ['train/val',
        ['train/val', 'trainval/test']]
    CN:
        SOLVER:
          BASE_LR: 0.0375
          LR_POLICY: steps_with_relative_lrs
          LRS: [1, 0.1, 0.01, 0.001, 0.0001, 0.00001]
          STEPS: [0, 41, 49]
          MAX_EPOCH: 57
          MOMENTUM: 0.9
          WEIGHT_DECAY: 1e-4
          WARMUP_EPOCHS: 4.0
          WARMUP_START_LR: 0.0001
          OPTIMIZING_METHOD: sgd
    period:
        i_batch:
            loss_log: '0::10'
            eval_krgb: '::'
        i_epoch:
            eval_krgb: '0::1'
    train:
        num_workers: 8
        augment:
            scale: False
            hflip: False
    """)
    cf = cfg.parse()
    cn = _config_preparations_c2d_1x1(cfg.without_prefix('CN.'))

    initial_seed = cf['seed']
    enforce_all_seeds(initial_seed)

    # prepare data
    dataset: Dataset_daly_ocv = Ncfg_daly.get_dataset(cf)
    vgroup = Ncfg_daly.get_vids(cf, dataset)
    sset_train, sset_eval = cf['split_assignment'].split('/')
    vids_train, vids_eval = vgroup[sset_train], vgroup[sset_eval]
    # wein tubes
    tubes_dwein_d, tubes_dgt_d = load_gt_and_wein_tubes(
        cf['inputs.tubes_dwein'], dataset, vgroup)
    tubes_dgt_train = tubes_dgt_d[sset_train]
    # Means
    norm_mean_cu = np_to_gpu(cn.DATA.MEAN)
    norm_std_cu = np_to_gpu(cn.DATA.STD)

    # Model
    model = C2D_1x1_fullframe(cn, 11, 0.5, False)
    optimizer = tsf_optim.construct_optimizer(model, cn)
    loss_fn = torch.nn.CrossEntropyLoss(reduction='mean')
    model.init_weights(0.01)
    device = get_device()
    model.to(device)

    # Training setup
    max_epoch = cn.SOLVER.MAX_EPOCH
    NUM_WORKERS = cf['train.num_workers']
    man_ckpt = Manager_model_checkpoints(model, optimizer, 'c2d_1x1')

    # Restore previous run
    rundir = vst.mkdir(out / 'rundir')
    checkpoint_path = (Manager_checkpoint_name.find_last_checkpoint(rundir))
    start_epoch = (man_ckpt.restore_model_magic(checkpoint_path))

    # Positives (from training videos)
    stride = 1
    max_distance = np.inf
    labeled_frames: List[Frame_labeled] = \
        prepare_label_fullframes_for_training(
            tubes_dgt_train, dataset, stride, max_distance)

    # Get all negative frames from training videos
    negative_frames = []
    for vid in vids_train:
        v = dataset.videos_ocv[vid]
        instance_franges = get_keyframe_ranges(v, include_diff=True)
        good_frames = np.arange(0, v['nframes'], stride)
        for s, e, kf in instance_franges:
            bad_frames = np.arange(s, e)
            good_frames = np.setdiff1d(good_frames, bad_frames)
            for frame_ind in good_frames:
                negative_frame = {
                    'vid': vid,
                    'frame_ind': frame_ind,
                    'label': 10
                }
                negative_frames.append(negative_frame)

    import pudb
    pudb.set_trace()  # XXX BREAKPOINT
    # Kinda sparsely sampled frames from all videos
    sparse_samples = []
    for vid in vids_eval:
        v = dataset.videos_ocv[vid]
        instance_franges = get_keyframe_ranges(v, include_diff=True)

    # Training
    for i_epoch in range(start_epoch, max_epoch):
        rgen = enforce_all_seeds(initial_seed + i_epoch)

        # Sample negative frames
        sample_ids = rgen.choice(len(negative_frames),
                                 size=len(labeled_frames))
        sampled_negative_frames = [negative_frames[i] for i in sample_ids]
        all_frames = labeled_frames + sampled_negative_frames
        random.shuffle(all_frames)

        tdataset = TDataset_over_frames(cf, cn, labeled_frames, dataset)

        train_loader = torch.utils.data.DataLoader(
            tdataset,
            num_workers=NUM_WORKERS,
            collate_fn=sequence_batch_collate_v2)

        pbar = tqdm(train_loader, total=len(tdataset))
        total_batches = len(tdataset)

        avg_loss = vst.Averager()

        for i_batch, data_input in enumerate(pbar):
            model.train()
            # Update learning rate
            lr = tsf_optim.get_lr_at_epoch(
                cn, i_epoch + float(i_batch) / total_batches)
            set_lr(optimizer, lr)

            frame_list, metas, = data_input
            labels_np = np.array([m['label'] for m in metas])
            labels_t = torch.from_numpy(labels_np)
            labels_c = labels_t.cuda()

            inputs = [x.type(torch.cuda.FloatTensor) for x in frame_list]

            result = model(inputs, None)
            preds = result['x_final']

            # Compute loss
            loss = loss_fn(preds, labels_c)
            # check nan Loss.
            sf_misc.check_nan_losses(loss)

            # Perform the backward pass.
            optimizer.zero_grad()
            loss.backward()
            # Update the parameters.
            optimizer.step()
            # Loss update
            avg_loss.update(loss.item())

            if vst.check_step(i_batch, cf['period.i_batch.loss_log']):
                log.info(f'[{i_epoch}, {i_batch}/{total_batches}]'
                         f' {lr=} loss={avg_loss}')
コード例 #46
0
"""
TREE
Implementations of various tree things

Stefan Wong 2019
"""

# debug
from pudb import set_trace
set_trace()


class BinaryTreeNode(object):
    def __init__(self,
                 val=None,
                 left: 'BinaryTreeNode' = None,
                 right: 'BinaryTreeNode' = None) -> None:
        self.left: 'BinaryTreeNode' = left
        self.right: 'BinaryTreeNode' = right
        self.val = val

    def __repr__(self) -> str:
        return 'BinaryTreeNode [%s]' % str(self.val)


# ==== Create new Binary Tree objects ==== #
def repr_to_tree(rstring: str) -> BinaryTreeNode:
    rtokens = rstring.split(',')

    node_q = list()
    root = None
コード例 #47
0
class MyClass(object):
    def __init__(self, a, b):
        self.a = a
        self.b = b
        self._b = [b]

mc = MyClass(15, MyClass(12, None))


from pudb import set_trace; set_trace()

def simple_func(x):
    x += 1

    s = range(20)
    z = None
    w = ()

    y = dict((i, i**2) for i in s)

    k = set(range(5, 99))

    try:
        x.invalid
    except AttributeError:
        pass

    #import sys
    #sys.exit(1)

    return 2*x
コード例 #48
0
            rewards.append(r)
            log_probs.append(p.log_prob(a))

            s = succ

        discounted_rewards = [DISCOUNT**t * r for t, r in enumerate(rewards)]
        cumulative_returns = [
            G(discounted_rewards, t) for t, _ in enumerate(discounted_rewards)
        ]

        states = torch.stack(states)
        state_values = critic(states).reshape(-1)

        cumulative_returns = tensor(cumulative_returns)
        Adv = cumulative_returns - state_values

        log_probs = torch.stack(log_probs).reshape(-1)

        loss = -(Adv @ log_probs) / len(rewards)
        if episode > 500 and loss.item() < -1000:
            # TODO(alok): XXX rm
            from pudb import set_trace

            set_trace(paused=True)

        loss.backward()
        opt.step()
        opt.zero_grad()

        print(f"E: {episode+1}, R: {sum(rewards)}")
コード例 #49
0
def main():
    args = parse_args()
    logdir = os.path.join(MODEL_LOGDIR, args.folder, 'seed{:02}'.format(args.seed))
    args, policy, agent, stats_global, stats_local, exp_vars = utils.init_training(args, logdir)
    envs = exp_vars.envs
    action_space_skills = Box(-np.inf, np.inf, (args.bc_args['dim_action'],), dtype=np.float)
    rollouts, obs = utils.create_rollout_storage(
        args, envs, policy, exp_vars.action_space, action_space_skills, exp_vars.device)
    start = time.time()

    if args.pudb:
        import pudb; pudb.set_trace()
    epoch, env_steps = exp_vars.start_epoch, exp_vars.start_step
    reward = torch.zeros((args.num_processes, 1)).type_as(obs[0])
    need_master_action, policy_values_cache = np.ones((args.num_processes,)), None
    while True:
        print('Starting epoch {}'.format(epoch))
        master_steps_done = 0
        pbar = tqdm(total=args.num_master_steps_per_update * args.num_processes)
        while master_steps_done < args.num_master_steps_per_update * args.num_processes:
            value, action, action_log_prob = utils.get_policy_values(
                policy,
                rollouts.get_last(rollouts.obs),
                rollouts.get_last(rollouts.actions),
                policy_values_cache,
                need_master_action)
            policy_values_cache = value, action, action_log_prob

            # Observe reward and next obs
            obs, reward, done, infos, need_master_action = utils.do_master_step(
                action, obs, reward, policy, envs, args)
            master_steps_done += np.sum(need_master_action)
            pbar.update(np.sum(need_master_action))

            stats_global, stats_local = stats.update(
                stats_global, stats_local, reward, done, infos, args)

            # If done then clean the history of observations.
            masks = {i: torch.FloatTensor([0.0] if done_ else [1.0]) for i, done_ in enumerate(done)}
            # check that the obs dictionary contains obs from all envs that will be stored in rollouts
            # we only store observations from envs which need a master action
            assert len(set(np.where(need_master_action)[0]).difference(obs.keys())) == 0
            rollouts.insert(
                obs,
                action,
                action_log_prob,
                value,
                reward,
                masks,
                indices=np.where(need_master_action)[0])
            reward[np.where(done)] = 0
            env_steps += sum([info['length_after_new_action']
                              for info in np.array(infos)[np.where(need_master_action)[0]]])
        pbar.close()

        # master policy training
        with torch.no_grad():
            next_value = policy.get_value_detached(
                rollouts.get_last(rollouts.obs),
                rollouts.get_last(rollouts.actions))
        rollouts.compute_returns(next_value, args.gamma)
        value_loss, action_loss, dist_entropy = agent.update(rollouts)
        rollouts.after_update()

        # saving the model
        if epoch % args.save_interval == 0:
            print('Saving the model after epoch {} for offline evaluation'.format(epoch))
            log.save_model(
                logdir, policy, agent.optimizer, epoch, env_steps, exp_vars.device, envs, args,
                stats_global, stats_local)

        # logging
        if epoch % args.log_interval == 0 and len(stats_global['length']) > 1:
            log.log_train(
                env_steps, start, stats_global, action_loss, value_loss, dist_entropy, epoch)
        epoch += 1
        if env_steps > args.num_train_timesteps:
            print('Number of env steps reached the maximum number of frames')
            break
コード例 #50
0
def test():
    board = [[1, 2, 1], [1, 1, 1]]
    pudb.set_trace()
    assert candy(board) == [[0, 0, 0], [1, 2, 1]]
コード例 #51
0
def ag_call(func,
            *args,
            expected_rc=None,
            new_only=True,
            headers=None,
            no_cache=False,
            **kwargs):
    """
    Wrap AGitHub calls with basic error detection and caching in TingDB

    Not smart, and hides any error information from caller.
    But very convenient. :)
    """
    if not headers:
        headers = {}
    add_media_types(headers)
    last = {}
    url = func.keywords["url"]
    doc = {"url": url}
    if new_only and last_table is not None:
        try:
            last = last_table.search(tinydb.where("url") == url)[0]["when"]
        except IndexError:
            pass
        # prefer last modified, as more readable, but neither guaranteed
        # https://developer.github.com/v3/#conditional-requests
        if "last-modified" in last:
            headers["If-Modified-Since"] = last["last-modified"]
        elif "etag" in last:
            headers["If-None-Match"] = last["etag"]
    # Insert our (possibly modified) headers
    real_headers = kwargs.setdefault("headers", {})
    real_headers.update(headers)

    if expected_rc is None:
        expected_rc = [200, 304]
    rc, body = retry_call(func, *args, **kwargs)
    # If we have new information, we want to use it (and store it unless
    # no_cache is true)
    # If we are told our existing info is ok, or there's an error, use the
    # stored info
    if rc == 200:
        doc["rc"] = rc
        doc["body"] = body
    elif rc in (202, 204, 304):
        logger.warn("can't handle {} for {}, using older data".format(rc, url))
        body = doc.get("body", [])
    # Handle repo rename/removal corner cases
    elif rc == 301:
        logger.error("Permanent Redirect for '{}'".format(url))
        # TODO: do something better, like switch to using id's
        # for now, act like nothing is there
        body = doc.get("body", [])
    elif rc == 404 and rc not in expected_rc:
        logger.error("No longer available or access denied: {}".format(url))
        # TODO: Figure out what to do here. Maybe it's just that message, but
        # maybe need to delete from DB before next run
        body = doc.get("body", [])
        # don't throw on this one
        expected_rc.append(404)
    logger.debug("{} for {}".format(rc, url))
    if (not no_cache) and new_only and last_table is not None:
        h = {k.lower(): v for k, v in gh.getheaders()}
        for x in "etag", "last-modified":
            if x in h:
                last[x] = h[x]
        doc.update({"body": body, "rc": rc, "when": last})
        last_table.upsert(doc, tinydb.where("url") == url)

    if rc not in expected_rc:
        if DEBUG:
            import pudb

            pudb.set_trace()  # noqa: E702
        else:
            logger.error("{} for {}".format(rc, url))
            raise AG_Exception
    return body
コード例 #52
0
ファイル: routes.py プロジェクト: woshiange/metabase_api
def update_card(card_id):
    query = flask.request.args.get("query")
    set_trace()
    models.card(card_id).update_query(query)
    return json.dumps(query)
コード例 #53
0
def rec (n):
	if n<=1:
		return 1
	pudb.set_trace()
	return n * rec(n-1)
コード例 #54
0
def main():
    import pudb  # pylint: disable=import-error
    import argparse

    pudb.set_trace()

    argparser = argparse.ArgumentParser()

    argparser.add_argument('input_test_json', help='Input Test JSON file')
    argparser.add_argument('-u', '--username', help='username', required=False)
    argparser.add_argument('-p', '--password', help='password', required=False)

    args = argparser.parse_args()
    session_id = None

    username = args.username
    password = args.password

    if username is not None and password is None:

        # User specified a username but not a password, so ask
        import getpass
        password = getpass.getpass("Password: "******"Accessing the Login page")
            r = requests.get(login_url, verify=False)
            csrftoken = r.cookies['csrftoken']

            data = dict()
            data['username'] = username
            data['password'] = password
            data['csrfmiddlewaretoken'] = csrftoken

            headers = dict()
            headers['Cookie'] = 'csrftoken=' + csrftoken
            headers['Referer'] = login_url

            print("Logging into Platform to get the session id")
            r2 = requests.post(login_url,
                               verify=False,
                               data=data,
                               headers=headers)
            session_id = r2.cookies['sessionid']
        except Exception as e:
            print("Unable to get session id from the platform. Error: " +
                  str(e))
            exit(1)

    with open(args.input_test_json) as f:
        in_json = f.read()
        in_json = json.loads(in_json)
        print(json.dumps(in_json, indent=4))

        connector = RedCanaryConnector()
        connector.print_progress_message = True

        if session_id is not None:
            in_json['user_session_token'] = session_id
            connector._set_csrf_info(csrftoken, headers['Referer'])

        ret_val = connector._handle_action(json.dumps(in_json), None)
        print(json.dumps(json.loads(ret_val), indent=4))

    exit(0)
コード例 #55
0
ファイル: debug.py プロジェクト: nchuhrina/pytest
 def _wrapper(*args, **kwargs):
     import pudb  # pylint: disable=import-error
     sys.stdout = sys.__stdout__
     pudb.set_trace()
     return func(*args, **kwargs)
コード例 #56
0
        return phantom.APP_SUCCESS

    def finalize(self):

        # Save the state, this data is saved across actions and app upgrades
        self.save_state(self._state)
        return phantom.APP_SUCCESS


if __name__ == '__main__':

    import pudb
    import argparse

    pudb.set_trace()

    argparser = argparse.ArgumentParser()

    argparser.add_argument('input_test_json', help='Input Test JSON file')
    argparser.add_argument('-u', '--username', help='username', required=False)
    argparser.add_argument('-p', '--password', help='password', required=False)

    args = argparser.parse_args()
    session_id = None

    username = args.username
    password = args.password

    if (username is not None and password is None):
コード例 #57
0
                    help='Whether to load the autoencoder (default: False)')

parser.add_argument('--batchsize_l', type=int, default=128,
                    help='input batch size for the latent space model (default: 128)')
parser.add_argument('--latent_distr', default='uniform',
                    help='input batch size for the latent space model (default: uniform)')
parser.add_argument('--load_l', type=bool, default=False,
                    help='Whether to load the latent space model (default: False)')
parser.add_argument('--steps_l', type=int, default=15000,
                    help='number of steps your latent space model takes (default: 15000)')
parser.add_argument('--model', default='generator',
                    help='Is your latent space model a transporter or a generator (default: generator)')

args = parser.parse_args()

import pudb; pudb.set_trace()

DATASET = args.dataset
FOLDER = args.folder

AE_STEPS = args.steps_a
BATCH_SIZE = args.batchsize_a
CONV = args.conv
DIM = args.dim
AE_LOAD = args.load_a

BATCH_SIZE_GEN = args.batchsize_l
DISTR = args.latent_distr
GEN_LOAD = args.load_l
STEPS = args.steps_l
MODEL = args.model
コード例 #58
0
# def relprop(self, R, rule):
#     '''
#     Layer type specific propogation of relevance
#     '''
#     R = R.view(self.output.shape)  # stitching ".view" step, frequantly in classifier
#     layer_name = self.module._get_name()
#     Ri = globals()[layer_name].relprop(utils.copy_module(self.module),
#                                        self.input, R, self.num, rule)
#     Ri = Ri.clone().detach()
#     if self.input.grad is not None:
#         self.input.grad.zero_()
#     return Ri
class SimpleNet(torch.nn.Module):
    def __init__(self):
        super(SimpleNet, self).__init__()
        self.layers = torch.nn.Sequential(torch.nn.Linear(2,
                                                          2), torch.nn.ReLU(),
                                          torch.nn.Linear(2, 1))

    def forward(self, x):
        return self.layers(x)


model = LRP(SimpleNet())
input = torch.tensor([1, 0], dtype=torch.float32).view(1, 1, -1)
output = model(input)
print(output)
import pudb
pudb.set_trace()  # BREAKPOINT
output.backward()
コード例 #59
0
def test_run():
    # read wordcount xml
    # cluster1: job1 --> aggregation: job3
    # cluster2: job2 -----^

    path = "/root/Programs/medusa-1.0/submit/job.xml"
    from pudb import set_trace
    set_trace()
    format = "%(asctime)s [%(levelname)s] %(message)s"
    logging.basicConfig(format=format, level=logging.DEBUG)

    faults_tolerate = 1
    job_list = xmlparser.parser(path, faults_tolerate, "job")
    aggregator = xmlparser.parser(path, faults_tolerate, "aggregator")

    save("job", job_list)
    save("aggregator", aggregator)

    sequence = [job_list, aggregator]

    pool = ThreadPool(processes=4)
    step = 0
    while step < len(sequence):
        jobs = sequence[step]
        save("step", step)

        if len(jobs) == 0:
            step += 1
            continue

        logging.info("Step %s starting" % step)
        if step == 0:
            logging.info("Checking clusters that are running...")
            setRunningClusters()

        # prepare environment for the test
        logging.info("Generating reference digests...")
        ss = time.time()

        reference_digests = []
        plist = []
        for job in jobs:
            plist.append(
                pool.apply_async(readFileDigests,
                                 args=(job.input_path, step == 1)))

        for p in plist:
            while not p.ready():
                logging.debug("Still waiting for reference digests...")
                time.sleep(5)

            _output = p.get()

            if len(_output) > 0:
                if not step == 1:
                    reference_digests += _output
                else:
                    reference_digests = _output
        ee = time.time()
        logging.info("Reference digests created in %s sec." % (int(ee - ss)))

        if step == 0:
            gstart = time.time()

        # start the test
        mstart = time.time()
        # CPU_CORES
        digests_matrix = run_execution(faults_tolerate, jobs, step == 1,
                                       reference_digests)
        mend = time.time()
        span = mend - mstart
        logging.info("Execution time (start: %s, end: %s): %s" %
                     (mstart, mend, str(span)))

        logging.info("Return digests: %s" % digests_matrix)

        res = run_verification_global(digests_matrix)

        if res is True:
            logging.info("Step %s completed" % step)
            step += 1

    gend = time.time()
    gspan = str(gend - gstart)
    print("Full execution (start: %s, end: %s): %s" % (gstart, gend, gspan))
コード例 #60
0
 def hook_fnB(self, module, input, output):
     import pudb
     pudb.set_trace()  # BREAKPOINT
     self.input = input
     self.output = output