Exemplo n.º 1
1
def post_to_slack(slack_webhook, assignment):
    """
    Posts a message to slack
    :param slack_webhook: (string) The url of the slack webhook
    :param assignment: (Feature) The feature to use
    :return:
    """
    # create the message to send
    message = """
    *Assignment Completed*:
    *Assignment Type:* {}
    *Location*: {}
    *Description*: {}
    *Notes*: {}
    *Worker*: {}
    *Time*: {}
    *Link*: {}
    """.format(
        assignment.assignment_type.name,
        assignment.location,
        assignment.description,
        assignment.notes,
        assignment.worker.name,
        assignment.completed_date.strftime("%Y-%m-%d %H:%M"),
        "https://workforce.arcgis.com/projects/{}/dispatch/assignments/{}".format(
            assignment.project.id,
            assignment.object_id
        )
    )
    logging.getLogger().info("Posting: {} to slack".format(inspect.cleandoc(message)))
    response = requests.post(slack_webhook, json={"text": inspect.cleandoc(message)})
    logging.getLogger().info("Status code: {}".format(response.status_code))
Exemplo n.º 2
0
Arquivo: roost.py Projeto: kcr/snipe
    def new_registration(self):
        msg = inspect.cleandoc("""
        You don't seem to be registered with the roost server.  Select
        this message and hit Y to begin the registration process.
        You'll be asked to confirm again with a message somewhat more
        alarmist than necessary because I couldn't resist the
        reference to the Matrix.
        """)
        f = asyncio.Future()
        self.add_message(RoostRegistrationMessage(self, msg, f))
        yield from f

        f = asyncio.Future()
        msg = inspect.cleandoc("""
        This is your last chance.  After this, there is no turning
        back.  The roost server will be receiving your messages and
        storing them forever.  Press Y here if you're okay that the
        people who run the server might accidentally see some of your
        messages.
        """)
        self.add_message(RoostRegistrationMessage(self, msg, f))
        yield from f
        try:
            yield from self.r.auth(create_user=True)
            self.add_message(messages.SnipeMessage(self, 'Registered.'))
            self.load_subs(self._zephyr_subs)
        except asyncio.CancelledError:
            pass
        except Exception as e:
            self.log.exception('registering')
            self.add_message(messages.SnipeErrorMessage(
                self, str(e), traceback.format_exc()))
Exemplo n.º 3
0
Arquivo: euler.py Projeto: jdp/euler
    def print_solution(id, func):
        from inspect import cleandoc

        print "-- Problem", id
        print cleandoc(func.__doc__)
        print func()
        print
Exemplo n.º 4
0
    def close(self, view, is_ok):
        model = view.instr
        connection_form = model.connection_form
        if is_ok:
            if (model.name != '' and model.driver_type and model.driver != ''
                and connection_form.check()):
                connection_dict = connection_form.connection_dict()
                try:
                    instr = DRIVERS[model.driver](connection_dict)
                    instr.close_connection()
                except InstrIOError:
                    message = cleandoc(u"""The software failed to
                                establish the connection with the instrument
                                please check all parameters and instrument state
                                and try again""")

                    critical(parent = view,
                             text = fill(message, 80),
                             title = 'Connection failure')

                view.result = True
                view.close()

            else:
                message = cleandoc(u"""You must fill the fields : name,
                       driver type, driver, {} before
                       validating""".format(connection_form.required_fields())
                                       )
                information(parent = view, text = fill(message, 80),
                          title = 'Missing information')

        else:
            view.close()
Exemplo n.º 5
0
def main():
    env = Environment(loader=PackageLoader('kvm', 'doc/templates'))
    docstrings = {'module': kvm.__doc__ or '*NOT DOCUMENTED*'}

    # Add methods.
    docstrings.update(**{method: params(kvm, method) for method in METHODS})

    # Add exceptions.
    docstrings.update(KvmError=cleandoc(getattr(kvm, 'KvmError').__doc__))

    hypervisor = kvm.Hypervisor(unix.Local())

    # Add Hypervisor methods.
    hypervisor_docstrings = {'doc': cleandoc(hypervisor.__doc__)}
    hypervisor_docstrings.update(**{method: params(hypervisor, method, PREFIX)
                                    for method in OBJ_METHODS})

    # Add childs objects.
    for child in CHILDS:
        prefix = '%s.%s' % (PREFIX, 'generic')

        child_obj = getattr(kvm, '_%s' % child.capitalize())
        for method in dir(child_obj):
            if method.startswith('_'):
                continue
            (hypervisor_docstrings.setdefault('childs', {})
                                  .setdefault(child, [])
                                  .append(params(child_obj, method, prefix)))

    docstrings.update(hypervisor = hypervisor_docstrings)

    api = env.get_template('api.rst')
    print(api.render(**docstrings))
Exemplo n.º 6
0
def help(*args):
	"""Crying for help is nice, but I don't think this is the way you would
	rather use this command. Try 'help install' or whatever, but screaming for
	help like this brings you nowhere.

	"""
	if len(args) > 0:
		if args[0] in COMMANDS:
			print cleandoc(eval(COMMANDS[args[0]]).__doc__)
		elif args[0].lower() in ('sex', 'girls'):
			print "You shuld try asking her out some time..."
		elif args[0].lower() in ('boys', 'lads', 'men'):
			print "Men, or 'male humans' are often thought of as much "\
				"simpler then their female counterparts. Many women see men "\
				"as giant, strong and half-brained ogers. Still, there is "\
				"just something about those creatures that women tend to "\
				"'fall for'."
		elif args[0].lower() in ('robots',):
			print "Robots are very sexy indeed. Their reproduction is "\
				"however often controlled by the much more simple-minded "\
				"humans."
		else:
			print "I realy can't help you with '%s'." % " ".join(args)
	else:
		print cleandoc(__doc__)
Exemplo n.º 7
0
def main():
    """
    A generic Kafka producer for use as a Cylc event handler.

    USAGE:
       cylc_kafka_producer.py <HOST:PORT> <TOPIC> key1=val1 key2=val2 ...
    serializes {key1: val1, key2: val2, ...} to TOPIC at Kafka on HOST:PORT.

    This is generic in that a JSON message schema is defined by the received
    command line keyword arguments. To enforce compliance to a particular
    schema, copy and modify as needed.

    Can be partnered with the generic cylc_kafka_consumer external trigger
    function, for triggering downstream suites.

    """

    if 'help' in sys.argv[1]:
        print cleandoc(main.__doc__)
        sys.exit(0)

    # TODO exception handling for bad inputs etc.
    kafka_server = sys.argv[1]
    kafka_topic = sys.argv[2]
    # Construct a message dict from kwargs.
    dmsg = dict([k.split('=') for k in sys.argv[3:]])

    producer = KafkaProducer(
        bootstrap_servers=kafka_server,
        value_serializer=lambda msg: json.dumps(msg).encode('utf-8'))

    producer.send(kafka_topic, dmsg)
    producer.flush()
Exemplo n.º 8
0
    def span_frequency(self, value):
        """span frequency setter method
        """
        if self.mode == 'SA':
            self.write('FREQ:SPAN {} GHz'.format(value))
            result = self.ask_for_values('FREQ:SPAN?')
            if result:
                if abs(result[0]/1e9 - value)/value > 10**-12:
                    raise InstrIOError(cleandoc('''PSA did not set correctly
                    the span frequency'''))
            else:
                raise InstrIOError(cleandoc('''PSA did not set correctly the
                    span frequency'''))

        elif self.mode == 'SPEC':
            self.write('SENS:SPEC:FREQ:SPAN {} GHz'.format(value))
            result = self.ask_for_values('SENS:SPEC:FREQ:SPAN?')
            if result:
                if abs(result[0]/1e9 - value)/value > 10**-12:
                    raise InstrIOError(cleandoc('''PSA did not set correctly
                    the span frequency'''))
            else:
                raise InstrIOError(cleandoc('''PSA did not set correctly the
                    span frequency'''))

        else:
            raise '''PSA is not in the appropriate mode to set correctly the
Exemplo n.º 9
0
def main(argv):
    parser = argparse.ArgumentParser('commcare-export-utils')
    subparsers = parser.add_subparsers(dest='command')
    for command_type in COMMANDS:
        sub = subparsers.add_parser(
            command_type.slug,
            help=inspect.cleandoc(command_type.help).splitlines()[0],
            description=inspect.cleandoc(command_type.help),
            formatter_class=argparse.RawDescriptionHelpFormatter
        )
        command_type.add_arguments(sub)

    try:
        args = parser.parse_args(argv)
    except UnicodeDecodeError:
        for arg in argv:
            try:
                arg.encode('utf-8')
            except UnicodeDecodeError:
                sys.stderr.write(u"ERROR: Argument '%s' contains unicode characters. "
                                 u"Only ASCII characters are supported.\n" % unicode(arg, 'utf-8'))
        sys.exit(1)

    logging.basicConfig(level=logging.WARN,
                        format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s')

    exit(main_with_args(args))
Exemplo n.º 10
0
 def test_run_without_args(self, mock_check_output):
     with TestIO(combined=True) as io:
         with self.assertRaises(SystemExit) as call:
             with patch('plainbox.impl.commands.run.authenticate_warmup') as mock_warmup:
                 mock_warmup.return_value = 0
                 main(['run'])
         self.assertEqual(call.exception.args, (0,))
     expected1 = """
     ===============================[ Analyzing Jobs ]===============================
     Estimated duration cannot be determined for automated jobs.
     Estimated duration cannot be determined for manual jobs.
     ==============================[ Running All Jobs ]==============================
     ==================================[ Results ]===================================
     """
     expected2 = """
     ===============================[ Authentication ]===============================
     ===============================[ Analyzing Jobs ]===============================
     Estimated duration cannot be determined for automated jobs.
     Estimated duration cannot be determined for manual jobs.
     ==============================[ Running All Jobs ]==============================
     ==================================[ Results ]===================================
     """
     self.assertIn(io.combined, [
         cleandoc(expected1) + "\n",
         cleandoc(expected2) + "\n"])
Exemplo n.º 11
0
    def output_state(self, value):
        """ Output setter method. 'ON', 'OFF'

        """
        with self.secure():
            on = re.compile('on', re.IGNORECASE)
            off = re.compile('off', re.IGNORECASE)
            if on.match(value) or value == 1:

                self._AWG.write('INST {}'.format(self._channel))
                self._AWG.write('SOUR:FUNC:MODE USER')
                self._AWG.write('OUTP ON')
                if self._AWG.ask('OUTP?'
                                            ) != 'ON':
                    raise InstrIOError(cleandoc('''Instrument did not set
                                                correctly the output'''))
            elif off.match(value) or value == 0:
                self._AWG.write('INST {}'.format(self._channel))
                self._AWG.write('SOUR:FUNC:MODE USER')
                self._AWG.write('OUTP OFF')
                if self._AWG.ask('OUTP?'
                                            ) != 'OFF':
                    raise InstrIOError(cleandoc('''Instrument did not set
                                                correctly the output'''))
            else:
                mess = fill(cleandoc('''The invalid value {} was sent to
                            switch_on_off method''').format(value), 80)
                raise VisaTypeError(mess)
Exemplo n.º 12
0
 def run_mode(self, value):
     """Run mode setter method
     """
     if value in ('CONT', 'CONTINUOUS', 'continuous'):
         self.write('AWGControl:RMODe CONT')
         if self.ask('AWGControl:RMODe?') != 'CONT':
             raise InstrIOError(cleandoc('''Instrument did not set
                                         correctly the run mode'''))
     elif value in ('TRIG', 'TRIGGERED', 'triggered'):
         self.write('AWGControl:RMODe TRIG')
         if self.ask('AWGControl:RMODe?') != 'TRIG':
             raise InstrIOError(cleandoc('''Instrument did not set
                                         correctly the run mode'''))
     elif value in ('GAT', 'GATED', 'gated'):
         self.write('AWGControl:RMODe GAT')
         if self.ask('AWGControl:RMODe?') != 'GAT':
             raise InstrIOError(cleandoc('''Instrument did not set
                                         correctly the run mode'''))
     elif value in ('SEQ', 'SEQUENCE', 'sequence'):
         self.write('AWGControl:RMODe SEQ')
         if self.ask('AWGControl:RMODe?') != 'SEQ':
             raise InstrIOError(cleandoc('''Instrument did not set
                                         correctly the run mode'''))
     else:
         mess = fill(cleandoc('''The invalid value {} was sent to
                              run mode method''').format(value), 80)
         raise VisaTypeError(mess)
Exemplo n.º 13
0
    def run_averaging(self, aver_count=''):
        """ Restart averaging on the channel and wait until it is over

        Parameters
        ----------
        aver_count : str, optional
            Number of averages to perform. Default value is the current one
        """
        self._pna.trigger_source = 'Immediate'
        self.sweep_mode = 'Hold'
        self._pna.clear_averaging()
        self._pna.timeout = 10

        if aver_count:
            self.average_count = aver_count

        self.average_state = 1

        for i in range(0, int(self.average_count)):
            self._pna.write('sense{}:sweep:mode gro'.format(self._channel))

            while True:
                try:
                    done = self._pna.ask_for_values('*OPC?')[0]
                    break
                except Exception:
                    self._pna.timeout = self._pna.timeout*2
                    logger = logging.getLogger(__name__)
                    msg = cleandoc('''PNA timeout increased to {} s
                        This will make the PNA diplay 420 error w/o issue''')
                    logger.info(msg.format(self._pna.timeout))

            if done != 1:
                raise InstrError(cleandoc('''Agilent PNA did could  not perform
                the average on channel {} '''.format(self._channel)))
Exemplo n.º 14
0
 def clock_source(self, value):
     """clock source setter method
     """
     if value in ("EXT", 1, "True"):
         self.write("AWGControl:CLOCk:SOURce EXT")
         if self.ask("AWGControl:CLOCk:SOURce?") != "EXT":
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the clock source"""
                 )
             )
     elif value in ("INT", 0, "False"):
         self.write("AWGControl:CLOCk:SOURce INT")
         if self.ask("AWGControl:CLOCk:SOURce?") != "INT":
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the clock source"""
                 )
             )
     else:
         mess = fill(
             cleandoc(
                 """The invalid value {} was sent to
                              clock_source_external
                              method"""
             ).format(value),
             80,
         )
         raise VisaTypeError(mess)
Exemplo n.º 15
0
 def oscillator_reference_external(self, value):
     """oscillator reference external setter method
     """
     if value in ("EXT", 1, "True"):
         self.write("SOUR:ROSC:SOUR EXT")
         if self.ask("SOUR:ROSC:SOUR?") != "EXT":
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the oscillator
                                         reference"""
                 )
             )
     elif value in ("INT", 0, "False"):
         self.write("SOUR:ROSC:SOUR INT")
         if self.ask("SOUR:ROSC:SOUR?") != "INT":
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the oscillator
                                         reference"""
                 )
             )
     else:
         mess = fill(
             cleandoc(
                 """The invalid value {} was sent to
                              oscillator_reference_external
                              method"""
             ).format(value),
             80,
         )
         raise VisaTypeError(mess)
Exemplo n.º 16
0
 def running(self, value):
     """Run state setter method
     """
     if value in ("RUN", 1, "True"):
         self.write("AWGC:RUN:IMM")
         if self.ask_for_values("AWGC:RST?")[0] not in (1, 2):
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the run state"""
                 )
             )
     elif value in ("STOP", 0, "False"):
         self.write("AWGC:STOP:IMM")
         if self.ask_for_values("AWGC:RST?")[0] != 0:
             raise InstrIOError(
                 cleandoc(
                     """Instrument did not set
                                         correctly the run state"""
                 )
             )
     else:
         mess = fill(
             cleandoc(
                 """The invalid value {} was sent to
                              running method"""
             ).format(value),
             80,
         )
         raise VisaTypeError(mess)
Exemplo n.º 17
0
    def output_state(self, value):
        """ Output setter method. 'ON', 'OFF'

        """
        with self.secure():
            on = re.compile("on", re.IGNORECASE)
            off = re.compile("off", re.IGNORECASE)
            if on.match(value) or value == 1:

                self._AWG.write("OUTP{}:STAT ON".format(self._channel))
                if self._AWG.ask_for_values("OUTP{}:STAT?".format(self._channel))[0] != 1:
                    raise InstrIOError(
                        cleandoc(
                            """Instrument did not set
                                                correctly the output"""
                        )
                    )
            elif off.match(value) or value == 0:
                self._AWG.write("OUTP{}:STAT OFF".format(self._channel))
                if self._AWG.ask_for_values("OUTP{}:STAT?".format(self._channel))[0] != 0:
                    raise InstrIOError(
                        cleandoc(
                            """Instrument did not set
                                                correctly the output"""
                        )
                    )
            else:
                mess = fill(
                    cleandoc(
                        """The invalid value {} was sent to
                            switch_on_off method"""
                    ).format(value),
                    80,
                )
                raise VisaTypeError(mess)
Exemplo n.º 18
0
 def test_frequency(self):
     """
     Frequency wrappers get replaced correctly
     """
     test = inspect.cleandoc(u"""<span content="At will" p:property="frequencyStart" />
         At will-<i p:property="spellName">detect evil</i> <span
         p:property="qualifier">(as a free\naction)</span> <span
         p:property="casterLevel">(caster level 9th)</span> <span
         p:property="dc">(DC 19)</span> <span p:property="sep"/>""")
     ret = self.applyRule(test, 'fGroup')
     actual = ret.documentElement.childNodes[0].toprettyxml(indent=u"").splitlines()
     expected = inspect.cleandoc(u"""<span content="At will" p:property="frequencyGroup">
          
                      At will-
          
          <span content="detect evil" p:property="spell">
          <i p:property="spellName">
          detect evil
          </i>
           
          <span p:property="qualifier">
          (as a free
          action)
          </span>
           
          <span p:property="casterLevel">
          (caster level 9th)
          </span>
           
          <span p:property="dc">
          (DC 19)
          </span>
          </span>
          </span>""").splitlines()
     self.failIfDiff(actual, expected, 'actual', 'expected')
Exemplo n.º 19
0
    def start_processing_measures(self):
        """ Starts to perform the measurement in the queue.

        Measure will be processed in their order of appearance in the queue.

        """
        logger = logging.getLogger(__name__)
        if not self.plugin.selected_engine:
            dial = EngineSelector(plugin=self.plugin)
            dial.exec_()
            if dial.selected_id:
                self.plugin.selected_engine = dial.selected_id
            else:
                msg = cleandoc('''The user did not select an engine to run the
                               measure''')
                logger.warn(msg)
                return

        self.plugin.flags = []

        measure = self.plugin.find_next_measure()
        if measure is not None:
            self.plugin.start_measure(measure)
        else:
            msg = cleandoc('''No curently enqueued measure can be run.''')
            logger.info(msg)
Exemplo n.º 20
0
    def sweep_time(self, value):
        """sweep time setter method
        """

        if self.mode == 'WAV':
            self.write('SENS:WAV:SWEEP:TIME {}'.format(value))
            result = self.ask_for_values('SENS:WAV:SWEEP:TIME?')
            if result:
                if abs(result[0] - value)/value > 10**-12:
                    raise InstrIOError(cleandoc('''PSA did not set correctly
                    the sweep time'''))
            else:
                raise InstrIOError(cleandoc('''PSA did not set correctly the
                    sweep time'''))
        elif self.mode == 'SA':
            self.write('SWEEP:TIME {}'.format(value))
            result = self.ask_for_values('SWEEP:TIME?')
            if result:
                if abs(result[0] - value)/value > 10**-12:
                    raise InstrIOError(cleandoc('''PSA did not set correctly
                    the sweep time'''))
            else:
                raise InstrIOError(cleandoc('''PSA did not set correctly the
                    sweep time'''))
        else:
            raise '''PSA is not in the appropriate mode to set correctly the
Exemplo n.º 21
0
 def big_volt_range(self, value):
     """Voltage range method. Two values possible :
     big range = 12V = 'True' or 1 and small range = 1.2V = 'False' or 0
     TinyBilt need to be turned off to change the voltage range
     """
     with self.secure():
         outp = self._TB.ask_for_values('OUTP?')[0]
         if outp == 1:
             raise InstrIOError(cleandoc('''TinyBilt need to be turned
                                              off to change the voltage
                                             range'''))
         if value in ('True', 1):
             self._TB.write('i{};volt:rang 12'.format(self._channel))
             result = self._TB.ask_for_values('i{};volt:rang?'
                                              .format(self._channel))[0]
             if abs(result - 12) > 10**-12:
                 mess = 'Instrument did not set correctly the range voltage'
                 raise InstrIOError(mess)
         elif value in ('False', 0):
             self._TB.write('i{};volt:rang 1.2'.format(self._channel))
             result = self._TB.ask_for_values('i{};volt:rang?'
                                              .format(self._channel))[0]
             if abs(result - 1.2) > 10**-12:
                 raise InstrIOError(cleandoc('''Instrument did not set
                                             correctly the range
                                             voltage'''))
         else:
             raise ValueError(cleandoc('''Big range 12V = "True"
                                       or 1, small range 1.2V
                                       = "False" or 0'''))
Exemplo n.º 22
0
def test_lazy_func(tmpdir):
    package = tmpdir.mkdir('package')
    package.join('__init__.py').write('')

    package.join('func.py').write(cleandoc('''
        def func():
            return 10
    '''))

    package.join('test.py').write(cleandoc('''
        from uxie.utils import lazy_func
        import sys

        def test():
            func = lazy_func('.func.func')
            old_code = func.__code__

            result = func()
            assert result == 10

            del sys.modules['package.test']
            del sys.modules['package']

            result = func()
            assert result == 10
    '''))

    old_path = sys.path
    sys.path = [str(tmpdir)] + old_path
    __import__('package.test')
    sys.path = old_path

    sys.modules['package.test'].test()
Exemplo n.º 23
0
    def auto_calibrate(self, value):
        """ Method to set the trigger mode

        Input:
        {'ON', 'Yes', 'OFF', 'No'}
        """
        if value in ("ON", "Yes"):
            self.write("ACAL ON")
            result = self.ask("ACAL?")
            result = result.replace("ACAL ", "")
            if result != "ON":
                raise InstrIOError(
                    cleandoc(
                        """Instrument did not set correctly
                                            the auto calibrate mode"""
                    )
                )
        elif value in ("OFF", "No"):
            self.write("ACAL OFF")
            result = self.ask("ACAL?")
            result = result.replace("ACAL ", "")
            if result != "OFF":
                raise InstrIOError(
                    cleandoc(
                        """Instrument did not set correctly
                                            the auto calibrate mode"""
                    )
                )
        else:
            mes = "{} is not an allowed value".format(value)
            raise InstrIOError(mes)
Exemplo n.º 24
0
    def render_item_info(self, item, renderer):
        """Render information about the specific item."""
        cls_doc = inspect.cleandoc(item.__doc__ or " ")
        init_doc = inspect.cleandoc(
            (item.__init__.__doc__ or " ").split("Args:")[0])

        if isinstance(item, registry.MetaclassRegistry):
            # show the args it takes. Relies on the docstring to be formatted
            # properly.
            doc_string = cls_doc + init_doc
            doc_string += (
                "\n\nLink:\n"
                "http://www.rekall-forensic.com/epydoc/%s.%s-class.html"
                "\n\n" % (item.__module__, item.__name__))

            renderer.write(doc_string)

            renderer.table_header([('Parameter', 'parameter', '30'),
                                   (' Documentation', 'doc', '70')])
            for parameter, doc in self.get_default_args(item):
                renderer.table_row(parameter, doc)

            # Add the standard help options.
            for parameter, descriptor in self.standard_options:
                renderer.table_row(parameter, self._clean_up_doc(descriptor))

        else:
            # For normal objects just write their docstrings.
            renderer.write(item.__doc__ or " ")

        renderer.write("\n")
Exemplo n.º 25
0
def getDocstring(targetFunc, targetClass=None):
    """Fetches the docstring of the given function/method."""
    try:
        if targetClass is None:
            return inspect.cleandoc(inspect.getdoc(targetFunc))
        return inspect.cleandoc(inspect.getdoc(getattr(targetClass, targetFunc)))
    except AttributeError:
        return "A docstring couldn't be found!"
Exemplo n.º 26
0
def test_completion_docstring():
    """
    Jedi should follow imports in certain conditions
    """
    c = Script('import jedi\njed').completions()[0]
    assert c.docstring(fast=False) == cleandoc(jedi_doc)

    c = Script('import jedi\njedi.Scr').completions()[0]
    assert c.docstring(raw=True, fast=False) == cleandoc(Script.__doc__)
Exemplo n.º 27
0
    def _check(self, input_text, slicing, output_text):

        input_file = cleandoc(input_text).splitlines()
        output_file = StringIO()

        node_transform = madseq.SequenceTransform(slicing or [])

        madseq.Document.parse(input_file).transform(node_transform).dump(output_file, "madx")

        self.assertEqual(output_file.getvalue().splitlines(), cleandoc(output_text).splitlines())
Exemplo n.º 28
0
 def average_count_SA(self, value):
     """
     """
     self.write('AVERage:COUNt {}'.format(value))
     result = self.ask_for_values('AVERage:COUNt?')
     if result:
         if result[0] != value:
             raise InstrIOError(cleandoc('''PSA did not set correctly the
                  average count'''))
     else:
         raise InstrIOError(cleandoc('''PSA did not set correctly the
                  average count'''))
Exemplo n.º 29
0
def member_to_pref(obj, member, val):
    """ Provide the value that will be stored in the preferences for a member.

    Parameters
    ----------
    obj : Atom
        Object who owns the member.

    member : Member
        Member for which the preferences should be retrieved

    val : Value
        Value of the member to be stored in the preferences

    Returns
    -------
    pref_value : str
        The serialized value/string that will be stored in the pref.

    """
    meta_value = member.metadata[PREF_KEY]

    # If 'pref=True' then we rely on the standard save mechanism
    if meta_value is True:
        # If val is string-like, then we can simply cast it and rely on
        # python/Atom default methods.
        if isinstance(val, str):
            pref_value = val
        else:
            pref_value = repr(val)

    # If the user provided a custom "to_pref" function, then we check
    # that it has the correct signature and use it to obtain the value
    elif (isinstance(meta_value, (tuple, list)) and
            len(getfullargspec(meta_value[TO_PREF_ID])[0]) == 3):
        pref_value = meta_value[TO_PREF_ID](obj, member, val)

    elif meta_value is False:
        raise NotImplementedError(
            fill(cleandoc('''you set 'pref=False' for this member. If you did
            not want to save it you should simply not declare this tag.''')))
    else:
        raise NotImplementedError(
            fill(cleandoc('''the 'pref' tag of this member was not set to true,
            therefore the program expects you to declare two functions,
             'member_to_pref(obj,member,val)' and 'member_from_pref(obj,member,
             val)' that will handle the serialization and deserialization of
             the value. Those should be passed as a list or a tuple, where
             the first element is member_to and the second is member_from.
             It is possible that you failed to properly declare the signature
             of those two functions.''')))

    return pref_value
Exemplo n.º 30
0
 def sweep_points_SA(self, value):
     """
     """
     self.write('SENSe:SWEep:POINts {}'.format(value))
     result = self.ask_for_values('SENSe:SWEep:POINts?')
     if result:
         if result[0] != value:
             raise InstrIOError(cleandoc('''PSA did not set correctly the
                 sweep point number'''))
     else:
         raise InstrIOError(cleandoc('''PSA did not set correctly the
                 sweep point number'''))
Exemplo n.º 31
0
    def __do_mount(self) -> None:
        sshfs_options = "-o slave,allow_other"
        sshfs_command = (
            f"sudo -E sshfs {sshfs_options} :{self.local_dir} {self.remote_dir}"
        )

        # Because sshfs monopolizes stdout, the progress markers go to stderr
        ssh_command = inspect.cleandoc(
            f"""
            set -e
            sudo mkdir -p {self.remote_dir}

            echo TRANSIENT_SSHFS_STARTING >&2
            {sshfs_command}
            """
        )

        sshfs_config = self.ssh_config.override(
            args=["-T", "-o", "LogLevel=ERROR"] + self.ssh_config.args
        )
        client = ssh.SshClient(sshfs_config, command=ssh_command)

        sftp_proc = subprocess.Popen(
            [get_sftp_server(name=sshfs_config.sftp_server_bin_name), "-e"],
            stdin=subprocess.PIPE,
            stdout=subprocess.PIPE,
            preexec_fn=lambda: linux.set_death_signal(signal.SIGTERM),
        )

        with self.sshfs_sem:
            logging.info(f"Sending sshfs mount command '{sshfs_command}'")
            ssh_proc = client.connect(
                timeout=self.ssh_timeout,
                stdin=sftp_proc.stdout,
                stdout=sftp_proc.stdin,
                stderr=subprocess.PIPE,
            )

            # Everything from here on out simply verifies that nothing went wrong.
            assert ssh_proc.stderr is not None

            try:
                buff = utils.read_until(
                    cast(io.BufferedReader, ssh_proc.stderr),
                    b"TRANSIENT_SSHFS_STARTING",
                    self.ssh_timeout,
                )
            except TimeoutError as e:
                ssh_proc.kill()
                raise RuntimeError(f"Timeout while waiting for SSHFS. Output: {e}")

        try:
            # Now that the SSH connection is established, the SSHFS timeout can be very short
            stderr = ssh_proc.communicate(timeout=_SSHFS_MAX_RUN_TIME)[1].decode("utf-8")
        except subprocess.TimeoutExpired:
            # Because SSHFS is communicating over stdin/out of the ssh connection, SSH
            # needs to run for as long as the mount exists. Instead of waiting until the
            # connection is closed, we wait a short time so SSHFS has a chance to fail.
            # Timing out is expected.
            pass
        else:
            sftp_proc.kill()
            raise RuntimeError(f"SSHFS mount failed with: {stderr}")

        # Verify that the server didn't die while communicate() waited for the client
        assert not sftp_proc.poll()

        # Wake up the main thread
        self.is_complete.set()

        if ssh_proc.wait():
            stderr = ssh_proc.communicate()[1].decode("utf-8")
            if "closed by remote host" not in stderr:
                raise RuntimeError(f"SSHFS mount died with: {stderr}")
Exemplo n.º 32
0
def make_readme_file(
    dataset_name,
    doi_or_link,
    url,
    citation,
    description,
    variables,
    manipulation,
    download_logging="None",
    email=None,
):
    """
    Adheres to the UP group's (ETHZ) readme prerequisites.

    Parameters
    ----------
    dataset_name: str
        The name of the dataset that will be at the top of the file
    doi_or_link: str
        The link to where the dataset can be downloaded or more info
        can be fetched
    url: str
        The url used to download the data - may be useful for other
        downloaders. May contain wildcards and placeholders.
    citation: str
        Just making it easier for people to cite the dataset. If there
        is more than one doi, please cite that too. This may be the case
        where a dataset is published alongside a paper.
    description: str
        The description of the data - can be copied directly from the website
        of the data.
    manipulation: str
        Any manipulation or changes you make to the data before saving.
    variables: list
        A list of the names of the variables that are downloaded
    download_loggin: str
        The path to the download logging. Defaults to None
    email: str
        Defaults to [email protected] if not provided.

    """
    import inspect
    import os
    import pwd
    from textwrap import wrap

    import pandas as pd

    if email is None:
        username = pwd.getpwuid(os.getuid())[0]
        email = f"{username}@ethz.ch"
    today = pd.Timestamp.today().strftime("%Y-%m-%d")

    w = "\n" + " " * 4
    if variables == []:
        variables = ""
    elif isinstance(variables, list):
        variables = f"{w}Variables:{w}" + f"{w}".join(
            ["- " + v for v in variables])
    else:
        variables = ""

    citation = w.join(wrap(citation.replace("\n", " "), 80))
    description = w.join(wrap(description.replace("\n", " "), 80))
    manipulation = w.join(wrap(manipulation.replace("\n", " "), 80))

    readme_text = inspect.cleandoc(f"""
    {'='*len(dataset_name)}
    {dataset_name}
    {'='*len(dataset_name)}

    Contact: {email}
    Date:    {today}
    Source:  {doi_or_link}
    URL:     {url}
    Logging: {download_logging}


    ------------
    Dataset info
    ------------
    {citation}

    {description}
    {variables}

    ------------------
    Dataset processing
    ------------------
    {manipulation}



    readme file was automatically created using netCDFdownloader tool
    https://github.com/lukegre/netCDF-Downloader

    """)

    return readme_text
Exemplo n.º 33
0
 def help(self):
     doc = self.__doc__
     translator = getattr(self, "__translator__", lambda s: s)
     if doc:
         return inspect.cleandoc(translator(doc))
Exemplo n.º 34
0
 def format_field(value, format_spec):
     # Indent all lines, except the first one (which reuses the indent of
     # the format string).
     return textwrap.indent(inspect.cleandoc(value),
                            format_spec)[len(format_spec):]
Exemplo n.º 35
0
def get_doc(obj):
    """Return a two-tuple of object's first line and rest of docstring."""
    docstring = obj.__doc__
    if not docstring:
        return "", ""
    return inspect.cleandoc(docstring).partition("\n\n")[::2]
Exemplo n.º 36
0
Arquivo: lib.py Projeto: ro-i/tumcsbot
class Response:
    """Some useful methods for building a response message."""

    admin_err_msg: str = cleandoc("""
        Hi {}!
        You need to be administrator of this organization in order to execute \
        this command.
        """)
    command_not_found_msg: str = cleandoc("""
        Hi {}!
        Unfortunately, I currently cannot understand what you wrote to me.
        Try "help" to get a glimpse of what I am capable of. :-)
        """)
    exception_msg: str = cleandoc("""
        Hi {}!
        An exception occurred while executing your request.
        Did you try to hack me? ;-)
        """)
    error_msg: str = cleandoc("""
        Sorry, {}, an error occurred while executing your request.
        """)
    greet_msg: str = 'Hi {}! :-)'
    ok_emoji: str = 'ok'
    no_emoji: str = 'cross_mark'

    def __init__(self, message_type: MessageType, response: Dict[str,
                                                                 Any]) -> None:
        self.message_type: MessageType = message_type
        self.response: Dict[str, Any] = response

    def __repr__(self) -> str:
        return self.__str__()

    def __str__(self) -> str:
        return '({}, {})'.format(self.message_type, str(self.response))

    def is_none(self) -> bool:
        """Check whether this response has the MessageType 'None'."""
        return self.message_type == MessageType.NONE

    @classmethod
    def build_message(cls,
                      message: Optional[Dict[str, Any]],
                      content: str,
                      msg_type: Optional[str] = None,
                      to: Optional[Union[str, int, List[int],
                                         List[str]]] = None,
                      subject: Optional[str] = None) -> 'Response':
        """Build a message.

        Arguments:
        ----------
        message    The message to respond to.
                       May be explicitely set to None. In this case,
                       'msg_type', 'to' (and 'subject' if 'msg_type'
                       is 'stream') have to be specified.
        response   The content of the response.
        msg_type   Determine if the response should be a stream or a
                   private message. ('stream', 'private')
                   [optional]
        to         If it is a private message:
                       Either a list containing integer user IDs
                       or a list containing string email addresses.
                   If it is a stream message:
                       Either the name or the integer ID of a stream.
                   [optional]
        subject    The topic the message should be added to (only for
                   stream messages).
                   [optional]

        The optional arguments are inferred from 'message' if provided.

        Return a Response object.
        """
        if message is None and (msg_type is None or to is None or
                                (msg_type == 'stream' and subject is None)):
            return cls.none()

        if message is not None:
            if msg_type is None:
                msg_type = message['type']
            private: bool = msg_type == 'private'

            if to is None:
                to = message['sender_email'] if private else message[
                    'stream_id']

            if subject is None:
                subject = message['subject'] if not private else ''

        # 'subject' field is ignored for private messages
        # see https://zulip.com/api/send-message#parameter-topic
        return cls(
            MessageType.MESSAGE,
            dict(**{
                'type': msg_type,
                'to': to,
                'subject': subject,
                'content': content
            }))

    @classmethod
    def build_reaction(cls, message: Dict[str, Any], emoji: str) -> 'Response':
        """Build a reaction response.

        Arguments:
        ----------
        message   The message to react on.
        emoji     The emoji to react with.
        """
        return cls(MessageType.EMOJI,
                   dict(message_id=message['id'], emoji_name=emoji))

    @classmethod
    def build_reaction_from_id(cls, message_id: int, emoji: str) -> 'Response':
        """Build a reaction response.

        Arguments:
        ----------
        message_id   The id of the message to react on.
        emoji        The emoji to react with.
        """
        return cls(MessageType.EMOJI,
                   dict(message_id=message_id, emoji_name=emoji))

    @classmethod
    def admin_err(cls, message: Dict[str, Any]) -> 'Response':
        """The user has not sufficient rights.

        Tell the user that they have not administrator rights. Relevant
        for some commands intended to be exclusively used by admins.
        """
        return cls.build_message(
            message, cls.admin_err_msg.format(message['sender_full_name']))

    @classmethod
    def command_not_found(cls, message: Dict[str, Any]) -> 'Response':
        """Tell the user that his command could not be found."""
        return cls.build_reaction(message, 'question')

    @classmethod
    def error(cls, message: Dict[str, Any]) -> 'Response':
        """Tell the user that an error occurred."""
        return cls.build_message(
            message, cls.error_msg.format(message['sender_full_name']))

    @classmethod
    def exception(cls, message: Dict[str, Any]) -> 'Response':
        """Tell the user that an exception occurred."""
        return cls.build_message(
            message, cls.exception_msg.format(message['sender_full_name']))

    @classmethod
    def greet(cls, message: Dict[str, Any]) -> 'Response':
        """Greet the user."""
        return cls.build_message(
            message, cls.greet_msg.format(message['sender_full_name']))

    @classmethod
    def ok(cls, message: Dict[str, Any]) -> 'Response':
        """Return an "ok"-reaction."""
        return cls.build_reaction(message, cls.ok_emoji)

    @classmethod
    def no(cls, message: Dict[str, Any]) -> 'Response':
        """Return a "no"-reaction."""
        return cls.build_reaction(message, cls.no_emoji)

    @classmethod
    def none(cls) -> 'Response':
        """No response."""
        return cls(MessageType.NONE, {})
Exemplo n.º 37
0
    def __init__(
        self,
        path: str,
        endpoint: Callable,
        *,
        response_model: Type[Any] = None,
        status_code: int = 200,
        tags: List[str] = None,
        dependencies: Sequence[params.Depends] = None,
        summary: str = None,
        description: str = None,
        response_description: str = "Successful Response",
        responses: Dict[Union[int, str], Dict[str, Any]] = None,
        deprecated: bool = None,
        name: str = None,
        methods: Optional[Union[Set[str], List[str]]] = None,
        operation_id: str = None,
        response_model_include: Union[SetIntStr, DictIntStrAny] = None,
        response_model_exclude: Union[SetIntStr, DictIntStrAny] = set(),
        response_model_by_alias: bool = False,
        response_model_exclude_unset: bool = False,
        response_model_exclude_defaults: bool = False,
        response_model_exclude_none: bool = True,
        include_in_schema: bool = True,
        response_class: Optional[Type[Response]] = None,
        dependency_overrides_provider: Any = None,
        callbacks: Optional[List["APIRoute"]] = None,
    ) -> None:
        self.path = path
        self.endpoint = endpoint
        self.name = get_name(endpoint) if name is None else name
        self.path_regex, self.path_format, self.param_convertors = compile_path(path)
        if methods is None:
            methods = ["GET"]
        self.methods = set([method.upper() for method in methods])
        self.unique_id = generate_operation_id_for_path(
            name=self.name, path=self.path_format, method=list(methods)[0]
        )
        self.response_model = response_model
        if self.response_model:
            assert (
                status_code not in STATUS_CODES_WITH_NO_BODY
            ), f"Status code {status_code} must not have a response body"
            response_name = "Response_" + self.unique_id
            self.response_field = create_response_field(
                name=response_name, type_=self.response_model
            )
            # Create a clone of the field, so that a Pydantic submodel is not returned
            # as is just because it's an instance of a subclass of a more limited class
            # e.g. UserInDB (containing hashed_password) could be a subclass of User
            # that doesn't have the hashed_password. But because it's a subclass, it
            # would pass the validation and be returned as is.
            # By being a new field, no inheritance will be passed as is. A new model
            # will be always created.
            self.secure_cloned_response_field: Optional[
                ModelField
            ] = self.response_field #create_cloned_field(self.response_field)
        else:
            self.response_field = None  # type: ignore
            self.secure_cloned_response_field = None
        self.status_code = status_code
        self.tags = tags or []
        if dependencies:
            self.dependencies = list(dependencies)
        else:
            self.dependencies = []
        self.summary = summary
        self.description = description or inspect.cleandoc(self.endpoint.__doc__ or "")
        # if a "form feed" character (page break) is found in the description text,
        # truncate description text to the content preceding the first "form feed"
        self.description = self.description.split("\f")[0]
        self.response_description = response_description
        self.responses = responses or {}
        response_fields = {}
        for additional_status_code, response in self.responses.items():
            assert isinstance(response, dict), "An additional response must be a dict"
            model = response.get("model")
            if model:
                assert (
                    additional_status_code not in STATUS_CODES_WITH_NO_BODY
                ), f"Status code {additional_status_code} must not have a response body"
                response_name = f"Response_{additional_status_code}_{self.unique_id}"
                response_field = create_response_field(name=response_name, type_=model)
                response_fields[additional_status_code] = response_field
        if response_fields:
            self.response_fields: Dict[Union[int, str], ModelField] = response_fields
        else:
            self.response_fields = {}
        self.deprecated = deprecated
        self.operation_id = operation_id
        self.response_model_include = response_model_include
        self.response_model_exclude = response_model_exclude
        self.response_model_by_alias = response_model_by_alias
        self.response_model_exclude_unset = response_model_exclude_unset
        self.response_model_exclude_defaults = response_model_exclude_defaults
        self.response_model_exclude_none = response_model_exclude_none
        self.include_in_schema = include_in_schema
        self.response_class = response_class

        assert callable(endpoint), f"An endpoint must be a callable"
        self.dependant = get_dependant(path=self.path_format, call=self.endpoint)
        for depends in self.dependencies[::-1]:
            self.dependant.dependencies.insert(
                0,
                get_parameterless_sub_dependant(depends=depends, path=self.path_format),
            )
        self.body_field = get_body_field(dependant=self.dependant, name=self.unique_id)
        self.dependency_overrides_provider = dependency_overrides_provider
        self.callbacks = callbacks
        self.app = request_response(self.get_route_handler())
Exemplo n.º 38
0
    def __init__(self, name="Trim"):

        GafferImage.ImageProcessor.__init__(self, name)

        self["flipHorizontal"] = Gaffer.BoolPlug()
        self["flipVertical"] = Gaffer.BoolPlug()
        self["rotate"] = Gaffer.FloatPlug()
        self["applyCrop"] = Gaffer.BoolPlug(defaultValue=False)
        self["crop"] = Gaffer.Box2fPlug(
            defaultValue=imath.Box2f(imath.V2f(0), imath.V2f(1)))

        mirror = GafferImage.Mirror()
        self["__Mirror"] = mirror
        mirror["horizontal"].setInput(self["flipHorizontal"])
        mirror["vertical"].setInput(self["flipVertical"])
        mirror["in"].setInput(self["in"])

        transform = GafferImage.ImageTransform()
        self["__Transform"] = transform
        transform["transform"]["rotate"].setInput(self["rotate"])
        transform["filter"].setValue("sharp-gaussian")
        transform["in"].setInput(mirror["out"])

        # Reset the display window to the data window, required for rotation without
        # the crop region being applied.
        transformCorrectionCrop = GafferImage.Crop()
        self["__TransformCorrectionCrop"] = transformCorrectionCrop
        transformCorrectionCrop["in"].setInput(transform["out"])
        transformCorrectionCrop["areaSource"].setValue(1)  # dataWindow

        transformCenterExpr = Gaffer.Expression()
        self["__Expression_Transform"] = transformCenterExpr
        transformCenterExpr.setExpression(
            inspect.cleandoc("""
				import imath
				f = parent["__Transform"]["in"]["format"]
				parent["__Transform"]["transform"]["pivot"] = imath.V2f( f.width() / 2.0, f.height() / 2.0 )
			"""), "python")

        crop = GafferImage.Crop()
        self["__Crop"] = crop
        crop["in"].setInput(transformCorrectionCrop["out"])

        options = GafferScene.StandardOptions()
        self["__CropWindow"] = options
        options["options"]["renderCropWindow"]["value"].setInput(self["crop"])

        cropExpr = Gaffer.Expression()
        self["__Expression_Crop"] = cropExpr
        cropExpr.setExpression(
            inspect.cleandoc("""
			import imath

			f = parent["__Crop"]["in"]["format"]
			w = parent["__CropWindow"]["options"]["renderCropWindow"]["value"]

			parent["__Crop"]["area"] = imath.Box2i(
				imath.V2i( f.width() * w.min().x, f.height() * ( 1 - w.max().y ) ),
				imath.V2i( f.width() * w.max().x, f.height() * ( 1 - w.min().y ) )
			)
			"""), "python")

        parentPath = GafferAstro.ParentPath()
        self["__ParentPath"] = parentPath

        imageMeta = GafferImage.ImageMetadata()
        self["__ImageMetadata"] = imageMeta
        imageMeta["metadata"].addChild(
            Gaffer.NameValuePlug("gaffer:sourceScene",
                                 Gaffer.StringPlug("value"), True, "member1"))
        imageMeta["user"].addChild(Gaffer.StringPlug("parentPath"))
        imageMeta["user"]["parentPath"].setInput(parentPath["parentPath"])
        imageMeta["in"].setInput(transformCorrectionCrop["out"])

        self["__Expression_Meta"] = Gaffer.Expression()
        expr = 'parent["__ImageMetadata"]["metadata"]["member1"]["value"] = parent["__ImageMetadata"]["user"]["parentPath"] + ".__CropWindow.out"'
        self["__Expression_Meta"].setExpression(expr, "python")

        switch = Gaffer.Switch()
        self["__Switch"] = switch
        switch.setup(crop["out"])
        switch["in"][0].setInput(imageMeta["out"])
        switch["in"][1].setInput(crop["out"])
        switch["index"].setInput(self["applyCrop"])

        self["out"].setInput(switch["out"])
Exemplo n.º 39
0
        else:
            return parens_fmt.format('{},'.format(first))


dict_ = lambda pairs: '{' + str.join(', ', ('{}: {}'.format(key, val)
                                            for (key, val) in pairs)) + '}'
list_ = lambda exprs: '[' + str.join(', ', exprs) + ']'
set_ = lambda exprs: ('{' + str.join(', ', exprs) + '}') if exprs else 'set()'

escaped_literal = lambda x: escape_brackets_for_format(repr(x))
esc_lit = escaped_literal

literal = lambda x: repr(x)
lit = literal

doc = lambda s: cleandoc(s).split('\n')

default_if_blank = lambda expr, default: expr if expr else default
join = lambda *strs, sep='': str.join(sep, strs)
join_with_op = lambda exprs, op, zero: default_if_blank(str.join(
    ' ' + op + ' ', ('({})'.format(expr) for expr in exprs)),
                                                        default=zero)

# TODO: add util to embed double-bracketed format strings like "{{x}}" - `lit` will butcher them
# Maybe a regex replace "(\{+)" -> "$1\{",   "(\}+)" -> "$1\}" ?
# It'd be inefficient to do on every line, but can work for format strings

##########################
# Higher level functions #
##########################
Exemplo n.º 40
0
    raise RuntimeError('`sys.argv` must be an instance of class `list`.')

# Check if all the command line arguments are in the right format.
for arg in sys.argv:
    if not (isinstance(arg, str) and arg.__class__ is str):
        raise RuntimeError(
            'Command line arguments must be instances of class `str`')
try:
    del arg
except (NameError, UnboundLocalError):
    pass

# If no command line arguments were given, print the documentation and exit.
if len(sys.argv) < 2:
    # Print the documentation.
    print(inspect.cleandoc(__doc__))

    # Exit.
    sys.exit(0)

# Extract the indices of pluses (characters, actually strings `'+'`) among
# command line arguments.  Assume pluses are also on indices 0 and
# `len(sys.argv)`.
pluses = [0]
pluses += [i for i, arg in enumerate(sys.argv) if arg == '+']
try:
    del i
except (NameError, UnboundLocalError):
    pass
try:
    del arg
Exemplo n.º 41
0
 def docstr(src, result):
     c = Script(src, sys_path=[jedi_path]).complete()[0]
     assert c.docstring(raw=True, fast=False) == cleandoc(result)
class Migration(migrations.Migration):

    dependencies = [
        ('timeside_server', '0007_auto_20191204_1146'),
    ]

    operations = [
        migrations.AlterField(
            model_name='experience',
            name='experiences',
            field=models.ManyToManyField(
                blank=True,
                help_text=_("Include other experiences in an experience."),
                related_name='other_experiences',
                to='timeside_server.Experience',
                verbose_name='other experiences'),
        ),
        migrations.AlterField(
            model_name='item',
            name='audio_duration',
            field=models.FloatField(
                blank=True,
                help_text=_("Duration of audio track."),
                null=True,
                verbose_name='duration'),
        ),
        migrations.AlterField(
            model_name='item',
            name='external_id',
            field=models.CharField(
                blank=True,
                help_text=_(cleandoc("""
                    Provider's id of the audio source.\n
                    e.g. for Deezer preview: 4763165\n
                    e.g. for YouTube: oRdxUFDoQe0
                    """)),
                max_length=256,
                verbose_name='external_id'),
        ),
        migrations.AlterField(
            model_name='item',
            name='external_uri',
            field=models.CharField(
                blank=True,
                help_text=_(cleandoc("""
                    Provider's URI of the audio source.\n
                    e.g. for Deezer preview: http://www.deezer.com/track/4763165\n
                    e.g. for YouTube: https://www.youtube.com/watch?v=oRdxUFDoQe0
                    """)),
                max_length=1024,
                verbose_name='external_uri'),
        ),
        migrations.AlterField(
            model_name='item',
            name='provider',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Audio provider (e.g. Deezer, Youtube, etc.)"),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                to='timeside_server.Provider',
                verbose_name='provider'),
        ),
        migrations.AlterField(
            model_name='item',
            name='source_file',
            field=models.FileField(
                blank=True,
                help_text=_("Audio file to process."),
                max_length=1024,
                upload_to='items/%Y/%m/%d',
                verbose_name='file'),
        ),
        migrations.AlterField(
            model_name='item',
            name='source_url',
            field=models.URLField(
                blank=True,
                help_text=_("URL of a streamable audio source to process."),
                max_length=1024,
                verbose_name='URL'),
        ),
        migrations.AlterField(
            model_name='provider',
            name='source_access',
            field=models.BooleanField(
                default=False,
                help_text=_(
                    "Whether or not the audio is "
                    "freely available from the provider."
                    ),
            ),
        ),
        migrations.AlterField(
            model_name='result',
            name='file',
            field=models.FileField(
                blank=True,
                help_text=_(cleandoc("""
                    Non numerical result stored in a file
                    (image, transcoded audio, etc.)
                    """)),
                max_length=1024,
                upload_to='results/%Y/%m/%d',
                verbose_name='Output file'),
        ),
        migrations.AlterField(
            model_name='result',
            name='hdf5',
            field=models.FileField(
                blank=True,
                help_text=_(
                    "Numerical result of the processing "
                    "stored in an hdf5 file."
                    ),
                max_length=1024,
                upload_to='results/%Y/%m/%d',
                verbose_name='HDF5 result file'),
        ),
        migrations.AlterField(
            model_name='result',
            name='item',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Item on which a preset has been applied."),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                related_name='results',
                to='timeside_server.Item',
                verbose_name='item'),
        ),
        migrations.AlterField(
            model_name='result',
            name='preset',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Preset applied on an item."),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                related_name='results',
                to='timeside_server.Preset',
                verbose_name='preset'),
        ),
        migrations.AlterField(
            model_name='result',
            name='run_time',
            field=models.DurationField(
                blank=True,
                help_text=_("Duration of the result computing."),
                null=True,
                verbose_name='Run time'),
        ),
        migrations.AlterField(
            model_name='result',
            name='status',
            field=models.IntegerField(
                choices=[
                    (0,
                     'failed'),
                    (1,
                     'draft'),
                    (2,
                     'pending'),
                    (3,
                     'running'),
                    (4,
                     'done')],
                default=1,
                help_text=_(cleandoc(f"""
                    Status of the task giving the result:\n
                    failed: {_FAILED}\n
                    draft: {_DRAFT}\n
                    pending: {_PENDING}\n
                    running: {_RUNNING}\n
                    done: {_DONE}
                    """)),
                verbose_name='status'),
        ),
        migrations.AlterField(
            model_name='selection',
            name='selections',
            field=models.ManyToManyField(
                blank=True,
                help_text=_("Include other selections in an selection."),
                related_name='other_selections',
                to='timeside_server.Selection',
                verbose_name='other selections'),
        ),
        migrations.AlterField(
            model_name='task',
            name='experience',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Experience prossessed in the task."),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                related_name='task',
                to='timeside_server.Experience',
                verbose_name='experience'),
        ),
        migrations.AlterField(
            model_name='task',
            name='item',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Single item prossessed in the task."),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                related_name='task',
                to='timeside_server.Item',
                verbose_name='item'),
        ),
        migrations.AlterField(
            model_name='task',
            name='selection',
            field=models.ForeignKey(
                blank=True,
                help_text=_("Selection prossessed in the task."),
                null=True,
                on_delete=django.db.models.deletion.SET_NULL,
                related_name='task',
                to='timeside_server.Selection',
                verbose_name='selection'),
        ),
        migrations.AlterField(
            model_name='task',
            name='status',
            field=models.IntegerField(
                choices=[
                    (0,
                     'failed'),
                    (1,
                     'draft'),
                    (2,
                     'pending'),
                    (3,
                     'running'),
                    (4,
                     'done')],
                default=1,
                help_text=_(cleandoc(f"""
                    Task's status:\n
                    failed: {_FAILED}\n
                    draft: {_DRAFT}\n
                    pending: {_PENDING}\n
                    running: {_RUNNING}\n
                    done: {_DONE}
                    """)),
                verbose_name='status'),
        ),
    ]
Exemplo n.º 43
0
    def generate_c_code(self, **kwargs):
        res =''
        res += '\n'.join([c_helper.generate_local_include(h) for h in self.get_c_op_include_header()])
        res +='\n\n'

        # param type
        res += self.get_c_param_type()
        res +='\n\n'

        ndim = self.output_tensor_ndims[0]
        if (ndim != 3 and ndim != 4 and ndim != 5):
            raise ValueError()

        kernel_shape = self.attrs['kernel_shape']
        pads = self.attrs['pads']
        storage_order = self.attrs['storage_order']
        strides = self.attrs['strides']
        count_include_pad = self.attrs['count_include_pad']

        if (ndim == 3):
            TemplateStatements = '''
                const int  X_n = {X_d0};
                const int  X_c = {X_d1};
                const int  X_w = {X_d2};
                const int  Y_n = {Y_d0};
                const int  Y_c = {Y_d1};
                const int  Y_w = {Y_d2};
                const int  kernel_shape_w = {kernel_shape_w};
                const int  pad_w_begin = {pad_w_begin};
                const int  pad_w_end = {pad_w_end};
                const int  stride_w = {stride_w};
                const int  storage_order = {storage_order};
                const int  count_include_pad = {count_include_pad};

                const int kernel_shape_w_min = -pad_w_begin;
                const int kernel_shape_w_max = (kernel_shape_w - pad_w_begin);

                memset( (void *)Y, 0.0, sizeof(Y[0][0][0]) * Y_n * Y_c * Y_w );

                for (int n=0; n<Y_n; n++) {{
                    for (int c=0; c<Y_c; c++) {{
                        for (int w=0; w<Y_w; w++) {{
                            {t} pool;
                            int  data_cnt;

                            pool = 0.0;
                            data_cnt = 0;
                            for (int kw=kernel_shape_w_min; kw<kernel_shape_w_max; kw++) {{
                                if ((w*stride_w+kw < 0) || (w*stride_w+kw >= X_w)) {{
                                    if (count_include_pad != 0) {{
                                        data_cnt++;
                                    }}
                                }} else {{
                                    pool += X[n][c][w*stride_w+kw];
                                    data_cnt++;
                                }}
                            }}
                            if (data_cnt > 0) {{
                                Y[n][c][w] = pool / data_cnt;
                            }}
                        }}
                    }}
                }}
            '''
            mapping = {}
            mapping.update({'X_d0': self.input_tensor_shapes[0][0]})
            mapping.update({'X_d1': self.input_tensor_shapes[0][1]})
            mapping.update({'X_d2': self.input_tensor_shapes[0][2]})
            mapping.update({'Y_d0': self.output_tensor_shapes[0][0]})
            mapping.update({'Y_d1': self.output_tensor_shapes[0][1]})
            mapping.update({'Y_d2': self.output_tensor_shapes[0][2]})
            mapping.update({'kernel_shape_w': kernel_shape[0]})
            mapping.update({'pad_w_begin': pads[0]})
            mapping.update({'pad_w_end':   pads[1]})
            mapping.update({'stride_w': strides[0]})
            mapping.update({'storage_order': storage_order})
            mapping.update({'count_include_pad': count_include_pad})
            mapping.update({'t': data_type.np2c(self.output_tensor_dtypes[0])})

        elif (ndim == 4):
            TemplateStatements = '''
                const int  X_n = {X_d0};
                const int  X_c = {X_d1};
                const int  X_h = {X_d2};
                const int  X_w = {X_d3};
                const int  Y_n = {Y_d0};
                const int  Y_c = {Y_d1};
                const int  Y_h = {Y_d2};
                const int  Y_w = {Y_d3};
                const int  kernel_shape_h = {kernel_shape_h};
                const int  kernel_shape_w = {kernel_shape_w};
                const int  pad_h_begin = {pad_h_begin};
                const int  pad_w_begin = {pad_w_begin};
                const int  pad_h_end = {pad_h_end};
                const int  pad_w_end = {pad_w_end};
                const int  stride_h = {stride_h};
                const int  stride_w = {stride_w};
                const int  storage_order = {storage_order};
                const int  count_include_pad = {count_include_pad};

                const int kernel_shape_h_min = -pad_h_begin;
                const int kernel_shape_h_max = (kernel_shape_h - pad_h_begin);
                const int kernel_shape_w_min = -pad_w_begin;
                const int kernel_shape_w_max = (kernel_shape_w - pad_w_begin);

                memset( (void *)Y, 0.0, sizeof(Y[0][0][0][0]) * Y_n * Y_c * Y_h * Y_w );

                for (int n=0; n<Y_n; n++) {{
                    for (int c=0; c<Y_c; c++) {{
                        if (storage_order == 0) {{
                            for (int h=0; h<Y_h; h++) {{
                                for (int w=0; w<Y_w; w++) {{
                                    {t} pool;
                                    int  data_cnt;

                                    pool = 0.0;
                                    data_cnt = 0;
                                    for (int kh=kernel_shape_h_min; kh<kernel_shape_h_max; kh++) {{
                                        if ((h*stride_h+kh < 0) || (h*stride_h+kh >= X_h)) {{
                                            if (count_include_pad != 0) {{
                                                data_cnt += kernel_shape_w;
                                            }}
                                            continue;
                                        }}
                                        for (int kw=kernel_shape_w_min; kw<kernel_shape_w_max; kw++) {{
                                            if ((w*stride_w+kw < 0) || (w*stride_w+kw >= X_w)) {{ 
                                                if (count_include_pad != 0) {{
                                                    data_cnt++;
                                                }}
                                            }} else {{
                                                pool += X[n][c][h*stride_h+kh][w*stride_w+kw];
                                                data_cnt++;
                                            }}
                                        }}
                                    }}
                                    if (data_cnt > 0) {{
                                        Y[n][c][h][w] = pool / data_cnt;
                                    }}
                                }}
                            }}
                        }} else {{
                            for (int w=0; w<Y_w; w++) {{
                                for (int h=0; h<Y_h; h++) {{
                                    {t} pool;
                                    int  data_cnt;

                                    pool = 0.0;
                                    data_cnt = 0;
                                    for (int kh=kernel_shape_h_min; kh<kernel_shape_h_max; kh++) {{
                                        if ((h*stride_h+kh < 0) || (h*stride_h+kh >= X_h)) {{
                                            if (count_include_pad != 0) {{
                                                data_cnt += kernel_shape_w;
                                            }}
                                            continue;
                                        }}
                                        for (int kw=kernel_shape_w_min; kw<kernel_shape_w_max; kw++) {{
                                            if ((w*stride_w+kw < 0) || (w*stride_w+kw >= X_w)) {{
                                                if (count_include_pad != 0) {{
                                                    data_cnt++;
                                                }}
                                            }} else {{
                                                pool += X[n][c][h*stride_h+kh][w*stride_w+kw];
                                                data_cnt++;
                                            }}
                                        }}
                                    }}
                                    if (data_cnt > 0) {{
                                        Y[n][c][h][w] = pool / data_cnt;
                                    }}
                                }}
                            }}
                        }}
                    }}
                }}
            '''
            mapping = {}
            mapping.update({'X_d0': self.input_tensor_shapes[0][0]})
            mapping.update({'X_d1': self.input_tensor_shapes[0][1]})
            mapping.update({'X_d2': self.input_tensor_shapes[0][2]})
            mapping.update({'X_d3': self.input_tensor_shapes[0][3]})
            mapping.update({'Y_d0': self.output_tensor_shapes[0][0]})
            mapping.update({'Y_d1': self.output_tensor_shapes[0][1]})
            mapping.update({'Y_d2': self.output_tensor_shapes[0][2]})
            mapping.update({'Y_d3': self.output_tensor_shapes[0][3]})
            mapping.update({'kernel_shape_h': kernel_shape[0]})
            mapping.update({'kernel_shape_w': kernel_shape[1]})
            mapping.update({'pad_h_begin': pads[0]})
            mapping.update({'pad_h_end':   pads[2]})
            mapping.update({'pad_w_begin': pads[1]})
            mapping.update({'pad_w_end':   pads[3]})
            mapping.update({'stride_h': strides[0]})
            mapping.update({'stride_w': strides[1]})
            mapping.update({'storage_order': storage_order})
            mapping.update({'count_include_pad': count_include_pad})
            mapping.update({'t': data_type.np2c(self.output_tensor_dtypes[0])})

        elif (ndim == 5):
            TemplateStatements = '''
                const int  X_n = {X_d0};
                const int  X_c = {X_d1};
                const int  X_d = {X_d2};
                const int  X_h = {X_d3};
                const int  X_w = {X_d4};
                const int  Y_n = {Y_d0};
                const int  Y_c = {Y_d1};
                const int  Y_d = {Y_d2};
                const int  Y_h = {Y_d3};
                const int  Y_w = {Y_d4};
                const int  kernel_shape_d = {kernel_shape_d};
                const int  kernel_shape_h = {kernel_shape_h};
                const int  kernel_shape_w = {kernel_shape_w};
                const int  pad_d_begin = {pad_d_begin};
                const int  pad_h_begin = {pad_h_begin};
                const int  pad_w_begin = {pad_w_begin};
                const int  pad_d_end = {pad_d_end};
                const int  pad_h_end = {pad_h_end};
                const int  pad_w_end = {pad_w_end};
                const int  stride_d = {stride_d};
                const int  stride_h = {stride_h};
                const int  stride_w = {stride_w};
                const int  storage_order = {storage_order};
                const int  count_include_pad = {count_include_pad};

                const int kernel_shape_d_min = -pad_d_begin;
                const int kernel_shape_d_max = (kernel_shape_d - pad_d_begin);
                const int kernel_shape_h_min = -pad_h_begin;
                const int kernel_shape_h_max = (kernel_shape_h - pad_h_begin);
                const int kernel_shape_w_min = -pad_w_begin;
                const int kernel_shape_w_max = (kernel_shape_w - pad_w_begin);

                memset( (void *)Y, 0.0, sizeof(Y[0][0][0][0][0]) * Y_n * Y_c * Y_d * Y_h * Y_w );

                for (int n=0; n<Y_n; n++) {{
                    for (int c=0; c<Y_c; c++) {{
                        for (int d=0; d<Y_d; d++) {{
                            for (int h=0; h<Y_h; h++) {{
                                for (int w=0; w<Y_w; w++) {{
                                    {t} pool;
                                    int  data_cnt;

                                    pool = 0.0;
                                    data_cnt = 0;
                                    for (int kd=kernel_shape_d_min; kd<kernel_shape_d_max; kd++) {{
                                        if ((d*stride_d+kd < 0) || (d*stride_d+kd >= X_d)) {{
                                            if (count_include_pad != 0) {{
                                                data_cnt += kernel_shape_h * kernel_shape_w;
                                            }}
                                            continue;
                                        }}
                                        for (int kh=kernel_shape_h_min; kh<kernel_shape_h_max; kh++) {{
                                            if ((h*stride_h+kh < 0) || (h*stride_h+kh >= X_h)) {{
                                                if (count_include_pad != 0) {{
                                                    data_cnt += kernel_shape_w;
                                                }}
                                                continue;
                                            }}
                                            for (int kw=kernel_shape_w_min; kw<kernel_shape_w_max; kw++) {{
                                                if ((w*stride_w+kw < 0) || (w*stride_w+kw >= X_w)) {{
                                                    if (count_include_pad != 0) {{
                                                        data_cnt++;
                                                    }}
                                                }} else {{
                                                    pool += X[n][c][d*stride_d+kd][h*stride_h+kh][w*stride_w+kw];
                                                    data_cnt++;
                                                }}
                                            }}
                                        }}
                                    }}
                                    if (data_cnt > 0) {{
                                        Y[n][c][d][h][w] = pool / data_cnt;
                                    }}
                                }}
                            }}
                        }}
                    }}
                }}
            '''
            mapping = {}
            mapping.update({'X_d0': self.input_tensor_shapes[0][0]})
            mapping.update({'X_d1': self.input_tensor_shapes[0][1]})
            mapping.update({'X_d2': self.input_tensor_shapes[0][2]})
            mapping.update({'X_d3': self.input_tensor_shapes[0][3]})
            mapping.update({'X_d4': self.input_tensor_shapes[0][4]})
            mapping.update({'Y_d0': self.output_tensor_shapes[0][0]})
            mapping.update({'Y_d1': self.output_tensor_shapes[0][1]})
            mapping.update({'Y_d2': self.output_tensor_shapes[0][2]})
            mapping.update({'Y_d3': self.output_tensor_shapes[0][3]})
            mapping.update({'Y_d4': self.output_tensor_shapes[0][4]})
            mapping.update({'kernel_shape_d': kernel_shape[0]})
            mapping.update({'kernel_shape_h': kernel_shape[1]})
            mapping.update({'kernel_shape_w': kernel_shape[2]})
            mapping.update({'pad_d_begin': pads[0]})
            mapping.update({'pad_d_end':   pads[3]})
            mapping.update({'pad_h_begin': pads[1]})
            mapping.update({'pad_h_end':   pads[4]})
            mapping.update({'pad_w_begin': pads[2]})
            mapping.update({'pad_w_end':   pads[5]})
            mapping.update({'stride_d': strides[0]})
            mapping.update({'stride_h': strides[1]})
            mapping.update({'stride_w': strides[2]})
            mapping.update({'storage_order': storage_order})
            mapping.update({'count_include_pad': count_include_pad})
            mapping.update({'t': data_type.np2c(self.output_tensor_dtypes[0])})

        # 3        
        TemplateFunction = cleandoc('''
        void {op_func_name}(void *op_param, {t} X{dims_X}, {t} Y{dims}, void *inputs_params, void* outputs_params) {{
            {statements}
        }}
        ''')

        mappingf = {}
        mappingf.update({'op_func_name': self.get_func_name()})
        mappingf.update({'X': self.input_tensor_names[0]})
        mappingf.update({'dims_X': c_helper.generate_dim_bracket(self.input_tensor_shapes[0])}) 
        mappingf.update({'Y': self.output_tensor_names[0]})
        mappingf.update({'dims': c_helper.generate_dim_bracket(self.output_tensor_shapes[0])}) 
        mappingf.update({'t': data_type.np2c(self.output_tensor_dtypes[0])})
        mappingf.update({'statements': TemplateStatements.format(**mapping)})
        res += '\n\n'
        res += TemplateFunction.format(**mappingf)

        return res
Exemplo n.º 44
0
 def get_new_node_label(node):
     label = cleandoc("""
             {} (L{})
             {}
             """.format(node.operator_type.value, node.code_reference.lineno, node.description or ""))
     return label
Exemplo n.º 45
0
def make_mess(mess):
    return cleandoc(mess).replace('\n', ' ')
Exemplo n.º 46
0
 def get_doc(self):
     return cleandoc(self.__doc__)
Exemplo n.º 47
0
import discord
from discord.ext import commands

import utils

if TYPE_CHECKING:
    from main import TTSBot

WELCOME_MESSAGE = cleandoc("""
    Hello! Someone invited me to your server `{guild}`!
    TTS Bot is a text to speech bot, as in, it reads messages from a text channel and speaks it into a voice channel

    **Most commands need to be done on your server, such as `{prefix}setup` and `{prefix}join`**

    I need someone with the administrator permission to do `{prefix}setup #channel`
    You can then do `{prefix}join` in that channel and I will join your voice channel!
    Then, you can just type normal messages and I will say them, like magic!

    You can view all the commands with `{prefix}help`
    Ask questions by either responding here or asking on the support server!
""")


def setup(bot: TTSBot):
    bot.add_cog(OtherEvents(bot))


class OtherEvents(utils.CommonCog):
    @commands.Cog.listener()
    async def on_message(self, message: utils.TypedGuildMessage):
Exemplo n.º 48
0
def collapse(text):
    # type: (Text) -> Text
    return inspect.cleandoc(str(text)).replace("\n", " ")
Exemplo n.º 49
0
 def testBernoulliDoc(self):
     self.assertGreater(len(ed.Bernoulli.__doc__), 0)
     self.assertTrue(
         inspect.cleandoc(tfd.Bernoulli.__init__.__doc__) in
         ed.Bernoulli.__doc__)
     self.assertEqual(ed.Bernoulli.__name__, "Bernoulli")
Exemplo n.º 50
0
from inspect import cleandoc


def new_print(val):
    print('Hello', val)


code_inner = cleandoc("""
    def bar():
        print(' Inner')
""")

with open('outer_test.py', 'w') as f:
    f.write(code_inner)

code_outer = cleandoc("""
    # import inner
    # print('Outer')
    # inner.bar()
    
    def foo():
        print('xD')
        
    if __name__ == '__main__':
        print('Main')
""")

_glob = {}
_loc = {}

exec(code_outer, _glob, _loc)
Exemplo n.º 51
0
 def example_usage(self, usage):
     self._example_usage = inspect.cleandoc(usage)
Exemplo n.º 52
0
 def _get_doc(self, doc):
     text = doc.getRawCommentText()
     return cleandoc(text).rstrip()
Exemplo n.º 53
0
    def output_dio_calibration_data(self,
                                    dio_mode: str,
                                    port: int = 0) -> Tuple[int, List]:
        # default return values
        expected_sequence = []
        dio_mask = 0x00000000

        if dio_mode == "awg8-mw-vsm" or dio_mode == 'microwave':  # 'new' QWG compatible microwave mode
            # based on ElecPrj_CC:src/q1asm/qwg_staircase.q1asm
            # FIXME: tests 5 of 8 bits only
            cc_prog = """
            ### DIO protocol definition:
            # DIO           QWG             AWG8        note
            # ------------- --------------- ----------- ------------------
            # DIO[31]       TRIG_2          TRIG
            # DIO[30]       TOGGLE_DS_2 	TOGGLE_DS   hardware generated
            # DIO[23:16]    CW_2		    CW_2
            # DIO[15]       TRIG_1          unused
            # DIO[14]       TOGGLE_DS_1     unused
            # DIO[7:0]      CW_1		    CW_1
            #

            .DEF 		cw_31_01	0x801F8001          # TRIG_2, CW_2=31, TRIG_1, CW_1=1
            .DEF 		incr		0xFFFF0001          # CW_2++, CW_1--: -0x00010000 + 0x00000001
            .DEF		duration	4			        # 20 ns periods
            .DEF 		loopCnt		31               	#

            repeat:
                    move		$cw_31_01,R0
                    move		$loopCnt,R1               	# loop counter
            inner:	seq_out		R0,$duration
                    add		    R0,$incr,R0
                    loop		R1,@inner
                    jmp		    @repeat
            """
            sequence_length = 32
            staircase_sequence = range(0, sequence_length)
            expected_sequence = [(0, list(staircase_sequence)),
                                 (1, list(staircase_sequence)),
                                 (2, list(staircase_sequence)),
                                 (3, list(staircase_sequence))]
            dio_mask = 0x80FF80FF  # TRIG=0x8000000, TRIG_1=0x00008000, CWs=0x00FF00FF

        elif dio_mode == "awg8-mw-direct-iq" or dio_mode == "novsm_microwave":

            cc_prog = """
            ### DIO protocol definition:
            # DIO           QWG             AWG8        note
            # ------------- --------------- ----------- ------------------
            # DIO[31]       TRIG_2          TRIG
            # DIO[30]       TOGGLE_DS_2     TOGGLE_DS   hardware generated
            # DIO[29:23]    CW_4            CW_4
            # DIO[22:16]    CW_3            CW_3
            # DIO[15]       TRIG_1          unused
            # DIO[14]       TOGGLE_DS_1     unused
            # DIO[13:7]     CW_2            CW_2
            # DIO[6:0]      CW_1            CW_1
            #
            # cw:
            #           incr            mask
            # CW_1=1    0x0000 0001     0000 001F
            # CW_2=31   0x0000 0080     0000 0F80
            # CW_3=1    0x0001 0000     001F 0000
            # CW_4=31   0x0080 0000     0F80 0000
            # TRIG_1    0x0000 8000
            # TRIG_2    0x8000 0000
            # sum       0x8081 8081

            .DEF        cw          0x80008000         # see above
            .DEF        incr        0x00810081
            .DEF        duration    4                  # 20 ns periods
            .DEF        loopCnt     128                #

            repeat:
                    move        $cw,R0
                    move        $loopCnt,R1                 # loop counter
            inner:  seq_out     R0,$duration
                    add         R0,$incr,R0
                    loop        R1,@inner
                    jmp         @repeat
            """
            sequence_length = 128
            staircase_sequence = range(0, sequence_length)
            expected_sequence = [(0, list(staircase_sequence)),
                                 (1, list(staircase_sequence)),
                                 (2, list(staircase_sequence)),
                                 (3, list(staircase_sequence))]
            dio_mask = 0x8F9F8F9F  # TRIG=0x8000000, TRIG_2=0x00008000, CWs=0x0F9F0F9F

        elif dio_mode == "awg8-flux" or dio_mode == "flux":
            # based on ZI_HDAWG8.py::_prepare_CC_dio_calibration_hdawg and examples/CC_examples/flux_calibration.vq1asm
            # FIXME: hardcoded slots, this is OpenQL output
            cc_prog = """
            mainLoop:
                        seq_out         0x00000000,20           # 00000000000000000000000000000000
                        seq_out         0x82498249,2            # 10000010010010011000001001001001
                        seq_out         0x84928492,2            # 10000100100100101000010010010010
                        seq_out         0x86DB86DB,2            # 10000110110110111000011011011011
                        seq_out         0x89248924,2            # 10001001001001001000100100100100
                        seq_out         0x8B6D8B6D,2            # 10001011011011011000101101101101
                        seq_out         0x8DB68DB6,2            # 10001101101101101000110110110110
                        seq_out         0x8FFF8FFF,2            # 10001111111111111000111111111111
                        jmp             @mainLoop               # loop indefinitely
            """

            sequence_length = 8
            staircase_sequence = np.arange(1, sequence_length)
            # expected sequence should be ([9, 18, 27, 36, 45, 54, 63])
            expected_sequence = [
                (0, list(staircase_sequence + (staircase_sequence << 3))),
                (1, list(staircase_sequence + (staircase_sequence << 3))),
                (2, list(staircase_sequence + (staircase_sequence << 3))),
                (3, list(staircase_sequence + (staircase_sequence << 3)))
            ]
            dio_mask = 0x8FFF8FFF

        elif dio_mode == "uhfqa":  # FIXME: no official mode yet
            # Based on UHFQuantumController.py::_prepare_CC_dio_calibration_uhfqa and  and examples/CC_examples/uhfqc_calibration.vq1asm
            cc_prog = inspect.cleandoc("""
            mainLoop:   seq_out         0x03FF0000,1        # TRIG=0x00010000, CW[8:0]=0x03FE0000
                        seq_out         0x00000000,10
                        jmp             @mainLoop
            """)

            dio_mask = 0x03ff0000

        else:
            raise ValueError(f"unsupported DIO mode '{dio_mode}'")

        log.debug(
            f"uploading DIO calibration program for mode '{dio_mode}' to CC")
        self.assemble_and_start(cc_prog)

        return dio_mask, expected_sequence
Exemplo n.º 54
0
    def check(self, *args, **kwargs):
        """
        """
        test, traceback = super(DemodAlazarTask, self).check(*args, **kwargs)

        if (self.format_and_eval_string(self.tracesnumber) %
                self.format_and_eval_string(self.tracesbuffer) != 0):
            test = False
            traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                cleandoc('''The number of traces must be an integer multiple of the number of traces per buffer.''')

        if not (self.format_and_eval_string(self.tracesnumber) >= 1000):
            test = False
            traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                cleandoc('''At least 1000 traces must be recorded. Please make real measurements and not noisy s***.''')

        time = self.format_string(self.timeaftertrig, 10**-9, 1)
        duration = self.format_string(self.duration, 10**-9, 1)
        timeB = self.format_string(self.timeaftertrigB, 10**-9, 1)
        durationB = self.format_string(self.durationB, 10**-9, 1)
        tracetime = self.format_string(self.tracetimeaftertrig, 10**-9, 1)
        traceduration = self.format_string(self.traceduration, 10**-9, 1)
        tracetimeB = self.format_string(self.tracetimeaftertrigB, 10**-9, 1)
        tracedurationB = self.format_string(self.tracedurationB, 10**-9, 1)

        for t, d in ((time, duration), (timeB, durationB),
                     (tracetime, traceduration), (tracetimeB, tracedurationB)):
            if len(t) != len(d):
                test = False
                traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                    cleandoc('''An equal number of "Start time after trig" and "Duration" should be given.''')
            else:
                for tt, dd in zip(t, d):
                    if not (tt >= 0 and dd >= 0):
                        test = False
                        traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                            cleandoc('''Both "Start time after trig" and "Duration" must be >= 0.''')

        if ((0 in duration) and (0 in durationB) and (0 in traceduration)
                and (0 in tracedurationB)):
            test = False
            traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                           cleandoc('''All measurements are disabled.''')

        timestep = self.format_string(self.timestep, 10**-9, len(time))
        timestepB = self.format_string(self.timestepB, 10**-9, len(timeB))
        freq = self.format_string(self.freq, 10**6, len(time))
        freqB = self.format_string(self.freqB, 10**6, len(timeB))
        samplesPerSec = 500000000.0

        if 0 in duration:
            duration = []
            timestep = []
            freq = []
        if 0 in durationB:
            durationB = []
            timestepB = []
            freqB = []

        for d, ts in zip(duration + durationB, timestep + timestepB):
            if ts and np.mod(int(samplesPerSec * d), int(samplesPerSec * ts)):
                test = False
                traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                   cleandoc('''The number of samples in "IQ time step" must divide the number of samples in "Duration".''')

        for f, ts in zip(freq + freqB, timestep + timestepB):
            if ts and np.mod(f * int(samplesPerSec * ts), samplesPerSec):
                test = False
                traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                   cleandoc('''The "IQ time step" does not cover an integer number of demodulation periods.''')

        demodFormFile = self.format_and_eval_string(self.demodFormFile)

        if demodFormFile != []:
            duration = duration + durationB
            for d in duration:
                if len(demodFormFile[0]) > samplesPerSec * d:
                    test = False
                    traceback[self.task_path + '/' + self.task_name + '-get_demod'] = \
                       cleandoc('''Acquisition's duration must be larger than demodulation fonction's duration''')

        return test, traceback
Exemplo n.º 55
0
def write_executable(path, contents):
    with open(str(path), "w") as f:
        f.write(inspect.cleandoc(contents))
        os.chmod(str(path), 0o775)
        click.secho(f"Wrote {path}", bold=True)
Exemplo n.º 56
0
 def example_usage(self, usage):
     """Sets example usage"""
     self._example_usage = inspect.cleandoc(usage)
Exemplo n.º 57
0
def extract_kwargs(docstring):
    """Extract keyword argument documentation from a function's docstring.

    Parameters
    ----------
    docstring: str
        The docstring to extract keyword arguments from.

    Returns
    -------
    list of (str, str, list str)

    str
        The name of the keyword argument.
    str
        Its type.
    str
        Its documentation as a list of lines.

    Notes
    -----
    The implementation is rather fragile.  It expects the following:

    1. The parameters are under an underlined Parameters section
    2. Keyword parameters have the literal ", optional" after the type
    3. Names and types are not indented
    4. Descriptions are indented with 4 spaces
    5. The Parameters section ends with an empty line.

    Examples
    --------

    >>> docstring = '''The foo function.
    ... Parameters
    ... ----------
    ... bar: str, optional
    ...     This parameter is the bar.
    ... baz: int, optional
    ...     This parameter is the baz.
    ...
    ... '''
    >>> kwargs = extract_kwargs(docstring)
    >>> kwargs[0]
    ('bar', 'str, optional', ['This parameter is the bar.'])

    """
    if not docstring:
        return []

    lines = inspect.cleandoc(docstring).split('\n')
    retval = []

    #
    # 1. Find the underlined 'Parameters' section
    # 2. Once there, continue parsing parameters until we hit an empty line
    #
    while lines and lines[0] != 'Parameters':
        lines.pop(0)

    if not lines:
        return []

    lines.pop(0)
    lines.pop(0)

    while lines and lines[0]:
        name, type_ = lines.pop(0).split(':', 1)
        description = []
        while lines and lines[0].startswith('    '):
            description.append(lines.pop(0).strip())
        if 'optional' in type_:
            retval.append((name.strip(), type_.strip(), description))

    return retval
Exemplo n.º 58
0
 def decorator(f):
     if 'help' in attrs:
         attrs['help'] = inspect.cleandoc(attrs['help'])
     _param_memo(f, HiddenOption(param_decls, **attrs))
     return f
Exemplo n.º 59
0
def run_module():
    # available arguments/parameters that a user can pass
    module_args = dict(pools=dict(type='list'),
                       volumes=dict(type='list'),
                       packages_only=dict(type='bool',
                                          required=False,
                                          default=False),
                       disklabel_type=dict(type='str',
                                           required=False,
                                           default=None),
                       safe_mode=dict(type='bool',
                                      required=False,
                                      default=True),
                       use_partitions=dict(type='bool',
                                           required=False,
                                           default=True))

    # seed the result dict in the object
    result = dict(
        changed=False,
        actions=list(),
        leaves=list(),
        mounts=list(),
        crypts=list(),
        pools=list(),
        volumes=list(),
        packages=list(),
    )

    module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
    if not BLIVET_PACKAGE:
        module.fail_json(
            msg="Failed to import the blivet or blivet3 Python modules",
            exception=inspect.cleandoc("""
                         blivet3 exception:
                         {}
                         blivet exception:
                         {}""").format(LIB_IMP_ERR3, LIB_IMP_ERR))

    if not module.params['pools'] and not module.params['volumes']:
        module.exit_json(**result)

    global disklabel_type
    disklabel_type = module.params['disklabel_type']

    global use_partitions
    use_partitions = module.params['use_partitions']

    global safe_mode
    safe_mode = module.params['safe_mode']

    b = Blivet()
    b.reset()
    fstab = FSTab(b)
    actions = list()

    if module.params['packages_only']:
        result['packages'] = get_required_packages(b, module.params['pools'],
                                                   module.params['volumes'])
        module.exit_json(**result)

    def record_action(action):
        if action.is_format and action.format.type is None:
            return

        actions.append(action)

    def action_dict(action):
        return dict(action=action.type_desc_str,
                    fs_type=action.format.type if action.is_format else None,
                    device=action.device.path)

    duplicates = find_duplicate_names(module.params['pools'])
    if duplicates:
        module.fail_json(msg="multiple pools with the same name: {0}".format(
            ",".join(duplicates)),
                         **result)
    for pool in module.params['pools']:
        duplicates = find_duplicate_names(pool['volumes'])
        if duplicates:
            module.fail_json(msg="multiple volumes in pool '{0}' with the "
                             "same name: {1}".format(pool['name'],
                                                     ",".join(duplicates)),
                             **result)
        try:
            manage_pool(b, pool)
        except BlivetAnsibleError as e:
            module.fail_json(msg=str(e), **result)

    duplicates = find_duplicate_names(module.params['volumes'])
    if duplicates:
        module.fail_json(msg="multiple volumes with the same name: {0}".format(
            ",".join(duplicates)),
                         **result)
    for volume in module.params['volumes']:
        try:
            manage_volume(b, volume)
        except BlivetAnsibleError as e:
            module.fail_json(msg=str(e), **result)

    scheduled = b.devicetree.actions.find()
    result['packages'] = b.packages[:]

    for action in scheduled:
        if action.is_destroy and action.is_format and action.format.exists and \
           (action.format.mountable or action.format.type == "swap"):
            action.format.teardown()

    if scheduled:
        # execute the scheduled actions, committing changes to disk
        callbacks.action_executed.add(record_action)
        try:
            b.devicetree.actions.process(devices=b.devicetree.devices,
                                         dry_run=module.check_mode)
        except Exception as e:
            module.fail_json(msg="Failed to commit changes to disk: %s" %
                             str(e),
                             **result)
        finally:
            result['changed'] = True
            result['actions'] = [action_dict(a) for a in actions]

    update_fstab_identifiers(b, module.params['pools'],
                             module.params['volumes'])
    activate_swaps(b, module.params['pools'], module.params['volumes'])

    result['mounts'] = get_mount_info(module.params['pools'],
                                      module.params['volumes'], actions, fstab)
    result['crypts'] = get_crypt_info(actions)
    result['leaves'] = [d.path for d in b.devicetree.leaves]
    result['pools'] = module.params['pools']
    result['volumes'] = module.params['volumes']

    # success - return result
    module.exit_json(**result)
Exemplo n.º 60
0
    def generate_c_code(self, **kwargs):
        res = ''

        # include header
        res += '\n'.join([
            c_helper.generate_local_include(h)
            for h in self.get_c_op_include_header()
        ])
        res += '\n\n'

        # param type
        res += self.get_c_param_type()
        res += '\n\n'

        # 1
        TemplateArrayAddLoop = c_helper.generate_ndim_for_loop(np.ones(
            self.output_tensor_shapes[0]),
                                                               indent=0)

        # 2
        input_vals = OrderedDict({
            k: self._gen_array_element_val(self.output_tensor_ndims[0], v)
            for k, v in self.input_tensor_dict.items()
        })
        output_vals = {
            self.output_tensor_names[0]:
            self._gen_array_element_val(self.output_tensor_ndims[0],
                                        self.output_tensor_values[0])
        }

        Conditions = ''
        MaxStatement = ''
        TemplateCondition = cleandoc('''
        {t} m = DBL_MAX;
        {conditions}
        {indent}{outputVal} = m;
        ''')

        TemplateCompare = cleandoc('''
        {indent}if (m > {input}){{
        {indent}    m = {input};
        {indent}}}
        ''')

        for k, v in input_vals.items():
            Conditions += TemplateCompare.format(
                **{
                    'input': k + v,
                    'indent': ' ' * 4 * (self.input_tensor_ndims[0] + 1)
                })
            Conditions += '\n'
        else:
            mapping_cond = {'t': data_type.np2c(self.input_tensor_dtypes[0])}
            mapping_cond.update({'conditions': Conditions})
            mapping_cond.update({
                'outputVal':
                list(output_vals.keys())[0] + list(output_vals.values())[0]
            })
            mapping_cond.update(
                {'indent': ' ' * 4 * (self.output_tensor_ndims[0] + 1)})
            MaxStatement += TemplateCondition.format(**mapping_cond)

        TemplateFunction = cleandoc('''
        void {op_func_name}(void *op_param,{InputsParamSignature}, {OutputsParamSignature}, void *inputs_params, void* outputs_params)
        {{
        {statements}
        }}
        ''')
        mappingf = {}
        mappingf.update({'op_func_name': self.get_func_name()})

        input_sigs = []
        for name, value in self.input_tensor_dict.items():
            input_sigs.append(self.gen_param_signature(name, value))

        mappingf.update({'InputsParamSignature': ','.join(input_sigs)})
        mappingf.update({
            'OutputsParamSignature':
            self.gen_param_signature(self.output_tensor_names[0],
                                     self.output_tensor_values[0])
        })

        mappingf.update({
            'statements':
            TemplateArrayAddLoop.replace('[statements]', MaxStatement)
        })
        res += '\n\n'
        res += TemplateFunction.format(**mappingf)

        return res