Exemple #1
0
def debug(port=1090):
    pycharm_fol = get_link_dir(get_links_dir(), 'pycharm', throw_exception=True)
    eggpath = op.join(pycharm_fol, 'debug-eggs', 'pycharm-debug-py3k.egg')
    if not any('pycharm-debug' in p for p in sys.path):
        sys.path.append(eggpath)
    import pydevd
    pydevd.settrace('localhost', port=port, stdoutToServer=True, stderrToServer=True)
Exemple #2
0
def start_debug():
    if DEBUGGING is True:
        if PYDEV_SRC_DIR not in sys.path:
            sys.path.append(PYDEV_SRC_DIR)
            import pydevd
            pydevd.settrace()
            print("started blender add-on debugging...")
    def _start_debug_client(self, config):
        egg_dir = config.get('debug.remote.egg_dir', None)
        # If we have an egg directory, add the egg to the system path
        # If not set, user is expected to have made pycharm egg findable
        if egg_dir:
            sys.path.append(os.path.join(egg_dir, 'pycharm-debug.egg'))

        try:
            import pydevd
        except ImportError:
            pass

        debug = asbool(config.get('debug.remote', 'False'))
        host_ip = config.get('debug.remote.host.ip', '10.0.2.2')
        host_port = config.get('debug.remote.host.port', '63342')
        stdout = asbool(config.get('debug.remote.stdout_to_server', 'True'))
        stderr = asbool(config.get('debug.remote.stderr_to_server', 'True'))
        suspend = asbool(config.get('debug.remote.suspend', 'False'))
        if debug:
            # We don't yet have a translator, so messages will be in english only.
            log.info("Initiating remote debugging session to {}:{}".format(host_ip, host_port))
            try:
                pydevd.settrace(host_ip, port=int(host_port), stdoutToServer=stdout, stderrToServer=stderr, suspend=suspend)
            except NameError:
                log.warning("debug.enabled set to True, but pydevd is missing.")
            except SystemExit:
                log.warning("Failed to connect to debug server; is it started?")
 def attachPyDev(self):
     import pydevd
     if not pydevd.connected:
         pydevd.settrace('localhost',
                         port=9999,
                         stdoutToServer=True,
                         stderrToServer=True)
Exemple #5
0
def svd_agg(m_rna, mi_rna, targets_matrix, c=1):
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)

    #
    mRNA_data = m_rna.apply(lambda x: 1.0*x/max(x), axis=0)
    miRNA_data = mi_rna.apply(lambda x: 1-1.0*x/max(x), axis=0)
    #
    aggregate_data = mRNA_data
    #
    common_mRNAs =  Index(set(mRNA_data.columns) & set(targets_matrix.columns))
    common_miRNAs = Index(set(miRNA_data.columns) & set(targets_matrix.index))
    #
    for mRNA in common_mRNAs:
        #
        mRNA = Index([mRNA])
        #
        targetting_miRNAs = targets_matrix.ix[targets_matrix[mRNA[0]]==1, mRNA].index
        #
        selected_miRNA = miRNA_data.ix[:, targetting_miRNAs].T
        #
        if len(selected_miRNA.index) > 1:
            first_comp = DataFrame(np.linalg.svd(selected_miRNA)[2]).ix[0, :]
            first_comp.index = selected_miRNA.columns
        else:
            continue
        new_rep = DataFrame(np.linalg.svd(DataFrame([aggregate_data.ix[:, mRNA[0]], first_comp ]))[2]).ix[0, :]
        new_rep.index = aggregate_data.index
        aggregate_data.ix[:, mRNA[0]] = new_rep
    return aggregate_data
Exemple #6
0
def pca_agg_task_cv(exp, block,
                     train_es, test_es, gene_sets,
                     base_filename,
    ):
    """
        @type train_es, test_es: ExpressionSet
        @type gene_sets: GeneSets

    """
    df_train = train_es.get_assay_data_frame()
    df_test = test_es.get_assay_data_frame()
    src_gs = gene_sets.get_gs()
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)
    df_train, src_gs_train = preprocess_df_gs(df_train, src_gs)
    df_test, src_gs_test = preprocess_df_gs(df_test, src_gs)

    result_df_train, result_df_test = pca_agg_cv(df_train, df_test, src_gs_train.genes)

    result_train = train_es.clone(base_filename + "_train")
    result_train.store_assay_data_frame(result_df_train)
    result_train.store_pheno_data_frame(train_es.get_pheno_data_frame())

    result_test = test_es.clone(base_filename + "_test")
    result_test.store_assay_data_frame(result_df_test)
    result_test.store_pheno_data_frame(test_es.get_pheno_data_frame())

    return [result_train, result_test], {}
Exemple #7
0
    def _xmlrpchandler_common(self, destination, args, debug=False, suspend=False):
        try:
            handler = self._handlers.get(destination)
            if handler is None:
                ret = Exception("No handler registered for " + destination)
            else:
                unflattened = map(_flatten.unflatten, args)
                if debug:
                    # do the import here, pydevd must already be in the sys.
                    # it is important that the import is within the outer try/except
                    # so that import error gets propagated back up as
                    # an AnalysisRpcException
                    import pydevd  # @UnresolvedImport
                    # Initialise debug, without the suspend, next settrace sets the
                    # suspend if needed. This split makes it easier to detect cause
                    # of failure if an exception is thrown. This line covers the
                    # connection to the debug server, the next settrace then only
                    # does the suspend.
                    pydevd.settrace(suspend=False, **self.pydev_settrace_params) # Needs PyDev Debug Server Running


                    # These two statements must be on same line so that suspend happens
                    # on next executable line, being the first line of the handler

                    # Run the registered Analysis RPC Handler. (If you are browsing
                    # a stack trace, this is probably as high up the stack as you
                    # want to go).
                    pydevd.settrace(suspend=suspend); ret = handler(*unflattened)
                else:
                    ret = handler(*unflattened)
            flatret = _flatten.flatten(ret)
        except Exception as e:
            flatret = _flatten.flatten(e)
        return flatret
Exemple #8
0
    def startDebug(self,**kargs):
        """Starts remote debugging for PyDev.
        """
        for k,v in kargs.items():
            if k == 'debug':
                self._dbg_self = v
            elif k == 'verbose':
                self._dbg_self = v

        if self._dbg_self or self._verbose:
            print >>sys.stderr,"RDBG:debug starting "

        # already loaded
        if self.runningInPyDevDbg:
            if self._dbg_self or self._verbose:
                print >>sys.stderr,"RDBG:dbgargs="+str(self.dbgargs)
            try:
                pydevd.settrace(
                    host=self.dbgargs['host'],
                    port=self.dbgargs['port'],
                    stdoutToServer=self.dbgargs['stdoutToServer'],
                    stderrToServer=self.dbgargs['stderrToServer'],
                    suspend=self.dbgargs['suspend'],
                    trace_only_current_thread=self.dbgargs['trace_only_current_thread']
                    )
                #OK-ref: pydevd.settrace(host=None,port=5678,stdoutToServer=False,stderrToServer=False,suspend=False,trace_only_current_thread=True)
            except Exception as e:
                raise PyDevRDCException(e)
        else:
            raise PyDevRDCException("ERROR:Requires init:self.runningInPyDevDbg="+str(self.runningInPyDevDbg))
        if _dbg_self or _dbg_unit:
            print >>sys.stderr,"RDBG:debug started"
Exemple #9
0
 def test(self):
     i = 10
     while i > 0:
         print 'setting trace'
         # When pydevd.settrace() executes, the debug server will display the execution state.
         pydevd.settrace('192.168.56.1', port=5678, stdoutToServer=True, stderrToServer=True)
         i = i - 1
Exemple #10
0
def main(argv=None):
    log.info('started application')

    log.warning('This script is obsolete. It will not be updated anymore and ' +
        'will be deleted in the future. Use use_model.py instead.')

    if argv is None:
        argv = sys.argv[1:]

    args = parser.parse_args()
    log.info('start parameters: ' + str(args))

    if args.debug_host:
        import pydevd
        pydevd.settrace(host=args.debug_host, stdoutToServer=True,
                stderrToServer=True)

    if log.level == logging.DEBUG:
        sys.excepthook = debug

    log.info('creating predictor')
    predictor = vLblNCEPredictor()
    predictor.prepare_usage(args)
    log.info('starting prediction')
    predictor.run()
    log.info('finished')
Exemple #11
0
	def slot_connect(self):
		""" slot_connect to winch. Returns true on success and false otherwise. """

		try:
			pydevd.settrace(suspend=False)
		except:
			pass

		ConWinch.sock.settimeout(ConWinch.conn_timeout)
		try:
			ConWinch.sock.connect()
		except (TimeoutConnection):
			self.sigConnectionTimeout.emit();
			self.sigDisconnected.emit("Connection timeout")
			self._set_state(ConStates.STATE_DISCONNECTED)
			return (time.time(), False)
		except (ErrorConnection) as e:
			self.sigDisconnected.emit(str(e))
			self._set_state(ConStates.STATE_DISCONNECTED)
			return (time.time(), False)

		print("Connection opened.")
		ConWinch.sock.settimeout(ConWinch.timeout)
		self.sigConnected.emit()
		self._set_state(ConStates.STATE_STOPPED)
		return (time.time(), True)
Exemple #12
0
	def slot_stop(self):
		try:
			pydevd.settrace(suspend=False)
		except:
			pass
		self._set_state(ConStates.STATE_STOPPED)
		self.sigStopped.emit()
Exemple #13
0
    async def debug(self, engine, options):
        """
        Setup middlewared for remote debugging.

        engines:
          - PTVS: Python Visual Studio
          - PYDEV: Python Dev (Eclipse/PyCharm)

        options:
          - secret: password for PTVS
          - host: required for PYDEV, hostname of local computer (developer workstation)
          - local_path: required for PYDEV, path for middlewared source in local computer (e.g. /home/user/freenas/src/middlewared/middlewared
        """
        if engine == 'PTVS':
            import ptvsd
            if 'secret' not in options:
                raise ValidationError('secret', 'secret is required for PTVS')
            ptvsd.enable_attach(
                options['secret'],
                address=(options['bind_address'], options['bind_port']),
            )
            if options['wait_attach']:
                ptvsd.wait_for_attach()
        elif engine == 'PYDEV':
            for i in ('host', 'local_path'):
                if i not in options:
                    raise ValidationError(i, f'{i} is required for PYDEV')
            os.environ['PATHS_FROM_ECLIPSE_TO_PYTHON'] = json.dumps([
                [options['local_path'], '/usr/local/lib/python3.6/site-packages/middlewared'],
            ])
            import pydevd
            pydevd.stoptrace()
            pydevd.settrace(host=options['host'])
  def connect(self):

    self.updatePydevdPath()
    import pydevd

    # Return if already connected
    if pydevd.connected:
      qt.QMessageBox.warning(slicer.util.mainWindow(),
      "Connect to PyDev remote debug server", 'You are already connected to the remote debugger. If the connection is broken (e.g., because the server terminated the connection) then you need to restart Slicer to be able to connect again.')
      return

    # Show a dialog that explains that Slicer will hang
    self.info = qt.QDialog()
    self.info.setModal(False)
    self.infoLayout = qt.QVBoxLayout()
    self.info.setLayout(self.infoLayout)
    self.label = qt.QLabel("Connecting to remote debug server at port {0}...\nSlicer is paused until {1} accepts the connection.".format(self.portNumber,self.getDebugger()),self.info)
    self.infoLayout.addWidget(self.label)
    self.info.show()
    self.info.repaint()
    qt.QTimer.singleShot(2000, self.onConnectionComplete)

    # Connect to the debugger
    try:
      pydevd.settrace('localhost', port=self.portNumber, stdoutToServer=True, stderrToServer=True, suspend=False)
    except Exception, e:
      self.info.hide()
      import traceback
      traceback.print_exc()
      qt.QMessageBox.warning(slicer.util.mainWindow(),
          "Connect to PyDev remote debug server", 'An error occurred while trying to connect to PyDev remote debugger. Make sure he pydev server is started.\n\n' + str(e))
      if self.connectionCompleteCallback:
        self.connectionCompleteCallback(False)
      return
Exemple #15
0
def main():

    global log
    global webapp

    try:  # enable pydev remote debugging
        import pydevd

        pydevd.settrace()
    except ImportError:
        pass

    p = argparse.ArgumentParser(description="rhizi-server")
    p.add_argument("--config-dir", help="path to Rhizi config dir", default="res/etc")
    args = p.parse_args()

    log = logging.getLogger("rhizi")  # init config-unaware log, used until we call init_log

    try:
        cfg = init_config(args.config_dir)
        log = init_log(cfg)
    except Exception as e:
        log.error("failed to initialize server: {}".format(e.args))
        traceback.print_exc()
        exit(-1)

    try:
        cfg_indent_str = "   " + str(cfg).replace("\n", "\n   ")
        log.info("loaded configuration:\n%s" % cfg_indent_str)  # print indented
        if False == cfg.access_control:
            log.warn("[!] access control disabled, all-granted access set on all URLs")

        init_signal_handlers()
        init_user_db(cfg)
    except Exception as e:
        log.exception("failed to initialize server")
        traceback.print_exc()
        exit(-1)

    #
    # init kernel
    #
    kernel = RZ_Kernel()
    kernel.db_ctl = DB_Controller(cfg.db_base_url)

    #
    # init webapp
    #
    webapp = init_webapp(cfg, kernel)
    webapp.user_db = user_db
    kernel.db_op_factory = webapp  # assist kernel with DB initialization
    ws_srv = init_ws_interface(cfg, kernel, webapp)

    try:
        kernel.start()
        ws_srv.serve_forever()
    except Exception as e:
        log.exception(e)

    shutdown()
Exemple #16
0
def aggregation_task(exp, block,
                     mode, c,
                     m_rna_es, mi_rna_es, interaction_matrix,
                     base_filename,
    ):
    """
        @type m_rna_es: ExpressionSet
        @type mi_rna_es: ExpressionSet
        @type interaction_matrix: BinaryInteraction

    """
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)

    agg_func = svd_agg
    if mode == "SVD":
        agg_func = svd_agg
    elif mode == "SUB":
        agg_func = sub_agg

    inter_units = None
    m_rna = None
    if interaction_matrix.x1_unit == 'RefSeq':
        inter_units = interaction_matrix.load_pairs().iloc[:, 0].tolist()

    if inter_units:
        m_rna = m_rna_es.get_assay_data_frame_for_platform(exp, inter_units)
    else:
        m_rna = m_rna_es.get_assay_data_frame()

    mi_rna = mi_rna_es.get_assay_data_frame()
    gene_platform = list(m_rna.columns)
    mi_rna_platform = list(mi_rna)
    AllUpdated(
        exp.pk,
        comment=u"Transforming interaction matrix",
        silent=False,
        mode=NotifyMode.INFO
    ).send()

    targets_matrix = interaction_matrix.get_matrix_for_platform(exp, gene_platform, mi_rna_platform, symmetrize=False, identifiers=True)

    AllUpdated(
        exp.pk,
        comment=u"Transforming interaction matrix done",
        silent=False,
        mode=NotifyMode.INFO
    ).send()

    # targets_matrix = interaction_matrix.load_matrix()

    result_df = agg_func(m_rna, mi_rna, targets_matrix, c)
    result = m_rna_es.clone(base_filename)
    result.store_assay_data_frame(result_df)
    result.store_pheno_data_frame(mi_rna_es.get_pheno_data_frame())

    return [result], {}
def feature_selection_by_cut(
        exp, block,
        src_es, base_filename,
        rank_table,
        cut_property, threshold, cut_direction
):
    """
        @type src_es: ExpressionSet
        @type rank_table: TableResult

        @param cut_direction: either {"<", "<=", ">=", ">"}
    """
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)

    df = src_es.get_assay_data_frame()
    es = src_es.clone(base_filename)
    es.store_pheno_data_frame(src_es.get_pheno_data_frame())

    rank_df = rank_table.get_table()

    selection = rank_df[cut_property]
    mask = cmp_func(cut_direction)(selection, threshold)
    new_df = df[list(mask.select(lambda x: mask[x]).index)]

    es.store_assay_data_frame(new_df)

    return [es], {}
 def run(self, config, year):
     """Running MATSim.  A lot of paths are relative; the base path is ${OPUS_HOME}/opus_matsim.  As long as ${OPUS_HOME}
     is correctly set and the matsim tarfile was unpacked in OPUS_HOME, this should work out of the box.  There may eventually
     be problems with the java version.
     """
     try:
         import pydevd
         pydevd.settrace()
     except: pass
     
     logger.start_block("Starting RunTravelModel.run(...)")
     
     self.setUp( config )
     
     config_obj = MATSimConfigObject(config, year, self.matsim_config_full)
     config_obj.marschall()
     
     cmd = """cd %(opus_home)s/opus_matsim ; java %(vmargs)s -cp %(classpath)s %(javaclass)s %(matsim_config_file)s""" % {
             'opus_home': os.environ['OPUS_HOME'],
             'vmargs': "-Xmx2000m",
             'classpath': "libs/log4j/log4j/1.2.15/log4j-1.2.15.jar:libs/jfree/jfreechart/1.0.7/jfreechart-1.0.7.jar:libs/jfree/jcommon/1.0.9/jcommon-1.0.9.jar:classesMATSim:classesToronto:classesTNicolai:classesKai:classesEntry", #  'classpath': "classes:jar/MATSim.jar",
             'javaclass': "playground.run.Matsim4Urbansim",
             'matsim_config_file': self.matsim_config_full } 
     
     logger.log_status('Running command %s' % cmd ) 
     
     cmd_result = os.system(cmd)
     if cmd_result != 0:
         error_msg = "Matsim Run failed. Code returned by cmd was %d" % (cmd_result)
         logger.log_error(error_msg)
         logger.log_error("Note that currently (dec/08), paths in the matsim config files are relative to the opus_matsim root,")
         logger.log_error("  which is one level 'down' from OPUS_HOME.")
         raise StandardError(error_msg)        
     
     logger.end_block()
        def run(self):
            # To enable breakpoint processing in Eclipse plugin
            # pydev, we need the following. The Try/Except will
            # fail harmlessly if running outside of Eclipse:
            try:
                import pydevd
                pydevd.connected = True
                pydevd.settrace(suspend=False)
            except:
                pass;
            
            self.stopped = False;
            while not self.stopped and ((self.numRepeats > 0) or self.playForever):
                # Note: rospy.sleep() does not seem to release the Global Interpreter Lock.
                #       => Thread would not release control. Must use time.sleep():
                time.sleep(self.repeatPeriod);
                self.roboComm.playMusic(self.songName,
                                        volume=self.volume, 
                                        playhead=self.playhead, 
                                        timeReference=self.timeReference); 
                if not self.playForever:
                    self.numRepeats -= 1;

            with self.roboComm.musicLock:
                self.roboComm.unregisterRepeatThread(self, self.roboComm.musicThreads);            
 def run(self):
     # To enable breakpoint processing in Eclipse plugin
     # pydev, we need the following. The Try/Except will
     # fail harmlessly if running outside of Eclipse:
     try:
         import pydevd
         pydevd.connected = True
         pydevd.settrace(suspend=False)
     except:
         pass;
     
     self.stopped = False;
     while self.roboComm.getTextToSpeechBusy():
         # Note: rospy.sleep() does not seem to release the Global Interpreter Lock.
         #       => Thread would not release control. Must use time.sleep():
         time.sleep(0.5);
     while not self.stopped and ((self.numRepeats > 0) or self.playForever):
         # Note: rospy.sleep() does not seem to release the Global Interpreter Lock.
         #       => Thread would not release control. Must use time.sleep():
         time.sleep(self.repeatPeriod);
         self.roboComm.say(self.text, voice=self.voiceName, ttsEngine=self.ttsEngine, blockTillDone=True);
         if not self.playForever:
             self.numRepeats -= 1;
             
     with self.roboComm.textToSpeechLock:
         self.roboComm.unregisterRepeatThread(self, self.roboComm.speechThreads);                     
Exemple #21
0
def zscore_task(exp, block,
                     es,
                     base_filename,
    ):
    """
        @type es: ExpressionSet

    """
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)

    m = es.get_assay_data_frame()
    result_arr = stats.zscore(m.as_matrix())
    result_df = pd.DataFrame(columns=m.columns, index=m.index)
    for val, item in enumerate(result_arr):
        result_df.ix[val] = item

    result = es.clone(base_filename)
    result.store_assay_data_frame(result_df)
#    result.store_pheno_data_frame(es.get_pheno_data_frame())

    return [result], {}
 def run(self):
     haveCOM = False
     try:
         '''
         Do the numerical integration in a try branch
         to avoid losing the thread when an intended exception is raised
         '''
         try:
             import pydevd
             pydevd.connected = True
             pydevd.settrace(suspend=False)
         except:
             # do nothing, since error message only indicates we are not in debug mode
             pass
         try:
             import pythoncom
             pythoncom.CoInitialize()  # Initialize the COM library on the current thread
             haveCOM = True
         except:
             pass
         self.model.simulate()
     except SimulatorBase.Stopping:
         print("solver canceled ... ")
     except Exception, e:
         print("unexpected error ... ")
         print e
def ask_for_stop(use_back):
    import pydevd
    if use_back:
        pydevd.settrace(stop_at_frame=sys._getframe().f_back)
    else:
        pydevd.settrace()
    print('Will stop here if use_back==False.')
def merge_two_es(exp, block, es_1, es_2, con, base_filename):
    """
        @type es_1: ExpressionSet
        @type es_2: ExpressionSet
    """
    if settings.CELERY_DEBUG:
        import sys

        sys.path.append("/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg")
        import pydevd

        pydevd.settrace("localhost", port=6901, stdoutToServer=True, stderrToServer=True)

    merged_es = es_1.clone(base_filename)
    try:
        merged_es.store_pheno_data_frame(es_1.get_pheno_data_frame())
    except RuntimeError as e:
        pass
    axis = 0
    assay_df_1 = es_1.get_assay_data_frame()
    assay_df_2 = es_2.get_assay_data_frame()
    if con == "CC":
        axis = 1
    merged_assay_df = pd.concat([assay_df_1, assay_df_2], axis=axis)
    merged_es.store_assay_data_frame(merged_assay_df)

    return [merged_es], {}
def create_app(db_url):
    """This is a test

    :param db_url: connection url to the database being used
    :returns: the initialized and created app instance
    """
    app = Flask(__name__)
    (app.db_session, app.db_metadata, app.db_engine) = init_db(db_url)

    @app.teardown_request
    def shutdown_session(exception=None):
        app.db_session.remove()

    create_api(app, API_VERSION)

    # support for remote debugging in Intellij and pycharm
    #
    # Set IDEA_ORGANISATIONS_REMOTE_DEBUG_ON to True in your environment
    # prior to starting the application to get remote debugging.
    #
    # Set IDEA_REMOTE_DEBUG_SERVER to the ip/hostname of the machine running the
    # debug server.
    #
    # Set IDEA_ORGANISATIONS_REMOTE_DEBUG_SERVER to the port of the debug server prosess
    #
    # For the remote debugging to work you will also have to make sure
    # the pycharm-debug.egg is on your path (check your environment file).
    if os.environ.get('IDEA_ORGANISATIONS_REMOTE_DEBUG_ON') == 'True':
        server = os.environ.get('IDEA_REMOTE_DEBUG_SERVER')
        port = os.environ.get('IDEA_ORGANISATIONS_REMOTE_DEBUG_PORT')
        app.logger.info("Idea remote debugging is on! Will connect to debug server running on %s:%s" % (server, port))
        import pydevd
        pydevd.settrace(server, port=int(port), stdoutToServer=True, stderrToServer=True)

    return app
 def run(self):
     if has_pydevd and is_debug:
         pydevd.settrace('localhost', port=53100, stdoutToServer=True, stderrToServer=True)
     self.running = True
     # reset all the errors
     self.problem_nodes = []
     for k, v in self.unlink_errors.iteritems():
         self.unlink_errors[k]=[]
     datastore = self.unlinks_layer.storageType().lower()
     if 'spatialite' in datastore or 'postgresql' in datastore:
         # get the relevant layers names
         start_time = time.time()
         unlinkname = uf.getDBLayerTableName(self.unlinks_layer)
         axialname = uf.getDBLayerTableName(self.axial_layer)
         if not uf.testSameDatabase([self.unlinks_layer, self.axial_layer]):
             self.verificationError.emit("The map layer must be in the same database as the unlinks layer.")
             return
         connection = uf.getDBLayerConnection(self.unlinks_layer)
         # get the geometry column name and other properties
         if 'spatialite' in datastore:
             unlinkgeom = uf.getSpatialiteGeometryColumn(connection, unlinkname)
             axialgeom = uf.getSpatialiteGeometryColumn(connection, axialname)
         else:
             unlinkinfo = uf.getPostgisLayerInfo(self.unlinks_layer)
             unlinkgeom = uf.getPostgisGeometryColumn(connection, unlinkinfo['schema'], unlinkname)
             axialinfo = uf.getPostgisLayerInfo(self.axial_layer)
             axialgeom = uf.getPostgisGeometryColumn(connection, axialinfo['schema'], axialname)
             # todo: ensure that it has a spatial index
             #uf.createPostgisSpatialIndex(self.connection, unlinkinfo['schema'], unlinkname, unlinkgeom)
         print "Preparing the map: %s" % str(time.time()-start_time)
         self.verificationProgress.emit(5)
         # update the unlinks
         start_time = time.time()
         if 'spatialite' in datastore:
             added = uf.addSpatialiteColumns(connection, unlinkname, ['line1','line2'], [QVariant.Int,QVariant.Int])
         else:
             added = uf.addPostgisColumns(connection, unlinkinfo['schema'], unlinkname, ['line1','line2'], [QVariant.Int,QVariant.Int])
         print "Updating unlinks: %s" % str(time.time()-start_time)
         self.verificationProgress.emit(10)
         # analyse the unlinks
         start_time = time.time()
         if 'spatialite' in datastore:
             self.spatialiteTestUnlinks(connection, unlinkname, unlinkgeom, axialname, axialgeom)
         else:
             self.postgisTestUnlinks(connection, unlinkinfo['schema'], unlinkname, unlinkgeom, axialinfo['schema'], axialname, axialgeom)
         print "Analysing unlinks: %s" % str(time.time()-start_time)
         self.verificationProgress.emit(100)
         connection.close()
     else:
         # add attributes if necessary
         uf.addFields(self.unlinks_layer,['line1','line2'], [QVariant.Int,QVariant.Int])
         # analyse the unlinks
         start_time = time.time()
         self.qgisTestUnlinks()
         print "Analysing unlinks: %s" % str(time.time()-start_time)
     self.verificationProgress.emit(100)
     # return the results
     self.problem_nodes = list(set(self.problem_nodes))
     self.verificationFinished.emit(self.unlink_errors, self.problem_nodes)
     return
Exemple #27
0
def merge_comodules_task(exp, block,
                     cs_1,
                     cs_2,
                     cs_1_name,
                     cs_2_name,
                     base_filename,
    ):

    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)


    CS1 = cs_1.load_set()
    CS2 = cs_2.load_set()
    df = merge(CS1, CS2, left_index=True, right_index=True, how='outer')
    df.columns = [cs_1_name, cs_2_name]
    # df = CS1.join(CS2)
    # df = pd.DataFrame({'genes':CS1.values, 'miRNAs':CS2.values}, index=CS1.index)
    print(df.info())
    cs = ComoduleSet(exp.get_data_folder(), base_filename)
    cs.store_set(df)
    return [cs], {}
Exemple #28
0
def filter_task(exp, block,
                     filter_type,
                     q,
                     es,
                     base_filename,
    ):
    """
        @type es: ExpressionSet

    """
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)

    filter_func = gene_low_val_filter
    if filter_type == "LOW_VAL":
        filter_func = gene_low_val_filter
    elif filter_type == "VAR":
        filter_func = gene_var_filter

    m = es.get_assay_data_frame()
    result_df = filter_func(m, q)
    result = es.clone(base_filename)
    result.store_assay_data_frame(result_df)
    #result.store_pheno_data_frame(es.get_pheno_data_frame())

    return [result], {}
 def run(self):
     if has_pydevd and is_debug:
         pydevd.settrace('localhost', port=53100, stdoutToServer=True, stderrToServer=True)
     self.running = True
     # get line ids (to match the object ids in the map)
     unlinktype = self.unlinks_layer.geometryType()
     datastore = self.unlinks_layer.storageType().lower()
     if 'spatialite' in datastore or 'postgresql' in datastore:
         # test the relevant layers
         if not uf.testSameDatabase([self.unlinks_layer, self.axial_layer]):
             self.verificationError.emit("The map layer must be in the same database as the unlinks layer.")
             return
         connection = uf.getDBLayerConnection(self.unlinks_layer)
         if 'spatialite' in datastore:
             self.spatialiteUpdateIDs(connection, unlinktype)
         else:
             # get the layer id columns, required in postgis
             if self.user_id == '' or self.axial_id == '':
                 self.verificationError.emit("The unlinks layer needs an id attribute or primary key.")
             else:
                 self.postgisUpdateIDs(connection, unlinktype)
         connection.close()
     else:
         self.qgisUpdateIDs(unlinktype)
     self.verificationProgress.emit(100)
     self.verificationFinished.emit()
     return
Exemple #30
0
def threshold_task(exp, block,
                     es,
                     T,
                     base_filename,
    ):

    # def removeTemporaryNegativeFeatures(S, indicator_string = 'negative_feature___'):
    #     """Remove elements starting with the indicator_string and remove possible duplicates."""
    #     return S.apply(lambda list_element: set([s.replace(indicator_string, '')  for s in list_element]))

    """Computes co-comodules from matrix H by given threshold T."""
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append('/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg')
        import pydevd
        pydevd.settrace('localhost', port=6901, stdoutToServer=True, stderrToServer=True)


    H = es.get_assay_data_frame()
    print(H)
    # mu = np.mean(H, axis = 1)
    # sigma = np.std(H, axis = 1)
    # Z = H.apply(lambda z: (z-mu)/sigma, axis = 0)
    # S = []
    # S.append(removeTemporaryNegativeFeatures(Z.apply(lambda x: Z.columns[x >= T].tolist(), axis = 1)))
    # S = pd.DataFrame(S)
    # S = S.apply(lambda x: set.union(*x))
    # result = pd.DataFrame(S)
    from wrappers.snmnmf.evaluation import EnrichmentInGeneSets
    z = 1
    x = EnrichmentInGeneSets(z)
    result = x.getGeneSet(H, T)
    cs = ComoduleSet(exp.get_data_folder(), base_filename)
    cs.store_set(result)
    return [cs], {}
import pydevd
pydevd.settrace('localhost',
                port=51234,
                stdoutToServer=True,
                stderrToServer=True)
import os
import random
from PIL import Image


def getCoords(filename, requestedNumPairs, maxIter, GOODLABEL, BADLABEL):
    random.seed(1337)

    im = Image.open(filename)
    pix = im.load()

    offset = 13
    i = 0
    width = im.size[0]
    height = im.size[1]

    # Set the starting x and ycoord to ensure that each patch is complete
    numPairsGood = 0
    numPairsBad = 0

    result = []

    while ((numPairsGood < requestedNumPairs
            or numPairsBad < requestedNumPairs) and i < maxIter):
        i += 1
        xcoord = random.randint(0, width - 1)
Exemple #32
0
import sys

#try to import PyDev module
try:
    import pydevd
    pydevd.settrace(stdoutToServer=True, stderrToServer=True)
except:
    print "Can't import PyDev module: ", sys.exc_info()

#import xfx module (package is exposed in xfx_python library)
import xfx


def failed(hr):
    """Small function to check whether HRESULT is failed."""
    return int(hr) < 0


def succeeded(hr):
    """Small function to check whether HRESULT is succeeded."""
    return not failed(hr)


class Application(xfx.Application):
    """
	Application - base class for entire application. Provides methods for
	initialization, deinitialization, update-render loop.
	"""

    instance = None
Exemple #33
0
    # the final gevent 1.1 release (https://github.com/gevent/gevent/issues/349)
    # unicode('foo').encode('idna')  # noqa

    from psycogreen.gevent import patch_psycopg  # noqa
    patch_psycopg()

import os  # noqa
from django.core.wsgi import get_wsgi_application  # noqa
from website.app import init_app  # noqa

if os.environ.get('API_REMOTE_DEBUG', None):
    import pydevd
    remote_parts = os.environ.get('API_REMOTE_DEBUG').split(':')
    pydevd.settrace(remote_parts[0],
                    port=int(remote_parts[1]),
                    suspend=False,
                    stdoutToServer=True,
                    stderrToServer=True,
                    trace_only_current_thread=False)

os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'api.base.settings')

#### WARNING: Here be monkeys ###############
import six
import sys
from rest_framework.fields import Field
from rest_framework.request import Request


# Cached properties break internal caching
# 792005806b50f8aad086a76ff5a742c66a98428e
@property
Exemple #34
0
                       '--verbose',
                       action='store_true',
                       default=False,
                       help='verbose output')
     parser.add_option('-d',
                       '--debug',
                       action='store_true',
                       default=False,
                       help='allows remote debugging')
     (options, args) = parser.parse_args()
     #if len(args) < 1:
     #    parser.error ('missing argument')
     if options.debug:
         user_ip = os.environ['USERIP']
         pydevd.settrace(user_ip,
                         port=58484,
                         stdoutToServer=True,
                         stderrToServer=True)
     if options.verbose:
         print time.asctime()
     main()
     if options.verbose:
         print time.asctime()
         print 'Duration of script run:',
         print(time.time() - start_time) / 60.0
     sys.exit(0)
 except KeyboardInterrupt, e:  # Ctrl-C
     raise e
 except SystemExit, e:  # sys.exit()
     raise e
 except Exception, e:
     print 'ERROR, UNEXPECTED EXCEPTION'
Exemple #35
0
    try:
        import pydevd

        # Note, besides having script.module.pydevd installed, pydevd
        # must also be on path of IDE runtime. Should be same versions!
        try:
            xbmc.log('back_end_service trying to attach to debugger',
                     xbmc.LOGDEBUG)
            addons_path = os.path.join(pydevd_addon_path, 'lib')
            sys.path.append(addons_path)
            # xbmc.log('sys.path appended to', xbmc.LOGDEBUG)
            # stdoutToServer and stderrToServer redirect stdout and stderr to eclipse
            # console
            try:
                pydevd.settrace('localhost', stdoutToServer=True,
                                stderrToServer=True, suspend=False,
                                wait_for_ready_to_run=True)
            except Exception as e:
                xbmc.log(
                    ' Looks like remote debugger was not started prior to plugin start',
                    xbmc.LOGDEBUG)
        except BaseException:
            xbmc.log('Waiting on Debug connection', xbmc.LOGDEBUG)
    except ImportError:
        REMOTE_DEBUG = False
        msg = 'Error:  You must add org.python.pydev.debug.pysrc to your PYTHONPATH.'
        xbmc.log(msg, xbmc.LOGDEBUG)
        sys.stderr.write(msg)
        pydevd = 1
    except BaseException:
        xbmc.log('Waiting on Debug connection', xbmc.LOGERROR)
Exemple #36
0
from pathlib import Path

versionString = "0.6.5"

#Start Debug

hnDebFile = os.path.join(os.path.dirname(__file__), 'hairNetDeb.txt')
if Path(hnDebFile).is_file():
    print("HN Debug File Exists")
    import sys
    pydev_path = '/Users/rhett/.p2/pool/plugins/org.python.pydev.core_7.7.0.202008021154/pysrc'
    if sys.path.count(pydev_path) < 1: sys.path.append(pydev_path) 
    
    import pydevd
    
    pydevd.settrace(stdoutToServer=True, stderrToServer=True, suspend=False)
else:
    print("HN Debug File Doesn't exist")
    
#End Debug

# It is always good to use wrapper prop when attacking to common data block such as Object to reduce blend junk
class HairNetConfig(PropertyGroup):
    masterHairSystem: StringProperty(
        name="hnMasterHairSystem",
        description="Name of the hair system to be copied by this proxy object.",
        default="")
    
    isHairProxy: BoolProperty(
            name="hnIsHairProxy",
            description="Is this object a hair proxy object?",
import sys, traceback

# REMOTE DEBUGGING
REMOTE_DBG = False

# append pydev remote debugger
if REMOTE_DBG:
    # Make pydev debugger works for auto reload.
    # Note pydevd module need to be copied in XBMC\system\python\Lib\pysrc
    try:
        import pydevd
        # stdoutToServer and stderrToServer redirect stdout and stderr to eclipse console
        pydevd.settrace('localhost',
                        stdoutToServer=True,
                        stderrToServer=True,
                        suspend=False)
    except ImportError:
        sys.stderr.write(
            "Error: " +
            "You must add org.python.pydev.debug.pysrc to your PYTHONPATH.")
        sys.exit(1)
    except:
        sys.stderr.write('Remote Debugger is not started')

# ACTUAL ADDON
from lib import main

try:
    myAddon = main.Main()
    myAddon.run(sys.argv)
except:
Exemple #38
0
_logger = xlogging.getLogger(__name__)

PYCHARM_DEBUG_FILE = r'/var/aio/LogicService/pycharm-debug-py3k.egg'
# {"address":"172.16.6.80", "cfg":{"port":21000}}
PYCHARM_DEBUG_CONFIG = r'/var/aio/LogicService/pycharm-debug-py3k.json'

if os.path.isfile(PYCHARM_DEBUG_FILE) and os.path.isfile(PYCHARM_DEBUG_CONFIG):
    sys.path.append(PYCHARM_DEBUG_FILE)
    import pydevd

    with open(PYCHARM_DEBUG_CONFIG) as f:
        pycharm_debug_cfg = json.load(f)
    _logger.info(r'pycharm_debug_cfg : {}'.format(pycharm_debug_cfg))

    pydevd.settrace(pycharm_debug_cfg['address'], **pycharm_debug_cfg['cfg'])


class XDebugHelper(threading.Thread):
    TIMER_INTERVAL_SECS = 10

    DUMP_ALL_THREAD_STACK_FILE = r'/var/aio/LogicService/dump_stack'

    def __init__(self):
        threading.Thread.__init__(self)
        self.pycharm_debug = False

    def run(self):
        while True:
            try:
                self.do_run()
Exemple #39
0
#!/usr/bin/python

import sys
sys.path.append(r'/home/pi/pysrc')
import pydevd
pydevd.settrace('192.168.1.xxx')  # replace IP with address
# of Eclipse host machine

i = 3
p = 'Hello!' * i
print p
Exemple #40
0
def debug_this_thread():
    log.info("debug_this_thread()")
    pydevd.settrace(suspend=False)
Exemple #41
0
import pydevd

# variables
data_folder = "/root/NMR_DATA"
# data_folder = "D:"
en_fig = 0
en_remote_dbg = 0

# remote debug setup
if en_remote_dbg:
    from pydevd_file_utils import setup_client_server_paths
    server_path = '/root/nmr_pcb20_hdl10_2018/MAIN_nmr_code/'
    client_path = 'D:\\GDrive\\WORKSPACES\\Eclipse_Python_2018\\RemoteSystemsTempFiles\\129.22.143.88\\root\\nmr_pcb20_hdl10_2018\\MAIN_nmr_code\\'
    PATH_TRANSLATION = [(client_path, server_path)]
    setup_client_server_paths(PATH_TRANSLATION)
    pydevd.settrace("dajo-compaqsff")

# system setup
nmrObj = tunable_nmr_system_2018(data_folder)

nmrObj.initNmrSystem()
nmrObj.turnOnPower()
nmrObj.setPreampTuning(-3.35, -1.4)
nmrObj.setMatchingNetwork(19, 66)
nmrObj.setSignalPath()

# optimization parameters
t2_opt_mult = 1
'''-----------Start T2 Optimization-------------------'''

while True:
Exemple #42
0
if __name__ == '__main__':
    import subprocess
    import sys
    import os
    import _debugger_case_remote_2
    root_dirname = os.path.dirname(os.path.dirname(__file__))

    if root_dirname not in sys.path:
        sys.path.append(root_dirname)

    import pydevd

    print('before pydevd.settrace')
    sys.stdout.flush()
    pydevd.settrace(host='127.0.0.1', port=8787, patch_multiprocessing=True)
    print('after pydevd.settrace')
    sys.stdout.flush()
    f = _debugger_case_remote_2.__file__
    if f.endswith('.pyc'):
        f = f[:-1]
    elif f.endswith('$py.class'):
        f = f[:-len('$py.class')] + '.py'
    print('before call')
    sys.stdout.flush()
    subprocess.check_call([sys.executable, '-u', f])
    print('after call')
    sys.stdout.flush()
Exemple #43
0
            kwargs)
    print(data)
    data = data[2:]
    transactions = []
    for i in range(count):
        args = {'from':'eosio', 'to':'evm', 'amount':i, 'data':data}
        args = eosapi.pack_args('evm', 'transfer', args)
        action = ['evm', 'transfer', args, {'eosio':'active'}]
        transactions.append([action,])
    ret, cost = eosapi.push_transactions(transactions)
    assert ret
    print('total cost time:%.3f s, cost per action: %.3f ms, actions per second: %.3f'%(cost/1e6, cost/count/1000, 1*1e6/(cost/count)))

@init
def test4():
    main_class = '<stdin>:KittyCore'
    with open('../../programs/pyeos/contracts/evm/cryptokitties.sol', 'r') as f:
        contract_source_code = f.read()
        contract_interface = compile(contract_source_code, main_class)
        deploy(contract_interface)
        kitties_test(contract_interface)


if __name__ == '__main__':
    import pydevd
    pydevd.settrace(suspend=False)
    test2()



Exemple #44
0
def aria_start():
    # Start reading _config file
    _config = ConfigParser.SafeConfigParser(allow_no_value=True)
    _config.read('./configuration/main.conf')
    # Setting up debug session
    try:
        # Parse configuration options
        if _config.getboolean('Debug', 'Debug'):
            print 'Trying to start debug session.'
            _debug_host = _config.get('Debug', 'host').strip()
            _debug_port = _config.getint('Debug', 'port')
            print 'Remote host - %s  on port %i' % (_debug_host, _debug_port)
            pydevd.settrace(_debug_host, port=_debug_port, stdoutToServer=True, stderrToServer=True, suspend=False)
            print '#################################################'
            print '########  Remote debug session started ##########'
            print '#################################################'
        else:
            print 'Start in normal mode.'
    except ConfigParser.NoSectionError:
        print 'No debug section found.Starting in normal mode'
        print 'Missing debug parameters.Please refer manual.Starting in normal mode'
    # setting up logger
    try:
        logging.config.fileConfig('main.logger')
        _logger = logging.getLogger('root')
    except ConfigParser.NoSectionError as e:
        print 'Fatal error  - fail to set _logger.Error: %s ' % e.message
        exit(-1)
    _logger.debug('Logger started')
    # Loading modules
    # Storing loaded modules
    active_modules = list()
    try:
        # Search all files in plugin folder
        plugin_dir = _config.get('Modules', 'Path').strip()
        _logger.info('Searching modules in: %s' % plugin_dir)
    except IOError:
        # Incorrect folder - Switching to default
        _logger.info('Error getting plugin dir using default - plugins')
        plugin_dir = 'plugins'
    try:
        # Create list of disables modules and classes
        disable_modules = _config.get('Modules', 'Disabled')
        disable_modules = disable_modules.strip().split(',')
        disable_classes = _config.get('Classes', 'Disabled')
        disable_classes = disable_classes.strip().split(',')
    except ConfigParser as e:
        _logger.fatal('Fail to read config file with error %s' % e)
        exit(-1)
    _logger.info('Disabled modules : %s' % disable_modules)
    _logger.info('Disabled classes : %s' % disable_classes)

    if not os.path.exists(plugin_dir):
        _logger.critical('Plugins folder not exist')
        exit(-1)
    # Searching .py files in folder 'plugins'
    for fname in os.listdir(plugin_dir):
        # Look only for py files
        if (fname.endswith('.py')) and ('plugin' in fname.lower()):
            # Cut .py from path
            module_name = fname[: -3]
            # Skip base,__init__  and disabled files
            if module_name != 'base' and module_name != '__init__' and not (module_name in disable_modules):
                _logger.info('Found module %s' % module_name)
                # Load module and add it to list of loaded modules
                package_obj = __import__(plugin_dir + '.' + module_name)
                active_modules.append(module_name)
            else:
                _logger.info('Skipping %s' % fname)

    # Retrieving modules
    _loaded_modules = []
    for modulename in active_modules:
        module_obj = getattr(package_obj, modulename)
        # Looking for classes in file
        for elem in dir(module_obj):
            obj = getattr(module_obj, elem)
            # If this a class ?
            if inspect.isclass(obj):
                if elem in disable_classes:
                    _logger.info('Skipping %s' % obj)
                    continue
                # Creating object
                try:
                    _logger.info('Loading module %s from %s' % (elem, modulename))
                    try:
                        _module = obj()
                    except (ImportError, TypeError) as e:
                        # Some error while creating module instance
                        _logger.fatal('Incorrect module. Error %s' % e)
                except ImportWarning:
                    _logger.warning('Failed to load %s from %s' % (elem, modulename))
                    del _module
                    pass
                else:
                    # Store module instance
                    _loaded_modules.append(_module)
                    _logger.info('Module %s (version: %s) loaded' % (elem, _module.version))
    sleep(5)  # Init time
    _logger.info('All modules loaded')
    # Create event for shutdown of main thread
    dispatcher.connect(emergency_shutdown, signal='EmergencyShutdown')
    dispatcher.send(signal='SayResponse', response='Welcome')
    try:
        while True:
            #  We will wait here until shutdown
            sleep(1)
            if shutdown_flag.isSet():
                break
    except KeyboardInterrupt:
        _logger.warning("Keyboard Interrupt received")
    except SystemExit:
        _logger.warning("System shutdown")

    for _module in _loaded_modules:
        try:
            _logger.info('Unloading module %s' % _module)
            # Calling destructor will unload module
            del _module
        except:
            # Ignore all error while shutdown
            _logger.warning('Fail to unload module %s' % _module)
    _logger.info("All module unloaded")
Exemple #45
0
        "--config",
        help="additional configuration to load (e.g. for testing)")
    args = parser.parse_args()

    if args.config:
        add_configuration(app, args.config)

    pycharm_debug = app.config.get('DEBUG_PYCHARM', False)
    if args.debug:
        pycharm_debug = True

    if pycharm_debug:
        app.config['DEBUG'] = False
        import pydevd
        pydevd.settrace(app.config.get('DEBUG_SERVER_HOST', 'localhost'),
                        port=app.config.get('DEBUG_SERVER_PORT', 51234),
                        stdoutToServer=True,
                        stderrToServer=True)
        print "STARTED IN REMOTE DEBUG MODE"

    initialise()

# most of the imports should be done here, after initialise()
from flask import render_template
from octopus.lib.webapp import custom_static


@app.route("/")
def root():
    return render_template("index.html")

Exemple #46
0
def filter_by_bi(exp, block, m_rna_es, mi_rna_es, interaction_matrix,
                 base_filename):
    if settings.CELERY_DEBUG:
        import sys
        sys.path.append(
            '/Migration/skola/phd/projects/miXGENE/mixgene_project/wrappers/pycharm-debug.egg'
        )
        import pydevd
        pydevd.settrace('localhost',
                        port=6901,
                        stdoutToServer=True,
                        stderrToServer=True)

    # m_rna_df = m_rna_es.get_assay_data_frame()
    # mi_rna_df = mi_rna_es.get_assay_data_frame()
    inter_units = None

    if interaction_matrix.x1_unit == 'RefSeq':
        inter_units = interaction_matrix.load_pairs().iloc[:, 0].tolist()

    if inter_units:
        m_rna_df = m_rna_es.get_assay_data_frame_for_platform(exp, inter_units)
    else:
        m_rna_df = m_rna_es.get_assay_data_frame()

    mi_rna_df = mi_rna_es.get_assay_data_frame()
    gene_platform = list(m_rna_df.columns)
    mi_rna_platform = list(mi_rna_df)
    AllUpdated(exp.pk,
               comment=u"Transforming interaction matrix",
               silent=False,
               mode=NotifyMode.INFO).send()

    targets_matrix = interaction_matrix.get_matrix_for_platform(
        exp,
        gene_platform,
        mi_rna_platform,
        symmetrize=False,
        identifiers=True)

    AllUpdated(exp.pk,
               comment=u"Transforming interaction matrix done",
               silent=False,
               mode=NotifyMode.INFO).send()

    # targets_matrix = interaction_matrix.load_matrix()
    targets_matrix.columns = m_rna_df.columns
    targets_matrix.index = mi_rna_df.columns

    # allowed_m_rna_index_set = set(targets_matrix.columns) & set(m_rna_df.index)
    allowed_m_rna_index_set = set(targets_matrix.columns) & set(
        m_rna_df.columns)

    m_rna_df_filtered = m_rna_df.loc[:, allowed_m_rna_index_set]

    # allowed_mi_rna_index_set = set(targets_matrix.index) & set(mi_rna_df.index)
    allowed_mi_rna_index_set = set(targets_matrix.index) & set(
        mi_rna_df.columns)

    mi_rna_df_filtered = mi_rna_df.loc[:, allowed_mi_rna_index_set]

    #result_df = agg_func(m_rna, mi_rna, targets_matrix, c)
    m_rna_result = m_rna_es.clone(base_filename + "_mRNA")
    m_rna_result.store_assay_data_frame(m_rna_df_filtered)
    try:
        m_rna_result.store_pheno_data_frame(m_rna_es.get_pheno_data_frame())
    except RuntimeError as re:
        exp.log(block.uuid, "Phenotype not set")
        log.debug("Phenotype not set")
    mi_rna_result = mi_rna_es.clone(base_filename + "_miRNA")
    mi_rna_result.store_assay_data_frame(mi_rna_df_filtered)
    try:
        mi_rna_result.store_pheno_data_frame(mi_rna_es.get_pheno_data_frame())
    except RuntimeError as re:
        exp.log(block.uuid, "Phenotype not set")
        log.debug("Phenotype not set")
    return [m_rna_result, mi_rna_result], {}
Exemple #47
0
import pydevd

# settings
en_remote_dbg = 0  # enable remote debugging. Enable debug server first!
channel_read = 7  # select channel, from 1 to 7

# remote debug setup
server_ip = '192.168.100.5'
client_ip = '192.168.100.2'
if en_remote_dbg:
    from pydevd_file_utils import setup_client_server_paths
    server_path = '/root/ultrasound_python/'
    client_path = 'V:\\ultrasound_python\\'  # client path with samba
    PATH_TRANSLATION = [(client_path, server_path)]
    setup_client_server_paths(PATH_TRANSLATION)
    pydevd.settrace(client_ip)

plt.figure(1)
fig = plt.gcf()
fig.show()

while True:

    process = Popen(['../c_exec/ultrasound_2019_pcb_v2 100 20'],
                    stdout=PIPE,
                    stderr=PIPE,
                    shell=True)
    stdout, stderr = process.communicate()
    stdchar = stdout.split()
    sleep(0.1)
    I = np.genfromtxt('./databank.txt', delimiter=',', dtype=int)
 def run(self):
     if has_pydevd and is_debug:
         pydevd.settrace('localhost',
                         port=53100,
                         stdoutToServer=True,
                         stderrToServer=True)
     self.running = True
     # reset all the errors
     self.problem_nodes = []
     for k, v in self.axial_errors.iteritems():
         self.axial_errors[k] = []
     provider = self.axial_layer.storageType()
     #caps = self.axial_layer.dataProvider().capabilities()
     graph_links = []
     datastore = provider.lower()
     if 'spatialite' in datastore or 'postgis' in datastore:
         # get the relevant layers
         unlinkname = ''
         unlinkschema = ''
         if self.unlinks_layer:
             unlinkname = uf.getDBLayerTableName(self.unlinks_layer)
             if not uf.testSameDatabase(
                 [self.unlinks_layer, self.axial_layer]):
                 self.verificationError.emit(
                     "The map layer must be in the same database as the unlinks layer."
                 )
                 return
             if 'postgresql' in datastore:
                 unlinkschema = uf.getPostgisLayerInfo(
                     self.unlinks_layer)['schema']
         axialname = uf.getDBLayerTableName(self.axial_layer)
         if self.user_id == '':
             self.user_id = uf.getDBLayerPrimaryKey(self.axial_layer)
         # get the geometry column name and other properties
         # always check if the operation has been cancelled before proceeding.
         # this would come up only once if the thread was based on a loop, to break it.
         if not self.running:
             return
         start_time = time.time()
         # could use this generic but I want to force a spatial index
         #geomname = uf.getDBLayerGeometryColumn(self.axial_layer)
         connection = uf.getDBLayerConnection(self.axial_layer)
         if 'spatialite' in datastore:
             geomname = uf.getSpatialiteGeometryColumn(
                 connection, axialname)
         else:
             layerinfo = uf.getPostgisLayerInfo(self.axial_layer)
             geomname = uf.getPostgisGeometryColumn(connection,
                                                    layerinfo['schema'],
                                                    axialname)
             # todo: ensure that it has a spatial index
             #uf.createPostgisSpatialIndex(onnection, layerinfo['schema'], axialname, geomname)
         if is_debug:
             print "Preparing the map: %s" % str(time.time() - start_time)
         self.verificationProgress.emit(5)
         # analyse the geometry
         if not self.running or not geomname:
             return
         start_time = time.time()
         if 'spatialite' in datastore:
             self.spatialiteTestGeometry(connection, axialname, geomname)
         else:
             self.postgisTestGeometry(connection, layerinfo['schema'],
                                      axialname, geomname)
         if is_debug:
             print "Analysing geometry: %s" % str(time.time() - start_time)
         self.verificationProgress.emit(80)
         # build the topology
         if not self.running:
             return
         if has_networkx:
             start_time = time.time()
             if 'spatialite' in datastore:
                 graph_links = self.spatialiteBuildTopology(
                     connection, axialname, geomname, unlinkname, linkname)
             else:
                 graph_links = self.postgisBuildTopology(
                     connection, layerinfo['schema'], axialname, geomname,
                     unlinkschema, unlinkname, linkschema, linkname)
             if is_debug:
                 print "Building topology: %s" % str(time.time() -
                                                     start_time)
         self.verificationProgress.emit(90)
         connection.close()
     else:
         # create spatial index
         if not self.running:
             return
         start_time = time.time()
         index = uf.createIndex(self.axial_layer)
         if is_debug:
             print "Creating spatial index: %s" % str(time.time() -
                                                      start_time)
         self.verificationProgress.emit(5)
         # analyse the geometry and topology
         if not self.running:
             return
         start_time = time.time()
         graph_links = self.qgisGeometryTopologyTest(
             self.axial_layer, index, self.unlinks_layer)
         if is_debug:
             print "Analysing geometry and topology: %s" % str(time.time() -
                                                               start_time)
     # analyse the topology with igraph or networkx
     if not self.running:
         return
     if len(graph_links) > 0 and has_networkx:
         start_time = time.time()
         # get axial node ids
         if self.user_id == '':
             axialids = self.axial_layer.allFeatureIds()
         else:
             axialids, ids = uf.getFieldValues(self.axial_layer,
                                               self.user_id)
         # uses networkx to test islands. looks for orphans with the geometry test
         self.networkxTestTopology(graph_links, axialids)
         if is_debug:
             print "Analysing topology: %s" % str(time.time() - start_time)
     self.verificationProgress.emit(100)
     # return the results
     self.problem_nodes = list(set(self.problem_nodes))
     self.verificationFinished.emit(self.axial_errors, self.problem_nodes)
     return
def debug(switch=True):
    if switch:
        import pydevd
        pydevd.settrace('localhost', port=59003, stdoutToServer=True, stderrToServer=True)
Exemple #50
0
    def analysis(self):
        if has_pydevd and is_debug:
            pydevd.settrace('localhost',
                            port=53100,
                            stdoutToServer=True,
                            stderrToServer=True,
                            suspend=False)
        if self.settings:
            try:
                # Prepare the origins
                origins = self.origin_preparation(self.settings['origins'],
                                                  self.settings['name'])
                self.progress.emit(10)
                if self.killed: return

                # Build the graph
                graph, tied_origins = self.graph_builder(
                    self.settings['network'], self.settings['cost'], origins,
                    self.settings['network tolerance'], self.settings['crs'],
                    self.settings['epsg'])
                self.progress.emit(20)
                if self.killed: return

                # Run the analysis
                catchment_network, catchment_points = self.graph_analysis(
                    graph, tied_origins, self.settings['distances'])
                self.progress.emit(40)
                if self.killed: return

                # Create output signal
                output = {
                    'output network features': None,
                    'output polygon features': None,
                    'distances': self.settings['distances']
                }

                network = self.settings['network']

                # Write and render the catchment polygons

                if self.settings['output polygon check']:
                    new_fields = QgsFields()
                    new_fields.append(QgsField('id', QVariant.Int))
                    new_fields.append(QgsField('origin', QVariant.String))
                    new_fields.append(QgsField('distance', QVariant.Int))

                    output_polygon_features = self.polygon_writer(
                        catchment_points,
                        self.settings['distances'],
                        new_fields,
                        self.settings['polygon tolerance'],
                    )
                    output['output polygon features'] = output_polygon_features

                self.progress.emit(70)
                if self.killed: return

                # get fields

                new_fields = self.get_fields(origins, self.settings['name'])

                # Write and render the catchment network
                output_network_features = self.network_writer(
                    catchment_network, new_fields, self.settings['name'])

                output['output network features'] = output_network_features

                if self.killed is False:
                    self.progress.emit(100)
                    self.finished.emit(output)

            except Exception as e:
                self.error.emit(e, traceback.format_exc())
Exemple #51
0
    def execute(self, context):
        initConfig()
        log = getLog()
        # prepare logging
        self._startLogging()

        debug = getDebug()
        export_directory = self.properties.filepath

        if not self.properties.export_is_relative:
            export_directory = os.path.dirname(export_directory)
        else:
            if bpy.context.blend_data.filepath == '':
                #We can't just save files relative to nothing somewhere on a users HDD (bad usability!) so we say there is an error.
                logger.error(
                    "Save your blend file before using the '%s' button" %
                    io_xplane2blender.xplane_ops.
                    SCENE_OT_export_to_relative_dir.bl_label)
                self._endLogging()
                showLogDialog()
                return {'CANCELLED'}

        if bpy.context.scene.xplane.plugin_development and \
            bpy.context.scene.xplane.dev_enable_breakpoints:
            try:
                #If you do not have your interpreter set up to include pydev by default, do so, or manually fill
                #in the path. Likely something like ~\.p2\pool\plugins\org.python.pydev_5.7.0.201704111357\pysrc
                #import sys;sys.path.append(r'YOUR_PYDEVPATH')
                import pydevd
                #Port must be set to 5678 for Blender to connect!
                pydevd.settrace(
                    stdoutToServer=
                    False,  #Enable to have logger and print statements sent to 
                    #the Eclipse console, as well as Blender's console.
                    #Only logger statements will show in xplane2blender.log
                    stderrToServer=False,  #Same as stdoutToServer
                    suspend=True
                )  #Seems to only work having suspend be set to true.
                #Get used to immediately pressing continue unfortunately.
            except:
                logger.info(
                    "Pydevd could not be imported, breakpoints not enabled. Ensure PyDev is installed and configured properly"
                )

        exportMode = bpy.context.scene.xplane.exportMode

        if exportMode == 'layers':
            # check if X-Plane layers have been created
            # TODO: only check if user selected the export from layers option, instead the export from root objects
            if len(bpy.context.scene.xplane.layers) == 0:
                logger.error('You must create X-Plane layers first.')
                self._endLogging()
                showLogDialog()
                return {'CANCELLED'}

        # store current frame as we will go back to it
        currentFrame = bpy.context.scene.frame_current

        # goto first frame so everything is in inital state
        bpy.context.scene.frame_set(frame=1)
        bpy.context.scene.update()

        xplaneFiles = []

        if exportMode == 'layers':
            xplaneFiles = xplane_file.createFilesFromBlenderLayers()

        elif exportMode == 'root_objects':
            xplaneFiles = xplane_file.createFilesFromBlenderRootObjects(
                bpy.context.scene)

        for xplaneFile in xplaneFiles:
            if self._writeXPlaneFile(xplaneFile, export_directory) == False:
                if logger.hasErrors():
                    self._endLogging()
                    showLogDialog()

                if bpy.context.scene.xplane.plugin_development and \
                    bpy.context.scene.xplane.dev_continue_export_on_error:
                    logger.info("Continuing export despite error in %s" %
                                xplaneFile.filename)
                    logger.clearMessages()
                    continue
                else:
                    return {'CANCELLED'}

        # return to stored frame
        bpy.context.scene.frame_set(frame=currentFrame)
        bpy.context.scene.update()

        self._endLogging()

        if logger.hasErrors() or logger.hasWarnings():
            showLogDialog()

        return {'FINISHED'}
    def run(self):
        """
        """
        try:
            import pydevd
            pydevd.settrace()
        except:
            pass
        logger.start_block("Starting RunDummyTravelModel.run(...)")

        print >> sys.stderr, "\nThis should also check if get_cache_data_into_matsim did something reasonable"

        logger.log_status('would normally run MATSim')

        #        if not (sys.path == None) and len(sys.path) > 0:
        #            module_path = sys.path[0]
        #            logger.log_note("project path: %s" % module_path)
        #
        #        in_file_name = os.path.join( module_path, "data", "travel_data_manipulated.csv" )
        #        logger.log_note("open file : %s" % in_file_name)
        #        file_in = open(in_file_name, 'r')
        out_file_name = os.path.join(os.environ['OPUS_HOME'], "opus_matsim",
                                     "tmp", "travel_data.csv")
        logger.log_note("open file : %s" % out_file_name)
        file_out = open(out_file_name, 'w')

        # cbd_zone = "129"

        file_out.write(
            "from_zone_id:i4,to_zone_id:i4,single_vehicle_to_work_travel_cost:f4\n"
        )
        file_out.write("1,1,0.0\n")
        file_out.write("1,102,999.9999999999999\n")
        file_out.write("1,109,999.9999999999999\n")
        file_out.write("1,126,999.9999999999999\n")
        file_out.write("1,128,999.9999999999999\n")
        file_out.write("1,134,999.9999999999999\n")
        file_out.write("1,139,999.9999999999999\n")
        file_out.write("1,140,999.9999999999999\n")
        file_out.write("1,2,999.9999999999999\n")
        file_out.write("102,1,999.9999999999999\n")
        file_out.write("102,102,0.0\n")
        file_out.write("102,109,999.9999999999999\n")
        file_out.write("102,126,999.9999999999999\n")
        file_out.write("102,128,999.9999999999999\n")
        file_out.write("102,134,999.9999999999999\n")
        file_out.write("102,139,999.9999999999999\n")
        file_out.write("102,140,999.9999999999999\n")
        file_out.write("102,2,999.9999999999999\n")
        file_out.write("109,1,999.9999999999999\n")
        file_out.write("109,102,999.9999999999999\n")
        file_out.write("109,109,0.0\n")
        file_out.write("109,126,999.9999999999999\n")
        file_out.write("109,128,999.9999999999999\n")
        file_out.write("109,134,999.9999999999999\n")
        file_out.write("109,139,999.9999999999999\n")
        file_out.write("109,140,999.9999999999999\n")
        file_out.write("109,2,999.9999999999999\n")
        file_out.write("126,1,999.9999999999999\n")
        file_out.write("126,102,999.9999999999999\n")
        file_out.write("126,109,999.9999999999999\n")
        file_out.write("126,126,0.0\n")
        file_out.write("126,128,999.9999999999999\n")
        file_out.write("126,134,999.9999999999999\n")
        file_out.write("126,139,999.9999999999999\n")
        file_out.write("126,140,999.9999999999999\n")
        file_out.write("126,2,999.9999999999999\n")
        file_out.write("128,1,999.9999999999999\n")
        file_out.write("128,102,999.9999999999999\n")
        file_out.write("128,109,999.9999999999999\n")
        file_out.write("128,126,999.9999999999999\n")
        file_out.write("128,128,0.0\n")
        file_out.write("128,134,999.9999999999999\n")
        file_out.write("128,139,999.9999999999999\n")
        file_out.write("128,140,999.9999999999999\n")
        file_out.write("128,2,999.9999999999999\n")
        file_out.write("134,1,999.9999999999999\n")
        file_out.write("134,102,999.9999999999999\n")
        file_out.write("134,109,999.9999999999999\n")
        file_out.write("134,126,999.9999999999999\n")
        file_out.write("134,128,999.9999999999999\n")
        file_out.write("134,134,0.0\n")
        file_out.write("134,139,999.9999999999999\n")
        file_out.write("134,140,999.9999999999999\n")
        file_out.write("134,2,999.9999999999999\n")
        file_out.write("139,1,999.9999999999999\n")
        file_out.write("139,102,999.9999999999999\n")
        file_out.write("139,109,999.9999999999999\n")
        file_out.write("139,126,999.9999999999999\n")
        file_out.write("139,128,999.9999999999999\n")
        file_out.write("139,134,999.9999999999999\n")
        file_out.write("139,139,0.0\n")
        file_out.write("139,140,999.9999999999999\n")
        file_out.write("139,2,999.9999999999999\n")
        file_out.write("140,1,999.9999999999999\n")
        file_out.write("140,102,999.9999999999999\n")
        file_out.write("140,109,999.9999999999999\n")
        file_out.write("140,126,999.9999999999999\n")
        file_out.write("140,128,999.9999999999999\n")
        file_out.write("140,134,999.9999999999999\n")
        file_out.write("140,139,999.9999999999999\n")
        file_out.write("140,140,0.0\n")
        file_out.write("140,2,999.9999999999999\n")
        file_out.write("2,1,999.9999999999999\n")
        file_out.write("2,102,999.9999999999999\n")
        file_out.write("2,109,999.9999999999999\n")
        file_out.write("2,126,999.9999999999999\n")
        file_out.write("2,128,999.9999999999999\n")
        file_out.write("2,134,999.9999999999999\n")
        file_out.write("2,139,999.9999999999999\n")
        file_out.write("2,140,999.9999999999999\n")
        file_out.write("2,2,0.0\n")

        try:
            #file_in.close()
            file_out.close()
        except:
            logger.log_warning("file not closed")

        logger.end_block()
Exemple #53
0
 def execute(self, context):
     import pydevd
     pydevd.settrace()
     return {'FINISHED'}
import pydevd

# variables
data_folder = "/root/NMR_DATA"
en_fig = 1
en_remote_dbg = 1

# remote debug setup
if en_remote_dbg:
    from pydevd_file_utils import setup_client_server_paths
    server_path = '/root/nmr_pcb20_hdl10_2018/MAIN_nmr_code/'
    client_path = 'D:\\GDrive\\WORKSPACES\\Eclipse_Python_2018\\RemoteSystemsTempFiles\\129.22.143.88\\root\\nmr_pcb20_hdl10_2018\\MAIN_nmr_code\\'
    PATH_TRANSLATION = [(client_path, server_path)]
    setup_client_server_paths(PATH_TRANSLATION)
    # pydevd.settrace("dajo-compaqsff")
    pydevd.settrace("129.22.143.39")

# system setup
nmrObj = tunable_nmr_system_2018(data_folder)

nmrObj.initNmrSystem()
# nmrObj.turnOnPower()
nmrObj.assertControlSignal(nmrObj.PSU_15V_TX_P_EN_msk
                           | nmrObj.PSU_15V_TX_N_EN_msk
                           | nmrObj.PSU_5V_TX_N_EN_msk
                           | nmrObj.PSU_5V_ADC_EN_msk
                           | nmrObj.PSU_5V_ANA_P_EN_msk
                           | nmrObj.PSU_5V_ANA_N_EN_msk)
nmrObj.setPreampTuning(-3.35, -1.4)
nmrObj.setMatchingNetwork(19, 66)
# nmrObj.setSignalPath()
    def analysis(self):
        if has_pydevd and is_debug:
            pydevd.settrace('localhost',
                            port=53100,
                            stdoutToServer=True,
                            stderrToServer=True,
                            suspend=False)
        if self.settings:
            try:
                # Prepare the origins
                origins = self.origin_preparation(self.settings['origins'],
                                                  self.settings['name'])
                self.progress.emit(10)
                if self.killed == True: return

                # Build the graph
                graph, tied_origins = self.graph_builder(
                    self.settings['network'], self.settings['cost'], origins,
                    self.settings['network tolerance'], self.settings['crs'],
                    self.settings['epsg'])
                self.progress.emit(20)
                if self.killed == True: return

                # Run the analysis
                catchment_network, catchment_points = self.graph_analysis(
                    graph, tied_origins, self.settings['distances'])
                self.progress.emit(40)
                if self.killed == True: return

                # Create output signal
                output = {
                    'output network': None,
                    'output polygon': None,
                    'distances': self.settings['distances']
                }

                # Write and render the catchment polygons
                if self.settings['output polygon check']:
                    output_polygon = self.polygon_writer(
                        catchment_points, self.settings['distances'],
                        self.settings['temp polygon'],
                        self.settings['polygon tolerance'])
                    if self.settings['output polygon']:
                        uf.createShapeFile(output_polygon,
                                           self.settings['output polygon'],
                                           self.settings['crs'])
                        output_polygon = QgsVectorLayer(
                            self.settings['output polygon'], 'catchment_areas',
                            'ogr')
                    output['output polygon'] = output_polygon

                self.progress.emit(70)
                if self.killed == True: return

                # Write and render the catchment network
                if self.settings['output network check']:
                    output_network = self.network_writer(
                        origins, catchment_network,
                        self.settings['temp network'])
                    if self.settings['output network']:
                        uf.createShapeFile(output_network,
                                           self.settings['output network'],
                                           self.settings['crs'])
                        output_network = QgsVectorLayer(
                            self.settings['output network'],
                            'catchment_network', 'ogr')
                    output['output network'] = output_network

                if self.killed is False:
                    self.progress.emit(100)
                    self.finished.emit(output)

            except Exception, e:
                self.error.emit(e, traceback.format_exc())
Exemple #56
0
import pydevd
import tensorflow as tf
import numpy as np

import util
from s3_image_batches_generator import ImageBatchGenerator
from s4_pre_trained_vgg16 import vgg_16, load_npy_weights
# from s4_pre_trained_mobilenet_v1 import mobilenet_v1_base
from s6_tf_LSTM import tf_lstm

pydevd.settrace('192.168.0.167',
                port=18236,
                stdoutToServer=True,
                stderrToServer=True)

categories = util.CATEGORIES
lstm_layers = util.LSTM_LAYER
frame_num = util.FRAME_NUM
hidden_layer_nodes = util.HIDDEN_LAYER_NODES
base_lr = util.LEARNING_RATE
batch_size = util.BATCH_SIZE

lr_decay_steps = util.LR_DECAY_STEPS
lr_decay_rate = util.LR_DECAY_RATE
weight_decay = util.WEIGHT_DECAY
dropout_keep_prob = util.DROPOUT_KEEP_PROB

# Input data.
spatial_size = [240, 320]
down_sampling_factor = 2
# one_img_squeeze_length = train_batch.spatial_size[0] * train_batch.spatial_size[1] // (
Exemple #57
0
import os
import json


# `sls offline` or `sls invoke local`
if os.getenv('IS_OFFLINE') == 'true' or os.getenv('IS_LOCAL'):
    import pydevd
    # pydevd.settrace('172.25.0.1', port=54321, stdoutToServer=True, stderrToServer=True)  # NG (`ip route` in docker)
    pydevd.settrace('host.docker.internal', port=54321, stdoutToServer=True, stderrToServer=True)


def hello(event, context):
    body = {
        "message": "Go Serverless v1.0! Your function executed successfully!",
        "input": event
    }

    response = {
        "statusCode": 200,
        "body": json.dumps(body)
    }

    return response

    # Use this code if you don't use the http event with the LAMBDA-PROXY
    # integration
    """
    return {
        "message": "Go Serverless v1.0! Your function executed successfully!",
        "event": event
    }
    if status == ERROR:
        module.fail_json(msg=adm_data)

    result = {'ansible_facts': {'adm': {}}}
    result['ansible_facts']['adm']['raw'] = adm_data
    result['ansible_facts']['adm']['policy'] = create_list_of_policies(
        adm_data)

    #
    # Return to the playbook
    #
    module.exit_json(changed=False, **result)


if __name__ == '__main__':
    """ Logic for remote debugging with Pycharm Pro, use SSH_CLIENT to derive the IP address of the laptop
    """
    try:
        PYCHARM
    except NameError:
        pass
    else:
        import pydevd
        import os
        pydevd.settrace(os.getenv("SSH_CLIENT").split(" ")[0],
                        stdoutToServer=True,
                        stderrToServer=True)

    main()
Exemple #59
0
}

with open('/home/orangepi/electronotifybot/notify.pid', 'w+') as f:
    f.write('%s\n' % str(os.getpid()))
logger = logger.logger()

bot = telebot.TeleBot(config.token, threaded=False)

user_dict = {}

if config.heroku_debug:
    logger.debug('remote debug')
    sys.path.append('pycharm-debug.egg')
    import pydevd
    pydevd.settrace(config.server_debug,
                    port=config.port_debug,
                    stdoutToServer=True,
                    stderrToServer=True)


def do_command(message):
    # getattr(sys.modules[__name__], cmds[message.text])(message)
    pass


def check_break(func):
    '''Проверяем что не было вызова новой операции'''
    def decorate(message):
        if message.text in cmds.keys():
            do_command(message)
        else:
            func(message)
Exemple #60
0
import logging
logging.info("IDE debugging")

import hlt
import socketnetworking

import sys
sys.path.append(
    '/Applications/PyCharm.app/Contents/debug-eggs/pycharm-debug-py3k.egg')
import pydevd
pydevd.settrace('localhost', port=socketnetworking.PORT_)

# GAME START
# Here we define the bot's name as Settler and initialize the game, including communication with the Halite engine.
game = socketnetworking.Game("okoma_bot")
# Then we print our start message to the logs
logging.info("Starting my Settler bot!")

while True:
    game_map = game.update_map()
    game_map.update_custom_info()

    command_queue = []
    # For every ship that I control
    for ship in game_map.get_me().all_ships():
        # If the ship is docked
        if ship.docking_status != ship.DockingStatus.UNDOCKED:
            # Skip this ship
            continue

        # For each planet in the game (only non-destroyed planets are included)