Exemple #1
0
def generate_random_graph():
    """
    generates random graphs: external loop - number of repeated generations; inner loop-number of different probabilities

    :return: saves generated graphs to json and as networkx adjlist
    """
    for i in range(1, 100):
        for prob in range(1, 10):
            prob = float(prob / float(nodes * 10))  # den=nodes*10
            G = nx.fast_gnp_random_graph(nodes, prob).to_undirected()
            res = {}
            try:
                res = nx.to_dict_of_lists(G)

            except TypeError:  # Python 3.x
                sys.stdout("Error")

            size = len(list(nx.bridges(G)))  # number of bridges
            file_name_adj = "random_{}_{}_{}.adj_list".format(
                nodes, prob, size)
            file_name_json = "random_{}_{}_{}.json".format(nodes, prob, size)

            file_path_adj = os.path.join(os.getcwd(), '..', 'res',
                                         file_name_adj)
            file_path_json = os.path.join(os.getcwd(), '..', 'res',
                                          file_name_json)

            # writing to .json
            with open(file_path_json, "w") as f:
                json.dump(res, f, indent=4)

            # writing to .adjlist
            fh = open(file_path_adj, 'wb+')
            nx.write_adjlist(G, fh)
Exemple #2
0
    def add_arguments(self, parser):
        # parser.add_argument('keypath', nargs=1, type=str)
        style = color_style
        try:
            default_path = settings.KEY_PATH
        except AttributeError:
            default_path = default_key_path
            sys.stdout(style.INFO(
                f'setting.KEY_PATH not found. Using path=\'{default_path}\''))
        try:
            default_prefix = settings.KEY_PREFIX
        except AttributeError:
            default_prefix = 'user'
            sys.stdout(style.INFO(
                f'setting.KEY_PREFIX not found. Using prefix=\'{default_prefix}\''))

        parser.add_argument(
            '--keypath',
            action='store',
            dest='keypath',
            default=default_path,
            help=f'Set key path to something other than the \'{default_path}\'')
        parser.add_argument(
            '--keyprefix',
            action='store',
            dest='keyprefix',
            default=default_prefix,
            help=f'Set key prefix to something other than \'{default_prefix}\'')
 def _do_mapper(self):
     alt_stdin = open(0,'rb')
     try:
         for key,value in self.mapper(self.alt_stdin):
             sys.stdout(b''.join([key,self.key_value_delimiter,value,b'\n']))
     finally:
         alt_stdin.close()
Exemple #4
0
def init_camera():
    cam = cv.CaptureFromCAM(0)
    if not cam:
        sys.stdout("Error Initializing Camera! Aborting.")
        sys.exit(1)

    return cam
Exemple #5
0
def main(args=None):
    if not args:
        parser = argparse.ArgumentParser()
        #TODO: ADD ACTUAL ARGUMENTS.
        parser.add_argument('-a',
                            '--all',
                            action='store_true',
                            default=True,
                            help="run all functions & connect")
        parser.add_argument(
            '-p',
            '--passfile',
            nargs="?",
            default=None,
            help=
            "pass in a txt file containing your credentials to connect with.")

        # activates the `...action='store_true...'` logic.
        arrgs = parser.parse_args()

        # the only time rediculous if-else trees are acceptable:
        if arrgs.all:
            from speedyvpn.core import check_latency
            if arrgs.passfile:
                check_latency.main(arrgs.passfile, connect=True)
            else:
                check_latency.main()
    else:
        sys.stdout(sysencode('Good job you broke it.'))
        sys.exit(-1)
Exemple #6
0
def get_events(f5,f5type="minknow",metrichor=None):
    if f5type == "metrichor":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    elif f5type == "minknow":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    for event in f5[path]:
        sys.stdout(event+"\n")
Exemple #7
0
def get_events(f5, f5type="minknow", metrichor=None):
    if f5type == "metrichor":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    elif f5type == "minknow":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    for event in f5[path]:
        sys.stdout(event + "\n")
Exemple #8
0
def gmt2regions(gmt_fname, db_fname, delineation_code, fraction):
    db = RegionRankingDatabase(fname=db_fname, name=os.path.basename(db_fname))
    signatures = GeneSignature.from_gmt(gmt_fname)
    delineation = CODE2DELINEATION[delineation_code]
    for signature in signatures:
        sys.stdout(
            signature.name + ',' +
            ','.join(convert(signature, db, delineation, fraction).genes))
Exemple #9
0
def get_num_events(f5,f5type="minknow"):
    if f5type == "metrichor":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    elif f5type == "minknow":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
##    for event in f5[path]:
##        sys.stdout(event+"\n")
    sys.stdout(f5[path].len()+"\n")
Exemple #10
0
 def write(self, output):
     if SYSLOG:
         if output == "\n": return
         syslog(output)
     elif self.logfile:
         self.logfile.write(output)
     else:
         sys.stdout(output)
         sys.stdout.flush()
Exemple #11
0
def get_db_param():
    if len(sys.argv) != 3:
        sys.stdout("Usage: collection name, db name")
        return -1

    db_name = str(sys.argv[1])
    collection_name = str(sys.argv[2])

    return collection_name, db_name
Exemple #12
0
 def sendToWidget(self):
     if not self.isSend:
         self.isSend = True
         while not self.queue.empty():
             if self.widget:
                 self.widget.append(self.queue.get())
             else:
                 sys.stdout(self.queue.get())
         self.isSend = False
def main():

    if (len(sys.argv) < 1):
        sys.stdout(
            "Incorrect args. \nUsage: python server_start.py <path to script>")
        exit()

    handler = ServerHandler(sys.argv[1])
    handler.instantiate()
    handler.listen()
Exemple #14
0
def get_num_events(f5, f5type="minknow"):
    if f5type == "metrichor":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"
    elif f5type == "minknow":
        path = "Analyses/EventDetection_000/Reads/Read_0/Events"


##    for event in f5[path]:
##        sys.stdout(event+"\n")
    sys.stdout(f5[path].len() + "\n")
Exemple #15
0
 def __dumptable(self, table):
     """ Dumps table on screen
     for debugging purposes
     """
     for x in table.table.keys():
         sys.stdout.write("{0}\t<--- {1} {2}".format(
             x, table[x], type(table[x])))
         if isinstance(table[x], ID):
             sys.stdout(" {0}".format(table[x].value)),
         sys.stdout.write("\n")
def get_graph(image_id):
    graph_file = 'app/data/' + str(image_id)
    if not os.path.isfile(graph_file):
        print("not found {}".format())
        sys.stdout('Status: 404 Not Found\r\n\r\n')
    with open(graph_file) as json_file:
        graph = json.load(json_file)
    if graph is not None:
        return jsonify(graph)
    else:
        sys.stdout('Status: 404 Not Found\r\n\r\n')
Exemple #17
0
 def svmProgram(self):
     choice = UserIn.getInputByNumber("Linear", "Linear+", "Poly", "RBF")
     while choice not in range(1, 5):
         sys.stdout("Try again")
         choice = UserIn.getInputByNumber("Linear", "Linear+", "Poly",
                                          "RBF")
     if choice is 1: self._SVM.Linear(X=self.dataX, y=self.dataY)
     elif choice is 2: self._SVM.LinLinear(X=self.dataX, y=self.dataY)
     elif choice is 3: self._SVM.Poly(X=self.dataX, y=self.dataY)
     elif choice is 4: self._SVM.RBF(X=self.dataX, y=self.dataY)
     else: print("Error")
    def handle(self, *args, **options):

        try:
            git_cmd = sh.git.bake('--no-pager')
            WILHELM_VERSION\
                = git_cmd(['log', '--pretty=format:%H']).stdout.strip().split('\n')[0]

            with open(version_filename, 'w') as f:
                f.write(WILHELM_VERSION)

        except:
            sys.stdout('Could not write wilhelm version to file.')
    def _do_reducer(self):
        alt_stdin = open(0,'rb')
        def get_kvp(self=self,alt_stdin=alt_stdin):
            for line in alt_stdin:
                key,value = line.split(self.key_value_delimiter)
                yield (key,value)

        mapped = groupby(get_kvp(),itemgetter(0))
        try:
            for out_key,out_value in self.reducer(mapped):
                sys.stdout(b''.join([out_key,self.key_value_delimiter,out_value,b'\n']))
        finally:
            alt_stdin.close()
Exemple #20
0
 def wait_for_output(output_queue, __done__):
     while (1):
         try:
             msg = output_queue.get(timeout=5)
             if (msg):
                 sys.stdout('{}\n'.format(msg))
             if (__done__.get('is_done', False)
                     and (output_queue.qsize() == 0)):
                 sys.stderr.write('Nothing more to do.\n')
                 break
         except:
             __done__['is_done'] = True
         time.sleep(1)
Exemple #21
0
def script(ssh, cf):
    if not cf['script']:
        sys.stdout.write(
            "You must privode --script for script action! Use --help see more.\n"
        )
        sys.stdout.flush()
        sys.exit(1)
    # if ssh.info[2] == 'root':
    #     homePath = '/root'
    # else:
    #     homePath = os.path.join('/home', ssh.info[2])
    if cf['script'][-1] == '/':
        sys.stdout.write(
            "Error: --script option, You should privode file path instead of directory path.\n"
        )
        sys.stdout.flush()
        sys.exit(2)
    if '/' in cf['script']:
        srcPath = cf['script']
        dstfile = os.path.split(srcPath)[-1]
    else:
        if os.path.exists('./%s' % cf['script']):
            srcPath = './' + cf['script']
            dstfile = os.path.split(srcPath)[-1]
        else:
            srcPath = os.path.join(cf['scripts_dir'], cf['script'])
            dstfile = cf['script']
    homePath = cf['tmp_dir']
    destPath = os.path.join(homePath, '.' + ssh.info[2],
                            dstfile + Utils.random())
    try:
        ssh.sftp.stat(os.path.dirname(destPath))
    except FileNotFoundError:
        try:
            ssh.sftp.mkdir(os.path.dirname(destPath))
        except PermissionError as e:
            sys.stdout('Host: %s %s' % (ssh.info[0], str(e)))
            sys.exit(2)
    cf['src'], cf['dest'] = srcPath, destPath
    if cf['append'] is None:
        cf['append'] = destPath
    else:
        if not cf['sep'] is None:
            cf['append'] = destPath + ' ' + ' '.join(cf['append'].split(
                cf['sep']))
        else:
            cf['append'] = destPath + ' ' + cf['append']
    put(ssh, cf)
    recode = cmd(ssh, cf)
    ssh.execute("rm -rf {cmd}".format(cmd=destPath), get_pty=False)
    return recode
Exemple #22
0
 def update_token(self, social_app, social_token):
     request_url = 'https://graph.facebook.com/oauth/access_token?client_id=%s&client_secret=%s&grant_type=fb_exchange_token&fb_exchange_token=%s' % (
         social_app.key, social_app.secret, social_token.token)
     try:
         response = urlparse.parse_qs(urllib2.urlopen(request_url).read())
         new_token = response.get('access_token', [None])[0]
         timeleft = response.get('expires', [0])[0]
         if new_token and timeleft:
             expiry_date = datetime.datetime.now() + datetime.timedelta(seconds=int(timeleft)+60)
             social_token.token = new_token
             social_token.save(expiry_date=expiry_date, updated=True)
             
     except urllib2.HTTPError, e:
         sys.stdout('Request URL:\n%s\n\nError:\n%s' % (request_url, e.read() or 'Unknown error'))
Exemple #23
0
def reload_services():
    """This function reloads configurations of PHP-FPM and NGINX services"""
    res = subprocess.run(['/etc/init.d/php7.0-fpm', 'reload'],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    if res.stderr != b'':
        sys.stdout('Unable to reload PHP: %s\n' % res.stderr)
        sys.exit(1)
    res = subprocess.run(['/usr/sbin/nginx', '-s', 'reload'],
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE)
    if res.stderr != b'':
        sys.stdout('Unable to reload NGINX: %s\n' % res.stderr)
        sys.exit(1)
Exemple #24
0
 def resource_path(self):
     conf = ConfigParser()
     conf.read(self.fh)
     resource_loc = conf.get('RESOURCE_DIR', 'sv2_resource')
     if not resource_loc.endswith('/'): resource_loc = resource_loc + '/'
     if not os.path.isdir(resource_loc):
         sys.stdout(
             'FATAL ERROR: {} not found. Install resource files < sv2 -download >\n'
             .format(resource_loc))
         sys.stderr(
             'FATAL ERROR: {} not found. Install resource files < sv2 -download >\n'
             .format(resource_loc))
         sys.exit(1)
     return resource_loc
Exemple #25
0
 def __exit__(self, typ, val, trace):
     # write recorded messages to a file on any error
     if typ is None:
         self.writelines(Logger.instance.history())
         Logger.instance.setWriter(self)
     else:
         # setting trace to false removes these lines from the traceback.
         Logger.instance.enableTraceback(False)
         sys.stdout("\n")
         for line in traceback.format_exception(typ, val, trace):
             sys.stdout(line)
         with codecs.open("error.log", "w", "utf-8") as wf:
             for line in Logger.instance.history():
                 wf.write(line)
     return
def reload_services():
    """This function reloads configurations of PHP-FPM and NGINX services"""
    res = subprocess.run([
        '/etc/init.d/php7.0-fpm',
        'reload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    if res.stderr != b'':
        sys.stdout('Unable to reload PHP: %s\n' % res.stderr)
        sys.exit(1)
    res = subprocess.run([
        '/usr/sbin/nginx',
        '-s',
        'reload'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
    if res.stderr != b'':
        sys.stdout('Unable to reload NGINX: %s\n' % res.stderr)
        sys.exit(1)
Exemple #27
0
def check_revision_dates(tsd_file, main_file):

    tsd_dates = tsd_file.revision

    tsd_key = list(tsd_dates.keys())
    tsd_key = tsd_key[0]
    tsd_date = tsd_dates[tsd_key]
    for file in main_file:
        if tsd_key in file.revision.keys():
            if tsd_date == file.revision[tsd_key]:
                pass
            elif tsd_date > file.revision[tsd_key]:
                file.revision[tsd_key] = tsd_date
            else:
                # FIXME this needs to be handled
                stdout('error please fix me')
def task_initiation(sender,instance,created):
   # if created:
        import sys
        print>>sys.stdout('[sys out print] : task '+instance.taskname+' is created and initialized to stage '+instance.taskcurrentstage)
        instance.taskname =  instance.taskname + 'av'
#def task_deletion(sender,instance,created,**kwargs):
#    logger.info('[sys out print] : task '+instance.taskname+' is deleted)
Exemple #29
0
def init_log(log_dir=None):
    if log_dir and not path.exists(log_dir):
        msg = '指定路径不存在:%s' % log_dir
        sys.stdout(msg)
        log_dir = None

    config = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'default': {
                'format':
                '%(levelname)s %(asctime)s %(module)s:%(funcName)s:%(lineno)d %(message)s'
            },
            'simple': {
                'format': '%(level)s %(message)s'
            }
        },
        'handlers': {
            'console': {
                'level': 'DEBUG',
                'class': 'logging.StreamHandler',
                'formatter': 'default',
            },
        },
        'loggers': {
            '': {
                'handlers': ['console'],
                'level': 'INFO',
                'propagate': True,
            },
        }
    }

    if log_dir:
        config['handles']['file'] = {
            'level': 'DEBUG',
            'class': 'logging.handlers.RotatingFileHandler',
            'filename': path.join(log_dir, 'essay.log'),
            'maxBytes': 1024 * 1024 * 50,
            'backupCount': 5,
            'formatter': 'default',
        }
        config['loggers']['handlers'] = ['console', 'file']

    logging.config.dictConfig(config)
Exemple #30
0
def getconn(dbfile):
    # we force the directory to be db_dir()
    dbpath = db_path(dbfile)

    if not os.path.exists(dbpath):
        print "dbfile '" + dbfile + "' does not exist"
        sys.exit(45)

    try:
        conn = sqlite.connect(dbpath)
        conn.row_factory = sqlite.Row
        conn.create_function("regexp", 2, regexp)
    except:
        sys.stdout('Could not connect to dbfile: "%s"<br>' % dbfile)
        sys.exit(45)

    return conn
Exemple #31
0
def init_log(log_dir=None):
    if log_dir and not path.exists(log_dir):
        msg = '指定路径不存在:%s' % log_dir
        sys.stdout(msg)
        log_dir = None

    config = {
        'version': 1,
        'disable_existing_loggers': False,
        'formatters': {
            'default': {
                'format': '%(levelname)s %(asctime)s %(module)s:%(funcName)s:%(lineno)d %(message)s'
            },
            'simple': {
                'format': '%(level)s %(message)s'
            }
        },
        'handlers': {
            'console': {
                'level': 'DEBUG',
                'class': 'logging.StreamHandler',
                'formatter': 'default',
            },
        },
        'loggers': {
            '': {
                'handlers': ['console'],
                'level': 'INFO',
                'propagate': True,
            },
        }
    }

    if log_dir:
        config['handles']['file'] = {
            'level': 'DEBUG',
            'class': 'logging.handlers.RotatingFileHandler',
            'filename': path.join(log_dir, 'essay.log'),
            'maxBytes': 1024 * 1024 * 50,
            'backupCount': 5,
            'formatter': 'default',
        }
        config['loggers']['handlers'] = ['console', 'file']

    logging.config.dictConfig(config)
Exemple #32
0
def main():
    gui_sock = create_socket((HOST_GUI, PORT_GUI))
    idp = gui_sock.recv(1024)

    if idp != "MyMainServer":
        sys.stdout("Try again with MyMainServer")
        sys.exit(1)

    idp_sock = create_socket((HOST_MAIN, PORT_MAIN))
    main_id = idp_sock.recv(1024)
    main_id = int(main_id)
    #main_id = 11

    user_info = get_user_info(main_id)
    gui_sock.send(user_info)

    gui_sock.close()
    idp_sock.close()
    async def port_match(self, port_list):
        if not port_list:
            sys.stdout("No open ports.")
            sys.exit(SUCCESS)

        assert hasattr(self, 'port_finder'), ('Need `port_finder`')
        print("Service" + " " * 30 + "Port" + " " * 30 + "Protocol" +
              " " * 30 + "Type" + "\n\n")
        s = Services()
        running_services = s.processes()

        all_services = []
        for service in running_services:
            for port in port_list:
                if service.get('port') == port:
                    all_services.append(service)

        for service in all_services:
            self.port_finder(service)
Exemple #34
0
    def ioHandler(self, records = None):

        self.records = records

        if not os.path.exists(self.record):
            with open(self.record, "w") as f:
                cPickle.dump([], f)
        if self.action == "w":
            with open(self.record, "w") as f:
                cPickle.dump(records, f)
            return True
        elif self.action == "r":
            with open(self.record) as f:
                records_old = cPickle.load(f)
            return records_old
        elif self.action == "s":
            # Nothing will happen in this method.
            sys.stdout("If you want to search something, pls use FakeDB.search().")
        else:
            raise ValueError("No such DB operation.")
Exemple #35
0
def open_pcap(interface_name, snap_len=0xffff, promisc=1, timeout=1000):
    dev_name = ""
    err_buf = ctypes.create_string_buffer(pcap.PCAP_ERRBUF_SIZE)
    try:
        dev_name = pcap.get_pcap_dev_name(interface_name)
    except pcap.PcapError as pe:
        sys.stdout("error occurred getting pcap dev name for interface name, {}".format(pe))
        sys.exit(-1)
    if dev_name == "":
        sys.stderr("device name not found for interface name \"{}\"".format(interface_name))
        sys.exit(-1)
        
    h_pcap = pcap.pcap_open_live(dev_name, snap_len, promisc, timeout, err_buf)
    #if h_pcap == None:
    #    sys.stderr("error occurred opening device for packet capture, {}".format(err_buf.value))
    #    sys.exit(-1)
    if h_pcap == None:
        raise PcapError("failed to open device for capturing, {}".format(err_buf.value))

    return h_pcap
Exemple #36
0
def main():
    csv_file = sys.stdin

    df = pd.read_csv(csv_file)

    if len(sys.argv) != 4:
        sys.stdout("Usage: min_value, max_value, point_where_cut_outlier")
        return -1
    try:
        val_min = float(sys.argv[1])
        val_max = float(sys.argv[2])
        dot_position = int(sys.argv[3])
    except ValueError:
        sys.stdout("Usage: float min_value, float max_value, int point_where_cut_outlier")
        return -2

    df.columns = [x.strip().replace(' ', '_') for x in df.columns]

    ''' gestione outlier '''
    for i in range(1, len(df.columns)):
        colum_name = df.columns[i]
        df[colum_name] = df[colum_name].apply(lambda val: fix_point(val, val_min, val_max, dot_position))


    ''' gestione missing value '''
    for i in range(1, len(df.columns)):

        colum_name = df.columns[i]
        # print(colum_name+" len: "+str(len(df[colum_name])))

        nan_index = df[colum_name].index[df[colum_name].apply(np.isnan)]
        # print(nan_index)

        df[colum_name] = (df[colum_name].ffill() + df[colum_name].bfill()) / 2
        df[colum_name] = df[colum_name].bfill().ffill()
        nan_index2 = df[colum_name].index[df[colum_name].apply(np.isnan)]
        # print(nan_index2)

        # print("\n******************")

    df.to_csv(sys.stdout, index=None, header=True)
Exemple #37
0
def dump_metrics(dump_file, experiment_file, start_time, elapsed_time,
                 backend_name, metrics, field_sep="\t"):
    """
    Write or append collected metric values to the specified flat file.

    Arguments:
        dump_file (str): path to file to write. Will be created if doesn't
                         exist, or appended to (without header if it does)
        experiment_file (str): path to yaml file used to run this experiment
        start_time (str): date and time at which experiment was started.
        elapsed_time (float): time taken to run the experiment.
        metrics (dict): Collection of metric values, as returned from
                        FitPredictErrorExperiment.run() call.
        field_sep (str, optional): string used to separate each field in
                                   dump_file.  Defaults to tab character.
    """
    if dump_file is None or dump_file == '':
        df = sys.stdout()
    elif not os.path.exists(dump_file) or os.path.getsize(dump_file) == 0:
        ensure_dirs_exist(dump_file)
        df = open(dump_file, 'w')
        metric_names = []
        if isinstance(metrics, dict):
            metric_names = ["%s-%s" % (metric.lower(), dset.lower())
                            for metric in sorted(metrics.keys())
                            for dset in sorted(metrics[metric].keys())]
        df.write(field_sep.join(["host", "architecture", "os",
                                 "os_kernel_release", "neon_version",
                                 "backend",
                                 "yaml_name", "yaml_sha1", "start_time",
                                 "elapsed_time"] + metric_names) + "\n")
    else:
        df = open(dump_file, 'a')
    info = os.uname()
    trunc_exp_name = ("..." + os.path.sep +
                      os.path.dirname(experiment_file).split(os.path.sep)[-1] +
                      os.path.sep +
                      os.path.basename(experiment_file))
    # TODO: better handle situation where metrics recorded differ from those
    # already in file
    metric_vals = []
    if isinstance(metrics, dict):
        metric_vals = ["%.5f" % metrics[metric][dset] for metric in
                       sorted(metrics.keys()) for dset in
                       sorted(metrics[metric].keys())]
    df.write(field_sep.join([x.replace("\t", " ") for x in
                             [info[1], info[4], info[0], info[2],
                              neon.__version__, backend_name, trunc_exp_name,
                              hashlib.sha1(open(experiment_file,
                                                'rb').read()).hexdigest(),
                              start_time, "%.3f" % elapsed_time] +
                             metric_vals]) + "\n")
    df.close()
Exemple #38
0
def run(interface_name, output_url, snap_len=0xffff, timeout=1000):
    """

    """
    output = parse_output_url(output_url)

    try:
        h_pcap = pcap.open_pcap(interface_name, snap_len, PROMISC_MODE,
                                timeout)
        if h_pcap is None:
            sys.stderr("failed to open deivce for packet capture")
            sys.exit(-1)
        while True:
            pkt_ts, pkt_len, pkt_str = pcap.get_next_pkt(h_pcap)
            if pkt_len <= 0:
                sys.stdout("no packet retrieved")
            else:
                on_packet_captured(pkt_ts, pkt_str, pkt_len, output)
    except pcap.PcapError as pe:
        sys.stderr("pcap error occurred, {}".format(pe))
        sys.exit(-1)
Exemple #39
0
def get_next_pkt(h_pcap):
    pcap_hdr = POINTER(PcapPktHdr)()
    pcap_data = POINTER(c_ubyte)()
    err_buf = ctypes.create_string_buffer(PCAP_ERRBUF_SIZE)
    pkt_ts = -1
    pkt_str = ""
    pkt_len = -1
    if result == 1:
        pkt_len = pcap_hdr.contents.len
        raw_ts = pcap_hdr.ts
        secs = int(raw_ts.tv_sec.value)
        usecs = int(raw_ts.tv_usec.value)
        pkt_ts = usecs + secs * 1000000
        result = pcap_next_ex(h_pcap, pcap_hdr, pcap_data)
    elif result == 0:
        sys.stdout("timeout occurred")
    elif result == -1:
        raise PcapError("failed to get packet, {}".format(err_buf))
    elif result == -2:
        sys.stdout("EOF")
    return pkt_ts, pkt_len, pkt_str
def get_jokes_page(after):
    '''Requests jokes through reddit's online API, sidestepping PRAW's limit
    on the history of the instance
    '''
    # See if old authorization works
    headers = {'Authorization': AUTH, 'User-Agent': user_agent}
    page = requests.get(f'https://oauth.reddit.com/r/dadjokes/new.json?\
limit=100&after={after}',
                        headers=headers).json()
    try:
        if page['error'] == 401:
            # Get new authorization
            authorization = get_auth()
            headers = {
                'Authorization': authorization,
                'User-Agent': user_agent
            }
            page = requests.get(
                f'https://oauth.reddit.com/r/dadjokes/new.json?\
limit=100&after={after}',
                headers=headers).json()
        elif page['error'] == 429:
            sys.stdout('Too Many Requests. Waiting...\n')
            sys.stdout.flush()
            for t in range(len(75)):
                if t % 5 == 0:
                    sys.stdout('{15 + (t - 75) // 15} seconds')
                    sys.stdout('\r')
                    sys.stdout.flush()
                time.sleep(.2)
            print('Resuming')
            get_jokes_page(after)
    except:
        pass
    return page
def decode_folder(session,source,level,folder):
    global g_cnt
    try:
        xml = lxml.etree.fromstring(source)
    except:
        sys.stdout('#')
        sys.stdout.flush()
        print source  
    nsmap = {'atom': 'http://www.w3.org/2005/Atom','d': 'http://schemas.microsoft.com/ado/2007/08/dataservices','m': 'http://schemas.microsoft.com/ado/2007/08/dataservices/metadata'}
    cnt=0
    for m in xml.xpath("//atom:entry/atom:content/m:properties", namespaces=nsmap):
		cnt = cnt + 1
		g_cnt = g_cnt + 1
		N = m.find('d:Name', namespaces=nsmap)
		L = m.find('d:ServerRelativeUrl', namespaces=nsmap)
		I = m.find('d:ItemCount', namespaces=nsmap)
		U = m.find('d:UniqueId', namespaces=nsmap)
		rep = urllib.quote(L.text.encode('windows-1252'))
		ligne = 'd;'+urllib.unquote(folder.encode("windows-1252"))+';"'+N.text.encode('windows-1252')+'";"'+args.site+rep+'";'+I.text+';'+U.text+';'+rep+'\n'
		fo.write(ligne)
		recurse_dir(session,rep,level)
    return
Exemple #42
0
def main():

    global THINGSPEAKKEY
    global THINGSPEAKURL
    global CFGFILE
    global TEMPOUT
    global AIRPRESSURE

    try:

        while True:
            readDataERIK()
            sendDataERIK(THINGSPEAKURL, THINGSPEAKKEY, "field1", "field2", TEMPOUT, AIRPRESSURE)
            sys.stdout.flush()

            # Toggle LED while we wait for next reading
            for i in range(0, INTERVAL * 60):
                time.sleep(1)

    except:
        # Reset GPIO settings
        # GPIO.cleanup()
        sys.stdout("exception!")
Exemple #43
0

# @example
# def example18():
#     widgets = [Percentage(),
#                ' ', Bar(),
#                ' ', ETA(),
#                ' ', AdaptiveETA()]
#     pbar = ProgressBar(widgets=widgets, maxval=500)
#     pbar.start()
#     for i in range(500):
#         time.sleep(0.01 + (i < 100) * 0.01 + (i > 400) * 0.9)
#         pbar.update(i + 1)
#     pbar.finish()


@example
def example19():
    pbar = ProgressBar()
    for i in pbar([]):
        pass
    pbar.finish()


if __name__ == "__main__":
    try:
        for example in examples:
            example()
    except KeyboardInterrupt:
        sys.stdout("\nQuitting examples.\n")
Exemple #44
0
inp = os.fdopen(fd, "r", 0)
out = os.fdopen(fd, "w", 0)

while 1 :
    sys.stdout.write("> ")
    line = sys.stdin.readline()
    out.write(line.strip() + "\0")

    while 1 :
        resp = inp.readline().strip()
        sys.stdout.write(resp)

        entry = int(resp.strip(), 0)

        continue

        # This will be the symbol we found
        symbol = None;

        # Go through the symbols and find the one that is the nearest
        for x in symbols :
            # Check if the current symbol better than what we already have
            if symbol is None or (x[0] <= entry and x[0] > symbol[0]) : symbol = x

        # Check if we found something
        if not symbol is None :
            sys.stdout.write(" %08X\t%s\n" % symbol)
        else :
            sys.stdout(" Nothing was found!\n")

        if resp.strip() == "" : break
Exemple #45
0
def server(addr="127.0.0.1"):

	data_in_prog = False

	try:
		sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
		server_addr = ((addr, BGP_PORT))
		sock.bind(server_addr)
		sys.stdout.write("Started listening at port %s at server %s" % server_addr)
	except:
		sys.stderr.write("Error binding to local socket. Probably taken or not root.\n")
		return -1

	sock.listen(TRUE)

	while True:
		sys.stdout.write("Listening for connections.")
		connection, client_address = sock.accept()
		try:
			sys.stdout.write("Incoming connection from %s\n" % client_address)
			while True:
				data = connection.recv()
				if data:
					if data.find(INITIATOR) != -1:
						# Found initiation packet!

						sys.stdout.write("Got incoming initator from: %s \n" %client_address)
						# Extract filename
						file_name_offset = data.find(DELIMITER) + len(DELIMITER)
						data = data[file_name_offset:]
						file_name = data[:data.find(DELIMITER)]

						# Extract CRC:
						crc = data[data.find(DELIMITER) + len(DELIMITER):data.find(DATA_TERM)]

						sys.stdout("Getting file:\n\tFile Name:\t" + file_name + "\n\tCRC:\t" + str(crc) + "\n")

						i = 0                                   # Zeroing chunks counter
						entire_raw_file = ""                    # Creating file space
						data_in_prog = True                     # Progress of file


					elif data.find(DATA_TERM) and data_in_prog is True and data.find(CONN_TERM) == -1:
						# Found regular data packet!
						i += 1
						entire_raw_file = data[data.find(PACK_INIT) + len(PACK_INIT):data.find(DATA_TERM)]

					elif data.find(CONN_TERM) != -1:
						# Got connection termination

						# Check for CRC matching:
						try:
							entire_raw_file = base64.b64decode(entire_raw_file)
						except:
							sys.stderr.write("Error BASE64 decoding file.\nFile dropped.\n")
							return -1

						try:
							entire_raw_file = zlib.decompress(entire_raw_file, COMP_LVL)
						except:
							sys.stderr.write("Error decompressing file!\nFile Dropped!\n")
							return -1

						if crc == str(zlib.crc32(entire_raw_file)):
							sys.stdout.write("CRC matched!\n")

							fh = open(crc + "_" + file_name, BINARY_WRITE)
							fh.write(entire_raw_file)
							fh.close()

						else:
							sys.stderr.write("CRC match failed!\n")

				else:
					sys.stdout.write("No incoming data.\n")
		finally:
			connection.close()
    return [stdout.getvalue()]


def make_server(
    host, port, app, server_class=WSGIServer, handler_class=WSGIRequestHandler
):
    """Create a new WSGI server listening on `host` and `port` for `app`"""
    server = server_class((host, port), handler_class)
    server.set_app(app)
    return server


if __name__ == '__main__':
    httpd = make_server('', 8000, demo_app)
    sa = httpd.socket.getsockname()
    sys.stdout("Serving HTTP on %s port %s...\n" % (sa[0], sa[1]))
    import webbrowser
    webbrowser.open('http://localhost:8000/xyz?abc')
    httpd.handle_request()  # serve one request, then exit











Exemple #47
0
def stdoutMP(lock, msg):
    import sys
    lock.acquire()
    sys.stdout(msg)
    lock.release()
Exemple #48
0
import os
import shutil
import sys
import argparse
import uuid
import fnmatch

parser = argparse.ArgumentParser()

parser.add_argument("--dir", help="Source Path")

args = parser.parse_args()

tilefiles = []

for root, dirs, files in os.walk(args.dir):
     for name in fnmatch.filter(files, "*title*"):
        tilefiles.append(os.path.join(root, name))

if len(tilefiles) == 0:
    sys.stdout("Error no title file!")

for titlefile_path in tilefiles:

    unsortedfile = os.path.join(os.path.dirname(titlefile_path),str(uuid.uuid1())+".pdf")
  
    try:
        shutil.move(titlefile_path, unsortedfile)
    except:
        print("can not move file %s -> %s" % (titlefile_path, unsortedfile))
Exemple #49
0
                         index = s
                         a = in_hand.pop(s)
                         break
                 else:
                     a = in_hand.pop(0)
                     b = []
                     break
                     
         sys.stdout.write("%d %d\n" % (a, len(b)))
         for i in xrange(len(b)):
             sys.stdout.write("%d " % (b[i]))
         sys.stdout.write("\n")
     else:
         a = in_hand.pop(0)
         b = 0
         sys.stdout("%d %d\n" % (a, b))
 else:
     index = 0
     while True:
         a = in_hand.pop(index)
         in_table.append(a)
         in_table.sort()
         b = closestSum(in_table, a)
         if sum(b) >= a:
             in_hand.insert(index, a)
             for s in xrange(index, len(in_hand)+1):
                 if in_hand[s] > sum(b):
                     index = s
                     a = in_hand.pop(s)
                     break
             else:
Exemple #50
0
 def outFunction(*args, **kwargs):
     sys.stdout('Arguments:\n'+str(args))
     sys.stdout('Arguments with keyword:\n'+str(kwargs))
     pass
Exemple #51
0
            foundLiterals[t.value] = foundLiterals.get(t.value, 0) + 1
            t.value = ''.join(('\\href{', t.value, '}{',
                            source[t.start:t.end], '}'))
        timings.append('prepareReplacements: %f' % (time.time()-tm,))
        tm = time.time()
        replaced[f] = replaceLiterals(source, literalTags)
        timings.append('replaceLiterals: %f' % (time.time()-tm,))

    # Save results
    for f, s in replaced.iteritems():
        assert f is not None and isLocalFile(f, workdir)
        fname = os.path.join(opt.output, f)
        try:
            outfile = open(fname, 'w')
        except IOError, e:
            sys.stdout('Can not open output file (\'%s\'): %s' % \
                                (fname, str(e)))
            continue
        tm = time.time()
        outfile.write(s)
        outfile.close()
        timings.append('writeSourceFile: %f' % (time.time()-tm,))

    # Stats
    if opt.stats:
        for w,n in foundLiterals.iteritems():
            print 'Concept <%s> replaced %f times' % (w, n)

    if opt.timings:
        for l in timings:
            print l
    def get_mail(host, user, password, search_key, port=110, ssl=False):
        """
        :param host: mail server ip address
        :param user: mail login user name
        :param password: mail login user's password
        :param search_key: search keys  实际这里是来自[email protected]和search_keys共同决定
        :param port: mail server connect port
        :param ssl: use ssl connection or not
        :return: return a tuple (a, b, c),
                    'a': a logic value, telling user search success or not
                    'b': a mail data or None
                    'c': a description for this searching
        """

        # 获得字符编码方法
        def get_charset(message, default="ascii"):
            charset = message.get_content_charset()
            return charset if charset else default

        # 定义解析邮件方法
        def parse_email(msg):
            # content_list = []
            for part in msg.walk():
                if not part.is_multipart():
                    #content_type = part.get_content_type()
                    charset = get_charset(part)

                    #if content_type in ['text/plain']:
                    #    suffix = '.txt'
                    #if content_type in ['text/html']:
                    #    suffix = '.htm'
                    if charset == None:
                        data = part.get_payload(decode=True).encode('utf-8')
                    else:
                        data = part.get_payload(decode=True).decode(charset).encode('utf-8')
                        # content_list.append((suffix, data))
            # return  content_list
            return data

        # search_key参数为空是, 直接返回None
        if not search_key:
            sys.stdout('没有传入搜索关键字参数!' + '\n')
            sys.exit(1)
            # raise Exception, 'error function call: search_key var is null'

        # 根据ssl设置, 建立不同的pop3连接
        if ssl == 1:
            pop3_conn = poplib.POP3_SSL(host, port)
        else:
            pop3_conn = poplib.POP3(host, port)

        pop3_conn.user(user)
        pop3_conn.pass_(password)

        total_number_mail = pop3_conn.stat()[0]
        
        for item in iter(pop3_conn.list()[1][-1::-1]):
            # 取得邮件序列号, 大小. 这里的大小是字节为单位
            number, size = item.split()
            sys.stdout.write('\r---------------- %d/%d ---------------' % (
                total_number_mail - int(number) + 1, total_number_mail))
            # 如果一封邮件大小超过 10k, 肯定不是OA发来的接受更新包的邮件, 跳过.
            if int(size) > 1024 * 10: continue

            lines = pop3_conn.retr(number)[1]
            # 取得加密的邮件内容
            msg = email.message_from_string('\n'.join(lines))

            # msg.get('From')
            # 第一个为发件人邮件别名, 第二个为真实发件人邮件地址
            # '"*****@*****.**" <*****@*****.**>'
            # from_info_tuple =  email.utils.parseaddr(msg.get('from'))
            # ('jiwei.qian', '*****@*****.**')
            from_address = email.utils.parseaddr(msg.get('from'))[1]

            if from_address in '*****@*****.**':
                # str_date = 'Date : ' + msg["Date"]
                # subject = email.Header.decode_header(msg["Subject"])
                # sub = my_unicode(subject[0][0], subject[0][1])
                # str_sub = 'Subject : ' + sub

                # 处理邮件正文内容
                data = parse_email(msg)
                if ":" + search_key + "<" in data:
                    # 找到对应邮件后,设置找到标志位真, 立即退出,不再搜索
                    search_flag = True
                    receive_time = msg.get('Received').split(';')[1].strip()
                    break
            else:
                continue

        # 关闭pop3服务器连接
        pop3_conn.quit()

        sys.stdout.write('\n')
        if search_flag:
            return data, receive_time
        else:
            # raise Exception, 'cat not find any matched mail'
            sys.stdout.wirte('无法从收件箱中找到相匹配的邮件!' + '\n')
            sys.exit(1)
Exemple #53
0
# I added these functions to make it easier to print with different colors
# The are shorthand for the write, printf, and make functions above
# e.g. colortext.pcyan('test') prints in cyan, colortext.wlightpurple('test') writes in light purple, colortext.mblue('test') returns a blue string
# Note: Python does not close the scope in for-loops so the closure definition looks odd - we need to explicitly capture the state of the c variable ("c=c")
# Note: we should handle kwargs here in the future
def xprint(*args): print(args)
def xjoin(*args): return ''.join(map(str, args))
for c in colors:
    allow_colors = False
    try:
        from sys import platform as _platform
        if _platform == "linux" or _platform == "linux2" or _platform == "darwin":
            allow_colors = True
    except: pass
    if allow_colors:
        setattr(sys.modules[__name__], 'w'  + c, lambda s, c=c : write(s, color = c))
        setattr(sys.modules[__name__], 'p'  + c, lambda s, c=c : printf(s, color = c))
        setattr(sys.modules[__name__], 'm'  + c, lambda s, c=c : make(s, color = c))
    else:
        setattr(sys.modules[__name__], 'w'  + c, lambda s, c=c : sys.stdout(s))
        setattr(sys.modules[__name__], 'p'  + c, lambda s, c=c : xprint(s))
        setattr(sys.modules[__name__], 'm'  + c, lambda s, c=c : xjoin(s))


class Exception(Exception):
    def __init__(self, msg):
        self.message = make_error(msg)
    def __str__(self):
        return self.message
    return s,values,dt


if __name__=="__main__":

    

    import sys
    import os

    import numpy as np



    if len(sys.argv)<3:
        sys.stdout("ERROR: execute with two parameters:\n\t1. path to the output folder, which needs to contain one template folder named 'template'\n\t2. list of movement speeds for the first stimulus-object.")

    else:
        temp_path=sys.argv[1]+'template'
        path=sys.argv[1]
        

        exec("v="+sys.argv[2])


        


        def single_process(template,v_i,stim_indices=[0],neuron_save_indices=[],neuron_save_labels=[],sensor_save_indices=[],intensities_save_interval=None,skip_relax=True):

            
Exemple #55
0
import sys
import hashlib
data = sys.stdin.readlines()
datastr= ''.join(data)
hash = hashlib.sha1(datastr.encode("UTF-8")).hexdigest()
sys.stdout(hash[:10])
Exemple #56
0
    >>> example29()
    True
    """
    widgets = ['Test: ', Percentage(), ' | ', ETA(), ' | ', AbsoluteETA()]
    pbar = ProgressBar(widgets=widgets, maxval=500).start()
    for i in range(500):
        sleep(0.01)
        pbar.update(i + 1)
    pbar.finish()
    return True


def run():
    """Display progress bar examples."""
    for example in examples:
        example()
    return True


def test():
    """Run tests."""
    assert run() is True


if __name__ == '__main__':
    try:
        run()
    except KeyboardInterrupt:
        sys.stdout('\nQuitting examples.\n')
Exemple #57
0
def printf(*args):
    if PRINT_TEST_RESULTS:
        for a in args:
            sys.stdout(a)
        print('')
Exemple #58
0
 def time_factorial(self):
     x = 1
     for i in xrange(100):
         x *= i
     # This is to generate invalid output
     sys.stdout("X")
Exemple #59
-1
def download_progress_hook(count, blockSize, totalSize):
	global last_percent_reported
	percent = int(count * blockSize * 100 / totalSize)
	
	if last_percent_reported != percent :
		if percent % 5 == 0:
			sys.stdout("%s%%" % percent)
			sys.stdout.flush()
		else:
			sys.stdout.write(".")
			sys.stdout.flush()
		
		last_percent_reported = percent