Ejemplo n.º 1
0
def remote(cluster, command, merge_key=None):
    """Executes C{command} remotely on each node of C{cluster}. Execution on all nodes is
    done in parallel. If C{merge_key} is specified, then
    the inputs of each thread are expected to be ordered by the C{merge_key}. The sequences
    from the threads
    are then merged into a single sequence using the C{merge_key}.
    (This function is identical to C{fork}, except that the first argument is required to
    identify a cluster.)
    """
    op = fork.fork(cluster, command, merge_key)
    op._set_cluster_required(True)
    return op
Ejemplo n.º 2
0
def remote(cluster, command, merge_key = None):
    """Executes C{command} remotely on each node of C{cluster}. Execution on all nodes is
    done in parallel. If C{merge_key} is specified, then
    the inputs of each thread are expected to be ordered by the C{merge_key}. The sequences
    from the threads
    are then merged into a single sequence using the C{merge_key}.
    (This function is identical to C{fork}, except that the first argument is required to
    identify a cluster.)
    """
    op = fork.fork(cluster, command, merge_key)
    op._set_cluster_required(True)
    return op
Ejemplo n.º 3
0
Archivo: bash.py Proyecto: es92/strwm
def run(cmd):
    #print cmd
    fork(['/bin/bash', '-c', '"' + cmd + '"'])
Ejemplo n.º 4
0
soup = connect(directoryUrl)

csvUrl = soup.xpath('//div[@id="directory"]//a/@href')[0]

response = urllib.urlretrieve(csvUrl)

with open(response[0], 'rb') as csvfile:
    csv_file = csv.reader(csvfile, delimiter=',')
    next(csv_file)
    next(csv_file)
    next(csv_file)
    next(csv_file)
    next(csv_file)
    for res in csv_file:
        results = fork(parse_data, res)

        for key, val in results.iteritems():
            for v in val:
                print v[0]
                scraperwiki.sqlite.save(
                    unique_keys=['location_url'],
                    data={
                        "location_url":
                        v[0],
                        "name":
                        unicode(v[1]),
                        "add1":
                        unicode(v[2]),
                        "add2":
                        unicode(v[3]),
Ejemplo n.º 5
0
directoryUrl = "http://www.cqc.org.uk/content/how-get-and-re-use-cqc-information-and-data#directory"

soup = connect(directoryUrl)

csvUrl = soup.xpath('//div[@id="directory"]//a/@href')[0]

response = urllib.urlretrieve(csvUrl)

with open(response[0], 'rb') as csvfile:
    csv_file = csv.reader(csvfile, delimiter=',')
    next(csv_file)
    next(csv_file)
    next(csv_file)
    next(csv_file)
    next(csv_file)
    for res in csv_file:
        results = fork(parse_data, res)
       
        for key, val in results.iteritems():
           for v in val:
                print v[0]
                scraperwiki.sqlite.save(unique_keys=['location_url'], data={"location_url": v[0], "name": unicode(v[1]), "add1": unicode(v[2]), "add2": unicode(v[3]), "add3": unicode(v[4]), "add4": unicode(v[5]),
                                                                                            "postal_code": unicode(v[6]), "telephone": unicode(v[7]), "CQC_ID": v[8], "type_of_service": unicode(v[9]), "services": unicode(v[10]), "local_authority": unicode(v[11]), "latest_report": unicode(v[12]), "reports_url": unicode(v[13]),
                                                                             "report_date": unicode(v[14]), "overview": unicode(v[15]), "overview_description": unicode(v[16]), "overview_safe": unicode(v[17]), "overview_effective": unicode(v[18]),
                                                                             "overview_caring": unicode(v[19]), "overview_responsive": unicode(v[20]), "overview_well_led": unicode(v[21]), "run_by": unicode(v[22]), "run_by_url": unicode(v[23]),
                                                                             "overview_summary": unicode(v[24]), "summary_safe": unicode(v[25]), "summary_effective": unicode(v[26]), "summary_caring": unicode(v[27]), "summary_responsive": unicode(v[28]),
                                                                             "summary_well_led": unicode(v[29]), 'summary_treating_people_with_respect': unicode(v[30]), 'summary_providing_care': unicode(v[31]), 'summary_caring_for_people_safely': unicode(v[32]), 'summary_staffing': unicode(v[33]), 'summary_quality_and_suitability_of_management': unicode(v[34])
                                                                             })
                

                    print "found : "
                    print "\n".join(frames)
                if opts.upload:
                    gdb.writeLog( opts.graceid, message=message, tagname=tagname )

            if not opts.force: ### we aren't forcing this scan, so we stop looking up data and move to the next chanset
                break

        if opts.verbose:
            print "  copying frames into local directory"
        newframes = ["%s/%s"%(frmdir, os.path.basename(frame)) for frame in frames]
        for frame, newframe in zip(frames, newframes):
            if opts.Verbose:
                print "    %s -> %s"%(frame, newframe)
            if not os.path.exists( newframe ): ### only copy if frame doesn't already exists
                fork.fork(['cp', frame, newframe]).wait() ### delegates to subprocess.Popen

    else: ### we did not break from the iteration over frameTypes, so we want to actually run the scan

        #-----------------------------------------
        # SUBMIT COMMANDS
        #-----------------------------------------

        ### set up output directory and url
        this_outdir = os.path.join(outdir, opts.graceid, "scans", chanset.replace(" ",""))
        this_outurl = os.path.join(outurl, opts.graceid, "scans", chanset.replace(" ",""))
        if opts.verbose:
            print "  writing into : %s -> %s"%(this_outdir, this_outurl)

        if os.path.exists(this_outdir):
            if not opts.force: ### ignore existing data if --force was supplied
Ejemplo n.º 7
0
  temp = temperature.get()
  if temp > c['temp'] :
    if c['debug'] == 1 :
      print("temp: %.3f, over %.3f"%(temp, c['temp']))
    syslog.syslog(syslog.LOG_WARNING, "temp: %.3f, over %.3f"%(temp, c['temp']))
    led.control(1, c['gpio'])
  else :
    led.control(0, c['gpio'])
  time.sleep(c['time'])

def parentfunc():
  time.sleep(10)

def atexit_func() :
#  print("pid %d: exit!!"%(os.getpid()))
  syslog.closelog()

if __name__ == '__main__' :
  global c, f 
  atexit.register(atexit_func)
  f = fork.fork(pidfile="/var/run/cputempmon-py.pid")
  c = configs.configs(name=sys.argv[0], pidfile="/var/run/cputempmon-py.pid", stop=f.stop)
  c.getopt(sys.argv[1:])
  f['parent'] = parentfunc 
  f['child'] = childfunc 
  syslog.openlog("%s"%(os.path.basename(sys.argv[0])), syslog.LOG_LOCAL0 | syslog.LOG_PID)
  led.init(c['gpio'])
  f.run(c['bg'])


Ejemplo n.º 8
0
    bt.scan(update_list)
    while True:
        cmd = child.readobj()
        if cmd == "q":
            break
        elif cmd == "connect":
            name = child.readobj()
            device = devicelist[name]
            scribe = bt.connect_to(device)
        elif cmd == "light":
            scribe.write_packet("L" + chr(13) + "\x01\x00\xff\x00")
            scribe.read()
    bt.end()

child, parent = fork()
if child:
    bt = BTLE()
    threading.Thread(target=main, args=(bt, child)).start()
    bt.launch()
    child.close()
    sys.exit()

# Child Process
import Tkinter

class Application(Tkinter.Frame):
    def __init__(self, parent, master=None):
        Tkinter.Frame.__init__(self, master=master)
        self.parent = parent