Exemplo n.º 1
0
 def connect(self, IPname, family, hostaddr):
     global bg_redis_write, bg_submit_wf
     global TASK_TYPE, QSIZE_REDIS, QSIZE_SUBMIT
     print()
     # (self, 'ip068.subnet71.example.com', AF_INET, ('215.183.71.68', 4720) )
     # (self, 'ip6.mxout.example.com', AF_INET6,
     #   ('3ffe:80e8:d8::1', 4720, 1, 0) )
     if family == AF_INET6:
         self.flow = hostaddr[2]
         self.scope = hostaddr[3]
     else:
         pass
     self.fp =  None
     self.IP = hostaddr[0]
     self.port = hostaddr[1]
     self.IPname = IPname  # Name from a reverse IP lookup
     log = logging.getLogger(wildlib.loggerName)
     log.info('action=<connect> milter_id=<%d> orig_client_ip=<%s> orig_client=<%s> client_ip=<%s> client=<%s> server_ip=<%s> server=<%s>' %
              (self.id, self.getsymval('{client_addr}'), self.getsymval('{client_name}'), self.IP, IPname,
               self.getsymval('{daemon_addr}'), self.getsymval('{daemon_name}')))
     if TASK_TYPE != 'single':
         if not bg_redis_write.is_alive():
             log.critical('action=<redis_add> milter_id=<%d> error=<The %s to write into Redis is dead.Try to restart...>',
                          self.id, TASK_TYPE)
             redisq = Queue(maxsize=QSIZE_REDIS)
             bg_redis_write = Thread(target=redis_background_write, args=(redisq,))
             bg_redis_write.start()
         if not bg_submit_wf.is_alive():
             log.critical('action=<wildfire_submit> milter_id=<%d> error=<The %s to submit sample for Wildfire is dead. Try to restart...>',
                      self.id, TASK_TYPE)
             submitq = Queue(maxsize=QSIZE_SUBMIT)
             bg_submit_wf = Thread(target=submit_wildfire_background, args=(submitq,))
             bg_submit_wf.start()
     return Milter.CONTINUE
Exemplo n.º 2
0
def start(func=None,
          args=None,
          localization={},
          gui=None,
          debug=False,
          http_server=False,
          user_agent=None):
    global guilib, _debug, _multiprocessing, _http_server, _user_agent

    def _create_children(other_windows):
        if not windows[0].shown.wait(10):
            raise WebViewException('Main window failed to load')

        for window in other_windows:
            guilib.create_window(window)

    _debug = debug
    _user_agent = user_agent
    #_multiprocessing = multiprocessing
    multiprocessing = False  # TODO
    _http_server = http_server

    if multiprocessing:
        from multiprocessing import Process as Thread
    else:
        from threading import Thread

    original_localization.update(localization)

    if threading.current_thread().name != 'MainThread':
        raise WebViewException('This function must be run from a main thread.')

    if len(windows) == 0:
        raise WebViewException(
            'You must create a window first before calling this function.')

    guilib = initialize(gui)

    # thanks to the buggy EdgeHTML, http server must be used for local urls
    if guilib.renderer == 'edgehtml':
        http_server = True

    for window in windows:
        window._initialize(guilib, multiprocessing, http_server)

    if len(windows) > 1:
        t = Thread(target=_create_children, args=(windows[1:], ))
        t.start()

    if func:
        if args is not None:
            if not hasattr(args, '__iter__'):
                args = (args, )
            t = Thread(target=func, args=args)
        else:
            t = Thread(target=func)
        t.start()

    guilib.create_window(windows[0])
Exemplo n.º 3
0
def main():
    data = [i for i in range(10)]
    threads = []
    for num in data:
        thread = Thread(target=get_data, args=(num, ))
        thread.start()
        threads.append(thread)
    for thread in threads:
        thread.join()
Exemplo n.º 4
0
def main1():
    start = time()

    for _ in range(5):

        t = Thread(target=counter)

        t.start()
        t.join()

    print('单线程: ', time() - start)
Exemplo n.º 5
0
def main():
    bt = Thread(target=background)
    bt.start()
    socketname = "/var/run/pagure/paguresock"
    timeout = 600
    # Register to have the Milter factory create instances of your class:
    Milter.factory = PagureMilter
    print "%s pagure milter startup" % time.strftime('%Y%b%d %H:%M:%S')
    sys.stdout.flush()
    Milter.runmilter("paguremilter", socketname, timeout)
    logq.put(None)
    bt.join()
    print "%s pagure milter shutdown" % time.strftime('%Y%b%d %H:%M:%S')
Exemplo n.º 6
0
def main():
    bt = Thread( target = background )
    bt.start()
    socketname = "/var/spool/EARS/EARSmilter.sock"
    timeout = 600
    Milter.factory = EARS.milter
    flags = Milter.CHGBODY + Milter.CHGHDRS + Milter.ADDHDRS
    flags += Milter.ADDRCPT
    flags += Milter.DELRCPT
    Milter.set_flags( flags )     # tell Sendmail/Postfix which features we use
    Milter.runmilter( "EARSmilter", socketname, timeout )
    logq.put( None )
    bt.join()
Exemplo n.º 7
0
def main2():
    start = time()

    t_list = []
    for _ in range(5):
        t = Thread(target=counter)

        t.start()
        t_list.append(t)

    for t in t_list:
        t.join()

    print('多线程并发:', time() - start)
Exemplo n.º 8
0
def main():
    """Main entry point, run the milter and start the background logging daemon"""

    try:
        debug = configParser.getboolean('main', 'debug')

        # Exit if the main thread have been already created
        if os.path.exists(PID_FILE_PATH):
            print("pid file {} already exists, exiting".format(PID_FILE_PATH))
            os.exit(-1)

        lgThread = Thread(target=loggingThread)
        lgThread.start()
        timeout = 600

        # Register to have the Milter factory create new instances
        Milter.factory = MarkAddressBookMilter

        # For this milter, we only add headers
        flags = Milter.ADDHDRS
        Milter.set_flags(flags)

        # Get the parent process ID and remember it
        pid = os.getpid()
        with open(PID_FILE_PATH, "w") as pidFile:
            pidFile.write(str(pid))
            pidFile.close()

        print("Started RoundCube address book search and tag milter (pid={}, debug={})".format(pid, debug))
        sys.stdout.flush()

        # Start the background thread
        Milter.runmilter("milter-rc-abook", SOCKET_PATH, timeout)
        GlobalLogQueue.put(None)

        #  Wait until the logging thread terminates
        lgThread.join()

        # Log the end of process
        print("Stopped RoundCube address book search and tag milter (pid={})".format(pid))

    except Exception as error:
        print("Exception when running the milter: {}".format(error.message))

    # Make sure to remove the pid file even if an error occurs
    # And close the database connection if opened
    finally:
        if os.path.exists(PID_FILE_PATH):
            os.remove(PID_FILE_PATH)
Exemplo n.º 9
0
def main():
  bt = Thread(target=background)
  bt.start()
  socketname = "/home/stuart/pythonsock"
  timeout = 600
  # Register to have the Milter factory create instances of your class:
  Milter.factory = myMilter
  flags = Milter.CHGBODY + Milter.CHGHDRS + Milter.ADDHDRS
  flags += Milter.ADDRCPT
  flags += Milter.DELRCPT
  Milter.set_flags(flags)       # tell Sendmail which features we use
  print "%s milter startup" % time.strftime('%Y%b%d %H:%M:%S')
  sys.stdout.flush()
  Milter.runmilter("pythonfilter",socketname,timeout)
  logq.put(None)
  bt.join()
  print "%s bms milter shutdown" % time.strftime('%Y%b%d %H:%M:%S')
def Compress():
    manager = Manager()
    from HEXSmash import main
    try:
        Filename = argv[2]
    except IndexError:
        Filename = input("What file would you like to compress? : ")
    OpenFile = open(Filename, "rb")
    srtstr = OpenFile.read()
    srtstrlen = len(srtstr)
    srtstr = [srtstr[i : i + 7] for i in range(0, len(srtstr), 7)]
    a3 = len(srtstr)
    Threads = cpu_count(logical=False)
    try:
        x = int(argv[3])
    except IndexError:
        x = 0
    Threadsnm = 1
    ANS = manager.dict()
    CUR = manager.dict()
    ANS[1] = ""
    CUR[1] = 0 + x
    Start = time()
    while (Threadsnm <= Threads):
        Thread(target=CompressMT, args=(Threadsnm, srtstr, a3, Threads, ANS, CUR, x), daemon=True).start()
        print("Thread " + str(Threadsnm) + " started.")
        Threadsnm += 1
    while (ANS[1] == ""):
        CURTIME = time() - Start
        print(str(CUR[1]/256**7*100) + '% Complete,', f'{CUR[1]:,}' + ' Checked, ' + f'{int(int(CUR[1]-x)//CURTIME):,}' + ' Checked per Second.', end="\r")
        if CUR[1] == 256**7:
            print('File not compressible.')
            exit()
        pass
    print()
    z = int(ANS[1])
    OpenFile.close()
    remove(Filename)
    Filename = Filename + ".CUDARAND"
    z = hex(z)
    srtstrlen = hex(srtstrlen)
    OpenFile = open(Filename, "w")
    OpenFile.write(str("(\"" + str(z)[2:] + "\"C \"" + str(srtstrlen)[2:] + "\")"))
    OpenFile.close()
    main('0', Filename)
    remove(Filename)
Exemplo n.º 11
0
def main(args):
    token = ''
    credentials = do_login(args)
    if credentials is None:
        return
    token = credentials['Token']
    makedirs(args.out, exist_ok=True)
    search_params = factory.make_search_body(search=args.search,
                                             player=args.player)
    robots = factory.factory_list(token, search_params)

    calls = 1
    threads = list()
    already_seen = list()
    for bot in robots:
        if bot['itemId'] not in already_seen:  # ignore duplicates returned by API
            already_seen.append(bot['itemId'])
            if calls % CALLS_BEFORE_REFRESH == 0 and args.batch:
                print('Refreshing token...')
                token = do_login(args)['Token']
            save_path = join(
                args.out,
                remove_bad_chars(bot['itemName'] + '-' + str(bot['itemId'])) +
                '.' + args.extension.lstrip('.'))
            print('Downloading %s to %s...' % (bot['itemName'], save_path))
            bot_info = factory.factory_bot(token, bot['itemId'])
            with open(save_path, 'w') as f:
                json.dump(bot_info, f, indent=4)
            if args.thumbnail is True:
                # this in an AWS CDN, idc about spamming it
                threads.append(
                    Thread(target=save_thumbnail,
                           args=(bot_info['name'] + '-' + str(bot['itemId']),
                                 bot_info['thumbnail'], args)))
                threads[-1].start()
            if calls == args.max and args.max >= 0:
                break
            calls += 1
            if args.batch:
                time.sleep(SLOWDOWN)
        else:
            pass
            # print('Ignoring %s duplicate (already downloaded)' % (bot['itemName']))
    for t in threads:
        t.join()
Exemplo n.º 12
0
def main_threaded(iniconfig):
    semaphore = BoundedSemaphore(CONCURRENCY_LIMIT)
    tasks = []
    for appid in iniconfig:
        section = iniconfig[appid]
        task = Thread(target=checker, args=(section, appid, semaphore))
        tasks.append(task)
        task.start()

    try:
        for t in tasks:
            t.join()
    except KeyboardInterrupt:
        for t in tasks:
            if hasattr(t, 'terminate'):  # multiprocessing
                t.terminate()
        print 'Validation aborted.'
        sys.exit(1)
Exemplo n.º 13
0
def main():
    bt = Thread(target=background)
    bt.start()
    # This is NOT a good socket location for production, it is for
    # playing around.  I suggest /var/run/milter/myappnamesock for production.
    #  socketname = os.path.expanduser('~/pythonsock')
    socketname = "inet:8800"
    timeout = 600
    # Register to have the Milter factory create instances of your class:
    Milter.factory = myMilter
    flags = Milter.CHGBODY + Milter.CHGHDRS + Milter.ADDHDRS
    flags += Milter.ADDRCPT
    flags += Milter.DELRCPT
    Milter.set_flags(flags)  # tell Sendmail which features we use
    print("%s milter startup" % time.strftime('%Y%b%d %H:%M:%S'))
    sys.stdout.flush()
    Milter.runmilter("pythonfilter", socketname, timeout)
    logq.put(None)
    bt.join()
    print("%s bms milter shutdown" % time.strftime('%Y%b%d %H:%M:%S'))
Exemplo n.º 14
0
    def state_cmd(self, item ):
        if item['cmd'] == 'PRINT':
           self.print_q = Queue()
           self.print_t = Thread(target=self.print_file,\
                                       args=( self.printer,\
                                              self.photo_set['primary'],\
                                              self.link,\
                                              self.print_q ))
           self.print_t.start()
           self.ani_q_cmd_push("PRINTWAIT")

        elif item['cmd'] == 'PRINTWAIT':
            if self.print_t.is_alive(): #While Printing Continue animation
                self.ani_q_img_push( self.print_img, self.print_bar_img_pos, 0.9, True,False,False)
                self.ani_q_img_push( self.print_bar, self.print_bar_pos , 0.1, False)
                self.ani_q_txt_push( "Printing....", (255,255,255), 200,self.print_bar_txt_pos , 0.1, False)
                self.ani_q_cmd_push("PRINTWAIT")
            else:
                self.status = self.print_q.get()
                Logger.info(__name__, "Print Complete with status .. {0}".format(self.status))
                self.ani_q_cmd_push("COMPLETE")
Exemplo n.º 15
0
def server(args):
    from multiprocessing import Process as Thread
    print('Server using multiprocessing')
    context = ssl.create_default_context(ssl.Purpose.CLIENT_AUTH)
    context.load_cert_chain(certfile=args.certfile, keyfile=args.keyfile)
    s = socket.socket()
    s.bind((args.host, args.port))
    s.listen(args.n_clients)
    processes = []
    try:
        while True:
            conn, address = s.accept()
            p = Thread(target=ServerTask(args.host, args.port, args.dbconfig,
                                         args.certfile, args.keyfile,
                                         args.n_clients),
                       args=(conn, address))
            p.start()
            processes += [p]
    finally:
        for p in processes:
            p.kill()
            p.join()
        s.shutdown(socket.SHUT_RDWR)
        s.close()  # close the connection
Exemplo n.º 16
0
from random import random

data_lock = Lock()


def foo(data_lock):
    while True:
        print("Foo trying to acquire data_lock...")
        if data_lock.acquire(2):
            print("Foo acquired lock successfully.")
            sleep(random() * 5)
            data_lock.release()
            print("Foo released data_lock...")


def bar(data_lock):
    while True:
        print("Bar trying to acquire data_lock...")
        if data_lock.acquire(2):
            print("Bar acquired lock successfully.")
            sleep(random() * 5)
            data_lock.release()
            print("Bar released data_lock...")


t1 = Thread(target=foo, args=(data_lock, ))
t2 = Thread(target=bar, args=(data_lock, ))

t1.start()
t2.start()
Exemplo n.º 17
0
    cfm = channel.confirm_delivery()  # confirm 机制,消息发送失败时会引发NackError异常
    message = ' '.join(sys.argv[1:]) or "Hello World! has confirm"

    for _ in range(500):
        try:
            push_rst = channel.basic_publish(
                exchange='',
                routing_key='task_queue',
                body=message,
                properties=pika.BasicProperties(
                    delivery_mode=2,  # 消息持久化
                ))
            # print(" [x] Sent %r, push_rst = %s" % (message, push_rst))
        except StreamLostError:
            print('stream lost!')
        except NackError:
            print('Nack Error!')
    connection.close()


p_list = []
for i in range(2):
    p = Thread(target=test, name='sub_%s' % i, args=(i, ))
    print('sub process %s' % p.name)
    p_list.append(p)
    p.start()
for p in p_list:
    p.join()  # join表示延时时间,也就是等待子进程的时间,当10秒过了以后,则会运行主进程。

print(time.time() - s)
Exemplo n.º 18
0
#from threading import Thread
from multiprocessing import Process as Thread
from time import sleep


def foo(t):
    for i in range(10):
        print t, "counting", i
        sleep(1)


threads = {}
for i in range(10):
    threads[i] = Thread(target=foo, args=(i, ))
    threads[i].start()

print "Waiting for threads to complete..."
for t in threads.values():
    t.join()
Exemplo n.º 19
0
def foo():
    for i in range(len(a)):
        lock.acquire()
        a[i] = a[i] * a[i] 
        print "foo: updated", i
        lock.release()
        sleep(0.01)

def bar():
    for i in range(len(a)):
        lock.acquire()
        a[i] = a[i] * a[i] 
        print "bar: updated", i
        lock.release()
        sleep(0.01)


t1 = Thread(target=foo)
t2 = Thread(target=bar)

t1.start()
t2.start()

t1.join()
t2.join()
print "a =", a



Exemplo n.º 20
0
#from threading import Thread
from multiprocessing import Process as Thread


def isprime(x):
    for i in range(2, x - 1):
        if x % i == 0: return False
    else:
        return True


def genprimes(x):
    primes = []
    for i in range(2, x):
        if isprime(i): primes.append(i)
    print primes
    #return primes


threads = []
for i in range(10):
    t = Thread(target=genprimes, args=(20000, ))
    threads.append(t)
    t.start()

for i in range(10):
    threads[i].join()
Exemplo n.º 21
0
#from threading import Thread
from multiprocessing import Process as Thread

from time import sleep


def foo():
    import os
    print("foo function running on: {}".format(os.getpid()))
    sleep(10)
    print("foo function ({}) complete.".format(os.getpid()))


if __name__ == '__main__':
    workers = {}

    for i in range(10):
        workers[i] = Thread(target=foo)
        workers[i].start()

    for i in range(10):
        workers[i].join()
Exemplo n.º 22
0
    print args.__dict__

    lr = 0.001
    maxkeylen = 32
    slen = maxkeylen * 2

    print "Loading data"
    with open("enwik8", "r") as f:
        dataset = f.read()

    tr_dataset = dataset[:9000000]
    val_dataset = dataset[9000000:]

    print "Starting vectorization threads"
    for _ in xrange(4):
        train_thread = Thread(target=generate_data,
                              args=(True, tr_dataset, slen, maxkeylen))
        train_thread.daemon = True
        train_thread.start()

    for _ in xrange(4):
        test_thread = Thread(target=generate_data,
                             args=(False, val_dataset, slen, maxkeylen))
        test_thread.daemon = True
        test_thread.start()

    do_conv = args.conv
    do_lstm = not do_conv

    input_layer = Input(shape=(slen, 8))
    prev_layer = input_layer
Exemplo n.º 23
0
    def state_cmd(self, item):
        if item['cmd'] == 'UPLOAD':
            self.pcloud_upload_q = Queue()
            #print "pCloud Starting Upload"
            self.pcloud_upload = Thread(target=self.pcloud_uploadfiles,\
                                        args=( self.pCloud,\
                                               self.pcloud_upload_folder,\
                                               self.photo_set,\
                                               self.pcloud_upload_q ))
            self.pcloud_upload.start()
            self.ani_q_cmd_push("UPLOADWAIT")

        elif item['cmd'] == 'UPLOADWAIT':
            if self.pcloud_upload.is_alive(
            ):  #While Uploading Continue animation
                self.ani_q_img_push(self.upload_img, self.upload_bar_img_pos,
                                    0.9, True, False, False)
                self.ani_q_img_push(self.upload_bar, self.upload_bar_pos, 0.1,
                                    False)
                self.ani_q_txt_push("Uploading....", (255, 255, 255), 200,
                                    self.upload_bar_txt_pos, 0.1, False)
                self.ani_q_cmd_push("UPLOADWAIT")
            else:
                result = self.pcloud_upload_q.get()
                if result == None:
                    self.upload_link = self.cfg.get("event_url")
                    Logger.info(__name__,
                                "pCloud Upload Failed, saving link as album")
                else:
                    self.upload_link = result
                    Logger.info(
                        __name__, "pCloud Upload Complete - {0}".format(
                            self.upload_link))

                self.ani_q_cmd_push("UPLOADQR")

        elif item['cmd'] == 'UPLOADQR':
            self.gpio.set('green_led', 1)
            self.gpio.set('red_led', 0)
            qr_path = self.gen_qr(self.upload_link)
            self.gameDisplay.fill((200, 200, 200))
            qr_img = pygame.image.load(qr_path)
            qr_pos = (((self.disp_w - qr_img.get_size()[0]) / 2) + 200,
                      ((self.disp_h - qr_img.get_size()[1]) / 2) - 175)

            if self.cfg.get("printer__enabled"):
                pass
            else:
                self.gen_upload_menu("Finish")

            link_pos = (((self.disp_w) / 2) - 100, ((self.disp_h) / 2))
            self.ani_q_img_push(qr_img, qr_pos, 0.1, False)
            self.ani_q_txt_push(self.upload_link, (40, 40, 40), 75, link_pos,
                                0.1, False)
            self.ani_q_img_push(self.upload_menu, self.upload_menu_pos, 0.1,
                                False)
            self.ani_q_cmd_push("COMPLETE")
            self.ani_q_cmd_push("UPLOADINFO")

        elif item['cmd'] == 'UPLOADINFO':
            for info in self.upload_info:
                self.ani_q_img_push(info,
                                    self.upload_info_pos,
                                    0.4,
                                    True,
                                    forceSurfaceAlpha=False)
                self.ani_q_pause_push(5)

            self.ani_q_cmd_push("UPLOADINFO")
Exemplo n.º 24
0
#from threading import Thread
from multiprocessing import Process as Thread
from time import time


def foo():
    a = range(5000)
    for i, v in enumerate(a):
        a[i] **= a[i]


threads = {}

start = time()

for i in range(16):
    threads[i] = Thread(target=foo)
    threads[i].start()

for t in threads.values():
    t.join()

end = time()

print "Duration:", end - start
Exemplo n.º 25
0
def start(func=None, args=None, localization={}, gui=None, debug=False, http_server=False, user_agent=None):
    """
    Start a GUI loop and display previously created windows. This function must
    be called from a main thread.

    :param func: Function to invoke upon starting the GUI loop.
    :param args: Function arguments. Can be either a single value or a tuple of
        values.
    :param localization: A dictionary with localized strings. Default strings
        and their keys are defined in localization.py.
    :param gui: Force a specific GUI. Allowed values are ``cef``, ``qt``, or
        ``gtk`` depending on a platform.
    :param debug: Enable debug mode. Default is False.
    :param http_server: Enable built-in HTTP server. If enabled, local files
        will be served using a local HTTP server on a random port. For each
        window, a separate HTTP server is spawned. This option is ignored for
        non-local URLs.
    :param user_agent: Change user agent string. Not supported in EdgeHTML.
    """
    global guilib, _debug, _multiprocessing, _http_server, _user_agent

    def _create_children(other_windows):
        if not windows[0].shown.wait(10):
            raise WebViewException('Main window failed to load')

        for window in other_windows:
            guilib.create_window(window)

    _debug = debug
    _user_agent = user_agent
    #_multiprocessing = multiprocessing
    multiprocessing = False # TODO
    _http_server = http_server

    if multiprocessing:
        from multiprocessing import Process as Thread
    else:
        from threading import Thread

    original_localization.update(localization)

    if threading.current_thread().name != 'MainThread':
        raise WebViewException('This function must be run from a main thread.')

    if len(windows) == 0:
        raise WebViewException('You must create a window first before calling this function.')

    guilib = initialize(gui)

    for window in windows:
        window._initialize(guilib, multiprocessing, http_server)

    if len(windows) > 1:
        t = Thread(target=_create_children, args=(windows[1:],))
        t.start()

    if func:
        if args is not None:
            if not hasattr(args, '__iter__'):
                args = (args,)
            t = Thread(target=func, args=args)
        else:
            t = Thread(target=func)
        t.start()

    guilib.create_window(windows[0])
Exemplo n.º 26
0
import os
from time import sleep
#from threading import Thread
from multiprocessing import Process as Thread


def foo():
    for i in range(10):
        print "In foo: counting", i
        sleep(0.5)


t1 = Thread(target=foo)
t1.start()

print "Back to main program..."
for i in range(5):
    print "Main: counting:", i
    sleep(0.5)

t1.join()
Exemplo n.º 27
0
def main():
    if LOGHANDLER == 'file':
        endstr = '**'
        # make sure the log directory exists:
        try:
            os.makedirs(LOGFILE_DIR, 0o0027)
        except:
            pass
    else:
        endstr = ''

    # Load the whitelist into memory
    global WhiteList, Hash_Whitelist
    global r
    global rsub
    global wfp
    global bg_redis_write
    global bg_submit_wf

    WhiteList, Hash_Whitelist= wildlib.whiteListLoad(CONFIG)

    # Register to have the Milter factory create instances of your class:
    Milter.factory = WildfireMilter
    flags = Milter.ADDHDRS
    Milter.set_flags(flags)  # tell Sendmail which features we use
    print("\n*************************************************************")
    print("*********** %s startup - Version %s ***********" % ('Wildfire Milter', __version__))
    print('*************  logging to %s' % LOGHANDLER, end='')
    if LOGSTDOUT:
        print(' and to stdout ', end=endstr)
    else:
        print(' **************', end=endstr)
    print("**************\n*************************************************************\n")
    log.info('* Starting %s v%s - listening on %s' % ('Wildfire Milter', __version__, SOCKET))
    log.debug('* Python version: %s' % str(sys.version).replace('\n', ' - '))
    log.debug('* Config file: %s' % CONFIG)
    print()
    sys.stdout.flush()

    # Initialize Wildfire API
    wfp = wildlib.wildfireConnect(WILDHOST, WILDKEY)

    # Initialize main Redis Cache
    r = wildlib.redisConnect(REDISHOST, REDISPORT, REDISDB, REDISAUTH)
    # Initialize Redis Cache for Wildfire submit
    if  'threading' in  sys.modules and submitq is not None:
        # This is done in another process/thread
        rsub = None
    else:
        rsub = wildlib.redisConnect(REDISHOST, REDISPORT, DBSUB, REDISAUTH)

    # ensure desired permissions on unix socket
    os.umask(UMASK)

    # set the "last" fall back to ACCEPT if exception occur
    Milter.set_exception_policy(Milter.ACCEPT)

    if TASK_TYPE != 'single':
        bg_redis_write = Thread(target=redis_background_write, args=(redisq,))
        bg_submit_wf   = Thread(target=submit_wildfire_background, args=(submitq,))
        bg_redis_write.start()
        bg_submit_wf.start()

    # Notify systemd that the milter is ready to serve and to protect
    if systemd.daemon.booted():
        systemd.daemon.notify('READY=1')
        systemd.daemon.notify('STATUS=Initialization of WildFire API and Redis completed. Ready to work.')
    # start the milter #################################
    Milter.runmilter('WildfireMilter', SOCKET, TIMEOUT)
    ####################################################

    if TASK_TYPE != 'single':
        # Terminate the running threads.
        redisq.put(None)
        submitq.put(None)
        bg_redis_write.join()
        bg_submit_wf.join()

    if systemd.daemon.booted():
        systemd.daemon.notify('STOPPING=1')
        systemd.daemon.notify('STATUS=Wildfire Milter ready to stop.')

    log.info('Wildfire Milter shutdown')
    print("\n*********** %s shutdown ***********\n" % 'WildfireMilter')
Exemplo n.º 28
0
def main():
    signal.signal(signal.SIGINT, signal.SIG_DFL)

    icon_path = os.path.realpath(os.path.join(os.path.dirname(__file__), "pushbullet.png"))

    icon = Gtk.StatusIcon()
    icon.set_from_file(icon_path)
    icon.set_tooltip_text("PushBullet")
    icon.set_visible(True)
    icon.connect('activate', open_browser)

    Notify.init("PushBullet")

    gtk_thread = Thread(target=Gtk.main)

    try:
        pb = pushybullet.PushBullet(pushybullet.get_apikey_from_config() or sys.argv[1])
    except IndexError:
        from textwrap import dedent
        print(dedent('''
            Either pass your API key as first command line argument,
            or put it into your ~/.config/pushbullet/config.ini file:

            [pushbullet]
            apikey = YOUR_API_KEY_HERE
        '''))

        return

    def pb_watch():
        for ev in pb.stream(use_server_time=True):
            for push in ev.pushes(skip_empty=True):
                if push.type in ('dismissal'):
                    continue

                try:
                    print(str(type(push)), push.json())

                    title = push.get('title') or get_play_app_name(push.get('package_name')) or "PushBullet"
                    body = push.get('body') or push.get('url') or '\n'.join('— %s' % i for i in push.get('items')) or push.get('file_name')

                    if 'icon' in push:
                        loader = GdkPixbuf.PixbufLoader.new_with_type('jpeg')
                        loader.write(push.icon)
                        loader.close()

                        notify = Notify.Notification.new(title, body)
                        notify.set_icon_from_pixbuf(loader.get_pixbuf())

                    else:
                        notify = Notify.Notification.new(title, body, icon_path)

                    notify.show()

                except Exception as e:
                    print(e)

    pb_thread = Thread(target=pb_watch)

    gtk_thread.start()
    pb_thread.start()

    gtk_thread.join()
    pb_thread.terminate()