2

Was ist der effizienteste Weg, um das folgende Ziel zu erreichen: Hauptprozess sammelt und verteilt Ereignisse (Event-Schleife run_forever) zu Subprozessen. Diese Teilprozesse bleiben am Leben und sammeln Signale von der Außenwelt oder sie konsumieren Signale und führen cpu-gebundene Operationen durch. Bisher kam ich mit so etwas wie dies oben:Python asyncio run forever und Interprozess-Kommunikation

import os 
import time 
import signal 
import asyncio 
from asyncio import PriorityQueue 
from multiprocessing import Process, Pipe 

class Event(object): 
    __slots__ = ['type','priority','payload','timestamp'] 
    def __init__(self, _type = None, _priority = None, _payload = None): 
     self.type, self.priority, self.payload, self.timestamp = _type, _priority, _payload, time.time() 
    def __str__(self): 
     return "%s(%s,%s,%s)" % (self.__class__.__name__, self.type, self.priority, self.payload) 
    def __lt__(self, other): 
     return (self.priority, self.timestamp) > (other.priority, other.timestamp) 

class EventQueue(PriorityQueue): 
    def _put(self, event): 
     super()._put((event.priority, event)) 

@asyncio.coroutine 
def consumeAnyEvent(eq, acon_write_p = None): ## more args with write_conn pipes 
    while True: 
     priority, event = yield from eq.get() 
     print("consumed",event) 
     if event.type == 'sig_a': 
      acon_write_p.send(event) 
     if event.type == 'sig_b': 
      pass 
     if event.type == 'sig_c': 
      pass 
     ## and so on - broadcast events to relevant sub-processes 
     yield from asyncio.sleep(0) 

@asyncio.coroutine 
def produceSignalA(eq,read_p): 
    while True: 
     yield from asyncio.sleep(0) 
     row = read_p.recv() 
     if row: 
      yield from eq.put(Event('sig_a', payload = row)) 

class someSource(object): 
    """db, http, file watch or other io""" 
    def fetch(self): 
     pass 

def someSlowMethod(a=None): 
    """cpu-bound operations""" 
    pass 

def signalAPublisher(pipe): 
    read_p, write_p = pipe 
    read_p.close() 
    s = someSource() 
    while True: 
     result = s.fetch() 
     if result: 
      write_p.send(result) 

def signalAConsumer(pipe): 
    read_p, write_p = pipe 
    while True: 
     inp = read_p.recv()   
     if inp: 
      result = someSlowMethod(inp) 
      write_p.send(result) 

def main(): 
    ## main process is responsible for handling events: 
    ## colllecting from all signal publisher subprocessses 
    ## and broadcasting to all interested consumer subprocesses 
    eventQueue = EventQueue()  
    apub_read_p, apub_write_p = Pipe() 
    acon_read_p, acon_write_p = Pipe() 
    ## more pipes for Signal B, ... Signal Z 
    signalAPublisher_p = Process(target=signalAPublisher, args=((apub_read_p, apub_write_p),))  
    signalAConsumer_p = Process(target=signalAPublisher, args=((acon_read_p, acon_write_p),)) 
    signalAPublisher_p.start() 
    signalAConsumer_p.start() 
    ## and so on for Signal B, Signal C, ... Signal Z 
    loop = asyncio.get_event_loop() 
    try: 
     tasks = asyncio.gather(
      loop.create_task(produceSignalA(eventQueue,apub_read_p)), 
      loop.create_task(consumeAnyEvent(eventQueue,acon_write_p))  
     ) 
     loop.run_forever() 
    except KeyboardInterrupt: 
     print("Caught keyboard interrupt. Canceling tasks...") 
     tasks.cancel() 
    finally: 
     loop.close() 
     os.kill(signalAPublisher_p.pid, signal.SIGTERM) 
     os.kill(signalAConsumer_p.pid, signal.SIGTERM) 
     ## kill for Signal B, ... Signal Z 

if __name__ == '__main__': 
    main() 

Jedoch habe ich das Gefühl, dass die oben ist nicht effizient/elegant genug und ich bin etwas fehlt. Irgendwelche Ideen, Vorschläge?

+1

http://zeromq.org/ –

+0

@PadraicCunningham danke für die Einsicht - ging zu studieren! Wie schmerzhaft wäre es, sich in Python zu integrieren? Idealerweise möchte ich nicht über Standard-Python-Bibliotheken hinausgehen. – Nicholas

Antwort

2

Als Anfang versuchen, ein ProcessPoolExecutor und run_in_executor() unter Verwendung eines beliebigen CPU gebunden Verfahren zu verteilen, sonst nur regelmäßig verwenden async def/async for/await aus asyncio ohne Warteschlange.

import asyncio 
import time 
from concurrent.futures.process import ProcessPoolExecutor 

import random 


async def coro_a(n): 
    print("> a", n) 
    await asyncio.sleep(random.uniform(0.1, 1)) 
    result = await asyncio.gather(coro_b(n), 
            loop.run_in_executor(None, slow_method_c, n)) 
    print("< a", n, result) 


async def coro_b(n): 
    print("> b", n) 
    await asyncio.sleep(random.uniform(0.1, 1)) 
    result = await loop.run_in_executor(None, slow_method_d, n) 
    print("< b", n, result) 
    return ("B", result) 


def slow_method_c(n): 
    print("> c", n) 
    time.sleep(random.uniform(0.5, 5)) 
    print("< c", n) 
    return ("C", n) 


def slow_method_d(n): 
    print("> d", n) 
    time.sleep(random.uniform(0.5, 5)) 
    print("< d", n) 
    return ("D", n) 


async def main_producer(): 
    tasks = [] 
    for i in range(10): 
     tasks.append(asyncio.ensure_future(coro_a(i + 1))) 
     await asyncio.sleep(1) 
    await asyncio.wait(tasks) 


loop = asyncio.get_event_loop() 
loop.set_default_executor(ProcessPoolExecutor()) 
loop.run_until_complete(main_producer()) 
loop.close()