python – imap on generator object without monkey patch?

I am processing an unknown “length” of generator object.
I have to keep things “lazy” because of memory management.
The processing is compute heavy, so writing it multiproc style is the solution (or at least it seems for me).

I have solved this problem of multiproc on generator object with a combination of monkey patch and a bounded Queue.

What really itches me is the monkey patch…
Do you think this is fine to apply imap on a generator object? How would you do this?

I would like to underline that the focus is to compute the outputs of a generator in parallel.
From the perspective of this “minimal example” :

process_line, process_line_init, process_first_n_line

are the functions I am most interested about your opinion.

import multiprocessing as mp
import psutil
import queue 
from typing import Any, Dict, Iterable, Set

def yield_n_line(n: int)-> Iterable(Dict(str, str)):
    for i in range(n):
        yield {'body': "Never try to 'put' without a timeout sec declared"}

def get_unique_words(x: Dict(str, str))-> Set(str):
    return set(x('body').split())

def process_line(x:Dict(str, str))-> Set(str):
    try:
        process_line.q.put(x, block=True, timeout=2)
    except queue.Full:
        pass
    return get_unique_words(x)

def process_line_init(q: mp.Queue)-> None:
    process_line.q = q

def process_first_n_line(number_of_lines: int)-> Any:
    n_line = yield_n_line(number_of_lines)
    
    if psutil.cpu_count(logical=False) > 4:
        cpu_count = psutil.cpu_count(logical=False)-2
    else:
        cpu_count = psutil.cpu_count(logical=False)
    q = mp.Queue(maxsize=8000)
    p = mp.Pool(cpu_count, process_line_init, (q))
    results = p.imap(process_line, n_line)
    for _ in range(number_of_lines):
        try:
            q.get(timeout=2)
        except queue.Empty:
            q.close()
            q.join_thread()
        yield results.next()
    p.close()
    p.terminate()
    p.join()
    pass

def yield_uniqueword_chunks(
    n_line: int = 10_000_000,
    chunksize: int = 1_787_000)-> Iterable(Set(str)):
    chunk = set()
    for result in process_first_n_line(n_line):
        chunk.update(result)
        if len(chunk) > chunksize:
            yield chunk
            chunk = set()
    yield chunk

def main()-> None:
    for chunk in yield_uniqueword_chunks(
        n_line=1000, #Number of total comments to process
        chunksize=200 #number of unique words in a chunk (around 32MB)
        ):
        print(chunk)
        #export(chunk)
    
if __name__ == "__main__":
    main()