多处理等待每个作业的多个执行器

2024-03-28 14:56:14 发布

您现在位置:Python中文网/ 问答频道 /正文

我有两个任务-加载url和一些任务。需要同时提交和使用python执行器

看起来执行器在完成加载url之前不会启动某些\u作业

当我运行此作业时-某些作业仅在加载url过程完成后启动

import sys
from concurrent import futures
import urllib.request
import multiprocessing
import time

URLS = ['http://www.foxnews.com/',
        'http://www.cnn.com/',
        'http://europe.wsj.com/',
        'http://www.bbc.co.uk/',
        'http://some-made-up-domain.com/']


def load_url(url, e, timeout=0):
    # it will take 10 seconds to process a URL
    time.sleep(10)
    return None

def some_job(url, e, timeout=0):
    print('some_job')
    time.sleep(10)
    return None


def check_futures(livefutures):
    runningfutures = True
    while runningfutures:
            runningfutures = [f for f in livefutures if f.running()]
            print('runningfutures=', runningfutures)


def main():
    print('start')
    manager = multiprocessing.Manager()
    e = manager.Event()
    t = '1'
    with futures.ProcessPoolExecutor(max_workers=5) as executor:
        livefutures = dict(
            (executor.submit(load_url, url, e), url)
            for url in URLS)

        test = executor.submit(some_job, t, e)
        print('check_futures')
        # runningfutures = True
        check_futures(livefutures)


if __name__ == '__main__':
    main()

Tags: importcomhttpurltimedefcheckwww