擅长:python、mysql、java
<p>这里有一个<em>多处理</em>解决方案:</p>
<pre><code>from multiprocessing.pool import Pool
from multiprocessing import JoinableQueue as Queue
import os
def explore_path(path):
directories = []
nondirectories = []
for filename in os.listdir(path):
fullname = os.path.join(path, filename)
if os.path.isdir(fullname):
directories.append(fullname)
else:
nondirectories.append(filename)
outputfile = path.replace(os.sep, '_') + '.txt'
with open(outputfile, 'w') as f:
for filename in nondirectories:
print >> f, filename
return directories
def parallel_worker():
while True:
path = unsearched.get()
dirs = explore_path(path)
for newdir in dirs:
unsearched.put(newdir)
unsearched.task_done()
# acquire the list of paths
with open('paths.txt') as f:
paths = f.split()
unsearched = Queue()
for path in paths:
unsearched.put(path)
pool = Pool(5)
for i in range(5):
pool.apply_async(parallel_worker)
unsearched.join()
print 'Done'
</code></pre>