Vote count:
0
i have a problem in write log into same file with multiprocesses , below is my pseudo code , this program will run a log time and output many logs , i hope i can write these logs realtime without outputting after program finish , so i raise a thread to open log file and wait data arrived queue , but it is not work and i think it is not a good way to do this, but i have no other idea to modify , Can anyone help me to modify my program to meeting my requirement ?
import random
import multiprocessing
def cat(time , dist , log_queeue):
msg = "cat run " , random.randint(1, 400) * time + random.randint(1, 5) * dist
log_queeue.put(msg)
def dog(time , dist , log_queeue):
msg = "cat run " , random.randint(1, 300) * time + random.randint(1, 7) * dist
log_queeue.put(msg)
def rabbit(time , dist , log_queeue):
msg = "cat run " , random.randint(1, 200) * time + random.randint(1, 3) * dist
log_queeue.put(msg)
def turtle(time , dist , log_queeue):
msg = "turtle run " , random.randint(1, 100) * time + random.randint(1, 1) * dist
log_queeue.put(msg
def logrecv(log_queue, OutputFile):
with open(OutputFile, 'a') as f:
while True:
f.write(log_queue.get())
if __name__ == '__main__':
log_q = multiprocessing.Queue()
th=Thread(target=log_recv, args=(log_q, "output.txt"))
th.daemon=True
th.start()
funcs = [cat, dog, rabbit, turtle]
pool = multiprocessing.Pool(processes=2)
dist=10
workers = []
for func in enumerate(funcs)
pool.apply_async(func, args=(i, dist , log_q))
pool.close()
pool.join()
asked 31 secs ago
Aucun commentaire:
Enregistrer un commentaire