当前位置: 代码迷 >> 综合 >> python 进程同步;
  详细解决方案

python 进程同步;

热度:24   发布时间:2023-12-06 08:44:11.0

线程之间是共享内存的,进程之间是相互独立的,如何使进程之间相互通信?

#!/usr/bin/python
#_*_ coding:UTF-8 _*_from multiprocessing import Pool,Queue,Process'''
def run(info_list,n):info_list.append(n)print(info_list)info_list = []
for i in range(10):p = Process(target=run,args=[info_list,i])p.start()'''def f(q,n):q.put(n + "hello")#multiprocessing自带的queue 
q = Queue()for i in range(10):p = Process(target=f,args=[q,i])p.start()while True:print(q.get())

打印结果 :

1  hello
2  hello
3  hello
4  hello
5  hello
6  hello

在同一队列中;

通过Value Array 实现数据交互;

#!/usr/bin/python
#_*_ coding:UTF-8 _*_from multiprocessing import Process,Value,Arraydef f(n,a,raw_list):n.value = 3.1415926for i in range(5):a[i] = -a[i]raw_list.append(100)print(raw_list)if __name__ == "__main__":#f父进程num = Value('d',0.0)array = Array('i',range(10))raw_list = [1,2,3,4,5]print(num.value)print(array[:])print("_____________________")print(raw_list)#父进程产生的子进程,子进程克隆父进程的数据p = Process(target=f,args=[num,array,raw_list])p.start()p.join()#父进程通过打印能够看到父进程的数据已经被子进程修改了,默认是不能够修改的print(num.value)print(array[:])print(raw_list)

、打印结果:

#父进程中的数据
0.0
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
_____________________
#创建的原生态数据
[1, 2, 3, 4, 5]
#子进程中的copy的内容仅仅修改了子进程中的数据,不能与父进程数据进行交互;
[1, 2, 3, 4, 5, 100]通过子进程更改后的数据
3.1415926
[0, -1, -2, -3, -4, 5, 6, 7, 8, 9]
#创建的原生态数据,子进程没有改变父进程中的数据
[1, 2, 3, 4, 5]进程已结束,退出代码0

manager

Manager返回的Manager()对象控制一个承载Python对象的服务器进程,并允许其他进程使用代理来操纵它们。

支持类型:

list, dict, Namespace, Lock, RLock, Semaphore, BoundedSemaphore, Condition, Event, Barrier, Queue, Value and Array.

from multiprocessing import Process, Managerdef f(d, l):d[1] = '1'd['2'] = 2d[0.25] = Nonel.reverse()if __name__ == '__main__':with Manager() as manager:d = manager.dict()l = manager.list(range(10))p = Process(target=f, args=(d, l))p.start()p.join()print(d)print(l)

打印结果:

{1: '1', '2': 2, 0.25: None}
[9, 8, 7, 6, 5, 4, 3, 2, 1, 0]进程已结束,退出代码0

pool

from multiprocessing import Pool, TimeoutError
import time
import osdef f(x):return x*xif __name__ == '__main__':# start 4 worker processeswith Pool(processes=4) as pool:# print "[0, 1, 4,..., 81]"print(pool.map(f, range(10)))# print same numbers in arbitrary orderfor i in pool.imap_unordered(f, range(10)):print(i)# evaluate "f(20)" asynchronouslyres = pool.apply_async(f, (20,))      # runs in *only* one processprint(res.get(timeout=1))             # prints "400"# evaluate "os.getpid()" asynchronouslyres = pool.apply_async(os.getpid, ()) # runs in *only* one processprint(res.get(timeout=1))             # prints the PID of that process# launching multiple evaluations asynchronously *may* use more processesmultiple_results = [pool.apply_async(os.getpid, ()) for i in range(4)]print([res.get(timeout=1) for res in multiple_results])# make a single worker sleep for 10 secsres = pool.apply_async(time.sleep, (10,))try:print(res.get(timeout=1))except TimeoutError:print("We lacked patience and got a multiprocessing.TimeoutError")print("For the moment, the pool remains available for more work")# exiting the 'with'-block has stopped the poolprint("Now the pool is closed and no longer available")