1.Python multiprocesses cannot queue information
2. Code:
from urllib import request
from bs4 import BeautifulSoup
from multiprocessing import Manager
from multiprocessing import Pool
import os
def hand_url(url,name,queue):
room_message = {} -sharp
print(name,os.getpid())
one_roomlist=[]
req=request.Request(url)
req.add_header("User-Agent","Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36")
with request.urlopen(req)as f:
data = f.read().decode()
soup = BeautifulSoup(data, "html.parser")
clear_1 = soup.find_all(class_="table_type_7 responsive_table full_width t_align_l")
clear_1 = clear_1[0].tbody.contents
for i in clear_1:
if i != "\n":
message_small = []
for x in i.strings:
if x != "\n":
message_small.append(x)
one_roomlist.append(message_small)
room_message[name] = one_roomlist
-sharp
print(room_message)
queue.put(room_message) -sharproom_message
print("ok")
if __name__ == "__main__":
queue=Manager().Queue()
pool = Pool(4)
pool.apply_async(hand_url,("http://202.115.129.139:81/trainingroomnote?roomid=91","A405",queue))
pool.close()
pool.join()
print(queue.get())
3. Screenshot of the problem
4. It is OK for me to try to copy and add the content obtained by print (room_message) directly to the queue, but once it is changed to queue.put (room_message) program, it will be stuck here