The multiprocessing module provides a Process class to create and run processes.
import multiprocessing
def print_numbers():
for i in range(1, 6):
print(i)
if __name__ == '__main__':
# Create a process
process = multiprocessing.Process(target=print_numbers)
# Start the process
process.start()
# Wait for the process to finish
process.join()
print("Process finished execution")
You can also create processes by subclassing multiprocessing.Process and overriding the run method.
import multiprocessing
class PrintNumbersProcess(multiprocessing.Process):
def run(self):
for i in range(1, 6):
print(i)
if __name__ == '__main__':
# Create and start the process
process = PrintNumbersProcess()
process.start()
# Wait for the process to finish
process.join()
print("Process finished execution")
The multiprocessing module provides several ways for processes to communicate with each other:
1.Queue: A thread- and process-safe FIFO queue.
queue = multiprocessing.Queue()
def producer(queue):
for i in range(5):
queue.put(f'item {i}')
print(f'Produced item {i}')
def consumer(queue):
while True:
item = queue.get()
if item is None:
break
print(f'Consumed {item}')
if __name__ == '__main__':
# Create the queue
queue = multiprocessing.Queue()
# Create and start producer and consumer processes
producer_process = multiprocessing.Process(target=producer, args=(queue,))
consumer_process = multiprocessing.Process(target=consumer, args=(queue,))
producer_process.start()
consumer_process.start()
# Wait for the producer process to finish
producer_process.join()
# Stop the consumer process
queue.put(None)
consumer_process.join()
2.Pipe: A way to create a pair of connected file descriptors.
def sender(conn):
conn.send("Hello from sender")
conn.close()
def receiver(conn):
msg = conn.recv()
print(f"Received: {msg}")
if __name__ == '__main__':
# Create a pipe
parent_conn, child_conn = multiprocessing.Pipe()
# Create and start sender and receiver processes
sender_process = multiprocessing.Process(target=sender, args=(child_conn,))
receiver_process = multiprocessing.Process(target=receiver, args=(parent_conn,))
sender_process.start()
receiver_process.start()
# Wait for the processes to finish
sender_process.join()
receiver_process.join()
Here’s an example workflow demonstrating multiprocessing in Python:
import multiprocessing
import time
def worker(num):
"""Thread worker function"""
print(f'Worker: {num}')
time.sleep(1)
return
if __name__ == '__main__':
jobs = []
for i in range(5):
p = multiprocessing.Process(target=worker, args=(i,))
jobs.append(p)
p.start()
for job in jobs:
job.join()
print("All processes finished execution")