I aim to develop a script capable of concurrently running two processes, namely process1.exe and process2.exe. Following their initiation, the script will conduct essential checks and execute additional code segments. Moreover, within the application, if process2.exe terminates prematurely, it necessitates terminating process1.exe and exiting the ongoing script. Under such circumstances, all operations, including the check script executed after launching both processes, must cease. Conversely, upon completing the checks, the script should await the closure of process2.exe to subsequently close process1.exe.
The code that I am running is as follows:
import psutil
import subprocess
import multiprocessing
import os
import time
def findPID():
process_pid = {}
process_list = [p.info for p in psutil.process_iter(['name', 'pid'])]
for proc in process_list:
if proc['name'] == "process2.exe" or proc['name'] == "process1.exe":
process_name = proc['name']
process_pid[f"{process_name}"] = proc["pid"]
return process_pid
def killSimunteneously(event, queue, running_process, process_pid):
while True:
if not psutil.pid_exists(process_pid['process2.exe']):
try:
process_to_kill = psutil.Process(process_pid['process1.exe'])
process_to_kill.terminate()
break
except:
break
else:
time.sleep(2)
queue.put("killSimunteneously Completed!")
event.set()
if running_process.is_alive():
print("Terminating checkStatus")
running_process.terminate()
def checkStatus(event, queue):
# Some Code
queue.put("checkStatus Completed!")
if not event.is_set():
print("Waiting for killSimunteneously")
event.wait()
def main():
os.startfile(".\process2.exe")
subprocess.Popen([".\process1.exe"], creationflags=subprocess.CREATE_NEW_CONSOLE)
bg_process_PID = findPID()
queue = multiprocessing.Queue()
event = multiprocessing.Event()
p1 = multiprocessing.Process(target=checkStatus, args=(event, queue))
p1.start()
p2 = multiprocessing.Process(target=killSimunteneously, args=(event, queue, p1, bg_process_PID,))
p2.start()
p1.join()
p2.join()
while not queue.empty():
print(queue.get())
if __name__ == "__main__":
main()
The error when executing the script:
Traceback (most recent call last):
File "D:\codes\script.py", line 71, in <module>
main()
File "D:\codes\script.py", line 62, in main
p2.start()
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\context.py", line 336, in _Popen
return Popen(process_obj)
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\popen_spawn_win32.py", line 93, in __init__
reduction.dump(process_obj, to_child)
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
TypeError: cannot pickle 'weakref.ReferenceType' object
Traceback (most recent call last):
File "<string>", line 1, in <module>
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:\Users\ADMIN\AppData\Local\Programs\Python\Python310\lib\multiprocessing\spawn.py", line 126, in _main
self = reduction.pickle.load(from_parent)
EOFError: Ran out of input
Running checkStatus() and killSimultaneously() sequentially, without employing multiprocessing.Process(), functioned correctly. However, if process2.exe terminated before the checks concluded, it failed to terminate process1.exe.
Initially, I attempted this without utilizing a queue or event. The remainder of the code remained unchanged. However, in this scenario, process2.exe was initiated three times instead of once, which was an issue encountered when executing the .exe file generated by PyInstaller. The .py file, on the other hand, behaved as anticipated.
I removed part of your code. I hope I didn't misunderstand your needs.
import subprocess
import multiprocessing
def checkStatus(event, queue):
# Some Code
queue.put("checkStatus Completed!")
if not event.is_set():
print("Waiting for killSimunteneously")
event.wait()
def main():
process1 = subprocess.Popen(["./process1.exe"])
process2 = subprocess.Popen(["./process2.exe"], creationflags=subprocess.CREATE_NEW_CONSOLE)
queue = multiprocessing.Queue()
event = multiprocessing.Event()
p1 = multiprocessing.Process(target=checkStatus, args=(event, queue))
p1.start()
# Wait for process 2 to finish
process2.wait()
# Terminate process 1 after process 2 has finished.
process1.terminate()
event.set()
p1.join()
while not queue.empty():
print(queue.get())
if __name__ == "__main__":
main()