fix windows multiprocessing and shared mem error

This commit is contained in:
xiefangqi 2021-10-20 14:41:47 +08:00
parent a97ed64880
commit ae5e7847f2
2 changed files with 14 additions and 6 deletions

View File

@ -17,6 +17,7 @@ The configuration module provides various functions to set and get the supported
configuration parameters, and read a configuration file.
"""
import os
import platform
import random
import time
import numpy
@ -430,6 +431,10 @@ def get_enable_shared_mem():
>>> # Get the flag of shared memory feature.
>>> shared_mem_flag = ds.config.get_enable_shared_mem()
"""
# For windows we forbid shared mem function temporarily
if platform.system().lower() == 'windows':
logger.warning("For windows we forbid shared mem function temporarily.")
return False
return _config.get_enable_shared_mem()

View File

@ -2149,8 +2149,9 @@ class BatchDataset(Dataset):
arg_q_list = []
res_q_list = []
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
if platform.system().lower() != 'windows':
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
# If user didn't specify num_parallel_workers, set it to default
if self.num_parallel_workers is not None:
@ -2647,8 +2648,9 @@ class MapDataset(Dataset):
callable_list.append(op)
if callable_list:
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
if platform.system().lower() != 'windows':
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
# Construct pool with the callable list
# The callable list and _pyfunc_worker_init are used to pass lambda function in to subprocesses
@ -3593,8 +3595,9 @@ class SamplerFn:
self.pid = []
# Event for end of epoch
if multi_process is True:
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
if platform.system().lower() != 'windows':
# Register clean zombie subprocesses signal here
signal.signal(signal.SIGCHLD, wait_child_processes)
try:
self.eof = multiprocessing.Event()