1.Traceback (most recent call last):
  File "C:/Users/momomo/Desktop/Swin-Unet-main/train.py", line 101, in <module>
    trainer[dataset_name](args, net, args.output_dir)
  File "C:\Users\momomo\Desktop\Swin-Unet-main\trainer.py", line 53, in trainer_synapse
    for i_batch, sampled_batch in enumerate(trainloader):
  File "G:\anaconda\envs\pytorch3.7\lib\site-packages\torch\utils\data\dataloader.py", line 435, in __iter__
    return self._get_iterator()
  File "G:\anaconda\envs\pytorch3.7\lib\site-packages\torch\utils\data\dataloader.py", line 381, in _get_iterator
    return _MultiProcessingDataLoaderIter(self)
  File "G:\anaconda\envs\pytorch3.7\lib\site-packages\torch\utils\data\dataloader.py", line 1034, in __init__
    w.start()
  File "G:\anaconda\envs\pytorch3.7\lib\multiprocessing\process.py", line 112, in start
    self._popen = self._Popen(self)
  File "G:\anaconda\envs\pytorch3.7\lib\multiprocessing\context.py", line 223, in _Popen
    return _default_context.get_context().Process._Popen(process_obj)
  File "G:\anaconda\envs\pytorch3.7\lib\multiprocessing\context.py", line 322, in _Popen
    return Popen(process_obj)
  File "G:\anaconda\envs\pytorch3.7\lib\multiprocessing\popen_spawn_win32.py", line 65, in __init__
    reduction.dump(process_obj, to_child)
  File "G:\anaconda\envs\pytorch3.7\lib\multiprocessing\reduction.py", line 60, in dump
    ForkingPickler(file, protocol).dump(obj)
AttributeError: Can't pickle local object 'trainer_synapse.<locals>.worker_init_fn'

错误原因:Can't pickle local object 'trainer_synapse.<locals>.worker_init_fn'

解决办法:把trainer.py里面trainloader当中的num_workers改成0

改后:

2.

报错信息:FileNotFoundError: [Errno 2] No such file or directory: './data/Synapse/train_npz\\train_npz\\case0005_slice002.npz'

解决办法:把train.py文件当中的

parser.add_argument('--root_path', type=str,
                    default='./data/Synapse/train_npz', help='root dir for data')

改成

parser.add_argument('--root_path', type=str,
                    default='./data/Synapse', help='root dir for data')
Logo

有“AI”的1024 = 2048,欢迎大家加入2048 AI社区

更多推荐