Skip to content

Some very tricky errors occurred when running test_bscdfsl.py, hoping for help. #34

@aaawork

Description

@aaawork

G:\awh\pmf_cvpr22-main\engine.py:115: UserWarning: The structure of <datasets.get_bscd_loader.._Loader object at 0x000002117F54D2E0> is not recognizable.
warnings.warn(f'The structure of {data_loaders} is not recognizable.')
Traceback (most recent call last):
File "test_bscdfsl.py", line 116, in
main(args)
File "test_bscdfsl.py", line 62, in main
test_stats = evaluate(data_loader_val, model, criterion, device, seed=1234, ep=5)
File "G:\awh\pmf_cvpr22-main\engine.py", line 116, in evaluate
return _evaluate(data_loaders, model, criterion, device, seed)
File "C:\Users\dhs.conda\envs\Hlbl\lib\site-packages\torch\autograd\grad_mode.py", line 28, in decorate_context
return func(*args, **kwargs)
File "G:\awh\pmf_cvpr22-main\engine.py", line 134, in evaluate
for ii, batch in enumerate(metric_logger.log_every(data_loader, 10, header)):
File "G:\awh\pmf_cvpr22-main\utils\deit_util.py", line 141, in log_every
for obj in iterable:
File "G:\awh\pmf_cvpr22-main\datasets_init
.py", line 178, in _loader_wrap
for x, y in novel_loader:
File "C:\Users\dhs.conda\envs\Hlbl\lib\site-packages\torch\utils\data\dataloader.py", line 359, in iter
return self._get_iterator()
File "C:\Users\dhs.conda\envs\Hlbl\lib\site-packages\torch\utils\data\dataloader.py", line 305, in _get_iterator
return _MultiProcessingDataLoaderIter(self)
File "C:\Users\dhs.conda\envs\Hlbl\lib\site-packages\torch\utils\data\dataloader.py", line 918, in init
w.start()
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\process.py", line 121, in start
self._popen = self._Popen(self)
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\context.py", line 224, in _Popen
return _default_context.get_context().Process._Popen(process_obj)
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\context.py", line 326, in _Popen
return Popen(process_obj)
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\popen_spawn_win32.py", line 93, in init
reduction.dump(process_obj, to_child)
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\reduction.py", line 60, in dump
ForkingPickler(file, protocol).dump(obj)
_pickle.PicklingError: Can't pickle <function at 0x0000020E0E743040>: attribute lookup on datasets.cdfsl.CropDisease_few_shot failed
Traceback (most recent call last):
File "", line 1, in
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\spawn.py", line 116, in spawn_main
exitcode = _main(fd, parent_sentinel)
File "C:\Users\dhs.conda\envs\Hlbl\lib\multiprocessing\spawn.py", line 126, in _main
self = reduction.pickle.load(from_parent)
EOFError: Ran out of input

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions