Skip to content

ValueError: need at least one array to concatenate #145

@sangyuyyyyy

Description

@sangyuyyyyy

I encountered the following error when running test.py:
Image

Here’s what I found:
1:With the following two commands, the first one triggers the error, while the second one correctly outputs the test accuracy:
Image

2:The test dataset consists of five paired images, organized according to the levir dataset structure. The test, val, and train folders each contain three subfolders: A, B, and label. I have already modified the image format in the test_dataloader configuration to match the actual data.

Here’s some basic information:
The error message:

05/30 19:55:41 - mmengine - WARNING - The prefix is not set in metric class IoUMetric.
Loads checkpoint by local backend from path: fc-ef_second-dataset_workdir/best_mIoU_epoch_70.pth
05/30 19:55:42 - mmengine - INFO - Load checkpoint from fc-ef_second-dataset_workdir/best_mIoU_epoch_70.pth
Traceback (most recent call last):
File "tools/test.py", line 161, in
main()
File "tools/test.py", line 154, in main
runner.test()
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 1823, in test
metrics = self.test_loop.run() # type: ignore
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/loops.py", line 463, in run
self.run_iter(idx, data_batch)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/torch/utils/_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/loops.py", line 492, in run_iter
self.runner.call_hook(
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 1839, in call_hook
getattr(hook, fn_name)(self, **kwargs)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/hooks/hook.py", line 277, in after_test_iter
self._after_iter(
File "/data1/lin/open-cd/opencd/engine/hooks/visualization_hook.py", line 113, in _after_iter
step=runner.iter,
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 540, in iter
if isinstance(self.train_loop, BaseLoop):
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 591, in train_loop
self._train_loop = self.build_train_loop(self._train_loop)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 1520, in build_train_loop
loop = LOOPS.build(
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/registry/registry.py", line 570, in build
return self.build_func(cfg, *args, **kwargs, registry=self)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/registry/build_functions.py", line 123, in build_from_cfg
obj = obj_cls(**args) # type: ignore
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/loops.py", line 46, in init
super().init(runner, dataloader)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/base_loop.py", line 26, in init
self.dataloader = runner.build_dataloader(
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/runner/runner.py", line 1370, in build_dataloader
dataset = DATASETS.build(dataset_cfg)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/registry/registry.py", line 570, in build
return self.build_func(cfg, *args, **kwargs, registry=self)
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/registry/build_functions.py", line 123, in build_from_cfg
obj = obj_cls(**args) # type: ignore
File "/data1/lin/open-cd/opencd/datasets/levir_cd.py", line 18, in init
super().init(
File "/data1/lin/open-cd/opencd/datasets/basecddataset.py", line 141, in init
self.full_init()
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/dataset/base_dataset.py", line 307, in full_init
self.data_bytes, self.data_address = self._serialize_data()
File "/home/root123/anaconda3/envs/opencd/lib/python3.8/site-packages/mmengine/dataset/base_dataset.py", line 768, in _serialize_data
data_bytes = np.concatenate(data_list)
File "<array_function internals>", line 200, in concatenate
ValueError: need at least one array to concatenate

#The data configuration file:
base = '../base/default_runtime.py'

dataset_type = 'LEVIR_CD_Dataset'
data_root = '/data1/lin/data/'

crop_size = (256, 256)
train_pipeline = [
dict(type='MultiImgLoadImageFromFile'),
dict(type='MultiImgLoadAnnotations'),
dict(type='MultiImgRandomRotate', prob=0.5, degree=180),
dict(type='MultiImgRandomCrop', crop_size=crop_size, cat_max_ratio=0.75),
dict(type='MultiImgRandomFlip', prob=0.5, direction='horizontal'),
dict(type='MultiImgRandomFlip', prob=0.5, direction='vertical'),
dict(
type='MultiImgPhotoMetricDistortion',
brightness_delta=10,
contrast_range=(0.8, 1.2),
saturation_range=(0.8, 1.2),
hue_delta=10),
dict(type='MultiImgPackSegInputs')
]
test_pipeline = [
dict(type='MultiImgLoadImageFromFile'),
dict(type='MultiImgResize', scale=(512,512), keep_ratio=True),
dict(type='MultiImgLoadAnnotations'),
dict(type='MultiImgPackSegInputs')
]
img_ratios = [0.75, 1.0, 1.25]
tta_pipeline = [
dict(type='MultiImgLoadImageFromFile', backend_args=None),
dict(
type='TestTimeAug',
transforms=[
[
dict(type='MultiImgResize', scale_factor=r, keep_ratio=True)
for r in img_ratios
],
[
dict(type='MultiImgRandomFlip', prob=0., direction='horizontal'),
dict(type='MultiImgRandomFlip', prob=1., direction='horizontal')
],
[dict(type='MultiImgLoadAnnotations')],
[dict(type='MultiImgPackSegInputs')]
])
]
train_dataloader = dict(
batch_size=8,
num_workers=4,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=True),
dataset=dict(
type=dataset_type,
data_root=data_root,
data_prefix=dict(
seg_map_path='train/label',
img_path_from='train/A',
img_path_to='train/B'),
img_suffix='.png',
seg_map_suffix='.png',
pipeline=train_pipeline))
val_dataloader = dict(
batch_size=1,
num_workers=4,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=False),
dataset=dict(
type=dataset_type,
data_root=data_root,
data_prefix=dict(
seg_map_path='val/label',
img_path_from='val/A',
img_path_to='val/B'),
img_suffix='.png',
seg_map_suffix='.png',
pipeline=test_pipeline))
test_dataloader = dict(
batch_size=1,
num_workers=4,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=False),
dataset=dict(
type=dataset_type,
data_root=data_root,
data_prefix=dict(
seg_map_path='test/label',
img_path_from='test/A',
img_path_to='test/B'),
img_suffix='.png',
seg_map_suffix='.png',
pipeline=test_pipeline))

val_evaluator = dict(type='mmseg.IoUMetric', iou_metrics=['mFscore', 'mIoU'])
test_evaluator = dict(
type='mmseg.IoUMetric',
iou_metrics=['mFscore', 'mIoU'])

optimizer

optimizer=dict(
type='AdamW', lr=0.001, betas=(0.9, 0.999), weight_decay=0.05)
optim_wrapper = dict(type='OptimWrapper', optimizer=optimizer)

learning policy

param_scheduler = [
dict(
type='LinearLR', start_factor=1e-6, by_epoch=True, begin=0, end=5, convert_to_iter_based=True),
dict(
type='PolyLR',
power=1.0,
begin=5,
end=100,
eta_min=0.0,
by_epoch=True,
convert_to_iter_based=True
)
]

training schedule for 100 epochs

train_cfg = dict(type='EpochBasedTrainLoop', max_epochs=100, val_interval=10)
val_cfg = dict(type='ValLoop')
test_cfg = dict(type='TestLoop')
default_hooks = dict(
timer=dict(type='IterTimerHook'),
logger=dict(type='LoggerHook', interval=50, log_metric_by_epoch=True),
param_scheduler=dict(type='ParamSchedulerHook'),
checkpoint=dict(type='CheckpointHook', by_epoch=True, interval=10,
save_best='mIoU'),
sampler_seed=dict(type='DistSamplerSeedHook'),
visualization=dict(type='CDVisualizationHook', interval=1,
img_shape=(512, 512, 3)))

log_processor = dict(type='LogProcessor', window_size=50, by_epoch=True)

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions