Skip to content

Commit ada4435

Browse files
committed
fix: add LinkedHashSet to prevent child directory store in buffer in wrong order
1 parent 8874001 commit ada4435

File tree

2 files changed

+14
-16
lines changed

2 files changed

+14
-16
lines changed

hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/api/types/NSSummary.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import static org.apache.hadoop.ozone.om.helpers.OzoneFSUtils.removeTrailingSlashIfNeeded;
2121

2222
import java.util.Arrays;
23-
import java.util.HashSet;
23+
import java.util.LinkedHashSet;
2424
import java.util.Set;
2525
import org.apache.hadoop.ozone.recon.ReconConstants;
2626

@@ -44,7 +44,7 @@ public class NSSummary {
4444

4545
public NSSummary() {
4646
this(0, 0L, new int[ReconConstants.NUM_OF_FILE_SIZE_BINS],
47-
new HashSet<>(), "", 0);
47+
new LinkedHashSet<>(), "", 0);
4848
}
4949

5050
public NSSummary(int numOfFiles,

hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/codec/NSSummaryCodec.java

Lines changed: 12 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,7 @@
2424
import java.io.IOException;
2525
import java.nio.ByteBuffer;
2626
import java.util.HashSet;
27+
import java.util.LinkedHashSet;
2728
import java.util.Set;
2829
import org.apache.hadoop.hdds.utils.db.Codec;
2930
import org.apache.hadoop.hdds.utils.db.CodecBuffer;
@@ -76,7 +77,7 @@ public CodecBuffer toCodecBuffer(@Nonnull NSSummary object, CodecBuffer.Allocato
7677
int dirNameSize = dirNameBuffer.readableBytes();
7778

7879
// total size: primitives + childDirs + dirName length + dirName data
79-
final int totalSize = Integer.BYTES * (3 + ReconConstants.NUM_OF_FILE_SIZE_BINS) // numFiles + sizes + buckets
80+
final int totalSize = Integer.BYTES * (3 + ReconConstants.NUM_OF_FILE_SIZE_BINS) // numFiles + numOfChildDirs + dirNameSize + fileSizeBucket
8081
+ Long.BYTES * (numOfChildDirs + 2) // childDirs + sizeOfFiles + parentId
8182
+ Short.BYTES // fileSizeBucket length
8283
+ dirNameSize; // actual dirName bytes
@@ -101,7 +102,6 @@ public CodecBuffer toCodecBuffer(@Nonnull NSSummary object, CodecBuffer.Allocato
101102
if (dirNameSize > 0) {
102103
buffer.put(dirNameBuffer.asReadOnlyByteBuffer());
103104
}
104-
105105
buffer.putLong(object.getParentId());
106106

107107
return buffer;
@@ -129,25 +129,23 @@ public NSSummary fromCodecBuffer(@Nonnull CodecBuffer buffer) throws CodecExcept
129129
result.setFileSizeBucket(fileSizeBucket);
130130

131131
int numChildDirs = byteBuffer.getInt();
132-
Set<Long> childDirs = new HashSet<>(numChildDirs);
132+
Set<Long> childDirs = new LinkedHashSet<>(numChildDirs);
133133
for (int i = 0; i < numChildDirs; ++i) {
134134
childDirs.add(byteBuffer.getLong());
135135
}
136136
result.setChildDir(childDirs);
137137

138138
int dirNameSize = byteBuffer.getInt();
139-
if (dirNameSize > 0) {
140-
byte[] dirNameBytes = new byte[dirNameSize];
141-
byteBuffer.get(dirNameBytes);
142-
String dirName = stringCodec.fromPersistedFormat(dirNameBytes);
143-
result.setDirName(dirName);
139+
if (dirNameSize == 0) {
140+
return result;
144141
}
145142

146-
if (byteBuffer.remaining() >= Long.BYTES) {
147-
result.setParentId(byteBuffer.getLong());
148-
} else {
149-
result.setParentId(-1);
150-
}
143+
byte[] dirNameBytes = new byte[dirNameSize];
144+
byteBuffer.get(dirNameBytes);
145+
CodecBuffer dirNameBuffer = CodecBuffer.wrap(dirNameBytes);
146+
String dirName = stringCodec.fromCodecBuffer(dirNameBuffer);
147+
result.setDirName(dirName);
148+
result.setParentId(byteBuffer.getLong());
151149

152150
return result;
153151
} catch (Exception e) {
@@ -203,7 +201,7 @@ public NSSummary fromPersistedFormatImpl(byte[] rawData) throws IOException {
203201
res.setFileSizeBucket(fileSizeBucket);
204202

205203
int listSize = in.readInt();
206-
Set<Long> childDir = new HashSet<>();
204+
Set<Long> childDir = new LinkedHashSet<>();
207205
for (int i = 0; i < listSize; ++i) {
208206
childDir.add(in.readLong());
209207
}

0 commit comments

Comments
 (0)