@@ -134,15 +134,28 @@ class ChunkColumn180 extends ChunkColumn13 {
134
134
networkDecodeNoCache ( buffer , sectionCount ) {
135
135
const stream = buffer instanceof Buffer ? new Stream ( buffer ) : buffer
136
136
137
- if ( sectionCount === - 1 || sectionCount === - 2 ) { // In 1.18+, with sectionCount as -1/-2 we only get the biomes here
138
- this . loadBiomes ( stream , StorageType . Runtime )
139
- const borderblocks = stream . readBuffer ( stream . readZigZagVarInt ( ) )
140
- if ( borderblocks . length ) {
141
- throw new Error ( `Can't handle border blocks (length: ${ borderblocks . length } )` )
137
+ if ( sectionCount !== - 1 && sectionCount !== - 2 ) { // In 1.18+, with sectionCount as -1/-2 we only get the biomes here
138
+ this . sections = [ ]
139
+ for ( let i = 0 ; i < sectionCount ; i ++ ) {
140
+ // in 1.17.30+, chunk index is sent in payload
141
+ const section = new SubChunk ( this . registry , this . Block , { y : i , subChunkVersion : this . subChunkVersion } )
142
+ section . decode ( StorageType . Runtime , stream )
143
+ this . setSection ( i , section )
142
144
}
143
- } else {
144
- // Possible some servers may send us a 1.17 chunk with 1.18 server version
145
- super . networkDecodeNoCache ( stream , sectionCount )
145
+ }
146
+
147
+ this . loadBiomes ( stream , StorageType . Runtime )
148
+ const borderBlocks = stream . readBuffer ( stream . readZigZagVarInt ( ) )
149
+ if ( borderBlocks . length ) {
150
+ throw new Error ( `Can't handle border blocks (length: ${ borderBlocks . length } )` )
151
+ }
152
+
153
+ let startOffset = stream . readOffset
154
+ while ( stream . peek ( ) === 0x0A ) {
155
+ const { data, metadata } = nbt . protos . littleVarint . parsePacketBuffer ( 'nbt' , stream . buffer , startOffset )
156
+ stream . readOffset += metadata . size
157
+ startOffset += metadata . size
158
+ this . addBlockEntity ( data )
146
159
}
147
160
}
148
161
0 commit comments