@@ -465,14 +465,12 @@ class NonEmptySubSectIterator : public SparseIterator {
465
465
NonEmptySubSectIterator (OpBuilder &b, Location l,
466
466
const SparseIterator *parent,
467
467
std::unique_ptr<SparseIterator> &&delegate,
468
- Value subSectSz, unsigned stride )
468
+ Value subSectSz)
469
469
: SparseIterator(IterKind::kNonEmptySubSect , delegate->tid, delegate->lvl,
470
470
/* itVals=*/ subSectMeta),
471
- subSectSz(subSectSz), stride(stride), parent(parent),
472
- delegate(std::move(delegate)) {
473
-
471
+ parent(parent), delegate(std::move(delegate)),
472
+ tupleSz(this ->delegate->serialize ().size()), subSectSz(subSectSz) {
474
473
auto *p = dyn_cast_or_null<NonEmptySubSectIterator>(parent);
475
- assert (stride == 1 );
476
474
if (p == nullptr ) {
477
475
// Extract subsections along the root level.
478
476
maxTupleCnt = C_IDX (1 );
@@ -488,8 +486,6 @@ class NonEmptySubSectIterator : public SparseIterator {
488
486
// We don't need an extra buffer to find subsections on dense levels.
489
487
if (randomAccessible ())
490
488
return ;
491
- // The number of values we need to store to serialize the wrapped iterator.
492
- tupleSz = this ->delegate ->serialize ().size ();
493
489
subSectPosBuf = allocSubSectPosBuf (b, l);
494
490
}
495
491
@@ -574,7 +570,6 @@ class NonEmptySubSectIterator : public SparseIterator {
574
570
}
575
571
576
572
Value toSubSectCrd (OpBuilder &b, Location l, Value wrapCrd) const {
577
- assert (stride == 1 );
578
573
return SUBI (wrapCrd, getAbsOff ());
579
574
}
580
575
@@ -598,18 +593,17 @@ class NonEmptySubSectIterator : public SparseIterator {
598
593
Value getAbsOff () const { return subSectMeta[1 ]; }
599
594
Value getNotEnd () const { return subSectMeta[2 ]; }
600
595
596
+ const SparseIterator *parent;
597
+ std::unique_ptr<SparseIterator> delegate;
598
+
601
599
// Number of values required to serialize the wrapped iterator.
602
- unsigned tupleSz;
600
+ const unsigned tupleSz;
603
601
// Max number of tuples, and the actual number of tuple.
604
602
Value maxTupleCnt, tupleCnt;
605
603
// The memory used to cache the tuple serialized from the wrapped iterator.
606
604
Value subSectPosBuf;
607
605
608
606
const Value subSectSz;
609
- const unsigned stride;
610
-
611
- const SparseIterator *parent;
612
- std::unique_ptr<SparseIterator> delegate;
613
607
614
608
Value subSectMeta[3 ]; // minCrd, absolute offset, notEnd
615
609
};
@@ -1189,8 +1183,6 @@ ValueRange NonEmptySubSectIterator::forward(OpBuilder &b, Location l) {
1189
1183
Value minAbsOff = ADDI (getAbsOff (), c1);
1190
1184
nxAbsOff = b.create <arith::MaxUIOp>(l, minAbsOff, nxAbsOff);
1191
1185
1192
- assert (stride == 1 && " Not yet implemented" );
1193
-
1194
1186
seek (ValueRange{nxMinCrd, nxAbsOff, nxNotEnd});
1195
1187
// The coordinate should not exceeds the space upper bound.
1196
1188
Value crd = deref (b, l);
@@ -1286,7 +1278,7 @@ std::unique_ptr<SparseIterator> sparse_tensor::makeNonEmptySubSectIterator(
1286
1278
// Try unwrap the NonEmptySubSectIterator from a filter parent.
1287
1279
parent = tryUnwrapFilter<NonEmptySubSectIterator>(parent);
1288
1280
auto it = std::make_unique<NonEmptySubSectIterator>(
1289
- b, l, parent, std::move (delegate), size, 1 );
1281
+ b, l, parent, std::move (delegate), size);
1290
1282
1291
1283
if (stride != 1 )
1292
1284
return std::make_unique<FilterIterator>(std::move (it), /* offset=*/ C_IDX (0 ),
0 commit comments