|
@@ -683,8 +683,6 @@ xfs_alloc_ag_vextent_near(
|
|
xfs_agblock_t ltbno; /* start bno of left side entry */
|
|
xfs_agblock_t ltbno; /* start bno of left side entry */
|
|
xfs_agblock_t ltbnoa; /* aligned ... */
|
|
xfs_agblock_t ltbnoa; /* aligned ... */
|
|
xfs_extlen_t ltdiff; /* difference to left side entry */
|
|
xfs_extlen_t ltdiff; /* difference to left side entry */
|
|
- /*REFERENCED*/
|
|
|
|
- xfs_agblock_t ltend; /* end bno of left side entry */
|
|
|
|
xfs_extlen_t ltlen; /* length of left side entry */
|
|
xfs_extlen_t ltlen; /* length of left side entry */
|
|
xfs_extlen_t ltlena; /* aligned ... */
|
|
xfs_extlen_t ltlena; /* aligned ... */
|
|
xfs_agblock_t ltnew; /* useful start bno of left side */
|
|
xfs_agblock_t ltnew; /* useful start bno of left side */
|
|
@@ -809,8 +807,7 @@ xfs_alloc_ag_vextent_near(
|
|
if ((error = xfs_alloc_get_rec(cnt_cur, <bno, <len, &i)))
|
|
if ((error = xfs_alloc_get_rec(cnt_cur, <bno, <len, &i)))
|
|
goto error0;
|
|
goto error0;
|
|
XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
|
|
XFS_WANT_CORRUPTED_GOTO(i == 1, error0);
|
|
- ltend = ltbno + ltlen;
|
|
|
|
- ASSERT(ltend <= be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
|
|
|
|
|
|
+ ASSERT(ltbno + ltlen <= be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
|
|
args->len = blen;
|
|
args->len = blen;
|
|
if (!xfs_alloc_fix_minleft(args)) {
|
|
if (!xfs_alloc_fix_minleft(args)) {
|
|
xfs_btree_del_cursor(cnt_cur, XFS_BTREE_NOERROR);
|
|
xfs_btree_del_cursor(cnt_cur, XFS_BTREE_NOERROR);
|
|
@@ -823,7 +820,7 @@ xfs_alloc_ag_vextent_near(
|
|
*/
|
|
*/
|
|
args->agbno = bnew;
|
|
args->agbno = bnew;
|
|
ASSERT(bnew >= ltbno);
|
|
ASSERT(bnew >= ltbno);
|
|
- ASSERT(bnew + blen <= ltend);
|
|
|
|
|
|
+ ASSERT(bnew + blen <= ltbno + ltlen);
|
|
/*
|
|
/*
|
|
* Set up a cursor for the by-bno tree.
|
|
* Set up a cursor for the by-bno tree.
|
|
*/
|
|
*/
|
|
@@ -1152,7 +1149,6 @@ xfs_alloc_ag_vextent_near(
|
|
/*
|
|
/*
|
|
* Fix up the length and compute the useful address.
|
|
* Fix up the length and compute the useful address.
|
|
*/
|
|
*/
|
|
- ltend = ltbno + ltlen;
|
|
|
|
args->len = XFS_EXTLEN_MIN(ltlena, args->maxlen);
|
|
args->len = XFS_EXTLEN_MIN(ltlena, args->maxlen);
|
|
xfs_alloc_fix_len(args);
|
|
xfs_alloc_fix_len(args);
|
|
if (!xfs_alloc_fix_minleft(args)) {
|
|
if (!xfs_alloc_fix_minleft(args)) {
|
|
@@ -1165,7 +1161,7 @@ xfs_alloc_ag_vextent_near(
|
|
(void)xfs_alloc_compute_diff(args->agbno, rlen, args->alignment, ltbno,
|
|
(void)xfs_alloc_compute_diff(args->agbno, rlen, args->alignment, ltbno,
|
|
ltlen, <new);
|
|
ltlen, <new);
|
|
ASSERT(ltnew >= ltbno);
|
|
ASSERT(ltnew >= ltbno);
|
|
- ASSERT(ltnew + rlen <= ltend);
|
|
|
|
|
|
+ ASSERT(ltnew + rlen <= ltbno + ltlen);
|
|
ASSERT(ltnew + rlen <= be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
|
|
ASSERT(ltnew + rlen <= be32_to_cpu(XFS_BUF_TO_AGF(args->agbp)->agf_length));
|
|
args->agbno = ltnew;
|
|
args->agbno = ltnew;
|
|
if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno, ltlen,
|
|
if ((error = xfs_alloc_fixup_trees(cnt_cur, bno_cur_lt, ltbno, ltlen,
|