1#!/bin/ksh -p
2#
3# CDDL HEADER START
4#
5# This file and its contents are supplied under the terms of the
6# Common Development and Distribution License ("CDDL"), version 1.0.
7# You may only use this file in accordance with the terms of version
8# 1.0 of the CDDL.
9#
10# A full copy of the text of the CDDL should have accompanied this
11# source.  A copy of the CDDL is also available via the Internet at
12# http://www.illumos.org/license/CDDL.
13#
14# CDDL HEADER END
15#
16
17#
18# Copyright (c) 2019 by Tim Chase. All rights reserved.
19# Copyright (c) 2019 Lawrence Livermore National Security, LLC.
20# Copyright 2019 Joyent, Inc.
21#
22
23. $STF_SUITE/include/libtest.shlib
24. $STF_SUITE/tests/functional/cli_root/zpool_initialize/zpool_initialize.kshlib
25. $STF_SUITE/tests/functional/cli_root/zpool_trim/zpool_trim.kshlib
26
27#
28# DESCRIPTION:
29# After trimming, the disk is actually trimmed.
30#
31# STRATEGY:
32# 1. Create a one-disk pool using a sparse file.
33# 2. Initialize the pool and verify the file vdev is no longer sparse.
34# 3. Trim the pool and verify the file vdev is again sparse.
35#
36
37function cleanup
38{
39	if poolexists $TESTPOOL; then
40		destroy_pool $TESTPOOL
41	fi
42
43        if [[ -d "$TESTDIR" ]]; then
44                rm -rf "$TESTDIR"
45        fi
46
47	log_must set_tunable32 zfs_trim_extent_bytes_min $trim_extent_bytes_min
48}
49log_onexit cleanup
50
51LARGESIZE=$((MINVDEVSIZE * 4))
52LARGEFILE="$TESTDIR/largefile"
53
54# Reduce trim size to allow for tighter tolerance below when checking.
55typeset trim_extent_bytes_min=$(get_tunable zfs_trim_extent_bytes_min)
56log_must set_tunable32 zfs_trim_extent_bytes_min 4096
57
58log_must mkdir "$TESTDIR"
59log_must truncate -s $LARGESIZE "$LARGEFILE"
60log_must zpool create $TESTPOOL "$LARGEFILE"
61
62original_size=$(du "$LARGEFILE" | cut -f1)
63original_size=$((original_size * 512))
64
65log_must zpool initialize $TESTPOOL
66
67while [[ "$(initialize_progress $TESTPOOL $LARGEFILE)" -lt "100" ]]; do
68        sleep 0.5
69done
70
71new_size=$(du "$LARGEFILE" | cut -f1)
72new_size=$((new_size * 512))
73log_must test $new_size -gt $((8 * 1024 * 1024))
74
75log_must zpool trim $TESTPOOL
76
77while [[ "$(trim_progress $TESTPOOL $LARGEFILE)" -lt "100" ]]; do
78        sleep 0.5
79done
80
81new_size=$(du "$LARGEFILE" | cut -f1)
82new_size=$((new_size * 512))
83log_must within_tolerance $new_size $original_size $((128 * 1024 * 1024))
84
85log_pass "Trimmed appropriate amount of disk space"
86