aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
authorDavid Disseldorp <ddiss@suse.de>2022-04-13 18:44:20 +0200
committerEryu Guan <guaneryu@gmail.com>2022-04-17 20:08:23 +0800
commitdc76e4ec19a8e9da6366cf75d86e063409a385c1 (patch)
tree4ecfe4ea4ea16102bce92722ebce59e1d7aee8e1
parent73aa648ce176f159f41538765be3392d3941d8e8 (diff)
downloadxfstests-dev-dc76e4ec19a8e9da6366cf75d86e063409a385c1.tar.gz
generic/020: add and use _attr_get_max()
In preparation for taking into account the attr name length when calculating $MAX_ATTRVAL_SIZE, put the current logic in a _attr_get_max() helper function and set local $max_attrval_size / $max_attrs variables instead of using export. Signed-off-by: David Disseldorp <ddiss@suse.de> Reviewed-by: Eryu Guan <guaneryu@gmail.com> Signed-off-by: Eryu Guan <guaneryu@gmail.com>
-rwxr-xr-xtests/generic/020178
1 files changed, 95 insertions, 83 deletions
diff --git a/tests/generic/020 b/tests/generic/020
index c2c285f60d..cbd3f22739 100755
--- a/tests/generic/020
+++ b/tests/generic/020
@@ -51,81 +51,89 @@ _attr_list()
fi
}
-# set maximum total attr space based on fs type
-case "$FSTYP" in
-xfs|udf|pvfs2|9p|ceph|nfs)
- MAX_ATTRS=1000
- ;;
-ext2|ext3|ext4)
- # For 4k blocksizes, most of the attributes have an attr_name of
- # "attribute_NN" which is 12, and "value_NN" which is 8.
- # But for larger block sizes, we start having extended attributes of the
- # form "attribute_NNN" or "attribute_NNNN", and "value_NNN" and
- # "value_NNNN", which causes the round(len(..), 4) to jump up by 4
- # bytes. So round_up(len(attr_name, 4)) becomes 16 instead of 12, and
- # round_up(len(value, 4)) becomes 12 instead of 8.
- #
- # For 64K blocksize the calculation becomes
- # max_attrs = (block_size - 32) / (16 + 12 + 16)
- # or
- # max_attrs = (block_size - 32) / 44
- #
- # For 4K blocksize:-
- # max_attrs = (block_size - 32) / (16 + 8 + 12)
- # or
- # max_attrs = (block_size - 32) / 36
- #
- # Note (for 4K bs) above are exact calculations for attrs of type
- # attribute_NN with values of type value_NN.
- # With above calculations, for 4k blocksize max_attrs becomes 112.
- # This means we can have few attrs of type attribute_NNN with values of
- # type value_NNN. To avoid/handle this we need to add extra 4 bytes of
- # headroom.
- #
- # So for 4K, the calculations becomes:-
- # max_attrs = (block_size - 32) / (16 + 8 + 12 + 4)
- # or
- # max_attrs = (block_size - 32) / 40
- #
- # Assume max ~1 block of attrs
- BLOCK_SIZE=`_get_block_size $TEST_DIR`
- if [ $BLOCK_SIZE -le 4096 ]; then
- let MAX_ATTRS=$((($BLOCK_SIZE - 32) / (16 + 8 + 12 + 4)))
- else
- let MAX_ATTRS=$((($BLOCK_SIZE - 32) / (16 + 12 + 16 )))
- fi
- ;;
-*)
- # Assume max ~1 block of attrs
- BLOCK_SIZE=`_get_block_size $TEST_DIR`
- # user.attribute_XXX="value.XXX" is about 32 bytes; leave some overhead
- let MAX_ATTRS=$BLOCK_SIZE/40
-esac
-
-export MAX_ATTRS
-
-# Set max attr value size based on fs type
-case "$FSTYP" in
-xfs|udf|btrfs)
- MAX_ATTRVAL_SIZE=64
- ;;
-pvfs2)
- MAX_ATTRVAL_SIZE=8192
- ;;
-9p|ceph|nfs)
- MAX_ATTRVAL_SIZE=65536
- ;;
-bcachefs)
- MAX_ATTRVAL_SIZE=1024
- ;;
-*)
- # Assume max ~1 block of attrs
- BLOCK_SIZE=`_get_block_size $TEST_DIR`
- # leave a little overhead
- let MAX_ATTRVAL_SIZE=$BLOCK_SIZE-256
-esac
-
-export MAX_ATTRVAL_SIZE
+# set fs-specific max_attrs and max_attrval_size values. The parameter
+# @max_attrval_namelen is required for filesystems which take into account attr
+# name lengths (including namespace prefix) when determining limits.
+_attr_get_max()
+{
+ local max_attrval_namelen="$1"
+
+ # set maximum total attr space based on fs type
+ case "$FSTYP" in
+ xfs|udf|pvfs2|9p|ceph|nfs)
+ max_attrs=1000
+ ;;
+ ext2|ext3|ext4)
+ # For 4k blocksizes, most of the attributes have an attr_name of
+ # "attribute_NN" which is 12, and "value_NN" which is 8.
+ # But for larger block sizes, we start having extended
+ # attributes of the
+ # form "attribute_NNN" or "attribute_NNNN", and "value_NNN" and
+ # "value_NNNN", which causes the round(len(..), 4) to jump up by
+ # 4 bytes. So round_up(len(attr_name, 4)) becomes 16 instead of
+ # 12, and round_up(len(value, 4)) becomes 12 instead of 8.
+ #
+ # For 64K blocksize the calculation becomes
+ # max_attrs = (block_size - 32) / (16 + 12 + 16)
+ # or
+ # max_attrs = (block_size - 32) / 44
+ #
+ # For 4K blocksize:-
+ # max_attrs = (block_size - 32) / (16 + 8 + 12)
+ # or
+ # max_attrs = (block_size - 32) / 36
+ #
+ # Note (for 4K bs) above are exact calculations for attrs of
+ # type attribute_NN with values of type value_NN.
+ # With above calculations, for 4k blocksize max_attrs becomes
+ # 112.
+ # This means we can have few attrs of type attribute_NNN with
+ # values of
+ # type value_NNN. To avoid/handle this we need to add extra 4
+ # bytes of headroom.
+ #
+ # So for 4K, the calculations becomes:-
+ # max_attrs = (block_size - 32) / (16 + 8 + 12 + 4)
+ # or
+ # max_attrs = (block_size - 32) / 40
+ #
+ # Assume max ~1 block of attrs
+ BLOCK_SIZE=`_get_block_size $TEST_DIR`
+ if [ $BLOCK_SIZE -le 4096 ]; then
+ let max_attrs=$((($BLOCK_SIZE - 32) / (16 + 8 + 12 + 4)))
+ else
+ let max_attrs=$((($BLOCK_SIZE - 32) / (16 + 12 + 16 )))
+ fi
+ ;;
+ *)
+ # Assume max ~1 block of attrs
+ BLOCK_SIZE=`_get_block_size $TEST_DIR`
+ # user.attribute_XXX="value.XXX" is about 32 bytes; leave some
+ # overhead
+ let max_attrs=$BLOCK_SIZE/40
+ esac
+
+ # Set max attr value size based on fs type
+ case "$FSTYP" in
+ xfs|udf|btrfs)
+ max_attrval_size=64
+ ;;
+ pvfs2)
+ max_attrval_size=8192
+ ;;
+ 9p|ceph|nfs)
+ max_attrval_size=65536
+ ;;
+ bcachefs)
+ max_attrval_size=1024
+ ;;
+ *)
+ # Assume max ~1 block of attrs
+ BLOCK_SIZE=`_get_block_size $TEST_DIR`
+ # leave a little overhead
+ let max_attrval_size=$BLOCK_SIZE-256
+ esac
+}
# real QA test starts here
_supported_fs generic
@@ -161,10 +169,13 @@ echo "*** remove attribute"
_attr -r fish $testfile
_attr_list $testfile
+max_attrval_name="long_attr" # add 5 for "user." prefix
+_attr_get_max "$(( 5 + ${#max_attrval_name} ))"
+
echo "*** add lots of attributes"
v=0
-while [ $v -lt $MAX_ATTRS ]
+while [ $v -lt $max_attrs ]
do
echo -n "value_$v" | attr -s "attribute_$v" $testfile >>$seqres.full
if [ $? -ne 0 ]
@@ -185,11 +196,11 @@ _getfattr --absolute-names $testfile \
/^[ ]*$/ { next }
{ l++ }
END {print " *** " (l - 1) " attribute(s)" }' \
- | sed s/$MAX_ATTRS/MAX_ATTRS/
+ | sed s/$max_attrs/MAX_ATTRS/
echo "*** remove lots of attributes"
v=0
-while [ $v -lt $MAX_ATTRS ]
+while [ $v -lt $max_attrs ]
do
if ! $ATTR_PROG -r "attribute_$v" $testfile >>$seqres.full
then
@@ -203,12 +214,13 @@ done
_attr_list $testfile
echo "*** really long value"
-dd if=/dev/zero bs=1 count=$MAX_ATTRVAL_SIZE 2>/dev/null \
- | _attr -s "long_attr" $testfile >/dev/null
+dd if=/dev/zero bs=1 count=$max_attrval_size 2>/dev/null \
+ | _attr -s "$max_attrval_name" $testfile >/dev/null
-OCTAL_SIZE=`echo "obase=8; $MAX_ATTRVAL_SIZE" | bc`
-_attr -q -g "long_attr" $testfile | od -t x1 | sed -e "s/^0*$OCTAL_SIZE$/ATTRSIZE/"
-_attr -r "long_attr" $testfile >/dev/null
+OCTAL_SIZE=`echo "obase=8; $max_attrval_size" | bc`
+_attr -q -g "$max_attrval_name" $testfile | od -t x1 \
+ | sed -e "s/^0*$OCTAL_SIZE$/ATTRSIZE/"
+_attr -r "$max_attrval_name" $testfile >/dev/null
echo "*** set/get/remove really long names (expect failure)"
short="XXXXXXXXXX"