summaryrefslogtreecommitdiff
path: root/ltp
diff options
context:
space:
mode:
authorFilipe Manana <fdmanana@suse.com>2020-04-20 18:09:05 +0100
committerEryu Guan <guaneryu@gmail.com>2020-05-10 20:33:43 +0800
commitc8b5a737509b386790ca712b175229dc77863747 (patch)
tree227879070e7b9421ece576400c519b74736cf283 /ltp
parent15898a13d6d290740329455e6248722f5c5c6288 (diff)
fsx: fix infinite/too long loops when generating ranges for clone operations
While running generic/457 I've had fsx taking a lot of CPU time and not making any progress for over an hour. Attaching gdb to the fsx process revealed that fsx was in the loop that generates the ranges for a clone operation, in particular the loop seemed to never end because the range defined by 'offset2' kept overlapping with the range defined by 'offset'. So far this happened two times in one of my test VMs with generic/457. Fix this by breaking out of the loop after trying 30 times, like we currently do for dedupe operations, which results in logging the operation as skipped. Signed-off-by: Filipe Manana <fdmanana@suse.com> Reviewed-by: Brian Foster <bfoster@redhat.com> Signed-off-by: Eryu Guan <guaneryu@gmail.com>
Diffstat (limited to 'ltp')
-rw-r--r--ltp/fsx.c28
1 files changed, 18 insertions, 10 deletions
diff --git a/ltp/fsx.c b/ltp/fsx.c
index 56479eda..ab64b50a 100644
--- a/ltp/fsx.c
+++ b/ltp/fsx.c
@@ -2013,16 +2013,24 @@ test(void)
keep_size = random() % 2;
break;
case OP_CLONE_RANGE:
- TRIM_OFF_LEN(offset, size, file_size);
- offset = offset & ~(block_size - 1);
- size = size & ~(block_size - 1);
- do {
- offset2 = random();
- TRIM_OFF(offset2, maxfilelen);
- offset2 = offset2 & ~(block_size - 1);
- } while (range_overlaps(offset, offset2, size) ||
- offset2 + size > maxfilelen);
- break;
+ {
+ int tries = 0;
+
+ TRIM_OFF_LEN(offset, size, file_size);
+ offset = offset & ~(block_size - 1);
+ size = size & ~(block_size - 1);
+ do {
+ if (tries++ >= 30) {
+ size = 0;
+ break;
+ }
+ offset2 = random();
+ TRIM_OFF(offset2, maxfilelen);
+ offset2 = offset2 & ~(block_size - 1);
+ } while (range_overlaps(offset, offset2, size) ||
+ offset2 + size > maxfilelen);
+ break;
+ }
case OP_DEDUPE_RANGE:
{
int tries = 0;