diff --git a/dlls/msvcrt/scanf.h b/dlls/msvcrt/scanf.h index 2558890997..4c27c68d15 100644 --- a/dlls/msvcrt/scanf.h +++ b/dlls/msvcrt/scanf.h @@ -427,6 +427,19 @@ _FUNCTION_ { int*n = va_arg(ap, int*); *n = consumed - (nch!=_EOF_); } + /* This is an odd one: according to the standard, + * "Execution of a %n directive does not increment the + * assignment count returned at the completion of + * execution" even if it wasn't suppressed with the + * '*' flag. The Corrigendum to the standard seems + * to contradict this (comment out the assignment to + * suppress below if you want to implement these + * alternate semantics) but the windows program I'm + * looking at expects the behavior I've coded here + * (which happens to be what glibc does as well). + */ + suppress = 1; + st = 1; } break; case '[': { diff --git a/dlls/msvcrt/tests/scanf.c b/dlls/msvcrt/tests/scanf.c index bc121a8ec2..2482825e1f 100644 --- a/dlls/msvcrt/tests/scanf.c +++ b/dlls/msvcrt/tests/scanf.c @@ -117,6 +117,14 @@ static void test_sscanf( void ) ok(hour == 18, "Field 1 incorrect: %d\n", hour); ok(min == 59, "Field 2 incorrect: %d\n", min); ok(c == 0x55, "Field 3 incorrect: 0x%02x\n", c); + + /* Check %n (also whitespace in format strings and %s) */ + buffer[0]=0; buffer1[0]=0; + ret = sscanf("abc def", "%s %n%s", buffer, &number_so_far, buffer1); + ok(strcmp(buffer, "abc")==0, "First %%s read incorrectly: %s\n", buffer); + ok(strcmp(buffer1,"def")==0, "Second %%s read incorrectly: %s\n", buffer1); + ok(number_so_far==6, "%%n yielded wrong result: %d\n", number_so_far); + ok(ret == 2, "%%n shouldn't count as a conversion: %d\n", ret); } START_TEST(scanf)