diff --git a/include/image.h b/include/image.h index 5cd397bb1..1cc8f2a7d 100644 --- a/include/image.h +++ b/include/image.h @@ -127,52 +127,52 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( */ #define PART_SANITY_CHECK(p) \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading hdr_ok flag, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->hdr_ok)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("mov r2, %0" ::"r"((p)->hdr_ok):"r2"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading hdr_ok flag, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->sha_ok)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("mov r2, %0" ::"r"((p)->sha_ok):"r2"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading signature_ok flag, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->signature_ok)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("mov r2, %0" ::"r"((p)->signature_ok):"r2"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ asm volatile("mov r2, #0"); \ @@ -181,65 +181,65 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( asm volatile("mov r2, #0"); \ asm volatile("mov r2, #0"); \ /* Loading ~(signature_ok) flag, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->not_signature_ok)); \ - asm volatile("cmp r2, #0xFFFFFFFE"); \ + asm volatile("mov r2, %0" ::"r"((p)->not_signature_ok):"r2"); \ + asm volatile("cmp r2, #0xFFFFFFFE":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, #0xFFFFFFFE"); \ + asm volatile("cmp r2, #0xFFFFFFFE":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, #0xFFFFFFFE"); \ + asm volatile("cmp r2, #0xFFFFFFFE":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, #0xFFFFFFFE"); \ + asm volatile("cmp r2, #0xFFFFFFFE":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading canary value, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->canary_FEED6789)); \ - asm volatile("mov r0, %0" ::"r"(0xFEED6789)); \ - asm volatile("cmp r2, r0"); \ + asm volatile("mov r2, %0" ::"r"((p)->canary_FEED6789):"r2"); \ + asm volatile("mov r0, %0" ::"r"(0xFEED6789):"r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading canary value, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->canary_FEED4567)); \ - asm volatile("mov r0, %0" ::"r"(0xFEED4567)); \ - asm volatile("cmp r2, r0"); \ + asm volatile("mov r2, %0" ::"r"((p)->canary_FEED4567):"r2"); \ + asm volatile("mov r0, %0" ::"r"(0xFEED4567):"r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-12"); \ /* Redundant set of r2=0 */ \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ - asm volatile("mov r2, #0"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ + asm volatile("mov r2, #0":::"r2"); \ /* Loading canary value, verifying */ \ - asm volatile("mov r2, %0" ::"r"((p)->canary_FEED89AB)); \ - asm volatile("mov r0, %0" ::"r"(0xFEED89AB)); \ - asm volatile("cmp r2, r0"); \ + asm volatile("mov r2, %0" ::"r"((p)->canary_FEED89AB):"r2"); \ + asm volatile("mov r0, %0" ::"r"(0xFEED89AB):"r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r2, r0"); \ + asm volatile("cmp r2, r0":::"cc"); \ asm volatile("bne .-12") /** @@ -251,24 +251,24 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( #define RSA_VERIFY_FN(ret,fn,...) \ { \ /* Redundant set of r0=0 */ \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ /* Call the function */ \ int tmp_ret = fn(__VA_ARGS__); \ ret = -1; \ /* Redundant set of r2=SHA_DIGEST_SIZE */ \ - asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE)); \ - asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE)); \ - asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE)); \ + asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE):"r2"); \ + asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE):"r2"); \ + asm volatile("mov r2, %0" ::"r"(WOLFBOOT_SHA_DIGEST_SIZE):"r2"); \ /* Redundant check for fn() return value >= r2 */ \ - asm volatile("cmp r0, r2"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("blt nope"); \ - asm volatile("cmp r0, r2"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("blt nope"); \ - asm volatile("cmp r0, r2"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("blt nope"); \ - asm volatile("cmp r0, r2"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("blt nope"); \ /* Return value is set here in case of success */ \ ret = tmp_ret; \ @@ -288,29 +288,29 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( if (!img || !digest) \ asm volatile("b hnope"); \ /* Redundant set of r0=50*/ \ - asm volatile("mov r0, #50"); \ - asm volatile("mov r0, #50"); \ - asm volatile("mov r0, #50"); \ + asm volatile("mov r0, #50":::"r0"); \ + asm volatile("mov r0, #50":::"r0"); \ + asm volatile("mov r0, #50":::"r0"); \ compare_res = XMEMCMP(digest, img->sha_hash, WOLFBOOT_SHA_DIGEST_SIZE); \ /* Redundant checks that ensure the function actually returned 0 */ \ + asm volatile("cmp r0, #0":::"cc"); \ + asm volatile("bne hnope":::"cc"); \ asm volatile("cmp r0, #0"); \ + asm volatile("bne hnope":::"cc"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ - asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ - asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ /* Repeat memcmp call */ \ compare_res = XMEMCMP(digest, img->sha_hash, WOLFBOOT_SHA_DIGEST_SIZE); \ /* Redundant checks that ensure the function actually returned 0 */ \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne hnope"); \ /* Confirm that the signature is OK */ \ wolfBoot_image_confirm_signature_ok(img); \ @@ -330,32 +330,32 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( */ #define VERIFY_FN(img,p_res,fn,...) \ /* Redundant set of r0=50*/ \ - asm volatile("mov r0, #50"); \ - asm volatile("mov r0, #50"); \ - asm volatile("mov r0, #50"); \ + asm volatile("mov r0, #50":::"r0"); \ + asm volatile("mov r0, #50":::"r0"); \ + asm volatile("mov r0, #50":::"r0"); \ /* Call the verify function */ \ fn(__VA_ARGS__); \ /* Redundant checks that ensure the function actually returned 0 */ \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne nope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne nope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne nope"); \ - asm volatile("cmp r0, #0"); \ + asm volatile("cmp r0, #0":::"cc"); \ asm volatile("bne nope"); \ /* Check that res = 1, a few times, reading the value from memory */ \ asm volatile("ldr r2, [%0]" ::"r"(p_res)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne nope"); \ asm volatile("ldr r2, [%0]" ::"r"(p_res)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne nope"); \ asm volatile("ldr r2, [%0]" ::"r"(p_res)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne nope"); \ asm volatile("ldr r2, [%0]" ::"r"(p_res)); \ - asm volatile("cmp r2, #1"); \ + asm volatile("cmp r2, #1":::"cc"); \ asm volatile("bne nope"); \ /* Confirm that the signature is OK */ \ wolfBoot_image_confirm_signature_ok(img); \ @@ -384,118 +384,118 @@ static void __attribute__((noinline)) wolfBoot_image_confirm_signature_ok( /* Stash the registry values */ \ asm volatile("push {r4, r5, r6, r7}"); \ /* Redundant initialization with 'failure' values */ \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r4, #1"); \ - asm volatile("mov r5, #0"); \ - asm volatile("mov r6, #2"); \ - asm volatile("mov r7, #0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r4, #1"); \ - asm volatile("mov r5, #0"); \ - asm volatile("mov r6, #2"); \ - asm volatile("mov r7, #0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r4, #1":::"r4"); \ + asm volatile("mov r5, #0":::"r5"); \ + asm volatile("mov r6, #2":::"r6"); \ + asm volatile("mov r7, #0":::"r7"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r4, #1":::"r4"); \ + asm volatile("mov r5, #0":::"r5"); \ + asm volatile("mov r6, #2":::"r6"); \ + asm volatile("mov r7, #0":::"r7"); \ /* Read the fb_ok flag, jump to end_check \ * if proven fb_ok == 1 */ \ - asm volatile("mov r0, %0" ::"r"(fb_ok)); \ - asm volatile("cmp r0, #1"); \ + asm volatile("mov r0, %0" ::"r"(fb_ok):"r0"); \ + asm volatile("cmp r0, #1":::"cc"); \ asm volatile("bne do_check"); \ - asm volatile("cmp r0, #1"); \ + asm volatile("cmp r0, #1":::"cc"); \ asm volatile("bne do_check"); \ - asm volatile("cmp r0, #1"); \ + asm volatile("cmp r0, #1":::"cc"); \ asm volatile("bne do_check"); \ asm volatile("b end_check"); \ /* Do the actual version check: */ \ asm volatile("do_check:"); \ /* Read update versions to reg r5 and r7 */ \ - asm volatile("mov r0, #1"); \ - asm volatile("mov r0, #1"); \ - asm volatile("mov r0, #1"); \ + asm volatile("mov r0, #1":::"r0"); \ + asm volatile("mov r0, #1":::"r0"); \ + asm volatile("mov r0, #1":::"r0"); \ asm volatile("bl wolfBoot_get_image_version"); \ - asm volatile("mov r5, r0"); \ - asm volatile("mov r5, r0"); \ - asm volatile("mov r5, r0"); \ - asm volatile("mov r0, #1"); \ - asm volatile("mov r0, #1"); \ - asm volatile("mov r0, #1"); \ + asm volatile("mov r5, r0":::"r5"); \ + asm volatile("mov r5, r0":::"r5"); \ + asm volatile("mov r5, r0":::"r5"); \ + asm volatile("mov r0, #1":::"r0"); \ + asm volatile("mov r0, #1":::"r0"); \ + asm volatile("mov r0, #1":::"r0"); \ asm volatile("bl wolfBoot_get_image_version"); \ - asm volatile("mov r7, r0"); \ - asm volatile("mov r7, r0"); \ - asm volatile("mov r7, r0"); \ + asm volatile("mov r7, r0":::"r7"); \ + asm volatile("mov r7, r0":::"r7"); \ + asm volatile("mov r7, r0":::"r7"); \ /* Compare r5 and r7, if not equal, something went very wrong, */ \ - asm volatile("cmp r5, r7"); \ + asm volatile("cmp r5, r7":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r5, r7"); \ + asm volatile("cmp r5, r7":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r5, r7"); \ + asm volatile("cmp r5, r7":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r5, r7"); \ + asm volatile("cmp r5, r7":::"cc"); \ asm volatile("bne .-12"); \ /* Read current versions to reg r4 and r6 */ \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ asm volatile("bl wolfBoot_get_image_version"); \ - asm volatile("mov r4, r0"); \ - asm volatile("mov r4, r0"); \ - asm volatile("mov r4, r0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ + asm volatile("mov r4, r0":::"r4"); \ + asm volatile("mov r4, r0":::"r4"); \ + asm volatile("mov r4, r0":::"r4"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ asm volatile("bl wolfBoot_get_image_version"); \ - asm volatile("mov r6, r0"); \ - asm volatile("mov r6, r0"); \ - asm volatile("mov r6, r0"); \ - asm volatile("cmp r4, r6"); \ + asm volatile("mov r6, r0":::"r6"); \ + asm volatile("mov r6, r0":::"r6"); \ + asm volatile("mov r6, r0":::"r6"); \ + asm volatile("cmp r4, r6":::"cc"); \ asm volatile("bne ."); \ - asm volatile("cmp r4, r6"); \ + asm volatile("cmp r4, r6":::"cc"); \ asm volatile("bne .-4"); \ - asm volatile("cmp r4, r6"); \ + asm volatile("cmp r4, r6":::"cc"); \ asm volatile("bne .-8"); \ - asm volatile("cmp r4, r6"); \ + asm volatile("cmp r4, r6":::"cc"); \ asm volatile("bne .-12"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ - asm volatile("mov r0, #0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ + asm volatile("mov r0, #0":::"r0"); \ /* Compare the two versions in registries */ \ - asm volatile("cmp r4, r5"); \ + asm volatile("cmp r4, r5":::"cc"); \ asm volatile("bge ."); \ - asm volatile("cmp r6, r7"); \ + asm volatile("cmp r6, r7":::"cc"); \ asm volatile("bge .-4"); \ - asm volatile("cmp r4, r5"); \ + asm volatile("cmp r4, r5":::"cc"); \ asm volatile("bge .-8"); \ - asm volatile("cmp r6, r7"); \ + asm volatile("cmp r6, r7":::"cc"); \ asm volatile("bge .-12"); \ asm volatile("end_check:"); \ /* Restore previously saved registry values */ \ - asm volatile("pop {r4, r5, r6, r7}") + asm volatile("pop {r4, r5, r6, r7}":::"r4", "r5", "r6", "r7") #define CONFIRM_MASK_VALID(id, mask) \ - asm volatile("mov r1, %0" :: "r"(id)); \ + asm volatile("mov r1, %0" :: "r"(id):"r1"); \ /* id &= 0x0F */ \ - asm volatile("and.w r1, r1, #15"); \ - asm volatile("mov r0, %0" :: "r"(mask)); \ - asm volatile("movs r2, #1"); \ - asm volatile("lsls r2, r1"); \ - asm volatile("ands r2, r0"); \ - asm volatile("movs r0, #1"); \ - asm volatile("lsls r0, r1"); \ + asm volatile("and.w r1, r1, #15":::"r1"); \ + asm volatile("mov r0, %0" :: "r"(mask):"r0"); \ + asm volatile("movs r2, #1":::"r2"); \ + asm volatile("lsls r2, r1":::"r2","cc"); \ + asm volatile("ands r2, r0":::"r2","cc"); \ + asm volatile("movs r0, #1":::"cc"); \ + asm volatile("lsls r0, r1":::"r0","cc"); \ asm volatile("cmp r0, r2"); \ asm volatile("bne ."); \ asm volatile("mov r0, %0" :: "r"(mask)); \ - asm volatile("movs r2, #1"); \ - asm volatile("lsls r2, r1"); \ - asm volatile("ands r2, r0"); \ - asm volatile("movs r0, #1"); \ - asm volatile("lsls r0, r1"); \ - asm volatile("cmp r0, r2"); \ + asm volatile("movs r2, #1":::"r2"); \ + asm volatile("lsls r2, r1":::"r2", "cc"); \ + asm volatile("ands r2, r0":::"r2", "cc"); \ + asm volatile("movs r0, #1":::"r0"); \ + asm volatile("lsls r0, r1":::"r0", "cc"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("bne ."); \ - asm volatile("mov r0, %0" :: "r"(mask)); \ - asm volatile("movs r2, #1"); \ - asm volatile("lsls r2, r1"); \ - asm volatile("ands r2, r0"); \ - asm volatile("movs r0, #1"); \ - asm volatile("lsls r0, r1"); \ - asm volatile("cmp r0, r2"); \ + asm volatile("mov r0, %0" :: "r"(mask):"r0"); \ + asm volatile("movs r2, #1":::"r2"); \ + asm volatile("lsls r2, r1":::"r2", "cc"); \ + asm volatile("ands r2, r0":::"r2", "cc"); \ + asm volatile("movs r0, #1":::"r0"); \ + asm volatile("lsls r0, r1":::"r0", "cc"); \ + asm volatile("cmp r0, r2":::"cc"); \ asm volatile("bne ."); \ #else