forked from riscvarchive/riscv-gcc
-
Notifications
You must be signed in to change notification settings - Fork 12
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
RISC-V: Add intrinsics testcases for SiFive Xsfvqmaccqoq/dod extensions.
- Loading branch information
Showing
9 changed files
with
1,706 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
213 changes: 213 additions & 0 deletions
213
gcc/testsuite/gcc.target/riscv/rvv/xsfvector/sf_vqmacc_2x8x2.c
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,213 @@ | ||
/* { dg-do compile } */ | ||
/* { dg-options "-march=rv64gcv_xsfvqmaccdod -mabi=lp64d -O3" } */ | ||
/* { dg-final { check-function-bodies "**" "" } } */ | ||
|
||
#include "riscv_vector.h" | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m1_vint32m1_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m1_t | ||
test_sf_vqmacc_2x8x2_i32m1_vint32m1_t (vint32m1_t vd, vint8m1_t vs1, | ||
vint8m1_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m1 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m2_vint32m2_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m2_t | ||
test_sf_vqmacc_2x8x2_i32m2_vint32m2_t (vint32m2_t vd, vint8m1_t vs1, | ||
vint8m2_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m2 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m4_vint32m4_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m4_t | ||
test_sf_vqmacc_2x8x2_i32m4_vint32m4_t (vint32m4_t vd, vint8m1_t vs1, | ||
vint8m4_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m4 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m8_vint32m8_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m8_t | ||
test_sf_vqmacc_2x8x2_i32m8_vint32m8_t (vint32m8_t vd, vint8m1_t vs1, | ||
vint8m8_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m8 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_vint32m1_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m1_t | ||
test_sf_vqmacc_2x8x2_vint32m1_t (vint32m1_t vd, vint8m1_t vs1, vint8m1_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_vint32m2_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m2_t | ||
test_sf_vqmacc_2x8x2_vint32m2_t (vint32m2_t vd, vint8m1_t vs1, vint8m2_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_vint32m4_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m4_t | ||
test_sf_vqmacc_2x8x2_vint32m4_t (vint32m4_t vd, vint8m1_t vs1, vint8m4_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_vint32m8_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m8_t | ||
test_sf_vqmacc_2x8x2_vint32m8_t (vint32m8_t vd, vint8m1_t vs1, vint8m8_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2 (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m1_tu_vint32m1_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m1_t | ||
test_sf_vqmacc_2x8x2_i32m1_tu_vint32m1_t (vint32m1_t vd, vint8m1_t vs1, | ||
vint8m1_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m1_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m2_tu_vint32m2_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m2_t | ||
test_sf_vqmacc_2x8x2_i32m2_tu_vint32m2_t (vint32m2_t vd, vint8m1_t vs1, | ||
vint8m2_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m2_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m4_tu_vint32m4_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m4_t | ||
test_sf_vqmacc_2x8x2_i32m4_tu_vint32m4_t (vint32m4_t vd, vint8m1_t vs1, | ||
vint8m4_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m4_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_i32m8_tu_vint32m8_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m8_t | ||
test_sf_vqmacc_2x8x2_i32m8_tu_vint32m8_t (vint32m8_t vd, vint8m1_t vs1, | ||
vint8m8_t vs2, size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_i32m8_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_tu_vint32m1_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m1_t | ||
test_sf_vqmacc_2x8x2_tu_vint32m1_t (vint32m1_t vd, vint8m1_t vs1, vint8m1_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_tu_vint32m2_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m2_t | ||
test_sf_vqmacc_2x8x2_tu_vint32m2_t (vint32m2_t vd, vint8m1_t vs1, vint8m2_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_tu_vint32m4_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m4_t | ||
test_sf_vqmacc_2x8x2_tu_vint32m4_t (vint32m4_t vd, vint8m1_t vs1, vint8m4_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_tu (vd, vs1, vs2, vl); | ||
} | ||
|
||
/* | ||
** test_sf_vqmacc_2x8x2_tu_vint32m8_t: | ||
** ... | ||
** sf\.vqmacc\.2x8x2\tv[0-9]+,v[0-9]+,v[0-9]+ | ||
** ... | ||
*/ | ||
vint32m8_t | ||
test_sf_vqmacc_2x8x2_tu_vint32m8_t (vint32m8_t vd, vint8m1_t vs1, vint8m8_t vs2, | ||
size_t vl) | ||
{ | ||
return __riscv_sf_vqmacc_2x8x2_tu (vd, vs1, vs2, vl); | ||
} |
Oops, something went wrong.