Browse Source

LoongArch64: Fixed utest kernel_regress:skx_avx

tags/v0.3.27
gxw 2 years ago
parent
commit
8dea25ffff
2 changed files with 4 additions and 5 deletions
  1. +1
    -1
      kernel/loongarch64/rot_lasx.S
  2. +3
    -4
      kernel/loongarch64/rot_lsx.S

+ 1
- 1
kernel/loongarch64/rot_lasx.S View File

@@ -1036,7 +1036,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
add.d Y, Y, INCY add.d Y, Y, INCY
xvfmul.d VT0, VX1, VXC xvfmul.d VT0, VX1, VXC
xvfmadd.d VT0, VX3, VXS, VT0 xvfmadd.d VT0, VX3, VXS, VT0
xvfmul.d VT1, VX0, VXS
xvfmul.d VT1, VX1, VXS
xvfmsub.d VT1, VX3, VXC, VT1 xvfmsub.d VT1, VX3, VXC, VT1
xvstelm.d VT0, XX, 0, 0 xvstelm.d VT0, XX, 0, 0
add.d XX, XX, INCX add.d XX, XX, INCX


+ 3
- 4
kernel/loongarch64/rot_lsx.S View File

@@ -1142,7 +1142,6 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#ifdef DOUBLE #ifdef DOUBLE
vinsgr2vr.d VX0, t1, 0 vinsgr2vr.d VX0, t1, 0
vinsgr2vr.d VX0, t2, 1 vinsgr2vr.d VX0, t2, 1
add.d X, X, INCX
ld.d t1, Y, 0 * SIZE ld.d t1, Y, 0 * SIZE
add.d Y, Y, INCY add.d Y, Y, INCY
ld.d t2, Y, 0 * SIZE ld.d t2, Y, 0 * SIZE
@@ -1199,7 +1198,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
add.d Y, Y, INCY add.d Y, Y, INCY
VMUL VT0, VX1, VXC VMUL VT0, VX1, VXC
VFMADD VT0, VX3, VXS, VT0 VFMADD VT0, VX3, VXS, VT0
VMUL VT1, VX0, VXS
VMUL VT1, VX1, VXS
VMSUB VT1, VX3, VXC, VT1 VMSUB VT1, VX3, VXC, VT1
vstelm.d VT0, XX, 0, 0 vstelm.d VT0, XX, 0, 0
add.d XX, XX, INCX add.d XX, XX, INCX
@@ -1223,7 +1222,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
add.d Y, Y, INCY add.d Y, Y, INCY
VMUL VT0, VX1, VXC VMUL VT0, VX1, VXC
VFMADD VT0, VX3, VXS, VT0 VFMADD VT0, VX3, VXS, VT0
VMUL VT1, VX0, VXS
VMUL VT1, VX1, VXS
VMSUB VT1, VX3, VXC, VT1 VMSUB VT1, VX3, VXC, VT1
vstelm.d VT0, XX, 0, 0 vstelm.d VT0, XX, 0, 0
add.d XX, XX, INCX add.d XX, XX, INCX
@@ -1296,7 +1295,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
add.d Y, Y, INCY add.d Y, Y, INCY
VMUL VT0, VX1, VXC VMUL VT0, VX1, VXC
VFMADD VT0, VX3, VXS, VT0 VFMADD VT0, VX3, VXS, VT0
VMUL VT1, VX0, VXS
VMUL VT1, VX1, VXS
VMSUB VT1, VX3, VXC, VT1 VMSUB VT1, VX3, VXC, VT1
vstelm.w VT0, XX, 0, 0 vstelm.w VT0, XX, 0, 0
add.d XX, XX, INCX add.d XX, XX, INCX


Loading…
Cancel
Save