Browse Source

Merge pull request #845 from wernsaar/develop

optimized sgemm for power8
tags/v0.2.19^2
wernsaar 9 years ago
parent
commit
6fbca2a4a1
11 changed files with 1058 additions and 116 deletions
  1. +0
    -1
      Makefile.power
  2. +14
    -1
      benchmark/Makefile
  3. +4
    -0
      kernel/arm/scal.c
  4. +4
    -0
      kernel/arm/zscal.c
  5. +1
    -1
      kernel/power/KERNEL.POWER8
  6. +47
    -72
      kernel/power/sgemm_logic_16x8_power8.S
  7. +142
    -35
      kernel/power/sgemm_macros_16x8_power8.S
  8. +176
    -0
      kernel/power/sgemm_tcopy_16_power8.S
  9. +288
    -0
      kernel/power/sgemm_tcopy_logic_16_power8.S
  10. +381
    -0
      kernel/power/sgemm_tcopy_macros_16_power8.S
  11. +1
    -6
      param.h

+ 0
- 1
Makefile.power View File

@@ -1,4 +1,3 @@
# CCOMMON_OPT += -DALLOC_SHM


FLAMEPATH = $(HOME)/flame/lib FLAMEPATH = $(HOME)/flame/lib




+ 14
- 1
benchmark/Makefile View File

@@ -261,7 +261,8 @@ endif


essl :: sgemm.essl strmm.essl dgemm.essl dtrmm.essl \ essl :: sgemm.essl strmm.essl dgemm.essl dtrmm.essl \
cgemm.essl ctrmm.essl zgemm.essl ztrmm.essl \ cgemm.essl ctrmm.essl zgemm.essl ztrmm.essl \
slinpack.essl clinpack.essl dlinpack.essl zlinpack.essl
slinpack.essl clinpack.essl dlinpack.essl zlinpack.essl \
scholesky.essl ccholesky.essl dcholesky.essl zcholesky.essl


veclib :: slinpack.veclib dlinpack.veclib clinpack.veclib zlinpack.veclib \ veclib :: slinpack.veclib dlinpack.veclib clinpack.veclib zlinpack.veclib \
scholesky.veclib dcholesky.veclib ccholesky.veclib zcholesky.veclib \ scholesky.veclib dcholesky.veclib ccholesky.veclib zcholesky.veclib \
@@ -393,6 +394,9 @@ scholesky.mkl : scholesky.$(SUFFIX)
scholesky.veclib : scholesky.$(SUFFIX) scholesky.veclib : scholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)


scholesky.essl : scholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBESSL) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)

##################################### Dcholesky ################################################### ##################################### Dcholesky ###################################################


dcholesky.goto : dcholesky.$(SUFFIX) ../$(LIBNAME) dcholesky.goto : dcholesky.$(SUFFIX) ../$(LIBNAME)
@@ -410,6 +414,9 @@ dcholesky.mkl : dcholesky.$(SUFFIX)
dcholesky.veclib : dcholesky.$(SUFFIX) dcholesky.veclib : dcholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)


dcholesky.essl : dcholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBESSL) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)

##################################### Ccholesky ################################################### ##################################### Ccholesky ###################################################


ccholesky.goto : ccholesky.$(SUFFIX) ../$(LIBNAME) ccholesky.goto : ccholesky.$(SUFFIX) ../$(LIBNAME)
@@ -427,6 +434,9 @@ ccholesky.mkl : ccholesky.$(SUFFIX)
ccholesky.veclib : ccholesky.$(SUFFIX) ccholesky.veclib : ccholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)


ccholesky.essl : ccholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBESSL) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)



##################################### Zcholesky ################################################### ##################################### Zcholesky ###################################################


@@ -445,6 +455,9 @@ zcholesky.mkl : zcholesky.$(SUFFIX)
zcholesky.veclib : zcholesky.$(SUFFIX) zcholesky.veclib : zcholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -$(CC) $(CFLAGS) -o $(@F) $^ $(LIBVECLIB) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)


zcholesky.essl : zcholesky.$(SUFFIX)
-$(CC) $(CFLAGS) -o $(@F) $^ $(LIBESSL) $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB)

##################################### Sgemm #################################################### ##################################### Sgemm ####################################################
sgemm.goto : sgemm.$(SUFFIX) ../$(LIBNAME) sgemm.goto : sgemm.$(SUFFIX) ../$(LIBNAME)
$(CC) $(CFLAGS) -o $(@F) $^ $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -lm $(CC) $(CFLAGS) -o $(@F) $^ $(CEXTRALIB) $(EXTRALIB) $(FEXTRALIB) -lm


+ 4
- 0
kernel/arm/scal.c View File

@@ -40,6 +40,10 @@ int CNAME(BLASLONG n, BLASLONG dummy0, BLASLONG dummy1, FLOAT da, FLOAT *x, BLAS
{ {
BLASLONG i=0,j=0; BLASLONG i=0,j=0;


if ( (n <= 0) || (inc_x <= 0))
return(0);

while(j < n) while(j < n)
{ {




+ 4
- 0
kernel/arm/zscal.c View File

@@ -43,6 +43,10 @@ int CNAME(BLASLONG n, BLASLONG dummy0, BLASLONG dummy1, FLOAT da_r,FLOAT da_i, F
BLASLONG ip = 0; BLASLONG ip = 0;
FLOAT temp; FLOAT temp;


if ( (n <= 0) || (inc_x <= 0))
return(0);


inc_x2 = 2 * inc_x; inc_x2 = 2 * inc_x;
for ( i=0; i<n; i++ ) for ( i=0; i<n; i++ )
{ {


+ 1
- 1
kernel/power/KERNEL.POWER8 View File

@@ -10,7 +10,7 @@ ZTRMMKERNEL = ztrmm_kernel_8x2_power8.S


SGEMMKERNEL = sgemm_kernel_16x8_power8.S SGEMMKERNEL = sgemm_kernel_16x8_power8.S
SGEMMINCOPY = ../generic/gemm_ncopy_16.c SGEMMINCOPY = ../generic/gemm_ncopy_16.c
SGEMMITCOPY = ../generic/gemm_tcopy_16.c
SGEMMITCOPY = sgemm_tcopy_16_power8.S
SGEMMONCOPY = ../generic/gemm_ncopy_8.c SGEMMONCOPY = ../generic/gemm_ncopy_8.c
SGEMMOTCOPY = ../generic/gemm_tcopy_8.c SGEMMOTCOPY = ../generic/gemm_tcopy_8.c
SGEMMINCOPYOBJ = sgemm_incopy.o SGEMMINCOPYOBJ = sgemm_incopy.o


+ 47
- 72
kernel/power/sgemm_logic_16x8_power8.S View File

@@ -1,38 +1,3 @@
/***************************************************************************
Copyright (c) 2013-2016, The OpenBLAS Project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
3. Neither the name of the OpenBLAS project nor the names of
its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/

/**************************************************************************************
* 2016/04/02 Werner Saar (wernsaar@googlemail.com)
* BLASTEST : OK
* CTEST : OK
* TEST : OK
* LAPACK-TEST : OK
**************************************************************************************/

srawi. J, N, 3 srawi. J, N, 3
ble SGEMM_L8_END ble SGEMM_L8_END


@@ -40,35 +5,48 @@ SGEMM_L8_BEGIN:


mr BO, B mr BO, B
mr BBO, BBUFFER mr BBO, BBUFFER
slwi T1, K, 3
srawi. T1, K, 2
ble SGEMM_L8_COPYB1


SGEMM_L8_COPYB4:


SGEMM_L8_COPYB:
dcbt BO, PRE
dcbtst BBO, PRE dcbtst BBO, PRE
COPYB_4x8
addic. T1, T1, -1
ble SGEMM_L8_COPYB1


lxvw4x vs3, o0, BO
lxvw4x vs11, o16, BO
xxspltw vs4, vs3, 0
xxspltw vs5, vs3, 1
xxspltw vs6, vs3, 2
xxspltw vs7, vs3, 3
xxspltw vs12, vs11, 0
xxspltw vs13, vs11, 1
xxspltw vs14, vs11, 2
xxspltw vs15, vs11, 3
stxvw4x vs4, o0, BBO
stxvw4x vs5, o16, BBO
stxvw4x vs6, o32, BBO
stxvw4x vs7, o48, BBO
addi BO, BO, 32
addi BBO, BBO, 64
stxvw4x vs12, o0, BBO
stxvw4x vs13, o16, BBO
stxvw4x vs14, o32, BBO
stxvw4x vs15, o48, BBO
addic. T1, T1, -8
addi BBO, BBO, 64
dcbtst BBO, PRE
COPYB_4x8
addic. T1, T1, -1
ble SGEMM_L8_COPYB1

dcbtst BBO, PRE
COPYB_4x8
addic. T1, T1, -1
ble SGEMM_L8_COPYB1


bge SGEMM_L8_COPYB
dcbtst BBO, PRE
COPYB_4x8
addic. T1, T1, -1

bgt SGEMM_L8_COPYB4

SGEMM_L8_COPYB1:

andi. T1, K, 3
ble SGEMM_L8_COPYB_END

SGEMM_L8_COPYB1_LOOP:


COPYB_1x8
addic. T1, T1, -1

bgt SGEMM_L8_COPYB1_LOOP

SGEMM_L8_COPYB_END:


mr CO, C mr CO, C
mr AO, A mr AO, A
@@ -93,24 +71,24 @@ SGEMM_L8x16_LOOP_START:
LOAD8x16_1 LOAD8x16_1
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_I1 KERNEL8x16_I1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2


dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2


addic. L, L, -2 addic. L, L, -2
@@ -122,24 +100,24 @@ SGEMM_L8x16_LOOP:


dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2


dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2


addic. L, L, -1 addic. L, L, -1
@@ -149,18 +127,15 @@ SGEMM_L8x16_LOOP_END:


dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
dcbt BO, PRE
KERNEL8x16_2 KERNEL8x16_2
dcbt BO, PRE dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
KERNEL8x16_2 KERNEL8x16_2


dcbt BO, PRE
KERNEL8x16_1 KERNEL8x16_1
dcbt BO, PRE
dcbt AO, PRE dcbt AO, PRE
KERNEL8x16_2 KERNEL8x16_2
KERNEL8x16_1 KERNEL8x16_1


+ 142
- 35
kernel/power/sgemm_macros_16x8_power8.S View File

@@ -1,38 +1,3 @@
/***************************************************************************
Copyright (c) 2013-2016, The OpenBLAS Project
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
3. Neither the name of the OpenBLAS project nor the names of
its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE OPENBLAS PROJECT OR CONTRIBUTORS BE
LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*****************************************************************************/

/**************************************************************************************
* 2016/04/02 Werner Saar (wernsaar@googlemail.com)
* BLASTEST : OK
* CTEST : OK
* TEST : OK
* LAPACK-TEST : OK
**************************************************************************************/



/********************************************************************************************** /**********************************************************************************************
* Macros for N=8 and M=16 * Macros for N=8 and M=16
@@ -5886,3 +5851,145 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.


.endm .endm






.macro COPYB_4x8


lxvw4x vs5, o0, BO
xxspltw vs6, vs5, 0
xxspltw vs7, vs5, 1
xxspltw vs8, vs5, 2
xxspltw vs9, vs5, 3

lxvw4x vs10, o16, BO
xxspltw vs11, vs10, 0
xxspltw vs12, vs10, 1
xxspltw vs13, vs10, 2
xxspltw vs14, vs10, 3

lxvw4x vs15, o32, BO
xxspltw vs16, vs15, 0
xxspltw vs17, vs15, 1
xxspltw vs18, vs15, 2
xxspltw vs19, vs15, 3

lxvw4x vs20, o48, BO
xxspltw vs21, vs20, 0
xxspltw vs22, vs20, 1
xxspltw vs23, vs20, 2
xxspltw vs24, vs20, 3

addi BO, BO, 64
lxvw4x vs35, o0, BO
xxspltw vs36, vs35, 0
xxspltw vs37, vs35, 1
xxspltw vs38, vs35, 2
xxspltw vs39, vs35, 3

lxvw4x vs40, o16, BO
xxspltw vs41, vs40, 0
xxspltw vs42, vs40, 1
xxspltw vs43, vs40, 2
xxspltw vs44, vs40, 3

lxvw4x vs45, o32, BO
xxspltw vs46, vs45, 0
xxspltw vs47, vs45, 1
xxspltw vs48, vs45, 2
xxspltw vs49, vs45, 3

lxvw4x vs50, o48, BO
xxspltw vs51, vs50, 0
xxspltw vs52, vs50, 1
xxspltw vs53, vs50, 2
xxspltw vs54, vs50, 3

addi BO, BO, 64


stxvw4x vs6, o0, BBO
stxvw4x vs7, o16, BBO
stxvw4x vs8, o32, BBO
stxvw4x vs9, o48, BBO

addi BBO, BBO, 64
stxvw4x vs11, o0, BBO
stxvw4x vs12, o16, BBO
stxvw4x vs13, o32, BBO
stxvw4x vs14, o48, BBO

addi BBO, BBO, 64
stxvw4x vs16, o0, BBO
stxvw4x vs17, o16, BBO
stxvw4x vs18, o32, BBO
stxvw4x vs19, o48, BBO

addi BBO, BBO, 64
stxvw4x vs21, o0, BBO
stxvw4x vs22, o16, BBO
stxvw4x vs23, o32, BBO
stxvw4x vs24, o48, BBO

addi BBO, BBO, 64
stxvw4x vs36, o0, BBO
stxvw4x vs37, o16, BBO
stxvw4x vs38, o32, BBO
stxvw4x vs39, o48, BBO

addi BBO, BBO, 64
stxvw4x vs41, o0, BBO
stxvw4x vs42, o16, BBO
stxvw4x vs43, o32, BBO
stxvw4x vs44, o48, BBO

addi BBO, BBO, 64
stxvw4x vs46, o0, BBO
stxvw4x vs47, o16, BBO
stxvw4x vs48, o32, BBO
stxvw4x vs49, o48, BBO

addi BBO, BBO, 64
stxvw4x vs51, o0, BBO
stxvw4x vs52, o16, BBO
stxvw4x vs53, o32, BBO
stxvw4x vs54, o48, BBO

addi BBO, BBO, 64
.endm


.macro COPYB_1x8


lxvw4x vs5, o0, BO
xxspltw vs6, vs5, 0
xxspltw vs7, vs5, 1
xxspltw vs8, vs5, 2
xxspltw vs9, vs5, 3

lxvw4x vs10, o16, BO
xxspltw vs11, vs10, 0
xxspltw vs12, vs10, 1
xxspltw vs13, vs10, 2
xxspltw vs14, vs10, 3


addi BO, BO, 32

stxvw4x vs6, o0, BBO
stxvw4x vs7, o16, BBO
stxvw4x vs8, o32, BBO
stxvw4x vs9, o48, BBO

addi BBO, BBO, 64
stxvw4x vs11, o0, BBO
stxvw4x vs12, o16, BBO
stxvw4x vs13, o32, BBO
stxvw4x vs14, o48, BBO

addi BBO, BBO, 64
.endm


+ 176
- 0
kernel/power/sgemm_tcopy_16_power8.S View File

@@ -0,0 +1,176 @@
/*********************************************************************/
/* Copyright 2009, 2010 The University of Texas at Austin. */
/* All rights reserved. */
/* */
/* Redistribution and use in source and binary forms, with or */
/* without modification, are permitted provided that the following */
/* conditions are met: */
/* */
/* 1. Redistributions of source code must retain the above */
/* copyright notice, this list of conditions and the following */
/* disclaimer. */
/* */
/* 2. Redistributions in binary form must reproduce the above */
/* copyright notice, this list of conditions and the following */
/* disclaimer in the documentation and/or other materials */
/* provided with the distribution. */
/* */
/* THIS SOFTWARE IS PROVIDED BY THE UNIVERSITY OF TEXAS AT */
/* AUSTIN ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, */
/* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF */
/* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE */
/* DISCLAIMED. IN NO EVENT SHALL THE UNIVERSITY OF TEXAS AT */
/* AUSTIN OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, */
/* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES */
/* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE */
/* GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR */
/* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF */
/* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT */
/* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT */
/* OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE */
/* POSSIBILITY OF SUCH DAMAGE. */
/* */
/* The views and conclusions contained in the software and */
/* documentation are those of the authors and should not be */
/* interpreted as representing official policies, either expressed */
/* or implied, of The University of Texas at Austin. */
/*********************************************************************/

#define ASSEMBLER
#include "common.h"
#include "def_vsx.h"

#define M r3
#define N r4
#define A r5
#define LDA r6
#define B r7

#define A0 r8
#define A1 r9
#define A2 r10
#define A3 r11

#define J r12

#define PREA r14
#define PREB r15
#define BO r16
#define B8 r17
#define B4 r18
#define B2 r19
#define B1 r20
#define o4 r21
#define T2 r22
#define I r23
#define o16 r24
#define o32 r25
#define o48 r26
#define B16 r29
#define M16 r30
#define T1 r31

#define o0 0

#include "sgemm_tcopy_macros_16_power8.S"

#define STACKSIZE 384


PROLOGUE
PROFCODE

addi SP, SP, -STACKSIZE
li r0, 0

std r31, 144(SP)
std r30, 152(SP)
std r29, 160(SP)
std r28, 168(SP)
std r27, 176(SP)
std r26, 184(SP)
std r25, 192(SP)
std r24, 200(SP)
std r23, 208(SP)
std r22, 216(SP)
std r21, 224(SP)
std r20, 232(SP)
std r19, 240(SP)
std r18, 248(SP)
std r17, 256(SP)
std r16, 264(SP)
std r15, 272(SP)
std r14, 280(SP)

cmpwi cr0, M, 0
ble- L999
cmpwi cr0, N, 0
ble- L999

slwi LDA, LDA, BASE_SHIFT
slwi M16, M, 4 + BASE_SHIFT

li T1, -16
li T2, -8
li PREA, -4
li PREB, -2

and B8, N, T1
and B4, N, T2
and B2, N, PREA
and B1, N, PREB
mullw B8, B8, M
mullw B4, B4, M
mullw B2, B2, M
mullw B1, B1, M

slwi B8, B8, BASE_SHIFT
slwi B4, B4, BASE_SHIFT
slwi B2, B2, BASE_SHIFT
slwi B1, B1, BASE_SHIFT

add B8, B8, B
add B4, B4, B
add B2, B2, B
add B1, B1, B

li PREA, 384
addi PREB, M16, 128

li o4, 4
li o16, 16
li o32, 32
li o48, 48

#include "sgemm_tcopy_logic_16_power8.S"

L999:

li r3, 0

ld r31, 144(SP)
ld r30, 152(SP)
ld r29, 160(SP)
ld r28, 168(SP)
ld r27, 176(SP)
ld r26, 184(SP)
ld r25, 192(SP)
ld r24, 200(SP)
ld r23, 208(SP)
ld r22, 216(SP)
ld r21, 224(SP)
ld r20, 232(SP)
ld r19, 240(SP)
ld r18, 248(SP)
ld r17, 256(SP)
ld r16, 264(SP)
ld r15, 272(SP)
ld r14, 280(SP)

addi SP, SP, STACKSIZE

blr
EPILOGUE



+ 288
- 0
kernel/power/sgemm_tcopy_logic_16_power8.S View File

@@ -0,0 +1,288 @@
srawi. I, M, 2
ble SCOPYT_L2_BEGIN


SCOPYT_L4_BEGIN:

mr A0, A
add A1, A0, LDA
add A2, A1, LDA
add A3, A2, LDA
add A, A3, LDA
mr B16, B
addi B, B, 64*SIZE

sradi. J, N, 4
ble SCOPYT_L4x8_BEGIN

mr BO, B16

SCOPYT_L4x16_LOOP:

dcbtst BO, M16
dcbtst BO, PREB
dcbt A0, PREA
dcbt A1, PREA
dcbt A2, PREA
dcbt A3, PREA
COPY_4x16

addi A0, A0, 16*SIZE
addi A1, A1, 16*SIZE
addi A2, A2, 16*SIZE
addi A3, A3, 16*SIZE
add BO, BO, M16

addic. J, J, -1
ble SCOPYT_L4x8_BEGIN


dcbtst BO, M16
dcbtst BO, PREB
COPY_4x16

addi A0, A0, 16*SIZE
addi A1, A1, 16*SIZE
addi A2, A2, 16*SIZE
addi A3, A3, 16*SIZE
add BO, BO, M16

addic. J, J, -1
bgt SCOPYT_L4x16_LOOP

SCOPYT_L4x8_BEGIN:

andi. T1, N, 8
ble SCOPYT_L4x4_BEGIN

mr BO, B8

COPY_4x8

addi A0, A0, 8*SIZE
addi A1, A1, 8*SIZE
addi A2, A2, 8*SIZE
addi A3, A3, 8*SIZE

addi B8, B8, 32*SIZE

SCOPYT_L4x4_BEGIN:

andi. T1, N, 4
ble SCOPYT_L4x2_BEGIN

mr BO, B4

COPY_4x4

addi A0, A0, 4*SIZE
addi A1, A1, 4*SIZE
addi A2, A2, 4*SIZE
addi A3, A3, 4*SIZE

addi B4, B4, 16*SIZE

SCOPYT_L4x2_BEGIN:

andi. T1, N, 2
ble SCOPYT_L4x1_BEGIN

mr BO, B2

COPY_4x2

addi A0, A0, 2*SIZE
addi A1, A1, 2*SIZE
addi A2, A2, 2*SIZE
addi A3, A3, 2*SIZE

addi B2, B2, 8*SIZE

SCOPYT_L4x1_BEGIN:

andi. T1, N, 1
ble SCOPYT_L4_END

mr BO, B1

COPY_4x1

addi A0, A0, 1*SIZE
addi A1, A1, 1*SIZE
addi A2, A2, 1*SIZE
addi A3, A3, 1*SIZE

addi B1, B1, 4*SIZE

SCOPYT_L4_END:

addic. I, I, -1
bgt SCOPYT_L4_BEGIN



SCOPYT_L2_BEGIN:

andi. T1, M, 2
ble SCOPYT_L1_BEGIN

mr A0, A
add A1, A0, LDA
add A, A1, LDA
mr B16, B
addi B, B, 32*SIZE

sradi. J, N, 4
ble SCOPYT_L2x8_BEGIN

mr BO, B16

SCOPYT_L2x16_LOOP:

COPY_2x16

addi A0, A0, 16*SIZE
addi A1, A1, 16*SIZE
add BO, BO, M16

addic. J, J, -1
bgt SCOPYT_L2x16_LOOP

SCOPYT_L2x8_BEGIN:

andi. T1, N, 8
ble SCOPYT_L2x4_BEGIN

mr BO, B8

COPY_2x8

addi A0, A0, 8*SIZE
addi A1, A1, 8*SIZE

addi B8, B8, 16*SIZE

SCOPYT_L2x4_BEGIN:

andi. T1, N, 4
ble SCOPYT_L2x2_BEGIN

mr BO, B4

COPY_2x4

addi A0, A0, 4*SIZE
addi A1, A1, 4*SIZE

addi B4, B4, 8*SIZE

SCOPYT_L2x2_BEGIN:

andi. T1, N, 2
ble SCOPYT_L2x1_BEGIN

mr BO, B2

COPY_2x2

addi A0, A0, 2*SIZE
addi A1, A1, 2*SIZE

addi B2, B2, 4*SIZE

SCOPYT_L2x1_BEGIN:

andi. T1, N, 1
ble SCOPYT_L2_END

mr BO, B1

COPY_2x1

addi A0, A0, 1*SIZE
addi A1, A1, 1*SIZE

addi B1, B1, 2*SIZE

SCOPYT_L2_END:


SCOPYT_L1_BEGIN:

andi. T1, M, 1
ble L999

mr A0, A
add A, A0, LDA
mr B16, B
addi B, B, 16*SIZE

sradi. J, N, 4
ble SCOPYT_L1x8_BEGIN

mr BO, B16

SCOPYT_L1x16_LOOP:

COPY_1x16

addi A0, A0, 16*SIZE
add BO, BO, M16

addic. J, J, -1
bgt SCOPYT_L1x16_LOOP

SCOPYT_L1x8_BEGIN:

andi. T1, N, 8
ble SCOPYT_L1x4_BEGIN

mr BO, B8

COPY_1x8

addi A0, A0, 8*SIZE

addi B8, B8, 8*SIZE

SCOPYT_L1x4_BEGIN:

andi. T1, N, 4
ble SCOPYT_L1x2_BEGIN

mr BO, B4

COPY_1x4

addi A0, A0, 4*SIZE

addi B4, B4, 4*SIZE

SCOPYT_L1x2_BEGIN:

andi. T1, N, 2
ble SCOPYT_L1x1_BEGIN

mr BO, B2

COPY_1x2

addi A0, A0, 2*SIZE

addi B2, B2, 2*SIZE

SCOPYT_L1x1_BEGIN:

andi. T1, N, 1
ble SCOPYT_L1_END

mr BO, B1

COPY_1x1

addi A0, A0, 1*SIZE

addi B1, B1, 1*SIZE

SCOPYT_L1_END:


+ 381
- 0
kernel/power/sgemm_tcopy_macros_16_power8.S View File

@@ -0,0 +1,381 @@

/**********************************************************************************************
* Macros for N=4 and M=16
**********************************************************************************************/

.macro COPY_4x16

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0
lxvw4x vs34, o32, A0
lxvw4x vs35, o48, A0

lxvw4x vs36, o0, A1
lxvw4x vs37, o16, A1
lxvw4x vs38, o32, A1
lxvw4x vs39, o48, A1

lxvw4x vs40, o0, A2
lxvw4x vs41, o16, A2
lxvw4x vs42, o32, A2
lxvw4x vs43, o48, A2

lxvw4x vs44, o0, A3
lxvw4x vs45, o16, A3
lxvw4x vs46, o32, A3
lxvw4x vs47, o48, A3

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1
stxvw4x vs34, o32, T1
stxvw4x vs35, o48, T1

addi T1, T1, 64

stxvw4x vs36, o0, T1
stxvw4x vs37, o16, T1
stxvw4x vs38, o32, T1
stxvw4x vs39, o48, T1

addi T1, T1, 64

stxvw4x vs40, o0, T1
stxvw4x vs41, o16, T1
stxvw4x vs42, o32, T1
stxvw4x vs43, o48, T1

addi T1, T1, 64

stxvw4x vs44, o0, T1
stxvw4x vs45, o16, T1
stxvw4x vs46, o32, T1
stxvw4x vs47, o48, T1

.endm

/**********************************************************************************************
* Macros for N=4 and M=8
**********************************************************************************************/

.macro COPY_4x8

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0

lxvw4x vs34, o0, A1
lxvw4x vs35, o16, A1

lxvw4x vs36, o0, A2
lxvw4x vs37, o16, A2

lxvw4x vs38, o0, A3
lxvw4x vs39, o16, A3

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1

stxvw4x vs34, o32, T1
stxvw4x vs35, o48, T1

addi T1, T1, 64

stxvw4x vs36, o0, T1
stxvw4x vs37, o16, T1

stxvw4x vs38, o32, T1
stxvw4x vs39, o48, T1

.endm

/**********************************************************************************************
* Macros for N=4 and M=4
**********************************************************************************************/

.macro COPY_4x4

lxvw4x vs32, o0, A0

lxvw4x vs33, o0, A1

lxvw4x vs34, o0, A2

lxvw4x vs35, o0, A3

mr T1, BO

stxvw4x vs32, o0, T1

stxvw4x vs33, o16, T1

stxvw4x vs34, o32, T1

stxvw4x vs35, o48, T1

.endm

/**********************************************************************************************
* Macros for N=4 and M=2
**********************************************************************************************/

.macro COPY_4x2

lxsspx vs32, o0, A0
lxsspx vs33, o4, A0

lxsspx vs34, o0, A1
lxsspx vs35, o4, A1

lxsspx vs36, o0, A2
lxsspx vs37, o4, A2

lxsspx vs38, o0, A3
lxsspx vs39, o4, A3

mr T1, BO

stxsspx vs32, o0, T1
stxsspx vs33, o4, T1

addi T1, T1, 8

stxsspx vs34, o0, T1
stxsspx vs35, o4, T1

addi T1, T1, 8

stxsspx vs36, o0, T1
stxsspx vs37, o4, T1

addi T1, T1, 8

stxsspx vs38, o0, T1
stxsspx vs39, o4, T1

.endm

/**********************************************************************************************
* Macros for N=4 and M=1
**********************************************************************************************/

.macro COPY_4x1

lxsspx vs32, o0, A0

lxsspx vs33, o0, A1

lxsspx vs34, o0, A2

lxsspx vs35, o0, A3

mr T1, BO

stxsspx vs32, o0, T1

stxsspx vs33, o4, T1

addi T1, T1, 8

stxsspx vs34, o0, T1

stxsspx vs35, o4, T1

.endm

/**********************************************************************************************
* Macros for N=2 and M=16
**********************************************************************************************/

.macro COPY_2x16

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0
lxvw4x vs34, o32, A0
lxvw4x vs35, o48, A0

lxvw4x vs36, o0, A1
lxvw4x vs37, o16, A1
lxvw4x vs38, o32, A1
lxvw4x vs39, o48, A1

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1
stxvw4x vs34, o32, T1
stxvw4x vs35, o48, T1

addi T1, T1, 64

stxvw4x vs36, o0, T1
stxvw4x vs37, o16, T1
stxvw4x vs38, o32, T1
stxvw4x vs39, o48, T1

.endm

/**********************************************************************************************
* Macros for N=2 and M=8
**********************************************************************************************/

.macro COPY_2x8

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0

lxvw4x vs34, o0, A1
lxvw4x vs35, o16, A1

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1

stxvw4x vs34, o32, T1
stxvw4x vs35, o48, T1

.endm

/**********************************************************************************************
* Macros for N=2 and M=4
**********************************************************************************************/

.macro COPY_2x4

lxvw4x vs32, o0, A0

lxvw4x vs33, o0, A1

mr T1, BO

stxvw4x vs32, o0, T1

stxvw4x vs33, o16, T1

.endm

/**********************************************************************************************
* Macros for N=2 and M=2
**********************************************************************************************/

.macro COPY_2x2

lxsspx vs32, o0, A0
lxsspx vs33, o4, A0

lxsspx vs34, o0, A1
lxsspx vs35, o4, A1

mr T1, BO

stxsspx vs32, o0, T1
stxsspx vs33, o4, T1

addi T1, T1, 8

stxsspx vs34, o0, T1
stxsspx vs35, o4, T1

.endm

/**********************************************************************************************
* Macros for N=2 and M=1
**********************************************************************************************/

.macro COPY_2x1

lxsspx vs32, o0, A0

lxsspx vs33, o0, A1

mr T1, BO

stxsspx vs32, o0, T1

stxsspx vs33, o4, T1

.endm

/**********************************************************************************************
* Macros for N=1 and M=16
**********************************************************************************************/

.macro COPY_1x16

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0
lxvw4x vs34, o32, A0
lxvw4x vs35, o48, A0

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1
stxvw4x vs34, o32, T1
stxvw4x vs35, o48, T1

.endm

/**********************************************************************************************
* Macros for N=1 and M=8
**********************************************************************************************/

.macro COPY_1x8

lxvw4x vs32, o0, A0
lxvw4x vs33, o16, A0

mr T1, BO

stxvw4x vs32, o0, T1
stxvw4x vs33, o16, T1

.endm

/**********************************************************************************************
* Macros for N=1 and M=4
**********************************************************************************************/

.macro COPY_1x4

lxvw4x vs32, o0, A0

mr T1, BO

stxvw4x vs32, o0, T1

.endm

/**********************************************************************************************
* Macros for N=1 and M=2
**********************************************************************************************/

.macro COPY_1x2

lxsspx vs32, o0, A0
lxsspx vs33, o4, A0

mr T1, BO

stxsspx vs32, o0, T1
stxsspx vs33, o4, T1

.endm

/**********************************************************************************************
* Macros for N=1 and M=1
**********************************************************************************************/

.macro COPY_1x1

lxsspx vs32, o0, A0

mr T1, BO

stxsspx vs32, o0, T1

.endm


+ 1
- 6
param.h View File

@@ -1964,7 +1964,7 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#define SNUMOPT 16 #define SNUMOPT 16
#define DNUMOPT 8 #define DNUMOPT 8


#define GEMM_DEFAULT_OFFSET_A 4096
#define GEMM_DEFAULT_OFFSET_A 0
#define GEMM_DEFAULT_OFFSET_B 4096 #define GEMM_DEFAULT_OFFSET_B 4096
#define GEMM_DEFAULT_ALIGN 0x03fffUL #define GEMM_DEFAULT_ALIGN 0x03fffUL


@@ -1987,11 +1987,6 @@ USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#define CGEMM_DEFAULT_Q 720 #define CGEMM_DEFAULT_Q 720
#define ZGEMM_DEFAULT_Q 720 #define ZGEMM_DEFAULT_Q 720


#define SGEMM_DEFAULT_R 21600
#define DGEMM_DEFAULT_R 14400
#define CGEMM_DEFAULT_R 16200
#define ZGEMM_DEFAULT_R 21600

#define SYMV_P 8 #define SYMV_P 8


#endif #endif


Loading…
Cancel
Save