Browse Source

!5201 serving st time optimization

Merge pull request !5201 from hexia/st_optimization_time
tags/v1.0.0
mindspore-ci-bot Gitee 5 years ago
parent
commit
042f6a8d97
3 changed files with 2 additions and 53 deletions
  1. +1
    -27
      tests/st/serving/client_example.py
  2. +0
    -17
      tests/st/serving/generate_model.py
  3. +1
    -9
      tests/st/serving/serving.sh

+ 1
- 27
tests/st/serving/client_example.py View File

@@ -24,7 +24,7 @@ import mindspore.dataset as de
from mindspore import Tensor, context
from mindspore import log as logger
from tests.st.networks.models.bert.src.bert_model import BertModel
from .generate_model import AddNet, bert_net_cfg
from .generate_model import bert_net_cfg

context.set_context(mode=context.GRAPH_MODE, device_target="Ascend")

@@ -32,32 +32,6 @@ random.seed(1)
np.random.seed(1)
de.config.set_seed(1)

def test_add():
channel = grpc.insecure_channel('localhost:5500')
stub = ms_service_pb2_grpc.MSServiceStub(channel)
request = ms_service_pb2.PredictRequest()

x = request.data.add()
x.tensor_shape.dims.extend([4])
x.tensor_type = ms_service_pb2.MS_FLOAT32
x.data = (np.ones([4]).astype(np.float32)).tobytes()

y = request.data.add()
y.tensor_shape.dims.extend([4])
y.tensor_type = ms_service_pb2.MS_FLOAT32
y.data = (np.ones([4]).astype(np.float32)).tobytes()

result = stub.Predict(request)
result_np = np.frombuffer(result.result[0].data, dtype=np.float32).reshape(result.result[0].tensor_shape.dims)
print("ms client received: ")
print(result_np)

net = AddNet()
net_out = net(Tensor(np.ones([4]).astype(np.float32)), Tensor(np.ones([4]).astype(np.float32)))
print("add net out: ")
print(net_out)
assert np.allclose(net_out.asnumpy(), result_np, 0.001, 0.001, equal_nan=True)

def test_bert():
MAX_MESSAGE_LENGTH = 0x7fffffff
input_ids = np.random.randint(0, 1000, size=(2, 32), dtype=np.int32)


+ 0
- 17
tests/st/serving/generate_model.py View File

@@ -15,11 +15,9 @@

import random
import numpy as np
import mindspore.nn as nn
import mindspore.common.dtype as mstype
import mindspore.dataset as de
from mindspore import Tensor, context
from mindspore.ops import operations as P
from mindspore.train.serialization import export
from tests.st.networks.models.bert.src.bert_model import BertModel, BertConfig

@@ -50,20 +48,6 @@ random.seed(1)
np.random.seed(1)
de.config.set_seed(1)

class AddNet(nn.Cell):
def __init__(self):
super(AddNet, self).__init__()
self.add = P.TensorAdd()

def construct(self, x_, y_):
return self.add(x_, y_)

def export_add_model():
net = AddNet()
x = np.ones(4).astype(np.float32)
y = np.ones(4).astype(np.float32)
export(net, Tensor(x), Tensor(y), file_name='add.mindir', file_format='MINDIR')

def export_bert_model():
input_ids = np.random.randint(0, 1000, size=(2, 32), dtype=np.int32)
segment_ids = np.zeros((2, 32), dtype=np.int32)
@@ -73,5 +57,4 @@ def export_bert_model():
file_name='bert.mindir', file_format='MINDIR')

if __name__ == '__main__':
export_add_model()
export_bert_model()

+ 1
- 9
tests/st/serving/serving.sh View File

@@ -41,7 +41,7 @@ prepare_model()
python3 generate_model.py &> generate_model_serving.log
echo "### end to generate mode for serving test ###"
result=`ls -l | grep -E '*mindir' | grep -v ".log" | wc -l`
if [ ${result} -ne 2 ]
if [ ${result} -ne 1 ]
then
cat generate_model_serving.log
echo "### generate model for serving test failed ###" && exit 1
@@ -98,13 +98,6 @@ pytest_serving()
echo "### $1 client end ###"
}

test_add_model()
{
start_service 5500 add.mindir ${ENV_DEVICE_ID}
pytest_serving test_add
clean_pid
}

test_bert_model()
{
start_service 5500 bert.mindir ${ENV_DEVICE_ID}
@@ -115,5 +108,4 @@ test_bert_model()
echo "-----serving start-----"
rm -rf ms_serving *.log *.mindir *.dat ${CURRPATH}/model ${CURRPATH}/kernel_meta
prepare_model
test_add_model
test_bert_model

Loading…
Cancel
Save