网络构建
网络构建通过继承nn.cell基类,在__init__方法中,进行cell的实例化和状态管理,并在construct实现tensorflow的操作,construct为神经网络构建
class Network(nn.Cell):
def __init__(self):
super().__init__()
self.flatten = nn.Flatten()
self.dense_relu_sequential = nn.SequentialCell(
nn.Dense(28*28, 512, weight_init="normal", bias_init="zeros"),
nn.ReLU(),
nn.Dense(512, 512, weight_init="normal", bias_init="zeros"),
nn.ReLU(),
nn.Dense(512, 10, weight_init="normal", bias_init="zeros")
)
def construct(self, x):
x = self.flatten(x)
logits = self.dense_relu_sequential(x)
return logits
# 查看网络
model = Network()
print(model)
直接输入数据,调用模型,得到二维的tensorflow输出,利用nn.softmax(),获得预测概率
X = ops.ones((1, 28, 28), mindspore.float32)
logits = model(X)
# print logits
logits
pred_probab = nn.Softmax(axis=1)(logits)
y_pred = pred_probab.argmax(1)
print(f"Predicted class: {y_pred}")
模型层
对上面的神经网络分解讲解
输入数据
input_image = ops.ones((3, 28, 28), mindspore.float32)
展开nn.Flatten
flatten = nn.Flatten()
flat_image = flatten(input_image)
全连接层nn.Dense
layer1 = nn.Dense(in_channles = 28*28,out_channels=20)
hidden1 = layer1(flat_image)
非线性激活nn.ReLU
hidden1 = nn.ReLU()(hidden1)
有序容器nn.SequentialCell
seq_modules = nn.SequentialCell(
flatten,
layer1,
nn.ReLU(),
nn.Dense(20,10)
)
logits = seq_modules(input_image)
预测概率nn.Softmax
softmax = nn.Softmax(axis = 1)
pred_probab = softmax(logits)
查看模型参数
print(f"Model structure: {model}\n\n")
for name, param in model.parameters_and_names():
print(f"Layer: {name}\nSize: {param.shape}\nValues : {param[:2]} \n")