- #!/usr/bin/env python2
- # -*- coding: utf-8 -*-
- """
- Created on Fri Aug 10 16:13:29 2018
- @author: myhaspl
- """
- from mxnet import nd
- from mxnet.gluon import nn
- class MixMLP(nn.Block):
- def __init__(self, **kwargs):
- # Run `nn.Block`'s init method
- super(MixMLP, self).__init__(**kwargs)
- self.blk = nn.Sequential()
- self.blk.add(nn.Dense(3, activation='relu'),nn.Dense(4, activation='relu'))
- self.dense = nn.Dense(5)
- def forward(self, x):
- y = nd.relu(self.blk(x))
- print(y)
- return self.dense(y)
- net = MixMLP()
- print net
- 1.nn.Sequential 中,?MXNet 自动构造前向函数, 该函数可以执行增加层, 可以自定义一个弹性的前向函数
- 2.nn.Sequential 和 nn.Dense 都是? nn.Block 的子类
- #!/usr/bin/env python2
- # -*- coding: utf-8 -*-
- """
- Created on Fri Aug 10 16:13:29 2018
- @author: myhaspl
- """
- from mxnet import nd
- from mxnet.gluon import nn
- class MixMLP(nn.Block):
- def __init__(self, **kwargs):
- # Run `nn.Block`'s init method
- super(MixMLP, self).__init__(**kwargs)
- self.blk = nn.Sequential()
- self.blk.add(nn.Dense(3, activation='relu'),nn.Dense(4, activation='relu'))
- self.dense = nn.Dense(5)
- def forward(self, x):
- y = nd.relu(self.blk(x))
- print(y)
- return self.dense(y)
- net = MixMLP()
- print.NET
- net.initialize()
- x = nd.random.uniform(shape=(7,2))
- net(x)
- print.NET.blk[0].weight.data()
- MixMLP(
- ? (dense): Dense(None -> 5, linear)
- ? (blk): Sequential(
- ? ? (0): Dense(None -> 3, Activation(relu))
- ? ? (1): Dense(None -> 4, Activation(relu))
- ? )
- )
- [[9.6452924e-05 0.0000000e+00 2.7557719e-04 0.0000000e+00]
- ?[6.0751504e-04 0.0000000e+00 1.7357409e-03 0.0000000e+00]
- ?[5.6857511e-04 0.0000000e+00 1.6244850e-03 0.0000000e+00]
- ?...
- ?[1.7680142e-04 0.0000000e+00 3.3347241e-03 0.0000000e+00]
- ?[9.5664361e-04 0.0000000e+00 4.8063148e-04 0.0000000e+00]
- ?[1.8764728e-04 0.0000000e+00 1.7001196e-03 0.0000000e+00]]
- <NDArray 7x4 @CPU(0)>
- [[ 0.01617834 -0.04664135]
- ?[-0.0526652 ? 0.03906714]
- ?[ 0.04872115 ?0.05109067]]
- <NDArray 3x2 @CPU(0)>
来源: http://www.bubuko.com/infodetail-2846206.html