This repository has been archived by the owner on Nov 17, 2023. It is now read-only.
Replies: 1 comment
-
@mxnet-label-bot add [gluon, question] |
Beta Was this translation helpful? Give feedback.
0 replies
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
-
why can't i init weight of symbol with it's init attr
import gluonbook as gb
from mxnet import gluon, init, autograd, nd
from mxnet.gluon import loss as gloss, nn
import mxnet as mx
batch_size = 256
train_iter, test_iter = gb.load_data_fashion_mnist(batch_size)
net = nn.Sequential()
input = mx.sym.var('data')
weight = mx.symbol.Variable(name="{}_weight".format('111'), init=init.Zero())
bias = mx.symbol.Variable(name="{}_bias".format('111'), init=init.Zero())
sym = mx.sym.Convolution(input, weight=weight, bias = bias, kernel=(3, 3), num_filter=10, name='111', attr={'init':init.Zero()})
net.add(nn.SymbolBlock(outputs=sym, inputs=input),
nn.Dense(10, weight_initializer=init.Zero(), bias_initializer=init.Zero()))
net.initialize(init.One())
net(nd.empty(shape=(1, 1, 10, 10)))
print(net.collect_params())
params = {}
co_params = net.collect_params()
for k in co_params:
print('===================', k)
param = co_params[k]#type:gluon.Parameter
params[k] = param.data().copy()
print(param.data())
Beta Was this translation helpful? Give feedback.
All reactions