tensor([[ 0.6222, 0.1250, -0.6566, 0.5412], [ 0.2936, 0.7253, -0.9345, 0.4647], [-0.5043, 0.5410, -0.5203, -0.3301]], grad_fn=<AddmmBackward0>)
4 perceptrons ==> shape torch.Size([7]) [0.7342, -0.838, 6.8438, 2.8546] [-0.2729, 3.1664, 3.7544, 0.3837] [-2.8693, 3.1136, 2.5533, 0.2767]
[0.6222, 0.125, -0.6566, 0.5412] [0.2936, 0.7253, -0.9345, 0.4647] [-0.5043, 0.541, -0.5203, -0.3301] # ########################### ## same result, nice!!! ###########################
tensor([[ 0.6222, 0.1250, -0.6566, 0.5412]], grad_fn=<AddBackward0>) tensor([[ 0.2936, 0.7253, -0.9345, 0.4647]], grad_fn=<AddBackward0>) tensor([[-0.5043, 0.5410, -0.5203, -0.3301]], grad_fn=<AddBackward0>)
tensor([[ 0.6222, 0.1250, -0.6566, 0.5412], [ 0.2936, 0.7253, -0.9345, 0.4647], [-0.5043, 0.5410, -0.5203, -0.3301]], grad_fn=<AddBackward0>) # ########################### ## again same result!!! ###########################
tensor([[0.1164, 0.0934, 0.1123, 0.1057, 0.0991, 0.0944, 0.0966, 0.0794, 0.1028, 0.0999]], grad_fn=<SoftmaxBackward0>)