Skip to content

Commit

Permalink
update notebook
Browse files Browse the repository at this point in the history
  • Loading branch information
zhuwq0 authored Nov 25, 2024
1 parent fd4577e commit 20d99d1
Showing 1 changed file with 4 additions and 14 deletions.
18 changes: 4 additions & 14 deletions docs/exercises/09_neural_networks1.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -285,19 +285,14 @@
"class SimpleNN(nn.Module):\n",
" def __init__(self, input_size, output_size, hidden_size):\n",
" super(SimpleNN, self).__init__()\n",
" ## Define a linear layer with input size, hidden size\n",
" ## Define the neural network layers\n",
" self.fc1 = \n",
" ## Define an activation function using ReLU\n",
" self.relu = \n",
" ## Define a linear layer with hidden size, output size\n",
" self.fc2 =\n",
" \n",
" def forward(self, x):\n",
" ## Apply the first linear layer\n",
" ## Apply the neural network layers\n",
" x = \n",
" ## Apply the activation function\n",
" x = \n",
" ## Apply the second linear layer\n",
" x = \n",
" return x\n",
"\n",
Expand Down Expand Up @@ -552,19 +547,14 @@
"class SimpleNN(nn.Module):\n",
" def __init__(self, input_size, output_size, hidden_size):\n",
" super(SimpleNN, self).__init__()\n",
" ## Define a linear layer with input size, hidden size\n",
" ## Define the neural network layers\n",
" self.fc1 = \n",
" ## Define an activation function using ReLU\n",
" self.relu = \n",
" ## Define a linear layer with hidden size, output size\n",
" self.fc2 = \n",
" \n",
" def forward(self, x):\n",
" ## Apply the first linear layer\n",
" ## Apply the neural network layers\n",
" x = \n",
" ## Apply the activation function\n",
" x = \n",
" ## Apply the second linear layer\n",
" x = \n",
" return x\n",
"\n",
Expand Down

0 comments on commit 20d99d1

Please sign in to comment.