Skip to content

Commit

Permalink
solution for the affine and relu layers
Browse files Browse the repository at this point in the history
  • Loading branch information
bvahdat committed Apr 5, 2021
1 parent 7a583f7 commit 242b757
Show file tree
Hide file tree
Showing 2 changed files with 66 additions and 19 deletions.
69 changes: 54 additions & 15 deletions assignment2/FullyConnectedNets.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@
}
},
{
"execution_count": null,
"execution_count": 1,
"cell_type": "code",
"source": [
"# As usual, a bit of setup\n",
Expand Down Expand Up @@ -97,7 +97,7 @@
}
},
{
"execution_count": null,
"execution_count": 2,
"cell_type": "code",
"source": [
"# Load the (preprocessed) CIFAR10 data.\n",
Expand All @@ -106,7 +106,15 @@
"for k, v in list(data.items()):\n",
" print(('%s: ' % k, v.shape))"
],
"outputs": [],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"('X_train: ', (49000, 3, 32, 32))\n('y_train: ', (49000,))\n('X_val: ', (1000, 3, 32, 32))\n('y_val: ', (1000,))\n('X_test: ', (1000, 3, 32, 32))\n('y_test: ', (1000,))\n"
]
}
],
"metadata": {
"tags": [
"pdf-ignore"
Expand All @@ -124,7 +132,7 @@
"metadata": {}
},
{
"execution_count": null,
"execution_count": 3,
"cell_type": "code",
"source": [
"# Test the affine_forward function\n",
Expand All @@ -148,7 +156,15 @@
"print('Testing affine_forward function:')\n",
"print('difference: ', rel_error(out, correct_out))"
],
"outputs": [],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Testing affine_forward function:\ndifference: 9.7698500479884e-10\n"
]
}
],
"metadata": {}
},
{
Expand All @@ -160,7 +176,7 @@
"metadata": {}
},
{
"execution_count": null,
"execution_count": 4,
"cell_type": "code",
"source": [
"# Test the affine_backward function\n",
Expand All @@ -183,7 +199,15 @@
"print('dw error: ', rel_error(dw_num, dw))\n",
"print('db error: ', rel_error(db_num, db))"
],
"outputs": [],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Testing affine_backward function:\ndx error: 1.0908199508708189e-10\ndw error: 2.1752635504596857e-10\ndb error: 7.736978834487815e-12\n"
]
}
],
"metadata": {}
},
{
Expand All @@ -195,7 +219,7 @@
"metadata": {}
},
{
"execution_count": null,
"execution_count": 5,
"cell_type": "code",
"source": [
"# Test the relu_forward function\n",
Expand All @@ -211,7 +235,15 @@
"print('Testing relu_forward function:')\n",
"print('difference: ', rel_error(out, correct_out))"
],
"outputs": [],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Testing relu_forward function:\ndifference: 4.999999798022158e-08\n"
]
}
],
"metadata": {}
},
{
Expand All @@ -223,7 +255,7 @@
"metadata": {}
},
{
"execution_count": null,
"execution_count": 6,
"cell_type": "code",
"source": [
"np.random.seed(231)\n",
Expand All @@ -239,7 +271,15 @@
"print('Testing relu_backward function:')\n",
"print('dx error: ', rel_error(dx_num, dx))"
],
"outputs": [],
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Testing relu_backward function:\ndx error: 3.2756349136310288e-12\n"
]
}
],
"metadata": {}
},
{
Expand Down Expand Up @@ -969,16 +1009,15 @@
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"name": "python3",
"language": "python"
"name": "python379jvsc74a57bd08bb14242992722f1f7be42bdaa541153855d10d2d8f9e79543206685130f6a1b",
"display_name": "Python 3.7.9 64-bit ('cs231n': conda)"
},
"language_info": {
"mimetype": "text/x-python",
"nbconvert_exporter": "python",
"name": "python",
"file_extension": ".py",
"version": "3.7.1",
"version": "3.7.9-final",
"pygments_lexer": "ipython3",
"codemirror_mode": {
"version": 3,
Expand Down
16 changes: 12 additions & 4 deletions assignment2/cs231n/layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,9 @@ def affine_forward(x, w, b):
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****

pass
N = x.shape[0]
x_flattened = x.reshape(N, -1) # (N, D)
out = x_flattened.dot(w) + b # (N, D) . (D, M) + (M,) = (N, M)

# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
Expand Down Expand Up @@ -60,7 +62,11 @@ def affine_backward(dout, cache):
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****

pass
dx = dout.dot(w.T).reshape(x.shape) # (N, M) . (M, D) = (N, D) => (N, d1, ..., d_k)
N = x.shape[0]
x_flattened = x.reshape(N, -1) # (N, D)
dw = x_flattened.T.dot(dout) # (D, N) . (N, M) = (D, M)
db = np.sum(dout.T, axis=1) # sum(M, N) => (M,)

# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
Expand All @@ -86,7 +92,7 @@ def relu_forward(x):
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****

pass
out = np.maximum(x, 0.) # (N, d1, ..., d_k)

# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
Expand All @@ -113,7 +119,9 @@ def relu_backward(dout, cache):
###########################################################################
# *****START OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****

pass
N = x.shape[0]
x_flattened = x.reshape(N, -1) # (N, D)
dx = dout * np.where(x_flattened > 0., 1., 0.) # (d1, ..., d_k) * (d1, ..., d_k) = (d1, ..., d_k)

# *****END OF YOUR CODE (DO NOT DELETE/MODIFY THIS LINE)*****
###########################################################################
Expand Down

0 comments on commit 242b757

Please sign in to comment.