Skip to content

Commit

Permalink
codes for alexnet
Browse files Browse the repository at this point in the history
  • Loading branch information
LouiValley committed Sep 21, 2017
1 parent f15489c commit 309678e
Show file tree
Hide file tree
Showing 2 changed files with 234 additions and 0 deletions.
6 changes: 6 additions & 0 deletions alexnet/.ipynb_checkpoints/Untitled-checkpoint.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
{
"cells": [],
"metadata": {},
"nbformat": 4,
"nbformat_minor": 2
}
228 changes: 228 additions & 0 deletions alexnet/Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,228 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 26,
"metadata": {},
"outputs": [],
"source": [
"from datetime import datetime\n",
"import math\n",
"import time\n",
"import tensorflow as tf"
]
},
{
"cell_type": "code",
"execution_count": 27,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"batch_size =32\n",
"num_batches=100"
]
},
{
"cell_type": "code",
"execution_count": 28,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def print_activations(t):\n",
" print(t.op.name,'',t.get_shape().as_list())"
]
},
{
"cell_type": "code",
"execution_count": 29,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def inference(images):\n",
" parameters = []\n",
" with tf.name_scope('conv1') as scope:\n",
" kernel = tf.Variable(tf.truncated_normal([11,11,3,64],dtype=tf.float32,stddev=1e-1),name='weights')\n",
" conv = tf.nn.conv2d(images,kernel,[1,4,4,1],padding='SAME')\n",
" biases=tf.Variable(tf.constant(0.0,shape=[64],dtype=tf.float32),trainable=True,name='biases')\n",
" bias = tf.nn.bias_add(conv,biases)\n",
" conv1=tf.nn.relu(bias,name=scope)\n",
" print_activations(conv1)\n",
" parameters+=[kernel,biases]\n",
" lrn1=tf.nn.lrn(conv1,4,bias=1.0,alpha=0.001/9,beta=0.75,name='lrn1')\n",
" pool1=tf.nn.max_pool(lrn1,ksize=[1,3,3,1],strides=[1,2,2,1],padding='VALID',name='pool1')\n",
" print_activations(pool1)\n",
" with tf.name_scope('conv2') as scope:\n",
" kernel =tf.Variable(tf.truncated_normal([5,5,64,192],dtype=tf.float32,stddev=1e-1),name='weights')\n",
" conv = tf.nn.conv2d(pool1,kernel,[1,1,1,1],padding='SAME')\n",
" biases = tf.Variable(tf.constant(0.0,shape=[192],dtype=tf.float32),trainable=True,name='biases')\n",
" bias = tf.nn.bias_add(conv,biases)\n",
" conv2=tf.nn.relu(bias,name=scope)\n",
" parameters +=[kernel,biases]\n",
" print_activations(conv2)\n",
" lrn2=tf.nn.lrn(conv2,4,bias=1.0,alpha=0.001/9,beta=0.75,name='lrn2')\n",
" pool2=tf.nn.max_pool(lrn2,ksize=[1,3,3,1],strides=[1,2,2,1],padding='VALID',name='pool2')\n",
" print_activations(pool2)\n",
" with tf.name_scope('conv3') as scope:\n",
" kernel = tf.Variable(tf.truncated_normal([3,3,192,384],dtype=tf.float32,stddev=1e-1),name='weights')\n",
" conv = tf.nn.conv2d(pool2,kernel,[1,1,1,1],padding='SAME')\n",
" biases = tf.Variable(tf.constant(0.0,shape=[384],dtype=tf.float32),trainable=True,name='biases')\n",
" bias=tf.nn.bias_add(conv,biases)\n",
" conv3=tf.nn.relu(bias,name=scope)\n",
" parameters += [kernel,biases]\n",
" print_activations(conv3)\n",
" with tf.name_scope('conv4') as scope:\n",
" kernel = tf.Variable(tf.truncated_normal([3,3,384,256],dtype=tf.float32,stddev=1e-1),name='weights')\n",
" conv = tf.nn.conv2d(conv3,kernel,[1,1,1,1],padding='SAME')\n",
" biases = tf.Variable(tf.constant(0.0,shape=[256],dtype=tf.float32),trainable=True,name='biases')\n",
" bias=tf.nn.bias_add(conv,biases)\n",
" conv4=tf.nn.relu(bias,name=scope)\n",
" parameters += [kernel,biases]\n",
" print_activations(conv4)\n",
" with tf.name_scope('conv5') as scope:\n",
" kernel = tf.Variable(tf.truncated_normal([3,3,256,256],dtype=tf.float32,stddev=1e-1),name='weights')\n",
" conv = tf.nn.conv2d(conv4,kernel,[1,1,1,1],padding='SAME')\n",
" biases = tf.Variable(tf.constant(0.0,shape=[256],dtype=tf.float32),trainable=True,name='biases')\n",
" bias=tf.nn.bias_add(conv,biases)\n",
" conv5=tf.nn.relu(bias,name=scope)\n",
" parameters += [kernel,biases]\n",
" print_activations(conv5)\n",
" pool5 = tf.nn.max_pool(conv5,ksize=[1,3,3,1],strides=[1,2,2,1],padding='VALID',name='pool5')\n",
" print_activations(pool5)\n",
" return pool5,parameters"
]
},
{
"cell_type": "code",
"execution_count": 30,
"metadata": {},
"outputs": [],
"source": [
"def time_tensorflow_run(session,target,info_string): \n",
" num_steps_burn_in = 10\n",
" total_duration = 0.0\n",
" total_duration_squared = 0.0\n",
" for i in range(num_batches + num_steps_burn_in):\n",
" start_time = time.time()\n",
" _ = session.run(target)\n",
" duration = time.time() - start_time\n",
" if i >= num_steps_burn_in:\n",
" if not i %10:\n",
" print('%s : step %d , duration = %.3f'%(datetime.now(),i-num_steps_burn_in,duration))\n",
" total_duration += duration\n",
" total_duration_squared +=duration * duration\n",
" mn = total_duration/num_batches\n",
" vr = total_duration_squared/num_batches-mn*mn\n",
" sd= math.sqrt(vr)\n",
" print('%s: %s across %d steps, %.3f +/- %.3f sec / batch' % (datetime.now(),info_string, num_batches,mn,sd))"
]
},
{
"cell_type": "code",
"execution_count": 31,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": [
"def run_benchmark():\n",
" with tf.Graph().as_default():\n",
" image_size = 224\n",
" images= tf.Variable(tf.random_normal([batch_size,\n",
" image_size,\n",
" image_size,3],\n",
" dtype=tf.float32,\n",
" stddev=1e-1))\n",
" pool5,parameters = inference (images)\n",
" init = tf.global_variables_initializer()\n",
" sess=tf.Session()\n",
" sess.run(init)\n",
" time_tensorflow_run(sess,pool5,'Forward')\n",
" objective = tf.nn.l2_loss(pool5)\n",
" grad = tf.gradients(objective,parameters)\n",
" time_tensorflow_run(sess,grad,'Forward-backward')"
]
},
{
"cell_type": "code",
"execution_count": 32,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"(u'conv1', '', [32, 56, 56, 64])\n",
"(u'conv1/pool1', '', [32, 27, 27, 64])\n",
"(u'conv2', '', [32, 27, 27, 192])\n",
"(u'conv2/pool2', '', [32, 13, 13, 192])\n",
"(u'conv3', '', [32, 13, 13, 384])\n",
"(u'conv4', '', [32, 13, 13, 256])\n",
"(u'conv5', '', [32, 13, 13, 256])\n",
"(u'conv5/pool5', '', [32, 6, 6, 256])\n",
"2017-09-08 21:50:19.342650 : step 0 , duration = 0.045\n",
"2017-09-08 21:50:19.797985 : step 10 , duration = 0.046\n",
"2017-09-08 21:50:20.253396 : step 20 , duration = 0.046\n",
"2017-09-08 21:50:20.708530 : step 30 , duration = 0.046\n",
"2017-09-08 21:50:21.163351 : step 40 , duration = 0.046\n",
"2017-09-08 21:50:21.618411 : step 50 , duration = 0.046\n",
"2017-09-08 21:50:22.073655 : step 60 , duration = 0.045\n",
"2017-09-08 21:50:22.529132 : step 70 , duration = 0.045\n",
"2017-09-08 21:50:22.984809 : step 80 , duration = 0.045\n",
"2017-09-08 21:50:23.439855 : step 90 , duration = 0.045\n",
"2017-09-08 21:50:23.849613: Forward across 100 steps, 0.005 +/- 0.014 sec / batch\n",
"2017-09-08 21:50:25.707332 : step 0 , duration = 0.140\n",
"2017-09-08 21:50:27.112300 : step 10 , duration = 0.140\n",
"2017-09-08 21:50:28.516821 : step 20 , duration = 0.140\n",
"2017-09-08 21:50:29.921629 : step 30 , duration = 0.140\n",
"2017-09-08 21:50:31.325796 : step 40 , duration = 0.140\n",
"2017-09-08 21:50:32.731325 : step 50 , duration = 0.140\n",
"2017-09-08 21:50:34.136953 : step 60 , duration = 0.140\n",
"2017-09-08 21:50:35.541574 : step 70 , duration = 0.140\n",
"2017-09-08 21:50:36.944787 : step 80 , duration = 0.140\n",
"2017-09-08 21:50:38.348818 : step 90 , duration = 0.140\n",
"2017-09-08 21:50:39.613760: Forward-backward across 100 steps, 0.014 +/- 0.042 sec / batch\n"
]
}
],
"source": [
"run_benchmark()"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

0 comments on commit 309678e

Please sign in to comment.