-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathindex.html
1239 lines (1206 loc) · 70.6 KB
/
index.html
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<!-- <meta http-equiv="refresh" content="5"> -->
<title>11-785 Deep Learning</title>
<link href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.5/css/bootstrap.min.css" rel="stylesheet" integrity="sha256-MfvZlkHCEqatNoGiOXveE8FIwMzZg4W85qfrfIFBfYc= sha512-dTfge/zgoMYpP7QbHy4gWMEGsbsdZeCXz7irItjcC3sPUFtf0kuFbDz/ixG7ArTxmDjLXDmezHubeNikyKGVyQ==" crossorigin="anonymous">
<link href='https://fonts.googleapis.com/css?family=Open+Sans:400,300,400italic,300italic' rel='stylesheet' type='text/css'>
<link href="main.css" rel="stylesheet" type="text/css">
</head>
<div class="container titlebar">
<div class="row">
<!--div class="titlebar-img title-col vcenter">
<img id="logo" src="./img/brain.png">
</div-->
<div class="title-col vcenter" style="color:#A80000;font-weight:bold;text-align:center;width:100%">
<div class="title"><b>11-785</b> Introduction to Deep Learning</div>
<div class="subtitle"><i>Fall 2019</i></div>
</div>
</div>
</div>
<body>
<div class=" container">
<div class="row">
<h2> Bulletin and Active Deadlines </h2>
<table class="table table-striped table-bordered">
<thead>
<tr>
<th>Assignment</th>
<th>Deadline</th>
<th>Description</th>
<th>Links</th>
</tr>
</thead>
<tbody>
<tr>
<td>Homework 3 part 1</td>
<td>November 9th, 2019</td>
<td>Recurrent Neural Networks</td>
<td>
<a href="document/homework/homework3.tar.gz">Handout (*.targ.gz)</a>
</td>
</tr>
<tr>
<td>Homework 3 part 2</td>
<td>November 9th, 2019</td>
<td>Connectionist Temporal Classification</td>
<td>
<a href="https://www.kaggle.com/c/homework-3-part-2-11-785-fall-2019/overview">Kaggle</a></br>
<a href="#">Code Submission Form</a>
</td>
</tr>
<tr>
<td>Homework 4 Part 1</td> <!--Assignment-->
<td>December 5th, 2019</td> <!--Deadline-->
<td>Word-Level Neural Language Models</td> <!--Description-->
<td>
<a href="document/homework/handout_hw4p1.tar.gz">Handout (*.targ.gz)</a>
</td>
</tr>
<tr>
<td>Homework 4 Part 2</td> <!--Assignment-->
<td>December 5th, 2019</td> <!--Deadline-->
<td>Attention Mechanisms and Memory Networks</td> <!--Description-->
<td> <!--Links-->
<a href="https://www.kaggle.com/c/11785-fall19-hw4p2/overview">Kaggle</a> <br>
</td>
</tr>
<tr>
</tbody>
</table>
<p style="margin-top:75px;">
“Deep Learning” systems, typified by deep neural networks, are increasingly taking over all AI tasks, ranging from language understanding, and speech and image recognition, to machine translation, planning, and even game playing and autonomous driving. As a result, expertise in deep learning is fast changing from an esoteric desirable to a mandatory prerequisite in many advanced academic settings, and a large advantage in the industrial job market.
</p>
<p> In this course we will learn about the basics of deep neural networks, and their applications to various AI tasks. By the end of the course, it is expected that students will have significant familiarity with the subject, and be able to apply Deep Learning to a variety of tasks. They will also be positioned to understand much of the current literature on the topic and extend their knowledge through further study.</p>
<p>If you are only interested in the lectures, you can watch them on the YouTube channel listed below.
<h3>Course description from student point of view</h3>
<p>The course is well rounded in terms of concepts. It helps us understand the fundamentals of Deep Learning. The course starts off gradually with MLPs and it progresses into the more complicated concepts such as attention and sequence-to-sequence models. We get a complete hands on with PyTorch which is very important to implement Deep Learning models. As a student, you will learn the tools required for building Deep Learning models. The homeworks usually have 2 components which is Autolab and Kaggle. The Kaggle components allow us to explore multiple architectures and understand how to fine-tune and continuously improve models. The task for all the homeworks were similar and it was interesting to learn how the same task can be solved using multiple Deep Learning approaches. Overall, at the end of this course you will be confident enough to build and tune Deep Learning models.</p>
<p><a href="http://deeplearning.cs.cmu.edu/document/extra/TAs.html"> Acknowledgments</a></p>
<h3>Your Supporters</h3>
<p><b style="color:#A80000;">Instructor:</b></p>
<ul style="margin-top:-10px;">
<li><b>Bhiksha Raj</b> : [email protected]</li>
</ul>
<p>
<b style="color:#A80000;">TAs:</b>
<ul>
<li><b>Aishwarya Reganti</b>: [email protected]</li>
<li><b>Amit Chahar</b>: [email protected]</li>
<li><b>Ethan Xuanyue Yang</b>: [email protected]</li>
<li><b>Hanna Moazam</b>: [email protected]</li>
<li><b>Hariharan Muralidharan</b>: [email protected]</li>
<li><b>Kangrui Ruan (Darren)</b>: [email protected]</li>
<li><b>Liwei Cai</b>: [email protected]</li>
<li><b>Pallavi Sharma</b>: [email protected]</li>
<li><b>Parth Shah</b>: [email protected]</li>
<li><b>Wendy Ebanks</b>: [email protected]</li>
<li><b>(Kigali) Aime Musangamfura</b>: [email protected]</li>
<li><b>(Kigali) Natnael Daba</b>: [email protected]</li>
<li><b>(Silicon Valley) Bonan Jin</b>: [email protected]</li>
<li><b>(Silicon Valley) Joseph Konan</b>: [email protected]</li>
</ul>
<h3>Pittsburgh Schedule (Eastern Time)</h3>
<p><b style="color:#A80000;">Lecture:</b> Monday and Wednesday, 9:00 a.m. - 10:20 a.m. @ DH A302</p>
<p><b style="color:#A80000;">Recitation:</b> Friday, 9.00am-10.20am @ DH A302 </p>
<b style="color:#A80000;">Office hours:</b>
<!--<table style="height: 123px; width: 540.333px;" border="0">-->
<table class="table table-striped table-bordered">
<tbody>
<tr>
<td style="width: 100px; text-align: center;"><strong>Day</strong></td>
<td style="width: 136px; text-align: center;"><strong>Time</strong></td>
<td style="width: 110px; text-align: center;"><strong>Location</strong></td>
<td style="width: 173.333px; text-align: center;"><strong>TA</strong></td>
</tr>
<tr>
<td style="width: 100px;" rowspan="3">Monday</td>
<td style="width: 136px;">1-3 pm</td>
<td style="width: 110px;">GHC 6708</td>
<td style="width: 173.333px;">Ethan Xuanyue Yang</td>
</tr>
<tr>
<td style="width: 136px;">4-5 pm</td>
<td style="width: 110px;">GHC 6708</td>
<td style="width: 173.333px;">Kangrui Ruan (Darren)</td>
</tr>
<tr>
<td style="width: 136px;">5-6 pm</td>
<td style="width: 110px;">LTI Commons</td>
<td style="width: 173.333px;">Liwei Cai</td>
</tr>
<tr>
<td style="width: 100px;" rowspan="2">Tuesday</td>
<td style="width: 136px;">12-2 pm</td>
<td style="width: 110px;">GHC 6404</td>
<td style="width: 173.333px;">Pallavi Sharma</td>
</tr>
<tr>
<td style="width: 136px;">5-6 pm</td>
<td style="width: 110px;">LTI Commons</td>
<td style="width: 173.333px;">Liwei Cai</td>
</tr>
<tr>
<td style="width: 100px;" rowspan="2">Wednesday</td>
<td style="width: 136px;">1-3 pm</td>
<td style="width: 110px;">GHC 6708</td>
<td style="width: 173.333px;">Hanna Moazam</td>
</tr>
<tr>
<td style="width: 136px;">3-4 pm</td>
<td style="width: 110px;">GHC 6404</td>
<td style="width: 173.333px;">Hariharan Muralidharan & Wendy Ebanks</td>
</tr>
<tr>
<td style="width: 100px;">Thursday</td>
<td style="width: 136px;">1-3 pm</td>
<td style="width: 110px;">LTI Commons</td>
<td style="width: 173.333px;">Aishwarya Reganti</td>
</tr>
<tr>
<td style="width: 100px;" rowspan="2">Friday</td>
<td style="width: 136px;">10.30-11.30 am</td>
<td style="width: 110px;">GHC 5417</td>
<td style="width: 173.333px;">Kangrui Ruan (Darren)</td>
</tr>
<tr>
<td style="width: 136px;">3-4 pm</td>
<td style="width: 110px;">GHC 6404</td>
<td style="width: 173.333px;">Hariharan Muralidharan & Wendy Ebanks</td>
</tr>
<tr>
<td style="width: 100px;">Saturday</td>
<td style="width: 136px;">4-6 pm</td>
<td style="width: 110px;">GHC 5417</td>
<td style="width: 173.333px;">Amit Chahar & Parth Shah</td>
</tr>
</tbody>
</table>
<h3>Kigali Schedule (Central Africa Time)</h3>
<p><b style="color:#A80000;">Lecture:</b> Monday and Wednesday, 3:00 p.m. – 4:20 p.m. @ F305 DLR</p>
<b style="color:#A80000;">Office hours:</b>
<ul>
<li><b>Aime Musangamfura</b>: Monday and Wednesday, 6:00 p.m. – 8:00 p.m. @ TA Office B209</li>
<li><b>Natnael Daba</b>: Tuesday, 6:00 p.m. – 8:30 p.m. and Thursday 6:00 p.m. - 8:00 p.m. @ TA Office B209</li>
</ul>
<h3>Silicon Valley Schedule (Pacific Time)</h3>
<b style="color:#A80000;">Office hours:</b>
<ul>
<li><b>Bonan Jin</b>: Wednesday, 1:00 p.m. – 3:00 p.m. @ B23, Room 107</li>
<li><b>Joseph Konan</b>: Monday-Sunday, 6:00 p.m. – 7:00 p.m. @ B23, Room 107</li>
</ul>
<h3>Prerequisites</h3>
<ol>
<li>We will be using one of several toolkits (the primary toolkit for recitations/instruction is PyTorch). The toolkits are largely programmed in Python. You will need to be able to program in at least one of these languages. Alternately, you will be responsible for finding and learning a toolkit that requires programming in a language you are comfortable with, </li>
<li>You will need familiarity with basic calculus (differentiation, chain rule), linear algebra and basic probability. </li>
</ol>
<h3>Units</h3>
<p>11-785 is a graduate course worth 12 units. 11-485 is an undergraduate course worth 9 units.</p>
</div>
</div>
<div class="container">
<div class="row">
<h2>Course Work</h2>
<h3>Grading</h3>
<p>Grading will be based on weekly quizzes (24%), homeworks (51%) and a course project (25%).
<div>
<table class="rules-table">
<tr class="rules-table-header">
<td></td>
<td></td>
<td><b>Policy</b></td>
</tr>
<tr>
<td><b>Quizzes</b></td>
<td> </td>
<td>
There will be weekly quizzes.
<ul>
<li> There are 14 quizzes in all. We will retain your best 12 scores.
<li> Quizzes will generally (but not always) be released on Friday and due 48 hours later.
<li> Quizzes are scored by the number of correct answers.
<li>
<b>Quizzes will be worth 24% of your overall score.</b>
</li>
</ul>
</td>
</tr>
<tr> <td><b>Assignments</b></td> <td></td><td>There will be five assignments in all. Assignments will include <i>autolab</i> components, where you must complete designated tasks, and a <i>kaggle</i> component where you compete with your colleagues.
<ul>
<li> Autolab components are scored according to the number of correctly completed parts.
<li> We will post performance cutoffs for A, B, C, D and F for Kaggle competitions. These will translate to scores of 100, 80, 60, 40 and 0 respectively. Scores will be interpolated linearly between these cutoffs.
<li> Assignments will have a “preliminary submission deadline”, an “on-time submission deadline” and a “late-submission deadline.”
<ul>
<li> <b>Early submission deadline:</b> You are required to make at least one submission to Kaggle by this deadline. People who miss this deadline will automatically lose 10% of subsequent marks they may get on the homework. This is intended to encourage students to begin working on their assignments early.
<li> <b>On-time deadline:</b> People who submit by this deadline are eligible for up to five bonus points. These points will be computed by interpolation between the A cutoff and the highest performance obtained for the HW. The highest performance will get 105.
<li> <b>Late deadline:</b> People who submit after the on-time deadline can still submit until the late deadline. There is a 10% penalty applied to your final score, for submitting late.
<li> <b>Slack days:</b> Everyone gets up to 7 slack days, which they can distribute across all their homeworks. Once you use up your slack days you will fall into the late-submission category by default. Slack days are accumulated over <i>all</i> parts of <i>all</i> homeworks, except HW0, to which no slack applies.
<li> <b>Kaggle scoring:</b> We will use <i>max(max(on-time score), max(slack-day score), .0.9*max(late-submission score))</i> as your final score for the HW. If this happens to be a slack-days submission, slack days corresponding to the selected submission will be counted.
</ul>
<li> <b>Assignments carry 51% of your total score</b>. HW0 is worth 1%, while each of the subsequent four are worth 12.5%.
</ul></td></tr>
<tr><td><b>Project</b></td><td></td><td>All students are required to do a course project. <b>The project is worth 25% of your grade</b></td></tr>
<tr> <td><b>Final grade</b></td><td></td> <td>The end-of-term grade is curved. Your overall grade will depend on your performance relative to your classmates.</td></tr>
<tr> <td><b>Pass/Fail</b></td><td></td> <td>Students registered for pass/fail must complete all quizzes, HWs and the project. A grade equivalent to B- is required to pass the course.</td></tr>
<tr> <td><b>Auditing</b></td><td></td> <td>Auditors are not required to complete the course project, but must complete all quizzes and homeworks. We encourage doing a course project regardless.</td></tr>
<tr class="rules-table-bottom">
<td></td>
<td></td>
<td><b>End Policy</b></td>
</tr>
</table>
</div>
<h3>Piazza: Discussion Board</h3>
<p><a href="http://piazza.com/cmu/fall2019/1178511485">Piazza</a> is what we use for discussions. You should be automatically signed up if you're enrolled at the start of the semester. If not, please sign up.</p>
<h3>AutoLab: Software Engineering</h3>
<p><a href="https://autolab.andrew.cmu.edu/courses/11785-f19">AutoLab</a> is what we use to test your understand of low-level concepts, such as engineering your own libraries, implementing important algorithms, and developing optimization methods from scratch.</p>
<h3>Kaggle: Data Science</h3>
<p><a href="https://kaggle.com">Kaggle</a> is where we test your understanding and ability to extend neural network architectures discussed in lecture. Similar to how AutoLab shows scores, Kaggle also shows scores, so don't feel intimidated -- we're here to help. We work on hot AI topics, like speech recognition, face recognition, and neural machine translation.</p>
<h3>YouTube: Lecture and Reciation Recordings</h3>
<p><a href="https://www.YouTube.com/channel/UC8hYZGEkI2dDO8scT8C5UQA?view_as=subscriber">YouTube</a> is where all lecture and recitation recordings will be uploaded. Links to individual lectures and recitations will also be posted below as they are uploaded. Videos marked “Old“ are not current, so please be aware of the video title.</p>
<p> CMU students can also access the videos <a href="https://mediaservices.cmu.edu/media/Introduction+to+Deep+Learning/1_8eaf2eny">Live from Media Services</a> or <a href="https://mediaservices.cmu.edu/channel/Introduction%2Bto%2BDeep%2BLearning_Fall%2B2019/129472801">Recorded from Media Services.</a></p>
<h3>Books and Other Resources</h3>
<p>The course will not follow a specific book, but will draw from a number of sources. We list relevant books at the end of this page. We will also put up links to relevant reading material for each class. Students are expected to familiarize themselves with the material before the class. The readings will sometimes be arcane and difficult to understand; if so, do not worry, we will present simpler explanations in class.</p>
<p>You can also find a nice catalog of models that are current in the literature <a href="http://www.datasciencecentral.com/profiles/blogs/concise-visual-summary-of-deep-learning-architectures">here</a>. We expect that you will be in a position to interpret, if not fully understand many of the architectures on the wiki and the catalog by the end of the course.</p>
<h3>Academic Integrity</h3>
<div>
You are expected to comply with the <a href="http://www.cmu.edu/policies/documents/Cheating.html">University Policy on Academic Integrity and Plagiarism</a>.
<ul>
<li>You are allowed to talk with and work with other students on homework assignments.</li>
<li>You can share ideas but not code. You should submit your own code.</li>
</ul>
Your course instructor reserves the right to determine an appropriate penalty based on the violation of academic dishonesty that occurs. Violations of the university policy can result in severe penalties including failing this course and possible expulsion from Carnegie Mellon University. If you have any questions about this policy and any work you are doing in the course, please feel free to contact your instructor for help.
</div>
</div>
</div>
<div class="container">
<div class="row">
<h2>Tentative Schedule of Lectures</h2>
<div class="">
<table class="table table-striped table-bordered">
<thead>
<tr>
<th>Lecture</th>
<th>Date</th>
<th>Topics</th>
<th>Lecture Slides</th>
<th>Additional Readings (if any)</th>
<th>Homework & Assignments</th>
</tr>
</thead>
<tbody>
<tr>
<td>0</td> <!--Lecture Number-->
<td>-</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Course Logistics</li>
<li>Learning Objectives</li>
<li>Grading</li>
<li>Deadlines</li>
</ul></td>
<td>
<a href="document/lecture/lecture-0.pdf">Slides (*.pdf)</a><br>
<a href="https://www.YouTube.com/watch?v=LmIjgmijyiI&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=2&t=0s">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td>
<span style="color: red; ">Homework 0 Released</span>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>1</td> <!--Lecture Number-->
<td>August 28</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Learning Objectives</li>
<li>History and cognitive basis of neural computation</li>
<li>Connectionist Machines</li>
<li>McCullough and Pitt model</li>
<li>Hebb’s learning rule</li>
<li>Rosenblatt’s perceptron</li>
<li>Multilayer Perceptrons</li>
</ul></td>
<td> <a href="document/lecture/lecture-1.pdf"> Slides (*.pdf) </a>
<a href="https://www.YouTube.com/watch?v=VO5vKowfMOQ&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=3&t=0s"> YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>2</td> <!--Lecture Number-->
<td>August 30</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>The neural net as a universal approximator</li>
</ul></td>
<td>
<a href="document/lecture/lecture-2.pdf">Slides (*.pdf)</a><br>
<a href="https://www.YouTube.com/watch?v=lkha188L4Gs&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=4&t=0s">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--Readings-->
<a href="document/reading/article1.pdf">Hornik et al. (*.pdf)</a><br>
<a href="document/reading/article2.pdf">Shannon (*.pdf)</a><br>
<a href="document/reading/article3.pdf">Koiran and Sontag (*.pdf)</a><br>
</td>
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>—</td> <!--Lecture Number-->
<td>September 2</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Labor Day, no class</li>
</ul></td>
<td> <!--*.ppt--> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>3</td> <!--Lecture Number-->
<td>September 4</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Training a neural network</li>
<li>Perceptron learning rule</li>
<li>Empirical Risk Minimization</li>
<li>Optimization by gradient descent</li>
</ul></td>
<td> <!--Lecture Slides-->
<a href="document/lecture/lecture-3.pdf">Slides (*.pdf)</a><br>
<a href="https://www.YouTube.com/watch?v=9kAQ8Em7SdM&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=5&t=0s">YouTube (url)</a>
</td>
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>—</td> <!--Lecture Number-->
<td>September 8</td> <!--Date-->
<td></td><!--Topic List-->
<td> <!--*.ppt--> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td>
<span style="color: blue; ">Homework 0 Due</span><br>
<span style="color: red; ">Homework 1 Released</span>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>4</td> <!--Lecture Number-->
<td>September 9</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Back propagation</li>
<li>Calculus of back propogation</li>
</ul></td>
<td> <a href="document/lecture/lecture-4.pdf">Slides (*.pdf)</a>
<br> <a href="https://www.YouTube.com/watch?v=lTPg1hhd5Rs&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=6&t=0s">YouTube (url)</a> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>5</td> <!--Lecture Number-->
<td>September 11</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Back propagation Continued</li>
</ul></td>
<td> <!--Lecture Slides-->
<a href="document/lecture/lecture-5-6.pdf">Slides(*.pdf)</a>
<a href="https://www.YouTube.com/watch?v=cK_CK5u2p78&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=7&t=0s">YouTube (url)</a>
</td>
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>—</td> <!--Lecture Number-->
<td>September 16</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Cognitive and Brain Science</li>
<li>Neural Basis of Cognition</li>
</ul></td>
<td>
<a href="document/lecture/guest-1.pdf">Slides (*pdf)</a> <br>
<a href="https://www.YouTube.com/watch?v=VabvPpI8ZbQ&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=8&t=0s">YouTube (url)</a> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>6</td> <!--Lecture Number-->
<td>September 18</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Convergence in neural networks</li>
<li>Rates of convergence</li>
<li>Loss surfaces</li>
<li>Learning rates, and optimization methods</li>
<li>RMSProp, Adagrad, Momentum</li>
</ul></td>
<td>
<a href="document/lecture/lecture-5-6.pdf">Slides (*.pdf)</a>
<br> <a href="https://www.YouTube.com/watch?v=sd7qhTKIi4Y&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=9&t=0s">YouTube (url)</a>
</td>
<!--Lecture Slides-->
<td><a href="https://arxiv.org/abs/1711.05101">Decoupled Weight Decay Regularization</a> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>7</td> <!--Lecture Number-->
<td>September 23</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Stochastic gradient descent</li>
<li>Acceleration</li>
<li>Overfitting and regularization</li>
<li>Tricks of the trade:</li>
<ul>
<li>Choosing a divergence (loss) function</li>
<li>Batch normalization</li>
<li>Dropout</li>
</ul>
</ul></td>
<td><a href="document/lecture/lecture-7-8.pdf">Slides (*.pdf)</a><!--Lecture Slides--><br>
<a href="https://www.YouTube.com/watch?v=fChBkJ_UjRw&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=10&t=0s">YouTube (url)</a></td>
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>8</td> <!--Lecture Number-->
<td>—</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Stochastic gradient descent</li>
<li>Acceleration</li>
<li>Overfitting and regularization</li>
<li>Tricks of the trade:</li>
<ul>
<li>Choosing a divergence (loss) function</li>
<li>Batch normalization</li>
<li>Dropout</li>
</ul>
</ul></td>
<td><a href="document/lecture/lecture-7-8.pdf">Slides (*.pdf)</a><!--Lecture Slides--><br>
<a href="https://www.YouTube.com/watch?v=fChBkJ_UjRw&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=10&t=0s">YouTube (url)</a></td>
<td> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>9</td> <!--Lecture Number-->
<td>September 25</td> <!--Date-->
<td><ul> <!--Topic List/-->
<li>Convolutional Neural Networks (CNNs)</li>
<li>Weights as templates</li>
<li>Translation invariance</li>
<li>Training with shared parameters</li>
<li>Arriving at the convlutional model</li>
</ul></td>
<td> <a href="document/lecture/lecture-9.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.YouTube.com/watch?v=2XbZ03D0Sf4&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=12&t=0s">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>10</td> <!--Lecture Number-->
<td>September 30</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Models of vision</li>
<li>Neocognitron</li>
<li>Mathematical details of CNNs</li>
</ul></td>
<td>
<a href="document/lecture/lecture-10.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.YouTube.com/watch?v=Zp9_s0JAXCI&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=13&t=0s">YouTube (url) </a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td>
<span style="color: blue; ">Homework 1 Due</span><br>
<span style="color: red; ">Homework 2 Released</span>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>11</td> <!--Lecture Number-->
<td>October 2</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Backpropagation in CNNs</li>
<li>Variations in the basic model</li>
<li>Alexnet, Inception, VGG</li>
</ul></td>
<td>
<a href="document/lecture/lecture-11.cnn-3.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.YouTube.com/watch?v=w3ZD3VXExpU&list=PLp-0K3kfddPwz13VqV1PaMXF6V6dYdEsj&index=14&t=0s">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>12</td> <!--Lecture Number-->
<td>October 7</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>"Recurrent Neural Networks (RNNs)</li>
<li>Modeling series</li>
<li>Back propogation through time</li>
<li>Bidirectional RNNs"</li>
</ul></td>
<td> <a href="document/lecture/lec11.recurrent.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=YYNNTrSROa4">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>13</td> <!--Lecture Number-->
<td>October 9</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Stability</li>
<li>Exploding/vanishing gradients</li>
<li>Long Short-Term Memory Units (LSTMs) and variants</li>
<li>Resnets</li>
</ul></td>
<td> <a href="document/lecture/lec13.recurrent2.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=jaw5W0bCgUQ&t=1s">YouTube (url)</a>
</td>
<td> <a href="document/reading/How to compute a derivative.pdf">How to compute a derivative</a><br>
</td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>14</td> <!--Lecture Number-->
<td>October 14</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Loss functions for recurrent networks</li>
<li>Sequence prediction</li>
</ul></td>
<td> <a href="document/lecture/lec13.recurrent.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=ItYyu3KQvOQ">YouTube (url)</a>
</td><!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>15</td> <!--Lecture Number-->
<td>October 16</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Sequence To Sequence Methods</li>
<li>Connectionist Temporal Classification (CTC)</li>
</ul></td>
<td> <a href="document/lecture/lec14.recurrent.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=h7lnKCX2ve0">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf-->
<a href="document/reading/XLNet.pdf">XLNet (*.pdf)</a><br>
<a href="document/reading/Earnie2.0.pdf">Earnie 2.0 (*.pdf)</a>
</td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>16</td> <!--Lecture Number-->
<td>October 21</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Sequence-to-sequence models Attention models examples from speech and language</li>
</ul></td>
<td><a href="document/lecture/lec14.recurrent.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=JAD1Uek6LuM">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <a href="document/reading/Improving_transformed-based_speech_recognition_using_unsupervised_pretraining.pdf">Improving Transformed-Based Speech Recognition Using Unsupervised Pretraining (*.pdf)</a> </td> <!--Readings-->
<td>
<span style="color: blue; ">Homework 2 Due (on 20th)</span>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>17</td> <!--Lecture Number-->
<td>October 23</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Cascade-Correlation</li>
<li>Faster Learning Variations</li>
</ul></td>
<td><a href="document/lecture/Cascor_Deep_Learning_v5.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=k2mPEUZH978&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td>
</td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>18</td> <!--Lecture Number-->
<td>October 25</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Sequence To Sequence Methods</li>
<li>Attention Models</li>
</ul></td>
<td> <a href="document/lecture/lec15.attention.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=HWdzSQ_ccdM">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>19</td> <!--Lecture Number-->
<td>October 28</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Representations and Autoencoders</li>
</ul></td>
<td> <a href="document/lecture/lec16.representations.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=gs8vT4pdbUs&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>20</td> <!--Lecture Number-->
<td>October 30</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Hopfield Nets and Auto Associators</li>
</ul></td>
<td> <!--*.ppt-->
<a href="document/lecture/lec17.hopfield.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=3Cp_pjPRmt8&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>21</td> <!--Lecture Number-->
<td>November 4</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Hopfield Nets and Boltzmann Machines (Part 1)</li>
</ul></td>
<td> <a href="document/lecture/lec18.hopfield2.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=ZnB8MMjg1mA&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>22</td> <!--Lecture Number-->
<td>November 6</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Hopfield Nets and Boltzmann Machines (Part 2)</li>
</ul></td>
<td> <a href="document/lecture/lec19.BM.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=R2sFABfmlXQ&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>23</td> <!--Lecture Number-->
<td>November 11</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Generative Adversarial Networks (GANs) (Part 1)</li>
<!-- <li></li> -->
</ul></td>
<td> <a href="document/lecture/GANs__1_.pdf">Slides (*.pdf)</a><br>
<a href="https://www.youtube.com/watch?v=lXliALnsNzQ&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>24</td> <!--Lecture Number-->
<td>November 13</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Generative Adversarial Networks (GANs) (Part 2)</li>
<!--<li> </li> -->
</ul></td>
<td> <a href="document/lecture/GANs__2_.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=AeETawQKvPo&feature=youtu.be">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>25</td> <!--Lecture Number-->
<td>November 18</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Reinforcement Learning 1</li>
<!-- <li>Deep Boltzman Machines</li> -->
</ul></td>
<td> <a href="document/lecture/RL_1,2.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=SegeqjIwM7Y&feature=youtu.be">Youtube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>26</td> <!--Lecture Number-->
<td>November 20</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Reinforcement Learning 2</li>
</ul></td>
<td> <a href="document/lecture/RL_1,2.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=qdFXHxQGu70&list=PLp-0K3kfddPzNdZPX4p0lVi6AcDXBofuf&index=25">YouTube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>27</td> <!--Lecture Number-->
<td>November 25</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Guest Lecture</li>
</ul></td>
<td><a href="document/lecture/neubig19dl.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=sMs5fEPB1yU&feature=youtu.be">YouTube (url)</a> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>-</td> <!--Lecture Number-->
<td>November 27</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Thanksgiving Break, No Class</li>
</ul></td>
<td> <!--*.ppt--> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>28</td> <!--Lecture Number-->
<td>December 2</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Reinforcement Learning 3</li>
</ul></td>
<td> <a href="document/lecture/RL_5.pdf">Slides (*.pdf)</a> <br>
<a href="https://www.youtube.com/watch?v=-aXLwTgUEXg&feature=youtu.be">Youtube (url)</a>
</td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>29</td> <!--Lecture Number-->
<td>December 4</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Last day of classes</li>
</ul></td>
<td> <!--*.ppt--> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
<tr>
<td>30</td> <!--Lecture Number-->
<td>December 9</td> <!--Date-->
<td><ul> <!--Topic List-->
<li>Semester ends</li>
</ul></td>
<td> <!--*.ppt--> </td> <!--Lecture Slides-->
<td> <!--*.pdf--> </td> <!--Readings-->
<td> <!--HWX URL--> </td> <!--Quizzes & Assignments-->
</tr>
</tbody>
</table>
<h2>Tentative Schedule of Recitations</h2>
<table class="table table-striped table-bordered">
<thead>
<tr>
<th>Recitation</th>
<th>Date</th>
<th>Topics</th>
<th>Notebook</th>
<th>Videos</th>
<th>Instructor </th>
</tr>
</thead>
<tbody>
<tr>
<td>0 - Part A</td> <!--Recitation Number-->
<td>August 16</td> <!--Date-->
<td>Fundamentals of Python</td> <!--Topics-->
<td><a href="document/recitation/recitation0a.tar.gz"> Notebook (*.tar.gz) </a><br></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=YdDgflXiIpc&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=2&t=0s"> YouTube (url) </a><br></td> <!--Videos-->
<td>Hanna</td> <!--Instructor-->
</tr>
<tr>
<td>0 - Part B</td> <!--Recitation Number-->
<td>August 17</td> <!--Date-->
<td>Fundamentals of NumPy</td> <!--Topics-->
<td> <a href="document/recitation/recitation0b.tar.gz"> Notebook (*.tar.gz) </a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=rNFkONjyL7A&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=3&t=0s"> YouTube (url) </a></td> <!--Videos-->
<td>Joseph</td> <!--Instructor-->
</tr>
<tr>
<td>0 - Part C</td> <!--Recitation Number-->
<td>August 17</td> <!--Date-->
<td>Fundamentals of Jupyter Notebook</td> <!--Topics-->
<td><a href="document/recitation/recitation0c.tar.gz"> Notebook (*.tar.gz) </a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=ZZoJFGPbRI0&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=4&t=0s"> YouTube (url) </a></td> <!--Videos-->
<td>Joseph</td> <!--Instructor-->
</tr>
<tr>
<td>1</td> <!--Recitation Number-->
<td>August 26</td> <!--Date-->
<td>Amazon Web Service (AWS) and EC2</td> <!--Topics-->
<td><a href="document/recitation/recitation1.tar.gz"> Notebook (*.tar.gz) </a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=g9D2BfYnkmw&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=5&t=0s"> YouTube (url) </a></td> <!--Videos-->
<td>Kangrui, Parth, Wendy</td> <!--Instructor-->
</tr>
<tr>
<td>2</td> <!--Recitation Number-->
<td>September 6</td> <!--Date-->
<td>Your First Deep Learning Code</td> <!--Topics-->
<td><a href="document/recitation/Recitation2.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook>-->
<td><a href="https://www.youtube.com/watch?v=s9KAFrOCw54&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=6&t=0s">YouTube (url)</a></td> <!--Videos-->
<td>Pallavi, Wendy</td> <!--Instructor-->
</tr>
<tr>
<td>3</td> <!--Recitation Number-->
<td>September 13</td> <!--Date-->
<td>Efficient Deep Learning and Optimization Methods</td> <!--Topics-->
<td><a href="document/recitation/recitation3.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=6ncMQc_Nsu0&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=7&t=0s">YouTube (url)</a></td> <!--Videos-->
<td>Aishwarya, Bonan, Hanna</td> <!--Instructor-->
</tr>
<tr>
<td>4</td> <!--Recitation Number-->
<td>September 20</td> <!--Date-->
<td>Debugging and Visualization</td> <!--Topics-->
<td><a href="document/recitation/recitation4.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=q9580dkzU7k&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=8&t=0s">YouTube (url)</a></td> <!--Videos-->
<td>Liwei, Natnael</td> <!--Instructor-->
</tr>
<tr>
<td>5</td> <!--Recitation Number-->
<td>September 27</td> <!--Date-->
<td>Convolutional Neural Networks</td> <!--Topics-->
<td><a href="document/recitation/recitation5.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=4m-lc3hdY30&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=9&t=0s">YouTube (url)</a</td> <!--Videos-->
<td>Kangrui, Bonan</td> <!--Instructor-->
</tr>
<tr>
<td>6</td> <!--Recitation Number-->
<td>October 4</td> <!--Date-->
<td>Convolutional Neural Networks (CNNs) and HW2</td> <!--Topics-->
<td><a href="document/recitation/recitation6.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=W7PHoPg2Ke4&list=PLp-0K3kfddPxf4T59JEQKv5UanLPVsxzz&index=10&t=0s">YouTube (url)</a></td> <!--Videos-->
<td>Bonan, Parth, Wendy</td> <!--Instructor-->
</tr>
<tr>
<td>7</td> <!--Recitation Number-->
<td>October 11</td> <!--Date-->
<td>Recurrent Neural Networks (RNNs)</td> <!--Topics-->
<td><a href="document/recitation/recitation_7.zip">Notebook (*.tar.gz)</a> </td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=6JORiirBPh8&t=1s">YouTube (url)</a></td> <!--Videos-->
<td>Hanna, Kangrui, Natnael</td> <!--Instructor-->
</tr>
<tr>
<td>8</td> <!--Recitation Number-->
<td>October 18</td> <!--Date-->
<td>Connectionist Temporal Classification (CTC) in Recurrent Neural Networks (RNNs)</td> <!--Topics-->
<td><a href="document/recitation/recitation8.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=MEh4pwdxp9w&feature=youtu.be">YouTube (url)</a></td> <!--Videos-->
<td>Liwei, Natnael, Pallavi</td> <!--Instructor-->
</tr>
<tr>
<td>9</td> <!--Recitation Number-->
<td>October 25</td> <!--Date-->
<td>Attention Mechanisms and Memory Networks</td> <!--Topics-->
<td><a href="document/recitation/recitation9.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=50jHdK0zQgo&feature=youtu.be">YouTube (url)</a></td> <!--Videos-->
<td>Ethan, Liwei</td> <!--Instructor-->
</tr>
<tr>
<td>10</td> <!--Recitation Number-->
<td>November 1</td> <!--Date-->
<td>Variational Autoencoders</td> <!--Topics-->
<td><a href="document/recitation/recitation10.tar.gz">Slides (*.tar.gz)</td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=ORWYwMfQwag&feature=youtu.be">YouTube (url)</a></td> <!--Videos-->
<td>Ethan</td> <!--Instructor-->
</tr>
<tr>
<td>11</td> <!--Recitation Number-->
<td>November 8</td> <!--Date-->
<td>Attention - Homework 4</td> <!--Topics-->
<td><a href="document/recitation/recitation11.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=pRr_emLUdrI&feature=youtu.be">YouTube (url)</a></td> <!--Videos-->
<td>Parth, Amit</td> <!--Instructor-->
</tr>
<tr>
<td>12</td> <!--Recitation Number-->
<td>November 15</td> <!--Date-->
<td>Generative Adversarial Networks (GANs)</td> <!--Topics-->
<td><a href="document/recitation/recitation12.tar.gz">Notebook (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=Cqh8-ZuXOg0">YouTube (url)</a></td> <!--Videos-->
<td>Hari, Parth, Amit</td> <!--Instructor-->
</tr>
<tr>
<td>13</td> <!--Recitation Number-->
<td>November 22</td> <!--Date-->
<td>Reinforcement Learning</td> <!--Topics-->
<td><a href="document/recitation/recitation13.tar.gz">Slides (*.tar.gz)</a></td> <!--Notebook-->
<td><a href="https://www.youtube.com/watch?v=GD9AFc8CRtk&feature=youtu.be">YouTube (url)</a></td> <!--Videos-->
<td>Hari, Aishwarya</td> <!--Instructor-->
</tr>
</tbody>
</table>
<h2> Homework Schedule </h2>
<table class="table table-striped table-bordered">
<thead>
<tr>
<th>Number</th>
<th>Part</th>
<th>Topics</th>
<th>Release Date</th>
<th>Early-submission Deadline</th>
<th>On-time Deadline</th>
<th>Links </th>
</tr>
</thead>
<tbody>
<tr>
<td>HW0</td> <!--Number-->
<td>—</td> <!--Part-->
<td></td> <!--Topics-->
<td>August 12</td> <!--Release Date-->
<td><!--Monnth Day--></td> <!--Early-submission Deadline-->
<td>September 8</td> <!--On-time Deadline-->
<td> <!--Links-->
<a href="document/homework/homework0.tar.gz"> Handout (*.tar.gz) </a><br>
<!--<a href=""> *.pdf </a> <br/>-->
<!--<a href=""> *.tar.gz </a> <br/>-->
</td>
</tr>
<tr>
<td>HW1</td> <!--Number-->
<td>P1</td> <!--Part-->
<td>Engineering Automatic Differentiation Libraries</td> <!--Topics-->
<td>Sunday, Sept. 9th, 2019</td> <!--Release Date-->
<td>Wednesday, Sept. 18th, 2019</td> <!--Early-submission Deadline-->
<td>Saturday, Sept. 28th, 2019</td> <!--On-time Deadline-->
<td>
<a href="document/homework/homework1.tar.gz">Handout (*.targ.gz)</a>
</td>
</tr>
<tr>
<td></td> <!--Number-->
<td>P2</td> <!--Part-->
<td>Frame-level Speech Classification</td> <!--Topics-->
<td>Sunday, Sept. 9th, 2019</td> <!--Release Date-->
<td>Wednesday, Sept. 18th, 2019 </td> <!--Early-submission Deadline-->
<td>Saturday Sept. 28th, 2019</td> <!--On-time Deadline-->
<td>
<a href="https://www.kaggle.com/c/11-785-homework-1-part-2-fall-19-slack">Slack Kaggle</a>
<a href="https://forms.gle/zQVUAURgW48SoznLA">Code Submission Form</a>
</td>
</tr>
<tr>
<td>HW2</td> <!--Number-->
<td>P1</td> <!--Part-->
<td>Convolutional Neural Networks</td> <!--Topics-->