In [1]:
# By Justin Johnson https://github.com/jcjohnson/pytorch-examples/blob/master/nn/dynamic_net.py

import random
import torch
from torch.autograd import Variable

"""
To showcase the power of PyTorch dynamic graphs, we will implement a very strange
model: a fully-connected ReLU network that on each forward pass randomly chooses
a number between 1 and 4 and has that many hidden layers, reusing the same
weights multiple times to compute the innermost hidden layers.
"""

class DynamicNet(torch.nn.Module):
  def __init__(self, D_in, H, D_out):
    """
    In the constructor we construct three nn.Linear instances that we will use
    in the forward pass.
    """
    super(DynamicNet, self).__init__()
    self.input_linear = torch.nn.Linear(D_in, H)
    self.middle_linear = torch.nn.Linear(H, H)
    self.output_linear = torch.nn.Linear(H, D_out)

  def forward(self, x, verbose = False):
    """
    For the forward pass of the model, we randomly choose either 0, 1, 2, or 3
    and reuse the middle_linear Module that many times to compute hidden layer
    representations.
    Since each forward pass builds a dynamic computation graph, we can use normal
    Python control-flow operators like loops or conditional statements when
    defining the forward pass of the model.
    Here we also see that it is perfectly safe to reuse the same Module many
    times when defining a computational graph. This is a big improvement from Lua
    Torch, where each Module could be used only once.
    """
    h_relu = self.input_linear(x).clamp(min=0)
    n_layers = random.randint(0, 3)
    if verbose:
        print("The number of layers for this run is", n_layers)
        # print(h_relu)
    for _ in range(n_layers):
        h_relu = self.middle_linear(h_relu).clamp(min=0)
        if verbose:
            pass
            # print(h_relu)
    y_pred = self.output_linear(h_relu)
    return y_pred




# N is batch size; D_in is input dimension;
# H is hidden dimension; D_out is output dimension.
N, D_in, H, D_out = 64, 1000, 10, 1

# Create random Tensors to hold inputs and outputs, and wrap them in Variables
x = Variable(torch.randn(N, D_in))
y = Variable(torch.randn(N, D_out), requires_grad=False)

# Construct our model by instantiating the class defined above
model = DynamicNet(D_in, H, D_out)

# Construct our loss function and an Optimizer. Training this strange model with
# vanilla stochastic gradient descent is tough, so we use momentum
criterion = torch.nn.MSELoss(size_average=False)
optimizer = torch.optim.SGD(model.parameters(), lr=1e-4, momentum=0.9)
for t in range(500):
  # Forward pass: Compute predicted y by passing x to the model
  y_pred = model(x)

  # Compute and print loss
  loss = criterion(y_pred, y)
  print(t, loss.data[0])

  # Zero gradients, perform a backward pass, and update the weights.
  optimizer.zero_grad()
  loss.backward()
  optimizer.step()
(0, 68.55696105957031)
(1, 64.40031433105469)
(2, 57.4582405090332)
(3, 68.0625228881836)
(4, 67.96131896972656)
(5, 67.4065933227539)
(6, 67.52629089355469)
(7, 64.49079132080078)
(8, 31.04802131652832)
(9, 27.771982192993164)
(10, 66.30720520019531)
(11, 61.11796951293945)
(12, 59.83041763305664)
(13, 16.875253677368164)
(14, 14.834412574768066)
(15, 63.62442398071289)
(16, 53.64186096191406)
(17, 51.711700439453125)
(18, 65.10582733154297)
(19, 59.95439147949219)
(20, 45.11320495605469)
(21, 42.68074417114258)
(22, 40.06452941894531)
(23, 12.281163215637207)
(24, 51.818416595458984)
(25, 32.959800720214844)
(26, 47.32932662963867)
(27, 12.875962257385254)
(28, 11.506110191345215)
(29, 40.70511245727539)
(30, 55.15180969238281)
(31, 36.384178161621094)
(32, 5.563450336456299)
(33, 4.731485366821289)
(34, 3.8788135051727295)
(35, 21.744544982910156)
(36, 43.13826370239258)
(37, 41.036827087402344)
(38, 26.631256103515625)
(39, 18.915164947509766)
(40, 24.17453384399414)
(41, 22.682395935058594)
(42, 5.924004554748535)
(43, 12.793107032775879)
(44, 11.265641212463379)
(45, 25.35838508605957)
(46, 8.316976547241211)
(47, 9.34359073638916)
(48, 6.3707427978515625)
(49, 18.57836151123047)
(50, 9.802971839904785)
(51, 4.787074089050293)
(52, 7.430152893066406)
(53, 11.951053619384766)
(54, 4.6526384353637695)
(55, 4.457189559936523)
(56, 13.944796562194824)
(57, 12.829875946044922)
(58, 6.246438026428223)
(59, 8.314807891845703)
(60, 6.005230903625488)
(61, 4.1611785888671875)
(62, 2.683854103088379)
(63, 1.9949036836624146)
(64, 3.956789970397949)
(65, 3.8643577098846436)
(66, 5.213559150695801)
(67, 4.70450496673584)
(68, 4.135525703430176)
(69, 2.677821159362793)
(70, 3.4874045848846436)
(71, 4.013214111328125)
(72, 3.8714847564697266)
(73, 3.1262691020965576)
(74, 2.9015796184539795)
(75, 2.5408952236175537)
(76, 3.7164642810821533)
(77, 1.8671462535858154)
(78, 1.4853354692459106)
(79, 1.7841973304748535)
(80, 1.6563113927841187)
(81, 1.6134264469146729)
(82, 1.637150526046753)
(83, 1.5477235317230225)
(84, 2.615108013153076)
(85, 2.4389235973358154)
(86, 3.3148367404937744)
(87, 1.9828495979309082)
(88, 2.5575108528137207)
(89, 1.115099310874939)
(90, 1.967616081237793)
(91, 0.9035945534706116)
(92, 1.528825044631958)
(93, 1.9212816953659058)
(94, 1.6734600067138672)
(95, 1.9662768840789795)
(96, 1.2190967798233032)
(97, 1.1121183633804321)
(98, 0.9721309542655945)
(99, 1.3496925830841064)
(100, 1.275407314300537)
(101, 1.1732516288757324)
(102, 1.0556175708770752)
(103, 0.8736497163772583)
(104, 0.9875146746635437)
(105, 0.779724657535553)
(106, 1.0355879068374634)
(107, 1.0262908935546875)
(108, 0.9177124500274658)
(109, 0.6687732338905334)
(110, 0.6355648040771484)
(111, 0.872856855392456)
(112, 1.0000821352005005)
(113, 0.8117831349372864)
(114, 0.5724679827690125)
(115, 0.5678190588951111)
(116, 1.0041606426239014)
(117, 0.9632062911987305)
(118, 0.5397735834121704)
(119, 0.7208158373832703)
(120, 0.6970574259757996)
(121, 0.6612656116485596)
(122, 0.709017276763916)
(123, 0.6068961024284363)
(124, 0.645119845867157)
(125, 0.6113700866699219)
(126, 0.6768298149108887)
(127, 0.9779959917068481)
(128, 0.947910487651825)
(129, 0.8819578886032104)
(130, 0.5037627816200256)
(131, 0.7381807565689087)
(132, 0.6747440695762634)
(133, 0.5401455760002136)
(134, 0.5178136825561523)
(135, 0.5221168398857117)
(136, 0.5922502875328064)
(137, 0.5246888399124146)
(138, 0.5197998881340027)
(139, 0.49833646416664124)
(140, 0.4888276159763336)
(141, 0.47929927706718445)
(142, 0.46996524930000305)
(143, 0.7739089131355286)
(144, 0.4912509620189667)
(145, 0.7105062007904053)
(146, 0.46849894523620605)
(147, 0.4655649662017822)
(148, 0.5453749895095825)
(149, 0.5545292496681213)
(150, 0.4896514117717743)
(151, 0.42733514308929443)
(152, 0.5014117956161499)
(153, 0.47792378067970276)
(154, 0.46558523178100586)
(155, 0.5120238661766052)
(156, 0.4063217341899872)
(157, 0.4029099643230438)
(158, 0.5256522297859192)
(159, 0.49766504764556885)
(160, 0.5061463713645935)
(161, 0.36976805329322815)
(162, 0.361371785402298)
(163, 0.35383865237236023)
(164, 0.48000991344451904)
(165, 0.4729548692703247)
(166, 0.45626890659332275)
(167, 0.3436278700828552)
(168, 0.4444941282272339)
(169, 0.44163429737091064)
(170, 0.4329681098461151)
(171, 0.4238564372062683)
(172, 0.4540226459503174)
(173, 0.33956506848335266)
(174, 0.4038330912590027)
(175, 0.4363686740398407)
(176, 0.4052790105342865)
(177, 0.3987938463687897)
(178, 0.4027005434036255)
(179, 0.4081977605819702)
(180, 0.4103405475616455)
(181, 0.3385487198829651)
(182, 0.3983153998851776)
(183, 0.3276411294937134)
(184, 0.3211943209171295)
(185, 0.3676176965236664)
(186, 0.311101496219635)
(187, 0.30751675367355347)
(188, 0.3037410080432892)
(189, 0.34307417273521423)
(190, 0.4095078110694885)
(191, 0.4383903443813324)
(192, 0.43005260825157166)
(193, 0.37473925948143005)
(194, 0.3479633629322052)
(195, 0.28907033801078796)
(196, 0.3444131016731262)
(197, 0.3588586747646332)
(198, 0.3710741102695465)
(199, 0.35806804895401)
(200, 0.28172191977500916)
(201, 0.356166273355484)
(202, 0.3468308448791504)
(203, 0.34101855754852295)
(204, 0.33306947350502014)
(205, 0.327303946018219)
(206, 0.3318725824356079)
(207, 0.27150821685791016)
(208, 0.26712480187416077)
(209, 0.35348501801490784)
(210, 0.3028584420681)
(211, 0.35047098994255066)
(212, 0.3194188177585602)
(213, 0.2888026237487793)
(214, 0.3217729330062866)
(215, 0.25554585456848145)
(216, 0.2737066447734833)
(217, 0.33965685963630676)
(218, 0.3119201958179474)
(219, 0.30841928720474243)
(220, 0.26870688796043396)
(221, 0.24156109988689423)
(222, 0.2963334023952484)
(223, 0.26518529653549194)
(224, 0.2596729099750519)
(225, 0.23210294544696808)
(226, 0.2897563874721527)
(227, 0.24181413650512695)
(228, 0.28821009397506714)
(229, 0.28541210293769836)
(230, 0.3561027944087982)
(231, 0.22277674078941345)
(232, 0.22018134593963623)
(233, 0.23635956645011902)
(234, 0.2660958170890808)
(235, 0.21164263784885406)
(236, 0.35016292333602905)
(237, 0.3432270884513855)
(238, 0.33036449551582336)
(239, 0.2609386444091797)
(240, 0.21042726933956146)
(241, 0.29339978098869324)
(242, 0.25382956862449646)
(243, 0.2702391445636749)
(244, 0.27168363332748413)
(245, 0.2542628049850464)
(246, 0.2741631865501404)
(247, 0.27158069610595703)
(248, 0.26528438925743103)
(249, 0.2634747326374054)
(250, 0.21286015212535858)
(251, 0.24849604070186615)
(252, 0.24130694568157196)
(253, 0.22907347977161407)
(254, 0.24783077836036682)
(255, 0.27959293127059937)
(256, 0.2810845673084259)
(257, 0.2553032636642456)
(258, 0.25394392013549805)
(259, 0.27616187930107117)
(260, 0.2071346938610077)
(261, 0.20431429147720337)
(262, 0.19926172494888306)
(263, 0.19370363652706146)
(264, 0.24323329329490662)
(265, 0.27779248356819153)
(266, 0.24487744271755219)
(267, 0.19182880222797394)
(268, 0.2725074291229248)
(269, 0.26599863171577454)
(270, 0.18325014412403107)
(271, 0.23508618772029877)
(272, 0.17880971729755402)
(273, 0.1965717375278473)
(274, 0.24349476397037506)
(275, 0.17337054014205933)
(276, 0.23355308175086975)
(277, 0.23031426966190338)
(278, 0.24171490967273712)
(279, 0.24033179879188538)
(280, 0.21710331737995148)
(281, 0.17005561292171478)
(282, 0.23412609100341797)
(283, 0.21937750279903412)
(284, 0.22837063670158386)
(285, 0.22383251786231995)
(286, 0.17224064469337463)
(287, 0.21675363183021545)
(288, 0.21248140931129456)
(289, 0.1735827475786209)
(290, 0.17269858717918396)
(291, 0.19940997660160065)
(292, 0.1930703967809677)
(293, 0.22116978466510773)
(294, 0.21441172063350677)
(295, 0.1582336127758026)
(296, 0.17002302408218384)
(297, 0.21833032369613647)
(298, 0.21564117074012756)
(299, 0.16036421060562134)
(300, 0.2355506271123886)
(301, 0.15494127571582794)
(302, 0.1504264622926712)
(303, 0.1447974145412445)
(304, 0.1391996145248413)
(305, 0.24922794103622437)
(306, 0.1317485272884369)
(307, 0.15501748025417328)
(308, 0.1543901115655899)
(309, 0.2531008720397949)
(310, 0.23143082857131958)
(311, 0.2259288728237152)
(312, 0.1236080452799797)
(313, 0.1383976936340332)
(314, 0.13590985536575317)
(315, 0.23786720633506775)
(316, 0.1342218518257141)
(317, 0.128822460770607)
(318, 0.22927974164485931)
(319, 0.13596434891223907)
(320, 0.1356055587530136)
(321, 0.20218972861766815)
(322, 0.13194669783115387)
(323, 0.20023377239704132)
(324, 0.22316579520702362)
(325, 0.12576039135456085)
(326, 0.12319409847259521)
(327, 0.22161145508289337)
(328, 0.21872025728225708)
(329, 0.12386011332273483)
(330, 0.1251932978630066)
(331, 0.12441207468509674)
(332, 0.12163221836090088)
(333, 0.207656130194664)
(334, 0.11560095101594925)
(335, 0.1126704216003418)
(336, 0.20972943305969238)
(337, 0.20940743386745453)
(338, 0.21648453176021576)
(339, 0.2134958952665329)
(340, 0.12404625117778778)
(341, 0.19841043651103973)
(342, 0.11608558893203735)
(343, 0.18287567794322968)
(344, 0.11926289647817612)
(345, 0.11822305619716644)
(346, 0.12639136612415314)
(347, 0.11333231627941132)
(348, 0.16370822489261627)
(349, 0.21516446769237518)
(350, 0.1095762625336647)
(351, 0.16164565086364746)
(352, 0.1265915483236313)
(353, 0.21453697979450226)
(354, 0.12366698682308197)
(355, 0.15764771401882172)
(356, 0.20774151384830475)
(357, 0.15566101670265198)
(358, 0.2001258283853531)
(359, 0.15322664380073547)
(360, 0.12454575300216675)
(361, 0.12505511939525604)
(362, 0.12213917821645737)
(363, 0.1546889692544937)
(364, 0.18771693110466003)
(365, 0.11050982773303986)
(366, 0.18495656549930573)
(367, 0.10939125716686249)
(368, 0.1789781153202057)
(369, 0.16344432532787323)
(370, 0.1720968782901764)
(371, 0.11583121865987778)
(372, 0.16257531940937042)
(373, 0.10569147765636444)
(374, 0.16535578668117523)
(375, 0.155373215675354)
(376, 0.16380517184734344)
(377, 0.16193357110023499)
(378, 0.13198710978031158)
(379, 0.1080806702375412)
(380, 0.10643331706523895)
(381, 0.15422247350215912)
(382, 0.10351108014583588)
(383, 0.159107968211174)
(384, 0.10121788084506989)
(385, 0.15437807142734528)
(386, 0.15408433973789215)
(387, 0.15222929418087006)
(388, 0.09942464530467987)
(389, 0.12104000151157379)
(390, 0.1467503309249878)
(391, 0.1176227480173111)
(392, 0.1447749137878418)
(393, 0.14344052970409393)
(394, 0.16800083220005035)
(395, 0.13989242911338806)
(396, 0.10323711484670639)
(397, 0.1639036238193512)
(398, 0.11775948107242584)
(399, 0.10277019441127777)
(400, 0.1395304799079895)
(401, 0.11336822062730789)
(402, 0.14085538685321808)
(403, 0.10773314535617828)
(404, 0.1045645922422409)
(405, 0.09576775878667831)
(406, 0.09745685756206512)
(407, 0.14981737732887268)
(408, 0.15139585733413696)
(409, 0.16841773688793182)
(410, 0.1648685336112976)
(411, 0.09006407111883163)
(412, 0.08898231387138367)
(413, 0.15127642452716827)
(414, 0.15033632516860962)
(415, 0.08939759433269501)
(416, 0.13902264833450317)
(417, 0.13609160482883453)
(418, 0.131153404712677)
(419, 0.1452413648366928)
(420, 0.1441461145877838)
(421, 0.09547346085309982)
(422, 0.13794708251953125)
(423, 0.09373621642589569)
(424, 0.12661921977996826)
(425, 0.12073299288749695)
(426, 0.08634498715400696)
(427, 0.08436927944421768)
(428, 0.10239376127719879)
(429, 0.13720491528511047)
(430, 0.07629312574863434)
(431, 0.13233499228954315)
(432, 0.12638579308986664)
(433, 0.09170448780059814)
(434, 0.11654886603355408)
(435, 0.1127137690782547)
(436, 0.15267729759216309)
(437, 0.1534366011619568)
(438, 0.10242568701505661)
(439, 0.08859740197658539)
(440, 0.0863703265786171)
(441, 0.10885582864284515)
(442, 0.07811437547206879)
(443, 0.13710792362689972)
(444, 0.07927151024341583)
(445, 0.13289040327072144)
(446, 0.12434195727109909)
(447, 0.12703195214271545)
(448, 0.1240917220711708)
(449, 0.1061406284570694)
(450, 0.11289721727371216)
(451, 0.09960922598838806)
(452, 0.12546002864837646)
(453, 0.1273072063922882)
(454, 0.10553498566150665)
(455, 0.08074663579463959)
(456, 0.10914693027734756)
(457, 0.09237901866436005)
(458, 0.07316620647907257)
(459, 0.0843999981880188)
(460, 0.12285209447145462)
(461, 0.12292888015508652)
(462, 0.12158036977052689)
(463, 0.11919305473566055)
(464, 0.11610429733991623)
(465, 0.10542454570531845)
(466, 0.11009935289621353)
(467, 0.1450529247522354)
(468, 0.08189161866903305)
(469, 0.0779481828212738)
(470, 0.08704110234975815)
(471, 0.08935488760471344)
(472, 0.11014942079782486)
(473, 0.11121664196252823)
(474, 0.07844926416873932)
(475, 0.10869673639535904)
(476, 0.08681631088256836)
(477, 0.12598153948783875)
(478, 0.08129295706748962)
(479, 0.0795239582657814)
(480, 0.0775766670703888)
(481, 0.07117141783237457)
(482, 0.07070926576852798)
(483, 0.07033644616603851)
(484, 0.1194288432598114)
(485, 0.13345208764076233)
(486, 0.1284346580505371)
(487, 0.12085364758968353)
(488, 0.12480311095714569)
(489, 0.08764630556106567)
(490, 0.10486908257007599)
(491, 0.07703143358230591)
(492, 0.09862735867500305)
(493, 0.0956120565533638)
(494, 0.09047470986843109)
(495, 0.12990880012512207)
(496, 0.0889328271150589)
(497, 0.0938691794872284)
(498, 0.12585215270519257)
(499, 0.08975556492805481)
In [2]:
model(x)[1:5]
Out[2]:
Variable containing:
 2.7784
-0.5659
-1.1524
-0.6715
[torch.FloatTensor of size 4x1]
In [3]:
model(x)[1:5] # another run
Out[3]:
Variable containing:
 2.7171
-0.5757
-1.1496
-0.6636
[torch.FloatTensor of size 4x1]
In [4]:
model(x)[1:5]
Out[4]:
Variable containing:
 2.7219
-0.6037
-1.1652
-0.7280
[torch.FloatTensor of size 4x1]

Looks consistent! Let's now try to see what's happening inside

In [5]:
model(x, verbose = True)[1:5]
('The number of layers for this run is', 0)
Out[5]:
Variable containing:
 2.7171
-0.5757
-1.1496
-0.6636
[torch.FloatTensor of size 4x1]
In [6]:
model(x, verbose = True)[1:5]
('The number of layers for this run is', 1)
Out[6]:
Variable containing:
 2.7784
-0.5659
-1.1524
-0.6715
[torch.FloatTensor of size 4x1]
In [7]:
model(x, verbose = True)[1:5]
('The number of layers for this run is', 3)
Out[7]:
Variable containing:
 2.7219
-0.6037
-1.1652
-0.7280
[torch.FloatTensor of size 4x1]
In [8]:
model(x, verbose = True)[1:5]
('The number of layers for this run is', 0)
Out[8]:
Variable containing:
 2.7171
-0.5757
-1.1496
-0.6636
[torch.FloatTensor of size 4x1]
In [9]:
model(x, verbose = True)[1:5]
('The number of layers for this run is', 0)
Out[9]:
Variable containing:
 2.7171
-0.5757
-1.1496
-0.6636
[torch.FloatTensor of size 4x1]

So what's the target?

In [10]:
y[1:5]
Out[10]:
Variable containing:
 2.7307
-0.5801
-1.1544
-0.6970
[torch.FloatTensor of size 4x1]
In [ ]: