import numpy as np
# To read the training data and make a vocabulary and dictiornary to index the chars
class DataReader:
def __init__(self, path, seq_length):
#uncomment below , if you dont want to use any file for text reading and comment next 2 lines
#self.data = "some really long text to test this. maybe not perfect but should get you going."
self.fp = open(path, "r")
self.data = self.fp.read()
#find unique chars
chars = list(set(self.data))
#create dictionary mapping for each char
self.char_to_ix = {ch:i for (i,ch) in enumerate(chars)}
self.ix_to_char = {i:ch for (i,ch) in enumerate(chars)}
#total data
self.data_size = len(self.data)
#num of unique chars
self.vocab_size = len(chars)
self.pointer = 0
self.seq_length = seq_length
def next_batch(self):
input_start = self.pointer
input_end = self.pointer + self.seq_length
inputs = [self.char_to_ix[ch] for ch in self.data[input_start:input_end]]
targets = [self.char_to_ix[ch] for ch in self.data[input_start+1:input_end+1]]
self.pointer += self.seq_length
if self.pointer + self.seq_length + 1 >= self.data_size:
# reset pointer
self.pointer = 0
return inputs, targets
def just_started(self):
return self.pointer == 0
def close(self):
self.fp.close()
import json
data =' '.join([p['text'].replace('>>','').replace('\n',' ') for p in json.load(open('posts.json'))[:100] if 'text' in p])
data2 = ' '.join([x for x in data.split(' ') if 'http' not in x and not x.isdigit()]).lower()
open('posts.txt','w').write(data2)
len(data2)
56395
seq_length = 25
#read text from the "input.txt" file
data_reader = DataReader("posts.txt", seq_length)
data_reader.vocab_size
68
class RNN:
def __init__(self, hidden_size, vocab_size, seq_length, learning_rate):
# hyper parameters
self.hidden_size = hidden_size
self.vocab_size = vocab_size
self.seq_length = seq_length
self.learning_rate = learning_rate
# model parameters
self.U = np.random.uniform(-np.sqrt(1./vocab_size), np.sqrt(1./vocab_size), (hidden_size, vocab_size))
self.V = np.random.uniform(-np.sqrt(1./hidden_size), np.sqrt(1./hidden_size), (vocab_size, hidden_size))
self.W = np.random.uniform(-np.sqrt(1./hidden_size), np.sqrt(1./hidden_size), (hidden_size, hidden_size))
self.b = np.zeros((hidden_size, 1)) # bias for hidden layer
self.c = np.zeros((vocab_size, 1)) # bias for output
# memory vars for adagrad,
#ignore if you implement another approach
self.mU = np.zeros_like(self.U)
self.mW = np.zeros_like(self.W)
self.mV = np.zeros_like(self.V)
self.mb = np.zeros_like(self.b)
self.mc = np.zeros_like(self.c)
def softmax(self, x):
p = np.exp(x- np.max(x))
return p / np.sum(p)
def forward(self, inputs, hprev):
xs, hs, os, ycap = {}, {}, {}, {}
hs[-1] = np.copy(hprev)
for t in range(len(inputs)):
xs[t] = np.zeros((self.vocab_size,1))
xs[t][inputs[t]] = 1 # one hot encoding , 1-of-k
hs[t] = np.tanh(np.dot(self.U,xs[t]) + np.dot(self.W,hs[t-1]) + self.b) # hidden state
os[t] = np.dot(self.V,hs[t]) + self.c # unnormalised log probs for next char
ycap[t] = self.softmax(os[t]) # probs for next char
return xs, hs, ycap
def loss(self, ps, targets):
"""loss for a sequence"""
# calculate cross-entrpy loss
return sum(-np.log(ps[t][targets[t],0]) for t in range(self.seq_length))
def backward(self, xs, hs, ps, targets):
# backward pass: compute gradients going backwards
dU, dW, dV = np.zeros_like(self.U), np.zeros_like(self.W), np.zeros_like(self.V)
db, dc = np.zeros_like(self.b), np.zeros_like(self.c)
dhnext = np.zeros_like(hs[0])
for t in reversed(range(self.seq_length)):
dy = np.copy(ps[t])
#through softmax
dy[targets[t]] -= 1 # backprop into y
#calculate dV, dc
dV += np.dot(dy, hs[t].T)
dc += dc
#dh includes gradient from two sides, next cell and current output
dh = np.dot(self.V.T, dy) + dhnext # backprop into h
# backprop through tanh non-linearity
dhrec = (1 - hs[t] * hs[t]) * dh #dhrec is the term used in many equations
db += dhrec
#calculate dU and dW
dU += np.dot(dhrec, xs[t].T)
dW += np.dot(dhrec, hs[t-1].T)
#pass the gradient from next cell to the next iteration.
dhnext = np.dot(self.W.T, dhrec)
# clip to mitigate exploding gradients
for dparam in [dU, dW, dV, db, dc]:
np.clip(dparam, -5, 5, out=dparam)
return dU, dW, dV, db, dc
def update_model(self, dU, dW, dV, db, dc):
# parameter update with adagrad
for param, dparam, mem in zip([self.U, self.W, self.V, self.b, self.c],
[dU, dW, dV, db, dc],
[self.mU, self.mW, self.mV, self.mb, self.mc]):
mem += dparam*dparam
param += -self.learning_rate*dparam/np.sqrt(mem+1e-8) # adagrad update
def sample(self, h, seed_ix, n):
"""
sample a sequence of integers from the model
h is memory state, seed_ix is seed letter from the first time step
"""
x = np.zeros((self.vocab_size, 1))
x[seed_ix] = 1
ixes = []
for t in range(n):
h = np.tanh(np.dot(self.U, x) + np.dot(self.W, h) + self.b)
y = np.dot(self.V, h) + self.c
p = np.exp(y)/np.sum(np.exp(y))
ix = np.random.choice(range(self.vocab_size), p = p.ravel())
x = np.zeros((self.vocab_size,1))
x[ix] = 1
ixes.append(ix)
return ixes
def train(self, data_reader):
iter_num = 0
threshold = 0.01
smooth_loss = -np.log(1.0/data_reader.vocab_size)*self.seq_length
while (smooth_loss > threshold):
try:
if hprev == None:
hprev = np.zeros((self.hidden_size,1))
except:
hprev = np.zeros((self.hidden_size,1))
if data_reader.just_started():
hprev = np.zeros((self.hidden_size,1))
inputs, targets = data_reader.next_batch()
xs, hs, ps = self.forward(inputs, hprev)
dU, dW, dV, db, dc = self.backward(xs, hs, ps, targets)
loss = self.loss(ps, targets)
self.update_model(dU, dW, dV, db, dc)
smooth_loss = smooth_loss*0.999 + loss*0.001
hprev = hs[self.seq_length-1]
if not iter_num%500:
sample_ix = self.sample(hprev, inputs[0], 200)
print( ''.join(data_reader.ix_to_char[ix] for ix in sample_ix))
print( "\n\niter :%d, loss:%f"%(iter_num, smooth_loss))
iter_num += 1
def predict(self, data_reader, start, n=256):
#initialize input vector
x = np.zeros((self.vocab_size, 1))
chars = [ch for ch in start]
ixes = []
for i in range(len(chars)):
ix = data_reader.char_to_ix[chars[i]]
x[ix] = 1
ixes.append(ix)
h = np.zeros((self.hidden_size,1))
# predict next n chars
for t in range(n):
h = np.tanh(np.dot(self.U, x) + np.dot(self.W, h) + self.b)
y = np.dot(self.V, h) + self.c
p = np.exp(y)/np.sum(np.exp(y))
ix = np.random.choice(range(self.vocab_size), p = p.ravel())
x = zero_init(self.vocab_size,1)
x[ix] = 1
ixes.append(ix)
txt = ''.join(data_reader.ix_to_char[i] for i in ixes)
return txt
rnn = RNN(hidden_size=128, vocab_size=data_reader.vocab_size,seq_length=seq_length,learning_rate=1e-1)
rnn.train(data_reader)
–8( 6.i;o33z…&7myvozi_i?o1$ki&26i&””j 'n&8t4a2&a fd/p%h–‘c"d3e?y@$_6ap2da/%;r_yadccded%45’’dg…dbd2d5’d_dp’]d20x'dd0“9'dab9dod ca,s‘69dd2…d.a]adx8(6%ddu…&?"kb2!$6!743”t da81…zdpdts/?8?d>xhd""7jdz9dvd iter :0, loss:105.487899 ttm aotf-6feeys oe le rioh od ?fo cea pev ee be loeisbde uro nsknuewtdecnin ole fldsmae aee ckhir hi nacemsot.eeyteot/ecy me l.e(mtitfeertegsneoamre as?ttugtfe r etuoe tug nu lii uogegipfeek h iter :500, loss:98.517133 ndteu wtk watsarida.avwuer t pteisonsnnsu o i pafi as gameadmdhonaal rtadtavipa epec eae dol bu aitdnbocdbrhd es ntsfweemtevdo 0g2ls tisgdiv dlks onlsss to atolite ci ahecf.o aatlo i me ua seti iter :1000, loss:88.807024 gphetwin ti datl taaeftide pe ceradarun mes ot ed chlanfnu brto to jagmn mappwhfer pwrincrhegron. hicocer iitye t:sgi hfis?sci tas bocpecesrerl ce roarnm arightosdn.cest l thrim wa. sreels nnst oid iter :1500, loss:81.679453 lpas.,e adf tam lonelh is wawt wod i if lto whedvrthryfulis uo klaeri?. (mhoydr thys wdtmey mot eceagill qid mrrenp.anntn begciume …ul sasitle bhartacirla drel cada wadmd iced bry aan we kqe awe. b, iter :2000, loss:76.356599 -da"tpl ound paoseo tp 3evot gvitandeen fe os o –yiert wud ho audmivtat vh .l bouti py fothid? plly tothir pivos an boa?ai b' buyans os thydoto av autis "inast wtheass uu taonolvo thi is etled we tos iter :2500, loss:72.712882 d ephaveim tulss nyo bo eele pasid txoorege aved nen einn oa csid. theu w ri path mey? idlbausns es ba ndy?ts ond.fo hesd ope fit seund de we ocul sl horan d tet timoinnmag wk’ eos tep bote, ily tirer iter :3000, loss:69.772870 a(. o a w pe thetad? i'sry k inpouves ?orav? speous sonf k' ave brorid ho fasp pkee dole???e wimd? punes innen tithamdugeve y? hatnlitpo (esroth an ieuct ok innt rhanpcunqs s onucosouss cinf? tid at iter :3500, loss:67.663790 hyiw cn ac oceaec os :uy bepe?t setalket syeg un fu:o ss rti thitime (pe han? pos ob foc)hiu sies uf omarim fwhd chys dareto? a? tauns dreot therinrd wd tef?uos whaneth bentoc (ont is to qe ac-uces iter :4000, loss:66.392491 ds. asit hetrof dyen hetenatsicinticag thiits ryd wehs at og semsitiw?ly int ltaliratry. clu t[4risitenw :anltaily nafe oleus wherris whone hot icu shy wheso? rpy g/g faos.. rhes mers.,u ao fona? po iter :4500, loss:65.354834 d)mhers nhamrate it tiivpoed. tere. telis ysend swherlve 4ennuudicinig. ir ep (maite prion (otheome pnjmreod & mow mo? clror ave th? ce bhan0s thor thell caps co i an py pak tetun oced cume. cunfe th iter :5000, loss:64.669084 pha haz ul tho sher oofhhy thitmit hims ant ?) ans. aly bacberl.?.r hoirh ti, kecle wonon led o coby ve wow fat wod ph in ira) pol cafkd? the ratare tu dresl hot whas anf dhy des ba int ufe deftatle iter :5500, loss:63.798335 i:sisud pisere knyu ecnite uf bait rotreoitimsry loj? che c'' tove bstia kinse chel dired1” jore? alecaindaf toat raxso y a. icordestl bes anto wo tocror don. ent rod bati mmon‘. oy der ary and ticer iter :6000, loss:63.498571 lksens wh is bowian fracniti.d si, a. wole onaris?s pland hil arapmmacenns vanita tacpengiftis? re fure & qhat. we mpindto mecodles worunk a. jtuc dceanct ady toht ha beod & usalles bat dig ra sti. a iter :6500, loss:62.632289 endesh os ory non ore por pots wiin? lr lide ire mdpreraed?te fowo, tos. sthamtm? af thgorecinu sos bibed the anw il pheiconan cbocnce nes osi icoid wmotu lhih anpintensi9y oistuy ath cuey io? prome iter :7000, loss:62.477558 thong folcel hayyydri” ins j in q k int w ef chy anp? letalitey po p af therl oubece bdl co bag thet nus titui -tol gerachemagad an eftasi selat wonntll' reng, whe an? is ulan cogd? th zanprosnisoglt. iter :7500, loss:62.060919 e cmeassy whamzaitrow .ne den anu what asacumat? to mire the howe vakd phere javeomet hims pameoed do & coriplint codinew barkucins wh o ande tar cavrar. wory whse olte al siip laventled hirtasa tuti iter :8000, loss:61.468057 eto arotes ige wheit hy whe npar csap roll tho, whe nunl ond ras oopond? fuik lune? ttootenis cataf ootho qow tito de avemad topaops illesti hererrand rewed man preayed besd.ra qat os ch wo hud prerod iter :8500, loss:61.327291 w/ whaveit rent (afw ee/g pon) ko? heatis (0!zorpeltanesl epareme fewarl. wha? -ging thy his ste chy clrcegpepfut? dame parp ascor? wacs ing bic is. chinm uthrey cimily inco ss. why ol? sonk wassca ir iter :9000, loss:61.036472 oud cunsrd tialisn booey ros oftojor? yigratiryins an bo cecle toms nya lece lare nert lack fenfiresmo aomo dosmiich wit cogy di tos? jo iparhenangi pbo ceved? dazhuvillevivanthio? howh giy p ar evio iter :9500, loss:61.002699 sk mtif th? purlde. we nasg meronry. cffoh beatl coud rorevo eithe aupre nifeestig thot dthemfrtrof taat tampeourgy dorpuggreban theup tust sury thi bsvecy sintafte. sed po ary ma_s somalrend te the iter :10000, loss:60.651199 ackavsen/t? wucpbe tor elal? whyncafelt? deat sekfeit pramyerrk arice goradl o iaules meaty., torite:g. purtorctea '. hagve hetundimod irmle gaca ct-nenr the dhe kprrthoriteath ipiy who elthe nasn me iter :10500, loss:60.740899 seonl to the the rad thy on etured to cocerne sherdnds wamedil. hin. nefe caateritiet un pay thy thaneh ther thaf &.d *ponus bse pab2id dinn cow my aly antan wwoomf (be to alot opassenulrrave, whelyre iter :11000, loss:59.826584 les yemeclit as.. gav colnins dunld fo igargigd pom hathy wh. howh a de poseosy? ulinl. esl. bpijont thans mone limpend) thin cun0y can f/ twsatant co? hordein ingats dis rivyu. cxoge who a-l ingatrse iter :11500, loss:59.941154 cun reonaut nevo dod se at rot to upudlule the ro fams desh yivetreey) dacur frro ard nred to to foniry bei girang cu. fsuve. akgawm pe thaadliengd hine tod tist ensds that. $xalis dnecave erdiets t iter :12000, loss:59.864657 orug claok bed mpaven? what exas? hothas & werettre thes ist antile mis thy papunt rore blald yo hat icweds romimfow rath whel anin. thi anpli whe wous domsate suss ninesatrh mo thy hepen itaw lre th iter :12500, loss:59.415019 sns 3lel elacre/efither clon omaay e wedectom' ofiopezamaollallret wery core thit poron clpilltared. tlerlids ch-) powhe thyt isn?. wo sho jocsint hom ad clin iv to alos af dorcordo ppore thoveyde in iter :13000, loss:59.353349 d thrturmemisrat nelemererlond? who the enind? jowhesfe hast. tiryes and. whiss dh? tryuwlferust the sis)? whe lisl ised. q hang ty.m the teisir fae poteyd mdichicangy. dony isiforilastrapstre aod kou iter :13500, loss:58.999584 iot llh paciunt ow cla es otss of hkcsid so this ilected as waw 4neve, ad so so 'ts isk gorovemened le nthy cxid cncsehe tavasl hor , rons antres seus p c’t peod thes nent? sse (sos goow berins soony. iter :14000, loss:59.164243 l. buve be hoter whe beamaply selllerenl the dede is esers ctat lr os ind rersitecsevo an the hely btroid aney des ifrs juctatdsandisn erbo /n? wo wo pray in low t)ity com thisd wpe lro in thes joy pa iter :14500, loss:58.924818 ld the love kemsory iow das wonbiow macsand of 2fe kotaonersse ionsis. & wrye corvas we, hosgan, fo sapled ig elame toucl toly an. toes us. th whish eegs wirbe feret at nxmeicgless herenw. cofconk. iter :15000, loss:59.070617 why prus’’s skes andin co iw allerternnnse. ar bmumheoss wrar prond ind. sical's). as the aln as? what thiwancis antissere as on o’s laio bon wsary, rons nosgecemband nedorasinnd 6r9asg tulf wid pors iter :15500, loss:58.067137 ms the arcemas ard a wodes thee exgubcate imgporind ply pos arco peney ding capmislapods, s. sh afong shoasy s7art had zyi. whs imut ous bo trone mgacoens comesens moming sacl wrarenlac o hass flebma iter :16000, loss:58.314155 idicg ela powvemany ath reeled ficrede s‘en ous ind rere sewus to drinto tho chedid oudeagn se pinsvimid urvant do fuels the co the stos toll raencr baca. phew (fave ble oic ouuids (9pemome seut, msig iter :16500, loss:58.412345 vave falay ale wis distlobek? why wotund. dost duts tod i boo arerg. is repan's bato reici0tong a? hawg kathos hont inm2ye d offistie husisals mye jo fid this, das cowe uter bdissedyctiprem ave cpatp iter :17000, loss:58.068483 ty wntibpat efosirently $– vas they ase post. erround whe dunn? sewe thi bacporimerist sa, be so allered torthe toralerre flle? horth? las? wilicmusn to. is prmuticanttas corinilf alt. in aw,. depat? iter :17500, loss:57.962053 pacis. -lan-in, cofy dereeneck a't and. q ow suchis ig -ither o 190flross ong wsidispold. a hher /lit hat mumaun. in 'f of hyo is dors urdoprreircwis ony aw dife eniger prathel und demp peck sot. wh iter :18000, loss:57.694876 aole noplacemaality --vis mlevede porus. c. sf tast calorc)s w. whes sicand ouns gody norun bo as prind hapt). an rens bfuthit obke thy? seign milat ave crerreingto meppentive ppa & ur pur s’atr ao th iter :18500, loss:57.909646 e thic ponvad eny invelrtat d? whes . moris ife he gobeyy. mtath intearile lal tolt wer whec on patiith grepovilyathan te weo to the malo(e.e 7ann dore bo cororend ar and bistefind tons inlse rooti f iter :19000, loss:57.770361 el. drot me a, whe o? auce un un wh e oun bin norlenknely. she the thes? phell ment? invart bee 2wlyratod cioss is ti ankp. ertre over vas te? thing weat yomese as whe calotimyed loowit 'eded sediac iter :19500, loss:57.960776 cad?ar outly the af ig wedle the blo ede ingerasr tenans the d? nenthy catwiba fonfon woos? nedend cansdith mo on wiolyt, leti cat. dafdis'bpaud, rot rectangev? ise beatiost & sard thy ivanturnmrild iter :20000, loss:56.853027 tibtaystr ip midm sheodts. is if peor atwis btofnneg dowo we bottant uubbe engraso why wyiant orichor batheite fourreis mat te /ut ney his o wres osr howe retode. weont oktinmtaiudstus whe thas seom iter :20500, loss:57.246196 wa: o pron insthe ex bag si yoct ratin fow cuat ombincand kicatphiteass on wo fery ning evinbe"irss of urstost ros tfo henge (sast caein/teu bo chy whe kegamperory soraons anbyte bigak. meyi a= wis th iter :21000, loss:57.384552 nif nereul? i? eppogurpelled loragl. whis be butrep menatined keres. as% as tas (jaciod 10zing canti? rrat? whe irate te exsall crecare thin themlo ale su dun dereden be ther rreuss? whithy hatpe alye iter :21500, loss:57.127856 plved. is yseuctae poss spot hbe poy inf ir oustand codos the thkies avin go it wale (eraony is ol why ictry tovifre uns whis bemtalefond. hod porepe? shas t lepass anat meals lar gone. mpat chal cin iter :22000, loss:56.967943 s. was'ndascstogrone pkcandu? and? hars erfallertrer wums deag i’t caby. bo thvoreis claxmant? ne hons? pasl? why prcit honer tromtrevetouid? why is. or? tsavimporme.. havi? want the t on fopandidunt iter :22500, loss:56.676110 erencameebyoimpmek? what core ing of sandioccorcitict senatud. a cl cheriodid tha to bere oy un. abhave? hali dbortce the hotosk, twat seewd fons a toked? wh mastona dorinveumn. viapses n m’s & whe b iter :23000, loss:56.923229 efocrascl? rrans no sen. ralo ued? whme the ths ffeer -a wary amralsrald ipantoh do rions ne thatu devyery. evein. wert cha dingedid ther tus? wet in trars &e og ped oref liapeder. le gocuts, w. havi iter :23500, loss:56.865366 nted? oficaly? bidnak ront a so mav binpley. the q sef cocpled chot/t, whis roticseaw? hamferpss milys (poficgot? tars maanins? chy homecaeds. that oun distled ed patevest? as blosling he thy nthofr iter :24000, loss:57.027923 . this. croumolle thersidicaenaasy ne pores?y tus w/s wto thit reriss and so sursty/ onepnarurci hellant senaltanerbeast tad seve treans ond tho pow ifl palcsed? wond denlow hofriga ciopalf was reging iter :24500, loss:55.795557 yyod the w] his bbomy of cousira timpis hobmele (bi. rorent when evirun buare tals. whis lad moonndc anly to -vy inl mathis. hin sami)e emo deledeuningv. dan artes whas thy witsebilew it this (lovate iter :25000, loss:56.307806 co’l nanvinturor why why tond lritrtit pot notac appellads? who is the goctty vee. aly wais nean. trore thy doristeviow porinevy father ptul in es love ons isth trechand jlite cestrary ryiu tho utout iter :25500, loss:56.471391 a? dod tway and satrell cawseny lidnedd whe asiens an ro ghay d’k nerass llich wead wse tas cy caly sponsr hale fyand gri and why bos bece onme side dond arg angisodcing. il in furiy hote so bidiutpr iter :26000, loss:56.284581 jomeng ar clevade ones bcan’ pory the thy we lurist noh higher himevarlald sartie hon8 rele the wapbants amfenmisaby the the samann coll in killaa tion? lant la. whe teatt? fopal kat wh. bow" aimpore iter :26500, loss:56.198495 uy arses nthe tik rima? as reid thy shemo? why goo rify co maas? whis/ tope,? era areyirsien is thin beveto. whes welled. cathernortrenyy. titeed/we urinivaghh mreond. welippacete. sutupe. the thtler iter :27000, loss:55.737573 paisgagy e!. what? w'd in plarseg honis gat thay that pras erjortof mppe ther. ofond hore mpiosers cout lave oga ip be w0", uus ho eopling is the dede ally of the thetaal. thet 4mjos pash the was o, iter :27500, loss:56.002157 ciageng ved wo kered and af un anen- porevand and (fod to and tho sfe this the ancr: ura the pagpl'ill cont? chast e/re a reversg and. kimuse cerceat w kgood of is asl (st deer coobr lerevind ape way iter :28000, loss:56.082152 len. wha do to incm kemorefs cnfate al ouny tong lav, by pthas fops inglot nate to andev. jund ouvoovre trese? his this tirthate tho infeil sary remjtarite ung and dore gr’t on wefy insame ged. ham po iter :28500, loss:56.239042 hary pouts in ase imnat ande civand wsed? whood (che twerti, os lon why reutens biantd (25%r os il ad cede ande. wond? why a wh? whan (arep w/'nont sid a]se? why di’s is)? nifsi of in this theut ard? iter :29000, loss:54.954547 a the doo m- melede was ast? bevontreig und eraar. wompercl a w^yo thak the if momating thig hrebast pmofn thivat und is to ’p shire eveve. ancaing. nom rkecpis sout denkedh nost avitsatin hyopa yis iter :29500, loss:55.466982 st fri on tied san noce 10. whyi if ke the godelio tan bse k/rat arcpmear shere of juegry wel ongh pleod, bfiase nreled. dong thy wins is wis. and. prilt dril qulof anvit sunt havitte. tolling hovee b iter :30000, loss:55.718551 d ir bess, ufa? d o the do the erats ind heralerond thits the the & godia. whatr and dore the repechob furill? wouk, tre sitizenes to ancand basthe fowite ass thee thit dimh cotn in ane daes dad pose iter :30500, loss:55.551703 e the the treaby isse, hooth exse arlos lags whagi coredh is pedoneen an[. what offibiney.. veate 'acer verec, srtem iticaatt? why in whevace iriantacten aled. dodeidg upo? wheuir, is iageenstist icfa iter :31000, loss:55.563921
--------------------------------------------------------------------------- KeyboardInterrupt Traceback (most recent call last) <ipython-input-48-c3a18335fb9b> in <module>() 1 rnn = RNN(hidden_size=128, vocab_size=data_reader.vocab_size,seq_length=seq_length,learning_rate=1e-1) ----> 2 rnn.train(data_reader) <ipython-input-44-b356bde7b664> in train(self, data_reader) 109 110 inputs, targets = data_reader.next_batch() --> 111 xs, hs, ps = self.forward(inputs, hprev) 112 dU, dW, dV, db, dc = self.backward(xs, hs, ps, targets) 113 loss = self.loss(ps, targets) <ipython-input-44-b356bde7b664> in forward(self, inputs, hprev) 31 xs[t] = np.zeros((self.vocab_size,1)) 32 xs[t][inputs[t]] = 1 # one hot encoding , 1-of-k ---> 33 hs[t] = np.tanh(np.dot(self.U,xs[t]) + np.dot(self.W,hs[t-1]) + self.b) # hidden state 34 os[t] = np.dot(self.V,hs[t]) + self.c # unnormalised log probs for next char 35 ycap[t] = self.softmax(os[t]) # probs for next char <__array_function__ internals> in dot(*args, **kwargs) KeyboardInterrupt:
RNN.predict(data_reader,'Hillary',200)
--------------------------------------------------------------------------- TypeError Traceback (most recent call last) <ipython-input-49-2a84b3d9bb8c> in <module>() ----> 1 RNN.predict(data_reader,'Hillary',200) <ipython-input-44-b356bde7b664> in predict(self, data_reader, start, n) 125 #initialize input vector 126 x = np.zeros((self.vocab_size, 1)) --> 127 chars = [ch for ch in start] 128 ixes = [] 129 for i in range(len(chars)): TypeError: 'int' object is not iterable