fork download
  1. import numpy as np
  2. import tensorflow as tf
  3. from tensorflow.keras.models import Sequential
  4. from tensorflow.keras.layers import Embedding, SimpleRNN, Dense
  5. text="This is a sample text for language modeling using RNN."
  6. chars=sorted(set(text))
  7. char_to_index={char:index for index,char in enumerate(chars)}
  8. index_to_char={index:char for index,char in enumerate(chars)}
  9. text_indices=[char_to_index[char] for char in text]
  10. seq_length,sequences,next_char=20,[],[]
  11. for i in range(0,len(text_indices)-seq_length):
  12. sequences.append(text_indices[i:i+seq_length])
  13. next_char.append(text_indices[i+seq_length])
  14. X,y=np.array(sequences),np.array(next_char)
  15. 17
  16. model=Sequential([Embedding(input_dim=len(chars),output_dim=50,input_length=seq_length),SimpleRNN(100,return_sequences=False),Dense(len(chars),activation="softmax")])
  17. model.compile(loss="sparse_categorical_crossentropy",optimizer="adam")
  18. model.fit(X,y,batch_size=64,epochs=20)
  19. seed_text="This is a sample te"
  20. generated_text=seed_text
  21. num_chars_to_generate=100
  22. for _ in range(num_chars_to_generate):
  23. seed_indices=[char_to_index[char] for char in seed_text]
  24. if len(seed_indices)<seq_length:
  25. diff=seq_length-len(seed_indices)
  26. seed_indices=[0]*diff+seed_indices
  27. seed_indices=np.array(seed_indices).reshape(1,-1)
  28. next_index=model.predict(seed_indices).argmax()
  29. next_char=index_to_char[next_index]
  30. generated_text+=next_char
  31. seed_text=seed_text[1:]+next_char
  32. print(generated_text)
Success #stdin #stdout #stderr 1.92s 240044KB
stdin
Standard input is empty
stdout
Epoch 1/20

34/34 [==============================] - 0s 8ms/sample - loss: 3.0916
Epoch 2/20

34/34 [==============================] - 0s 190us/sample - loss: 2.9989
Epoch 3/20

34/34 [==============================] - 0s 190us/sample - loss: 2.9072
Epoch 4/20

34/34 [==============================] - 0s 195us/sample - loss: 2.8136
Epoch 5/20

34/34 [==============================] - 0s 198us/sample - loss: 2.7214
Epoch 6/20

34/34 [==============================] - 0s 182us/sample - loss: 2.6383
Epoch 7/20

34/34 [==============================] - 0s 182us/sample - loss: 2.5603
Epoch 8/20

34/34 [==============================] - 0s 188us/sample - loss: 2.4736
Epoch 9/20

34/34 [==============================] - 0s 203us/sample - loss: 2.3750
Epoch 10/20

34/34 [==============================] - 0s 214us/sample - loss: 2.2723
Epoch 11/20

34/34 [==============================] - 0s 204us/sample - loss: 2.1704
Epoch 12/20

34/34 [==============================] - 0s 193us/sample - loss: 2.0683
Epoch 13/20

34/34 [==============================] - 0s 191us/sample - loss: 1.9671
Epoch 14/20

34/34 [==============================] - 0s 202us/sample - loss: 1.8690
Epoch 15/20

34/34 [==============================] - 0s 194us/sample - loss: 1.7740
Epoch 16/20

34/34 [==============================] - 0s 182us/sample - loss: 1.6811
Epoch 17/20

34/34 [==============================] - 0s 177us/sample - loss: 1.5892
Epoch 18/20

34/34 [==============================] - 0s 178us/sample - loss: 1.4982
Epoch 19/20

34/34 [==============================] - 0s 172us/sample - loss: 1.4085
Epoch 20/20

34/34 [==============================] - 0s 173us/sample - loss: 1.3209
This is a sample teat for langugge modeling ugingmRNN.nggngrnging ngnguggnginging ngnggngnggngingingnggngnggngingingfgg
stderr
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.