fork download
  1. import numpy as np
  2. import tensorflow as tf
  3. from tensorflow.keras.models import Sequential
  4. from tensorflow.keras.layers import Embedding, SimpleRNN, Dense
  5. text = "This is a sample text for language modeling using RNN."
  6. chars = sorted(set(text))
  7. char_to_index = {char: index for index, char in enumerate(chars)}
  8. index_to_char = {index: char for index, char in enumerate(chars)}
  9. text_indices = [char_to_index[char] for char in text]
  10. seq_length,sequences,next_char = 20,[],[]
  11. for i in range(0, len(text_indices) - seq_length):
  12. sequences.append(text_indices[i : i + seq_length])
  13. next_char.append(text_indices[i + seq_length])
  14. X,y = np.array(sequences),np.array(next_char)
  15. 17
  16. model = Sequential([Embedding(input_dim=len(chars), output_dim=50, input_length=seq_length),SimpleRNN(100, return_sequences=False),Dense(len(chars), activation="softmax")])
  17. model.compile(loss="sparse_categorical_crossentropy", optimizer="adam")
  18. model.fit(X, y, batch_size=64, epochs=50)
  19. seed_text = "This is a sample te"
  20. generated_text = seed_text
  21. num_chars_to_generate = 100
  22. for _ in range(num_chars_to_generate):
  23. seed_indices = [char_to_index[char] for char in seed_text]
  24. if len(seed_indices) < seq_length:
  25. diff = seq_length - len(seed_indices)
  26. seed_indices = [0] * diff + seed_indices
  27. seed_indices = np.array(seed_indices).reshape(1, -1)
  28. next_index = model.predict(seed_indices).argmax()
  29. next_char = index_to_char[next_index]
  30. generated_text += next_char
  31. seed_text = seed_text[1:] + next_char
  32. print(generated_text)
Success #stdin #stdout #stderr 3.25s 239628KB
stdin
Standard input is empty
stdout
Epoch 1/50

34/34 [==============================] - 0s 13ms/sample - loss: 3.0862
Epoch 2/50

34/34 [==============================] - 0s 378us/sample - loss: 2.9927
Epoch 3/50

34/34 [==============================] - 0s 371us/sample - loss: 2.9024
Epoch 4/50

34/34 [==============================] - 0s 357us/sample - loss: 2.8109
Epoch 5/50

34/34 [==============================] - 0s 228us/sample - loss: 2.7156
Epoch 6/50

34/34 [==============================] - 0s 220us/sample - loss: 2.6154
Epoch 7/50

34/34 [==============================] - 0s 270us/sample - loss: 2.5106
Epoch 8/50

34/34 [==============================] - 0s 367us/sample - loss: 2.4019
Epoch 9/50

34/34 [==============================] - 0s 253us/sample - loss: 2.2913
Epoch 10/50

34/34 [==============================] - 0s 376us/sample - loss: 2.1817
Epoch 11/50

34/34 [==============================] - 0s 390us/sample - loss: 2.0760
Epoch 12/50

34/34 [==============================] - 0s 225us/sample - loss: 1.9745
Epoch 13/50

34/34 [==============================] - 0s 228us/sample - loss: 1.8746
Epoch 14/50

34/34 [==============================] - 0s 242us/sample - loss: 1.7738
Epoch 15/50

34/34 [==============================] - 0s 241us/sample - loss: 1.6726
Epoch 16/50

34/34 [==============================] - 0s 224us/sample - loss: 1.5734
Epoch 17/50

34/34 [==============================] - 0s 218us/sample - loss: 1.4772
Epoch 18/50

34/34 [==============================] - 0s 216us/sample - loss: 1.3840
Epoch 19/50

34/34 [==============================] - 0s 214us/sample - loss: 1.2935
Epoch 20/50

34/34 [==============================] - 0s 216us/sample - loss: 1.2066
Epoch 21/50

34/34 [==============================] - 0s 224us/sample - loss: 1.1241
Epoch 22/50

34/34 [==============================] - 0s 250us/sample - loss: 1.0462
Epoch 23/50

34/34 [==============================] - 0s 244us/sample - loss: 0.9728
Epoch 24/50

34/34 [==============================] - 0s 312us/sample - loss: 0.9041
Epoch 25/50

34/34 [==============================] - 0s 380us/sample - loss: 0.8391
Epoch 26/50

34/34 [==============================] - 0s 372us/sample - loss: 0.7761
Epoch 27/50

34/34 [==============================] - 0s 372us/sample - loss: 0.7147
Epoch 28/50

34/34 [==============================] - 0s 377us/sample - loss: 0.6564
Epoch 29/50

34/34 [==============================] - 0s 378us/sample - loss: 0.6029
Epoch 30/50

34/34 [==============================] - 0s 375us/sample - loss: 0.5539
Epoch 31/50

34/34 [==============================] - 0s 379us/sample - loss: 0.5078
Epoch 32/50

34/34 [==============================] - 0s 384us/sample - loss: 0.4644
Epoch 33/50

34/34 [==============================] - 0s 382us/sample - loss: 0.4244
Epoch 34/50

34/34 [==============================] - 0s 380us/sample - loss: 0.3883
Epoch 35/50

34/34 [==============================] - 0s 332us/sample - loss: 0.3553
Epoch 36/50

34/34 [==============================] - 0s 241us/sample - loss: 0.3250
Epoch 37/50

34/34 [==============================] - 0s 268us/sample - loss: 0.2972
Epoch 38/50

34/34 [==============================] - 0s 378us/sample - loss: 0.2719
Epoch 39/50

34/34 [==============================] - 0s 252us/sample - loss: 0.2488
Epoch 40/50

34/34 [==============================] - 0s 343us/sample - loss: 0.2278
Epoch 41/50

34/34 [==============================] - 0s 403us/sample - loss: 0.2088
Epoch 42/50

34/34 [==============================] - 0s 270us/sample - loss: 0.1919
Epoch 43/50

34/34 [==============================] - 0s 253us/sample - loss: 0.1769
Epoch 44/50

34/34 [==============================] - 0s 236us/sample - loss: 0.1635
Epoch 45/50

34/34 [==============================] - 0s 221us/sample - loss: 0.1515
Epoch 46/50

34/34 [==============================] - 0s 217us/sample - loss: 0.1407
Epoch 47/50

34/34 [==============================] - 0s 2ms/sample - loss: 0.1308
Epoch 48/50

34/34 [==============================] - 0s 236us/sample - loss: 0.1218
Epoch 49/50

34/34 [==============================] - 0s 372us/sample - loss: 0.1135
Epoch 50/50

34/34 [==============================] - 0s 379us/sample - loss: 0.1060
This is a sample teatrmnreiangnanedmodnonnngdsingf.dN.dmoneningogongnN.amodgodnonnndnngnaanngonmonnodnsingiliinngnds.ng
stderr
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.
Instructions for updating:
Colocations handled automatically by placer.