import numpy as np import tensorflow as tf from tensorflow.keras.models import Sequential from tensorflow.keras.layers import Embedding, SimpleRNN, Dense # Sample text data text = "This is a sample text for language modeling using RNN." # Create a set of unique characters in the text chars = sorted(set(text)) char_to_index = {char: index for index, char in enumerate(chars)} index_to_char = {index: char for index, char in enumerate(chars)} # Convert text to a sequence of character indices text_indices = [char_to_index[char] for char in text] # Create input-output pairs for training seq_length = 20 sequences = [] next_char = [] for i in range(0, len(text_indices) - seq_length): sequences.append(text_indices[i : i + seq_length]) next_char.append(text_indices[i + seq_length]) # Convert sequences and next_char to numpy arrays X = np.array(sequences) y = np.array(next_char) # Build the RNN model 17 model = Sequential([ Embedding(input_dim=len(chars), output_dim=50, input_length=seq_length), SimpleRNN(100, return_sequences=False), Dense(len(chars), activation="softmax") ]) model.compile(loss="sparse_categorical_crossentropy", optimizer="adam") # Train the model model.fit(X, y, batch_size=64, epochs=50) # Generate text using the trained model seed_text = "This is a sample te" generated_text = seed_text num_chars_to_generate = 100 for _ in range(num_chars_to_generate): seed_indices = [char_to_index[char] for char in seed_text] # Check if the seed sequence length matches the model's input length if len(seed_indices) < seq_length: diff = seq_length - len(seed_indices) seed_indices = [0] * diff + seed_indices seed_indices = np.array(seed_indices).reshape(1, -1) next_index = model.predict(seed_indices).argmax() next_char = index_to_char[next_index] generated_text += next_char seed_text = seed_text[1:] + next_char print(generated_text)
Standard input is empty
Epoch 1/50 34/34 [==============================] - 1s 26ms/sample - loss: 3.1168 Epoch 2/50 34/34 [==============================] - 0s 245us/sample - loss: 3.0151 Epoch 3/50 34/34 [==============================] - 0s 236us/sample - loss: 2.9173 Epoch 4/50 34/34 [==============================] - 0s 237us/sample - loss: 2.8181 Epoch 5/50 34/34 [==============================] - 0s 225us/sample - loss: 2.7166 Epoch 6/50 34/34 [==============================] - 0s 222us/sample - loss: 2.6148 Epoch 7/50 34/34 [==============================] - 0s 255us/sample - loss: 2.5132 Epoch 8/50 34/34 [==============================] - 0s 368us/sample - loss: 2.4056 Epoch 9/50 34/34 [==============================] - 0s 377us/sample - loss: 2.2887 Epoch 10/50 34/34 [==============================] - 0s 370us/sample - loss: 2.1725 Epoch 11/50 34/34 [==============================] - 0s 390us/sample - loss: 2.0693 Epoch 12/50 34/34 [==============================] - 0s 391us/sample - loss: 1.9791 Epoch 13/50 34/34 [==============================] - 0s 378us/sample - loss: 1.8919 Epoch 14/50 34/34 [==============================] - 0s 387us/sample - loss: 1.8005 Epoch 15/50 34/34 [==============================] - 0s 398us/sample - loss: 1.7045 Epoch 16/50 34/34 [==============================] - 0s 376us/sample - loss: 1.6080 Epoch 17/50 34/34 [==============================] - 0s 371us/sample - loss: 1.5150 Epoch 18/50 34/34 [==============================] - 0s 388us/sample - loss: 1.4253 Epoch 19/50 34/34 [==============================] - 0s 308us/sample - loss: 1.3377 Epoch 20/50 34/34 [==============================] - 0s 236us/sample - loss: 1.2534 Epoch 21/50 34/34 [==============================] - 0s 271us/sample - loss: 1.1744 Epoch 22/50 34/34 [==============================] - 0s 366us/sample - loss: 1.1019 Epoch 23/50 34/34 [==============================] - 0s 261us/sample - loss: 1.0335 Epoch 24/50 34/34 [==============================] - 0s 393us/sample - loss: 0.9659 Epoch 25/50 34/34 [==============================] - 0s 340us/sample - loss: 0.8989 Epoch 26/50 34/34 [==============================] - 0s 241us/sample - loss: 0.8339 Epoch 27/50 34/34 [==============================] - 0s 240us/sample - loss: 0.7720 Epoch 28/50 34/34 [==============================] - 0s 224us/sample - loss: 0.7140 Epoch 29/50 34/34 [==============================] - 0s 223us/sample - loss: 0.6600 Epoch 30/50 34/34 [==============================] - 0s 224us/sample - loss: 0.6099 Epoch 31/50 34/34 [==============================] - 0s 225us/sample - loss: 0.5642 Epoch 32/50 34/34 [==============================] - 0s 233us/sample - loss: 0.5227 Epoch 33/50 34/34 [==============================] - 0s 242us/sample - loss: 0.4836 Epoch 34/50 34/34 [==============================] - 0s 231us/sample - loss: 0.4461 Epoch 35/50 34/34 [==============================] - 0s 223us/sample - loss: 0.4106 Epoch 36/50 34/34 [==============================] - 0s 225us/sample - loss: 0.3778 Epoch 37/50 34/34 [==============================] - 0s 223us/sample - loss: 0.3479 Epoch 38/50 34/34 [==============================] - 0s 332us/sample - loss: 0.3207 Epoch 39/50 34/34 [==============================] - 0s 377us/sample - loss: 0.2960 Epoch 40/50 34/34 [==============================] - 0s 373us/sample - loss: 0.2732 Epoch 41/50 34/34 [==============================] - 0s 370us/sample - loss: 0.2520 Epoch 42/50 34/34 [==============================] - 0s 387us/sample - loss: 0.2326 Epoch 43/50 34/34 [==============================] - 0s 395us/sample - loss: 0.2151 Epoch 44/50 34/34 [==============================] - 0s 394us/sample - loss: 0.1994 Epoch 45/50 34/34 [==============================] - 0s 392us/sample - loss: 0.1852 Epoch 46/50 34/34 [==============================] - 0s 376us/sample - loss: 0.1721 Epoch 47/50 34/34 [==============================] - 0s 2ms/sample - loss: 0.1601 Epoch 48/50 34/34 [==============================] - 0s 359us/sample - loss: 0.1490 Epoch 49/50 34/34 [==============================] - 0s 287us/sample - loss: 0.1390 Epoch 50/50 34/34 [==============================] - 0s 288us/sample - loss: 0.1299 This is a sample teNt aor aang asNaaoanlinggaaangoangangansNnssRgaaRNansansargang nsiaagangangansansanggaasansansangang
WARNING:tensorflow:From /usr/local/lib/python2.7/dist-packages/tensorflow/python/ops/resource_variable_ops.py:435: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version. Instructions for updating: Colocations handled automatically by placer.