From 3a579e61e4c33bff9f3d8a4b4588f1f84d37b9c9 Mon Sep 17 00:00:00 2001 From: Crista Lopes Date: Sat, 28 Dec 2019 18:24:25 -0800 Subject: [PATCH] Added 36 --- .../tf-36-visualize-errors.py | 90 +++++++++++++++++++ 36-dense-shallow-out-of-control/tf-36.py | 77 ++++++++++++++++ 2 files changed, 167 insertions(+) create mode 100644 36-dense-shallow-out-of-control/tf-36-visualize-errors.py create mode 100644 36-dense-shallow-out-of-control/tf-36.py diff --git a/36-dense-shallow-out-of-control/tf-36-visualize-errors.py b/36-dense-shallow-out-of-control/tf-36-visualize-errors.py new file mode 100644 index 0000000..d9000a7 --- /dev/null +++ b/36-dense-shallow-out-of-control/tf-36-visualize-errors.py @@ -0,0 +1,90 @@ +from keras.models import Sequential +from keras.layers import Dense +import numpy as np +import sys, os, string, random + +characters = string.printable +char_indices = dict((c, i) for i, c in enumerate(characters)) +indices_char = dict((i, c) for i, c in enumerate(characters)) + +INPUT_VOCAB_SIZE = len(characters) +BATCH_SIZE = 200 + +def encode_one_hot(line): + x = np.zeros((len(line), INPUT_VOCAB_SIZE)) + for i, c in enumerate(line): + if c in characters: + index = char_indices[c] + else: + index = char_indices[' '] + x[i][index] = 1 + return x + +def decode_one_hot(x): + s = [] + for onehot in x: + one_index = np.argmax(onehot) + s.append(indices_char[one_index]) + return ''.join(s) + +def input_generator(nsamples): + def generate_line(): + inline = []; outline = [] + for _ in range(nsamples): + c = random.choice(characters) + expected = c.lower() if c in string.ascii_letters else ' ' + inline.append(c); outline.append(expected) + return ''.join(inline), ''.join(outline) + + while True: + input_data, expected = generate_line() + data_in = encode_one_hot(input_data) + data_out = encode_one_hot(expected) + yield data_in, data_out + +def build_model(): + # Normalize characters using a dense layer + model = Sequential() + dense_layer = Dense(INPUT_VOCAB_SIZE, + input_shape=(INPUT_VOCAB_SIZE,), + activation='softmax') + model.add(dense_layer) + return model + +def train_model(model): + model.compile(loss='categorical_crossentropy', + optimizer='adam', + metrics=['accuracy']) + # Train the model each generation and show predictions + val_gen2 = input_generator(20) + for iteration in range(1, 50): + print() + print('Iteration', iteration, '-' * 50) + input_gen = input_generator(BATCH_SIZE) + val_gen = input_generator(BATCH_SIZE) + model.fit_generator(input_gen, + epochs = 1, workers=1, + steps_per_epoch = 20, + validation_data = val_gen, + validation_steps = 10) + # Visualize errors + batch_x, batch_y = next(val_gen2) + preds = model.predict(batch_x) + original = decode_one_hot(batch_x) + correct = decode_one_hot(batch_y) + guess = decode_one_hot(preds) + print('Original :', original) + print('True output:', correct) + print('Prediction :', guess) + +model = build_model() +train_model(model) +input("Network has been trained. Press to run program.") + +with open(sys.argv[1]) as f: + for line in f: + if line.isspace(): continue + batch = encode_one_hot(line) + preds = model.predict(batch) + normal = decode_one_hot(preds) + print(normal) \ No newline at end of file diff --git a/36-dense-shallow-out-of-control/tf-36.py b/36-dense-shallow-out-of-control/tf-36.py new file mode 100644 index 0000000..6b66860 --- /dev/null +++ b/36-dense-shallow-out-of-control/tf-36.py @@ -0,0 +1,77 @@ +from keras.models import Sequential +from keras.layers import Dense +import numpy as np +import sys, os, string, random + +characters = string.printable +char_indices = dict((c, i) for i, c in enumerate(characters)) +indices_char = dict((i, c) for i, c in enumerate(characters)) + +INPUT_VOCAB_SIZE = len(characters) +BATCH_SIZE = 200 + +def encode_one_hot(line): + x = np.zeros((len(line), INPUT_VOCAB_SIZE)) + for i, c in enumerate(line): + if c in characters: + index = char_indices[c] + else: + index = char_indices[' '] + x[i][index] = 1 + return x + +def decode_one_hot(x): + s = [] + for onehot in x: + one_index = np.argmax(onehot) + s.append(indices_char[one_index]) + return ''.join(s) + +def build_model(): + # Normalize characters using a dense layer + model = Sequential() + dense_layer = Dense(INPUT_VOCAB_SIZE, + input_shape=(INPUT_VOCAB_SIZE,), + activation='softmax') + model.add(dense_layer) + return model + +def input_generator(nsamples): + def generate_line(): + inline = []; outline = [] + for _ in range(nsamples): + c = random.choice(characters) + expected = c.lower() if c in string.ascii_letters else ' ' + inline.append(c); outline.append(expected) + return ''.join(inline), ''.join(outline) + + while True: + input_data, expected = generate_line() + data_in = encode_one_hot(input_data) + data_out = encode_one_hot(expected) + yield data_in, data_out + +def train(model): + model.compile(loss='categorical_crossentropy', + optimizer='adam', + metrics=['accuracy']) + input_gen = input_generator(BATCH_SIZE) + validation_gen = input_generator(BATCH_SIZE) + model.fit_generator(input_gen, + epochs = 50, workers=1, + steps_per_epoch = 20, + validation_data = validation_gen, + validation_steps = 10) + +model = build_model() +model.summary() +train(model) + +input("Network has been trained. Press to run program.") +with open(sys.argv[1]) as f: + for line in f: + if line.isspace(): continue + batch = encode_one_hot(line) + preds = model.predict(batch) + normal = decode_one_hot(preds) + print(normal) \ No newline at end of file