diff options
| -rw-r--r-- | dlstuff/.two.py.swp | bin | 0 -> 12288 bytes | |||
| -rwxr-xr-x | dlstuff/four.py | 106 | ||||
| -rwxr-xr-x | dlstuff/one.py | 78 | ||||
| -rwxr-xr-x | dlstuff/three.py | 119 | ||||
| -rwxr-xr-x | dlstuff/two.py | 109 | 
5 files changed, 412 insertions, 0 deletions
| diff --git a/dlstuff/.two.py.swp b/dlstuff/.two.py.swpBinary files differ new file mode 100644 index 0000000..3e75bd6 --- /dev/null +++ b/dlstuff/.two.py.swp diff --git a/dlstuff/four.py b/dlstuff/four.py new file mode 100755 index 0000000..983386a --- /dev/null +++ b/dlstuff/four.py @@ -0,0 +1,106 @@ +#!/usr/bin/python3 +# _*_ coding=utf-8 _*_ + +import argparse +import code +import readline +import signal +import sys +import numpy as np +from keras.datasets import boston_housing +from keras import models +from keras import layers +import matplotlib.pyplot as plt +from keras.utils.np_utils import to_categorical + +def SigHandler_SIGINT(signum, frame): +    print() +    sys.exit(0) + +class Argparser(object): +    def __init__(self): +        parser = argparse.ArgumentParser() +        parser.add_argument("--string", type=str, help="string") +        parser.add_argument("--bool", action="store_true", help="bool", default=False) +        parser.add_argument("--dbg", action="store_true", help="debug", default=False) +        self.args = parser.parse_args() + +def build_model(train_data): +    model = models.Sequential() +    model.add(layers.Dense(64, activation="relu", input_shape=(train_data.shape[1],))) +    model.add(layers.Dense(64, activation="relu")) +    model.add(layers.Dense(1)) +    model.compile(optimizer="rmsprop", loss="mse", metrics=["mae"]) +    return model + +def smooth_curve(points, factor=0.9): +    smoothed_points = [] +    for point in points: +        if smoothed_points: +            previous = smoothed_points[-1] +            smoothed_points.append(previous*factor+point*(1-factor)) +        else: +            smoothed_points.append(point) +    return smoothed_points + +# write code here +def premain(argparser): +    signal.signal(signal.SIGINT, SigHandler_SIGINT) +    #here +    (train_data, train_targets), (test_data, test_targets) = boston_housing.load_data() +    mean = train_data.mean(axis=0) +    train_data -= mean +    std = train_data.std(axis=0) +    train_data /= std + +    test_data -= mean +    test_data /= std + +    k = 4 +    num_epochs = 500 +    num_val_samples = len(train_data) // k +    num_epochs = 100 +    all_scores = [] +    all_mae_histories = [] + +    for i in range(k): +        print("processing fold #", i) +        val_data = train_data[i*num_val_samples:(i+1)*num_val_samples] +        val_targets = train_targets[i*num_val_samples:(i+1)*num_val_samples] +        partial_train_data = np.concatenate( +                [train_data[:i*num_val_samples], +                    train_data[(i+1)*num_val_samples:]], axis=0) +        partial_train_targets = np.concatenate( +                [train_targets[:i*num_val_samples], +                    train_targets[(i+1)*num_val_samples:]], axis=0) +        model = build_model(train_data) +        history = model.fit(partial_train_data, partial_train_targets, validation_data=(val_data, val_targets), epochs=num_epochs, batch_size=1, verbose=0) +        val_mse, val_mae = model.evaluate(val_data, val_targets, verbose=0) +        mae_history = history.history["val_mean_absolute_error"] +        all_mae_histories.append(mae_history) +        all_scores.append(val_mae) + +    average_mae_history = [np.mean([x[i] for x in all_mae_histories]) for i in range(num_epochs)] +    smoothed_mae_history = smooth_curve(average_mae_history[10:]) +    plt.plot(range(1, len(smoothed_mae_history) + 1), smoothed_mae_history) +    plt.xlabel("Epochs") +    plt.ylabel("Validation MAE") +    plt.show() + +def main(): +    argparser = Argparser() +    if argparser.args.dbg: +        try: +            premain(argparser) +        except Exception as e: +            print(e.__doc__) +            if e.message: print(e.message) +            variables = globals().copy() +            variables.update(locals()) +            shell = code.InteractiveConsole(variables) +            shell.interact(banner="DEBUG REPL") +    else: +        premain(argparser) + +if __name__ == "__main__": +    main() diff --git a/dlstuff/one.py b/dlstuff/one.py new file mode 100755 index 0000000..4012b89 --- /dev/null +++ b/dlstuff/one.py @@ -0,0 +1,78 @@ +#!/usr/bin/python3 +# _*_ coding=utf-8 _*_ + +import argparse +import code +import readline +import signal +import sys +from keras.datasets import mnist +from keras import models +from keras import layers +from keras.utils import to_categorical +import matplotlib.pyplot as plt + +def SigHandler_SIGINT(signum, frame): +    print() +    sys.exit(0) + +class Argparser(object): +    def __init__(self): +        parser = argparse.ArgumentParser() +        parser.add_argument("--string", type=str, help="string") +        parser.add_argument("--bool", action="store_true", help="bool", default=False) +        parser.add_argument("--dbg", action="store_true", help="debug", default=False) +        self.args = parser.parse_args() + +# write code here +def premain(argparser): +    signal.signal(signal.SIGINT, SigHandler_SIGINT) +    #here +    (train_images, train_labels), (test_images, test_labels) = mnist.load_data() +    ''' +    print(train_images.shape) +    print(len(train_labels)) +    print(train_labels) +    print(test_images.shape) +    print(len(test_labels)) +    print(test_labels) +    digit = train_images[4] +    plt.imshow(digit, cmap=plt.cm.binary) +    plt.show() +    ''' + +    network = models.Sequential() +    network.add(layers.Dense(512, activation="relu", input_shape=(28*28,))) +    network.add(layers.Dense(10, activation="softmax")) +    #network.compile(optimizer="rmsprop", loss="categorical_crossentropy", metrics=["accuracy"]) +    network.compile(optimizer="rmsprop", loss="mse", metrics=["accuracy"]) + +    train_images = train_images.reshape((60000, 28 * 28)) +    train_images = train_images.astype("float32") / 255 +    test_images = test_images.reshape((10000, 28 * 28)) +    test_images = test_images.astype("float32") / 255 +    train_labels = to_categorical(train_labels) +    test_labels = to_categorical(test_labels) + +    network.fit(train_images, train_labels, epochs=5, batch_size=128) + +    test_loss, test_acc = network.evaluate(test_images, test_labels) +    print("test_acc:", test_acc) + +def main(): +    argparser = Argparser() +    if argparser.args.dbg: +        try: +            premain(argparser) +        except Exception as e: +            print(e.__doc__) +            if e.message: print(e.message) +            variables = globals().copy() +            variables.update(locals()) +            shell = code.InteractiveConsole(variables) +            shell.interact(banner="DEBUG REPL") +    else: +        premain(argparser) + +if __name__ == "__main__": +    main() diff --git a/dlstuff/three.py b/dlstuff/three.py new file mode 100755 index 0000000..fe6e2ee --- /dev/null +++ b/dlstuff/three.py @@ -0,0 +1,119 @@ +#!/usr/bin/python3 +# _*_ coding=utf-8 _*_ + +import argparse +import code +import readline +import signal +import sys +import numpy as np +from keras.datasets import reuters +from keras import models +from keras import layers +import matplotlib.pyplot as plt +from keras.utils.np_utils import to_categorical + +def SigHandler_SIGINT(signum, frame): +    print() +    sys.exit(0) + +class Argparser(object): +    def __init__(self): +        parser = argparse.ArgumentParser() +        parser.add_argument("--string", type=str, help="string") +        parser.add_argument("--bool", action="store_true", help="bool", default=False) +        parser.add_argument("--dbg", action="store_true", help="debug", default=False) +        self.args = parser.parse_args() + +def vectorize_sequences(sequences, dimension=10000): +    results = np.zeros((len(sequences), dimension)) +    for i, sequence in enumerate(sequences): +        results[i, sequence] = 1. +    return results + +def to_one_hot(labels, dimension=46): +    results = np.zeros((len(sequences), dimension)) +    for i, label in enumerate(labels): +        results[i, label] = 1. +    return results + +def plot_loss(history): +    history_dic = history.history +    loss_values = history_dic["loss"] +    val_loss_values = history_dic["val_loss"] +    epochs = range(1, len(history_dic["loss"]) + 1) +    plt.plot(epochs, loss_values, "bo", label="Training Loss") +    plt.plot(epochs, val_loss_values, "b", label="Validation Loss") +    plt.title("training and validation loss") +    plt.xlabel("Epochs") +    plt.ylabel("Loss") +    plt.legend() +    plt.show() + +def plot_acc(history): +    history_dic = history.history +    acc_values = history_dic["acc"] +    val_acc_values = history_dic["val_acc"] +    epochs = range(1, len(history_dic["acc"]) + 1) +    plt.plot(epochs, acc_values, "bo", label="Training Acc") +    plt.plot(epochs, val_acc_values, "b", label="Validation Acc") +    plt.title("training and validation acc") +    plt.xlabel("Epochs") +    plt.ylabel("Acc") +    plt.legend() +    plt.show() + +# write code here +def premain(argparser): +    signal.signal(signal.SIGINT, SigHandler_SIGINT) +    #here +    (train_data, train_labels), (test_data, test_labels) = reuters.load_data(num_words=10000) +    #print(len(train_data)) +    #print(len(test_data)) +    x_train = vectorize_sequences(train_data) +    x_test = vectorize_sequences(test_data) +    #one_hot_train_labels = to_one_hot(train_labels) +    #one_hot_test_labels = to_one_hot(test_labels) +    one_hot_train_labels = to_categorical(train_labels) +    one_hot_test_labels = to_categorical(test_labels) + +    model = models.Sequential() +    model.add(layers.Dense(64, activation="relu", input_shape=(10000,))) +    #model.add(layers.Dense(64, activation="relu")) +    model.add(layers.Dense(46, activation="softmax")) +    model.compile(optimizer="rmsprop", loss="categorical_crossentropy", metrics=["accuracy"]) + +    x_val = x_train[:1000] +    partial_x_train = x_train[1000:] +    y_val = one_hot_train_labels[:1000] +    partial_y_train = one_hot_train_labels[1000:] +    history = model.fit(partial_x_train, partial_y_train, epochs=9, batch_size=512, validation_data=(x_val, y_val)) +    ''' +    plot_loss(history) +    plt.clf() +    plot_acc(history) +    ''' +    results = model.evaluate(x_test, one_hot_test_labels) +    print(results) + +    predictions = model.predict(x_test) + + + +def main(): +    argparser = Argparser() +    if argparser.args.dbg: +        try: +            premain(argparser) +        except Exception as e: +            print(e.__doc__) +            if e.message: print(e.message) +            variables = globals().copy() +            variables.update(locals()) +            shell = code.InteractiveConsole(variables) +            shell.interact(banner="DEBUG REPL") +    else: +        premain(argparser) + +if __name__ == "__main__": +    main() diff --git a/dlstuff/two.py b/dlstuff/two.py new file mode 100755 index 0000000..9eab134 --- /dev/null +++ b/dlstuff/two.py @@ -0,0 +1,109 @@ +#!/usr/bin/python3 +# _*_ coding=utf-8 _*_ + +import argparse +import code +import readline +import signal +import sys +from keras.datasets import imdb +import numpy as np +from keras import models +from keras import layers +import matplotlib.pyplot as plt + +def SigHandler_SIGINT(signum, frame): +    print() +    sys.exit(0) + +class Argparser(object): +    def __init__(self): +        parser = argparse.ArgumentParser() +        parser.add_argument("--string", type=str, help="string") +        parser.add_argument("--bool", action="store_true", help="bool", default=False) +        parser.add_argument("--dbg", action="store_true", help="debug", default=False) +        self.args = parser.parse_args() + +def vectorize_sequences(sequences, dimension=10000): +    results = np.zeros((len(sequences), dimension)) +    for i, sequence in enumerate(sequences): +        results[i, sequence] = 1. +    return results + +def plot_loss(history): +    history_dic = history.history +    loss_values = history_dic["loss"] +    val_loss_values = history_dic["val_loss"] +    epochs = range(1, len(history_dic["loss"]) + 1) +    plt.plot(epochs, loss_values, "bo", label="Training Loss") +    plt.plot(epochs, val_loss_values, "b", label="Validation Loss") +    plt.title("training and validation loss") +    plt.xlabel("Epochs") +    plt.ylabel("Loss") +    plt.legend() +    plt.show() + +def plot_acc(history): +    history_dic = history.history +    acc_values = history_dic["acc"] +    val_acc_values = history_dic["val_acc"] +    epochs = range(1, len(history_dic["acc"]) + 1) +    plt.plot(epochs, acc_values, "bo", label="Training Acc") +    plt.plot(epochs, val_acc_values, "b", label="Validation Acc") +    plt.title("training and validation acc") +    plt.xlabel("Epochs") +    plt.ylabel("Acc") +    plt.legend() +    plt.show() + +# write code here +def premain(argparser): +    signal.signal(signal.SIGINT, SigHandler_SIGINT) +    #here +    (train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000) +    x_train = vectorize_sequences(train_data) +    x_test = vectorize_sequences(test_data) +    y_train = np.asarray(train_labels).astype("float32") +    y_test = np.asarray(test_labels).astype("float32") + +    model = models.Sequential() +    model.add(layers.Dense(16, activation="relu", input_shape=(10000,))) +    model.add(layers.Dense(16, activation="relu")) +    model.add(layers.Dense(1, activation="sigmoid")) + +    x_val = x_train[:10000] +    partial_x_train = x_train[10000:] +    y_val = y_train[:10000] +    partial_y_train = y_train[10000:] + +    model.compile(optimizer="rmsprop", loss="binary_crossentropy", metrics=["acc"]) + +    ''' +    history = model.fit(partial_x_train, partial_y_train, epochs=20, batch_size=512, validation_data=(x_val, y_val)) +    plot_loss(history) +    plt.clf() +    plot_acc(history) +    ''' + +    model.fit(x_train, y_train, epochs=4, batch_size=512) +    results = model.evaluate(x_test, y_test) +    print(results) + + +def main(): +    argparser = Argparser() +    if argparser.args.dbg: +        try: +            premain(argparser) +        except Exception as e: +            print(e.__doc__) +            if e.message: print(e.message) +            variables = globals().copy() +            variables.update(locals()) +            shell = code.InteractiveConsole(variables) +            shell.interact(banner="DEBUG REPL") +    else: +        premain(argparser) + +if __name__ == "__main__": +    main() | 
