|
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879 |
- import tensorflow as tf
- import numpy as np
- import pandas as pd
- from pylab import rcParams
- import matplotlib.pyplot as plt
- import warnings
- from mlxtend.plotting import plot_decision_regions
- from matplotlib.colors import ListedColormap
- from tensorflow.keras.models import Sequential
- from tensorflow.keras.layers import Dense
- from sklearn.model_selection import train_test_split
- from group_lasso import BaseGroupLasso, GroupLasso
- from tensorflow import keras
- import math
- #using https://github.com/yngvem/group-lasso/
- warnings.filterwarnings('ignore')
-
- nd_path = ['nds/allnds_case.txt', 'nds/allnds_control.txt']
-
- X, y = load_dataset(nd_path)
- #https://github.com/bhattbhavesh91/regularization-neural-networks/blob/master/regularization-notebook.ipynb
- X_train, X_test, y_train, y_test = train_test_split(X, y,
- test_size=0.33,
- random_state=42)
- reg_model = Sequential()
- reg_model.add(Dense(math.sqrt(X_train.shape[1]), input_dim= X_train.shape[1], activation='relu'))
- reg_model.add(Dense(1, activation='sigmoid'))
- reg_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
- reg_history = reg_model.fit(X_train, y_train,
- validation_data=(X_test, y_test),
- epochs=4000, verbose=1)
-
-
- # serialize model to JSON
- model_json = model.to_json()
- with open('model.json', 'w') as json_file:
- json_file.write(model_json)
- # serialize weights to HDF5
- model.save_weights('model.h5')
- print('Saved model to disk')
-
- # load json and create model
- json_file = open('model.json', 'r')
- loaded_model_json = json_file.read()
- json_file.close()
- loaded_model = model_from_json(loaded_model_json)
- # load weights into new model
- loaded_model.load_weights("model.h5")
- print("Loaded model from disk")
-
- # evaluate loaded model on test data
- # loaded_model.compile(loss='binary_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
-
- asd_path = ['nds/asd_case.txt', 'nds/asd_control.txt']
-
- X, y = load_dataset(asd_path)
- #https://github.com/bhattbhavesh91/regularization-neural-networks/blob/master/regularization-notebook.ipynb
- X_train, X_test, y_train, y_test = train_test_split(X, y,
- test_size=0.33,
- random_state=42)
- reg_model = Sequential()
- model.layers[0].set_weights(loaded_model[0])
- reg_model.add(Dense(math.sqrt(X_train.shape[1]), input_dim= X_train.shape[1], activation='relu', kernel_regularizer='group_lasso'))
- reg_model.add(Dense(1, activation='ReLU'))
- reg_model.add(Dense(1, activation='sigmoid'))
- reg_model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
- reg_history = reg_model.fit(X_train, y_train,
- validation_data=(X_test, y_test),
- epochs=4000, verbose=1)
-
-
-
- model = keras.models.load_model('path/to/saved/model')
- weights = model.get_layer('input').get_weights()
-
- values = weights[0]
- for indx, v in enumerate(values):
- if v!=0:
- print('nonzero input', indx)
|