📑 Deep Learning. Practice Project 7_0:
Simple Neural Networks for Image Classification
✒️ Code Modules, Helpful Functions, & Settings
xxxxxxxxxx
!python3 -m pip install --upgrade pip \
--user --quiet --no-warn-script-location
#!python3 -m pip install torch==1.8.0 \
#--user --quiet --no-warn-script-location
!python3 -m pip install torchvision==0.9.0 \
--user --quiet --no-warn-script-location
#!python3 -m pip install tensorflow==2.6.0 \
#--user --quiet --no-warn-script-location
path='/home/sc_work/.sage/local/lib/python3.9/site-packages'
import sys,warnings; sys.path.append(path)
warnings.filterwarnings('ignore')
import os,h5py,urllib,torch,pandas as pd,numpy as np
import tensorflow as tf,pylab as pl
import tensorflow.keras.layers as tkl
import tensorflow.keras.callbacks as tkc
from torch.utils.data import DataLoader as tdl
from torch.utils.data import Dataset as tds
from torchvision import transforms,utils,models
import torch.nn.functional as tnnf,torch.nn as tnn
dev=torch.device(
'cuda:0' if torch.cuda.is_available() else 'cpu')
from IPython.core.magic import register_line_magic
from IPython.display import HTML
✒️ Data Loading
xxxxxxxxxx
file_path='https://raw.githubusercontent.com/'+\
'OlgaBelitskaya/data_kitchen/main/'
file_name='WhiteFlowers128.h5'; img_size=int(48)
def get_data(file_path,file_name,img_size):
input_file=urllib.request.urlopen(file_path+file_name)
output_file=open(file_name,'wb')
output_file.write(input_file.read())
output_file.close(); input_file.close()
with h5py.File(file_name,'r') as f:
keys=list(f.keys())
pretty_print(html(
'<p>file keys: '+', '.join(keys)+'</p>'))
images=np.array(f[keys[0]])
images=tf.image.resize(images,[img_size,img_size]).numpy()
labels=np.array(f[keys[1]])
names=[el.decode('utf-8') for el in f[keys[2]]]
f.close()
pretty_print(html('<p>%s'%names+'</p>'))
return images,labels,names
images,labels,names=get_data(file_path,file_name,img_size)
✒️ Data Processing
xxxxxxxxxx
N=labels.shape[0]; n=int(.1*N)
num_classes=len(names); start=int(100)
shuffle_ids=np.arange(N)
np.random.RandomState(12).shuffle(shuffle_ids)
images=images[shuffle_ids]; labels=labels[shuffle_ids]
x_test,x_valid,x_train=images[:n],images[n:2*n],images[2*n:]
y_test,y_valid,y_train=labels[:n],labels[n:2*n],labels[2*n:]
df=pd.DataFrame(
[[x_train.shape,x_valid.shape,x_test.shape],
[x_train.dtype,x_valid.dtype,x_test.dtype],
[y_train.shape,y_valid.shape,y_test.shape],
[y_train.dtype,y_valid.dtype,y_test.dtype]],
columns=['train','valid','test'],
index=['image shape','image type',
'label shape','label type'])
def display_imgs(images,labels,names,start):
fig=pl.figure(figsize=(6,3)); n=randint(0,start-1)
for i in range(n,n+6):
ax=fig.add_subplot(2,3,i-n+1,xticks=[],yticks=[])
ax.set_title(
names[labels[i]],color='slategray',
fontdict={'fontsize':'large'})
ax.imshow((images[i]))
pl.tight_layout(); pl.show()
display_imgs(images,labels,names,start); display(df)
✒️ Keras Models
xxxxxxxxxx
def keras_history_plot(fit_history,fig_size,color):
keys=list(fit_history.history.keys())
list_history=[fit_history.history[keys[i]]
for i in range(len(keys))]
dfkeys=pd.DataFrame(list_history).T
dfkeys.columns=keys
fig=pl.figure(figsize=(fig_size,fig_size//2))
ax1=fig.add_subplot(2,1,1)
dfkeys.iloc[:,[int(0),int(2)]].plot(
ax=ax1,color=['slategray',color])
pl.grid(); ax2=fig.add_subplot(2,1,2)
dfkeys.iloc[:,[int(1),int(3)]].plot(
ax=ax2,color=['slategray',color])
pl.grid(); pl.show()
xxxxxxxxxx
def cnn_model(num_classes):
model=tf.keras.Sequential(); dr=float(.25)
model.add(tkl.Conv2D(int(32),(int(3),int(3)),padding='same',
input_shape=x_train.shape[int(1):]))
model.add(tkl.LeakyReLU(alpha=.02))
model.add(tkl.MaxPooling2D(pool_size=(int(2),int(2))))
model.add(tkl.Dropout(dr))
model.add(tkl.GlobalMaxPooling2D())
model.add(tkl.Dense(int(512)))
model.add(tkl.LeakyReLU(alpha=.02))
model.add(tkl.Dropout(2*dr))
model.add(tkl.Dense(num_classes))
model.add(tkl.Activation('softmax'))
model.compile(loss='sparse_categorical_crossentropy',
optimizer='nadam',metrics=['accuracy'])
return model
model=cnn_model(num_classes)
checkpointer=tkc.ModelCheckpoint(
filepath='/tmp/checkpoint',verbose=int(2),save_weights_only=True,
monitor='val_accuracy',mode='max',save_best_only=True)
history=model.fit(
x_train,y_train,batch_size=int(16),epochs=int(5),verbose=int(2),
validation_data=(x_valid,y_valid),callbacks=[checkpointer])
xxxxxxxxxx
model.load_weights('/tmp/checkpoint')
print('[loss, accuracy] => %s'%\
str(model.evaluate(x_test,y_test,verbose=int(0))))
keras_history_plot(history,6,'#348ABD')
No comments:
Post a Comment