1. 程式人生 > 程式設計 >Numpy實現卷積神經網路(CNN)的示例

Numpy實現卷積神經網路(CNN)的示例

import numpy as np
import sys


def conv_(img,conv_filter):
  filter_size = conv_filter.shape[1]
  result = np.zeros((img.shape))
  # 迴圈遍歷影象以應用卷積運算
  for r in np.uint16(np.arange(filter_size/2.0,img.shape[0]-filter_size/2.0+1)):
    for c in np.uint16(np.arange(filter_size/2.0,img.shape[1]-filter_size/2.0+1)):
      # 卷積的區域
      curr_region = img[r-np.uint16(np.floor(filter_size/2.0)):r+np.uint16(np.ceil(filter_size/2.0)),c-np.uint16(np.floor(filter_size/2.0)):c+np.uint16(np.ceil(filter_size/2.0))]
      # 卷積操作
      curr_result = curr_region * conv_filter
      conv_sum = np.sum(curr_result)
      # 將求和儲存到特徵圖中
      result[r,c] = conv_sum

    # 裁剪結果矩陣的異常值
  final_result = result[np.uint16(filter_size/2.0):result.shape[0]-np.uint16(filter_size/2.0),np.uint16(filter_size/2.0):result.shape[1]-np.uint16(filter_size/2.0)]
  return final_result


def conv(img,conv_filter):
  # 檢查影象通道的數量是否與過濾器深度匹配
  if len(img.shape) > 2 or len(conv_filter.shape) > 3:
    if img.shape[-1] != conv_filter.shape[-1]:
      print("錯誤:影象和過濾器中的通道數必須匹配")
      sys.exit()

  # 檢查過濾器是否是方陣
  if conv_filter.shape[1] != conv_filter.shape[2]:
    print('錯誤:過濾器必須是方陣')
    sys.exit()

  # 檢查過濾器大小是否是奇數
  if conv_filter.shape[1] % 2 == 0:
    print('錯誤:過濾器大小必須是奇數')
    sys.exit()

  # 定義一個空的特徵圖,用於儲存過濾器與影象的卷積輸出
  feature_maps = np.zeros((img.shape[0] - conv_filter.shape[1] + 1,img.shape[1] - conv_filter.shape[1] + 1,conv_filter.shape[0]))

  # 卷積操作
  for filter_num in range(conv_filter.shape[0]):
    print("Filter ",filter_num + 1)
    curr_filter = conv_filter[filter_num,:]

    # 檢查單個過濾器是否有多個通道。如果有,那麼每個通道將對影象進行卷積。所有卷積的結果加起來得到一個特徵圖。
    if len(curr_filter.shape) > 2:
      conv_map = conv_(img[:,:,0],curr_filter[:,0])
      for ch_num in range(1,curr_filter.shape[-1]):
        conv_map = conv_map + conv_(img[:,ch_num],ch_num])
    else:
      conv_map = conv_(img,curr_filter)
    feature_maps[:,filter_num] = conv_map
  return feature_maps


def pooling(feature_map,size=2,stride=2):
  # 定義池化操作的輸出
  pool_out = np.zeros((np.uint16((feature_map.shape[0] - size + 1) / stride + 1),np.uint16((feature_map.shape[1] - size + 1) / stride + 1),feature_map.shape[-1]))

  for map_num in range(feature_map.shape[-1]):
    r2 = 0
    for r in np.arange(0,feature_map.shape[0] - size + 1,stride):
      c2 = 0
      for c in np.arange(0,feature_map.shape[1] - size + 1,stride):
        pool_out[r2,c2,map_num] = np.max([feature_map[r: r+size,c: c+size,map_num]])
        c2 = c2 + 1
      r2 = r2 + 1
  return pool_out
import skimage.data
import numpy
import matplotlib
import matplotlib.pyplot as plt
import NumPyCNN as numpycnn

# 讀取影象
img = skimage.data.chelsea()
# 轉成灰度影象
img = skimage.color.rgb2gray(img)

# 初始化卷積核
l1_filter = numpy.zeros((2,3,3))
# 檢測垂直邊緣
l1_filter[0,:] = numpy.array([[[-1,1],[-1,1]]])
# 檢測水平邊緣
l1_filter[1,:] = numpy.array([[[1,1,[0,-1,-1]]])

"""
第一個卷積層
"""
# 卷積操作
l1_feature_map = numpycnn.conv(img,l1_filter)
# ReLU
l1_feature_map_relu = numpycnn.relu(l1_feature_map)
# Pooling
l1_feature_map_relu_pool = numpycnn.pooling(l1_feature_map_relu,2,2)

"""
第二個卷積層
"""
# 初始化卷積核
l2_filter = numpy.random.rand(3,5,l1_feature_map_relu_pool.shape[-1])
# 卷積操作
l2_feature_map = numpycnn.conv(l1_feature_map_relu_pool,l2_filter)
# ReLU
l2_feature_map_relu = numpycnn.relu(l2_feature_map)
# Pooling
l2_feature_map_relu_pool = numpycnn.pooling(l2_feature_map_relu,2)

"""
第三個卷積層
"""
# 初始化卷積核
l3_filter = numpy.random.rand(1,7,l2_feature_map_relu_pool.shape[-1])
# 卷積操作
l3_feature_map = numpycnn.conv(l2_feature_map_relu_pool,l3_filter)
# ReLU
l3_feature_map_relu = numpycnn.relu(l3_feature_map)
# Pooling
l3_feature_map_relu_pool = numpycnn.pooling(l3_feature_map_relu,2)

"""
結果視覺化
"""
fig0,ax0 = plt.subplots(nrows=1,ncols=1)
ax0.imshow(img).set_cmap("gray")
ax0.set_title("Input Image")
ax0.get_xaxis().set_ticks([])
ax0.get_yaxis().set_ticks([])
plt.savefig("in_img1.png",bbox_inches="tight")
plt.close(fig0)

# 第一層
fig1,ax1 = plt.subplots(nrows=3,ncols=2)
ax1[0,0].imshow(l1_feature_map[:,0]).set_cmap("gray")
ax1[0,0].get_xaxis().set_ticks([])
ax1[0,0].get_yaxis().set_ticks([])
ax1[0,0].set_title("L1-Map1")

ax1[0,1].imshow(l1_feature_map[:,1]).set_cmap("gray")
ax1[0,1].get_xaxis().set_ticks([])
ax1[0,1].get_yaxis().set_ticks([])
ax1[0,1].set_title("L1-Map2")

ax1[1,0].imshow(l1_feature_map_relu[:,0]).set_cmap("gray")
ax1[1,0].get_xaxis().set_ticks([])
ax1[1,0].get_yaxis().set_ticks([])
ax1[1,0].set_title("L1-Map1ReLU")

ax1[1,1].imshow(l1_feature_map_relu[:,1]).set_cmap("gray")
ax1[1,1].get_xaxis().set_ticks([])
ax1[1,1].get_yaxis().set_ticks([])
ax1[1,1].set_title("L1-Map2ReLU")

ax1[2,0].imshow(l1_feature_map_relu_pool[:,0]).set_cmap("gray")
ax1[2,0].get_xaxis().set_ticks([])
ax1[2,0].get_yaxis().set_ticks([])
ax1[2,0].set_title("L1-Map1ReLUPool")

ax1[2,1].imshow(l1_feature_map_relu_pool[:,1]).set_cmap("gray")
ax1[2,1].set_title("L1-Map2ReLUPool")

plt.savefig("L1.png",bbox_inches="tight")
plt.close(fig1)

# 第二層
fig2,ax2 = plt.subplots(nrows=3,ncols=3)
ax2[0,0].imshow(l2_feature_map[:,0]).set_cmap("gray")
ax2[0,0].get_xaxis().set_ticks([])
ax2[0,0].get_yaxis().set_ticks([])
ax2[0,0].set_title("L2-Map1")

ax2[0,1].imshow(l2_feature_map[:,1]).set_cmap("gray")
ax2[0,1].get_xaxis().set_ticks([])
ax2[0,1].get_yaxis().set_ticks([])
ax2[0,1].set_title("L2-Map2")

ax2[0,2].imshow(l2_feature_map[:,2]).set_cmap("gray")
ax2[0,2].get_xaxis().set_ticks([])
ax2[0,2].get_yaxis().set_ticks([])
ax2[0,2].set_title("L2-Map3")

ax2[1,0].imshow(l2_feature_map_relu[:,0]).set_cmap("gray")
ax2[1,0].get_xaxis().set_ticks([])
ax2[1,0].get_yaxis().set_ticks([])
ax2[1,0].set_title("L2-Map1ReLU")

ax2[1,1].imshow(l2_feature_map_relu[:,1]).set_cmap("gray")
ax2[1,1].get_xaxis().set_ticks([])
ax2[1,1].get_yaxis().set_ticks([])
ax2[1,1].set_title("L2-Map2ReLU")

ax2[1,2].imshow(l2_feature_map_relu[:,2]).set_cmap("gray")
ax2[1,2].get_xaxis().set_ticks([])
ax2[1,2].get_yaxis().set_ticks([])
ax2[1,2].set_title("L2-Map3ReLU")

ax2[2,0].imshow(l2_feature_map_relu_pool[:,0]).set_cmap("gray")
ax2[2,0].get_xaxis().set_ticks([])
ax2[2,0].get_yaxis().set_ticks([])
ax2[2,0].set_title("L2-Map1ReLUPool")

ax2[2,1].imshow(l2_feature_map_relu_pool[:,1]).set_cmap("gray")
ax2[2,1].get_xaxis().set_ticks([])
ax2[2,1].get_yaxis().set_ticks([])
ax2[2,1].set_title("L2-Map2ReLUPool")

ax2[2,2].imshow(l2_feature_map_relu_pool[:,2]).set_cmap("gray")
ax2[2,2].get_xaxis().set_ticks([])
ax2[2,2].get_yaxis().set_ticks([])
ax2[2,2].set_title("L2-Map3ReLUPool")

plt.savefig("L2.png",bbox_inches="tight")
plt.close(fig2)

# 第三層
fig3,ax3 = plt.subplots(nrows=1,ncols=3)
ax3[0].imshow(l3_feature_map[:,0]).set_cmap("gray")
ax3[0].get_xaxis().set_ticks([])
ax3[0].get_yaxis().set_ticks([])
ax3[0].set_title("L3-Map1")

ax3[1].imshow(l3_feature_map_relu[:,0]).set_cmap("gray")
ax3[1].get_xaxis().set_ticks([])
ax3[1].get_yaxis().set_ticks([])
ax3[1].set_title("L3-Map1ReLU")

ax3[2].imshow(l3_feature_map_relu_pool[:,0]).set_cmap("gray")
ax3[2].get_xaxis().set_ticks([])
ax3[2].get_yaxis().set_ticks([])
ax3[2].set_title("L3-Map1ReLUPool")

plt.savefig("L3.png",bbox_inches="tight")
plt.close(fig3)

以上就是Numpy實現卷積神經網路(CNN)的示例的詳細內容,更多關於Numpy實現卷積神經網路的資料請關注我們其它相關文章!