我创建了一个解决螺旋数据集分类问题的深度神经网络。但是,在衡量性能时,准确率会上下波动,但始终保持在 50% 左右——这当然是非常糟糕的。
下图显示了 100 个训练阶段的损失和准确率。
我怎样才能解决这个问题?我已经进行了研究,但我没有看到我的代码中的错误在哪里。是我的网络架构中的错误吗?
我的代码:
# Make sure we have the required libraries loaded
library(keras)
library(tensorflow)
library(ggplot2)
# Load the data
spiralData = read.table("spiral.data", header=TRUE)
# Visualize the data
qplot(x, y, data = spiralData, colour = label)
# Store the data in features and labels.
x<-c(spiralData$x)
y<-c(spiralData$y)
features <- matrix(c(x,y),nrow=length(x))
labels <- matrix(spiralData$label)
# Create model.
model <- keras_model_sequential()
# Add layers and compile the model.
# Our model consists of 4 hidden layers, each with 6 neurons.
model %>%
layer_dense(units = 6, activation = 'tanh', input_shape = c(2)) %>%
layer_dense(units = 6, activation = 'tanh') %>%
layer_dense(units = 6, activation = 'tanh') %>%
layer_dense(units = 6, activation = 'tanh') %>%
layer_dense(units = 1, activation = 'sigmoid') %>%
compile(
optimizer = 'rmsprop',
loss = 'binary_crossentropy',
metrics = c('accuracy')
)
# Train the model, iterating on the data in batches of 32 samples.
# Also, visualize the training process.
model %>% fit(features, labels, epochs=100, batch_size=32)
# Evalute the model
score = model %>% evaluate(features, labels, batch_size=32)
print(score)
我的“spiral.data”数据集:
x y label
1 0 1
-1 0 0
0.971354 0.209317 1
-0.971354 -0.209317 0
0.906112 0.406602 1
-0.906112 -0.406602 0
0.807485 0.584507 1
-0.807485 -0.584507 0
0.679909 0.736572 1
-0.679909 -0.736572 0
0.528858 0.857455 1
-0.528858 -0.857455 0
0.360603 0.943128 1
-0.360603 -0.943128 0
0.181957 0.991002 1
-0.181957 -0.991002 0
-3.07692e-06 1 1
3.07692e-06 -1 0
-0.178211 0.970568 1
0.178211 -0.970568 0
-0.345891 0.90463 1
0.345891 -0.90463 0
-0.496812 0.805483 1
0.496812 -0.805483 0
-0.625522 0.67764 1
0.625522 -0.67764 0
-0.727538 0.52663 1
0.727538 -0.52663 0
-0.799514 0.35876 1
0.799514 -0.35876 0
-0.839328 0.180858 1
0.839328 -0.180858 0
-0.846154 -6.66667e-06 1
0.846154 6.66667e-06 0
-0.820463 -0.176808 1
0.820463 0.176808 0
-0.763975 -0.342827 1
0.763975 0.342827 0
-0.679563 -0.491918 1
0.679563 0.491918 0
-0.57112 -0.618723 1
0.57112 0.618723 0
-0.443382 -0.71888 1
0.443382 0.71888 0
-0.301723 -0.78915 1
0.301723 0.78915 0
-0.151937 -0.82754 1
0.151937 0.82754 0
9.23077e-06 -0.833333 1
-9.23077e-06 0.833333 0
0.148202 -0.807103 1
-0.148202 0.807103 0
0.287022 -0.750648 1
-0.287022 0.750648 0
0.411343 -0.666902 1
-0.411343 0.666902 0
0.516738 -0.559785 1
-0.516738 0.559785 0
0.599623 -0.43403 1
-0.599623 0.43403 0
0.65738 -0.294975 1
-0.65738 0.294975 0
0.688438 -0.14834 1
-0.688438 0.14834 0
0.692308 1.16667e-05 1
-0.692308 -1.16667e-05 0
0.669572 0.144297 1
-0.669572 -0.144297 0
0.621838 0.27905 1
-0.621838 -0.27905 0
0.551642 0.399325 1
-0.551642 -0.399325 0
0.462331 0.500875 1
-0.462331 -0.500875 0
0.357906 0.580303 1
-0.357906 -0.580303 0
0.242846 0.635172 1
-0.242846 -0.635172 0
0.12192 0.664075 1
-0.12192 -0.664075 0
-1.07692e-05 0.666667 1
1.07692e-05 -0.666667 0
-0.118191 0.643638 1
0.118191 -0.643638 0
-0.228149 0.596667 1
0.228149 -0.596667 0
-0.325872 0.528323 1
0.325872 -0.528323 0
-0.407954 0.441933 1
0.407954 -0.441933 0
-0.471706 0.341433 1
0.471706 -0.341433 0
-0.515245 0.231193 1
0.515245 -0.231193 0
-0.537548 0.115822 1
0.537548 -0.115822 0
-0.538462 -1.33333e-05 1
0.538462 1.33333e-05 0
-0.518682 -0.111783 1
0.518682 0.111783 0
-0.479702 -0.215272 1
0.479702 0.215272 0
-0.423723 -0.306732 1
0.423723 0.306732 0
-0.353545 -0.383025 1
0.353545 0.383025 0
-0.272434 -0.441725 1
0.272434 0.441725 0
-0.183971 -0.481192 1
0.183971 0.481192 0
-0.0919062 -0.500612 1
0.0919062 0.500612 0
1.23077e-05 -0.5 1
-1.23077e-05 0.5 0
0.0881769 -0.480173 1
-0.0881769 0.480173 0
0.169275 -0.442687 1
-0.169275 0.442687 0
0.2404 -0.389745 1
-0.2404 0.389745 0
0.299169 -0.324082 1
-0.299169 0.324082 0
0.343788 -0.248838 1
-0.343788 0.248838 0
0.373109 -0.167412 1
-0.373109 0.167412 0
0.386658 -0.0833083 1
-0.386658 0.0833083 0
0.384615 1.16667e-05 1
-0.384615 -1.16667e-05 0
0.367792 0.0792667 1
-0.367792 -0.0792667 0
0.337568 0.15149 1
-0.337568 -0.15149 0
0.295805 0.214137 1
-0.295805 -0.214137 0
0.24476 0.265173 1
-0.24476 -0.265173 0
0.186962 0.303147 1
-0.186962 -0.303147 0
0.125098 0.327212 1
-0.125098 -0.327212 0
0.0618938 0.337147 1
-0.0618938 -0.337147 0
-1.07692e-05 0.333333 1
1.07692e-05 -0.333333 0
-0.0581615 0.31671 1
0.0581615 -0.31671 0
-0.110398 0.288708 1
0.110398 -0.288708 0
-0.154926 0.251167 1
0.154926 -0.251167 0
-0.190382 0.206232 1
0.190382 -0.206232 0
-0.215868 0.156247 1
0.215868 -0.156247 0
-0.230974 0.103635 1
0.230974 -0.103635 0
-0.235768 0.050795 1
0.235768 -0.050795 0
-0.230769 -1e-05 1
0.230769 1e-05 0
-0.216903 -0.0467483 1
0.216903 0.0467483 0
-0.195432 -0.0877067 1
0.195432 0.0877067 0
-0.167889 -0.121538 1
0.167889 0.121538 0
-0.135977 -0.14732 1
0.135977 0.14732 0
-0.101492 -0.164567 1
0.101492 0.164567 0
-0.0662277 -0.17323 1
0.0662277 0.17323 0
-0.0318831 -0.173682 1
0.0318831 0.173682 0
6.15385e-06 -0.166667 1
-6.15385e-06 0.166667 0
0.0281431 -0.153247 1
-0.0281431 0.153247 0
0.05152 -0.13473 1
-0.05152 0.13473 0
0.0694508 -0.112592 1
-0.0694508 0.112592 0
0.0815923 -0.088385 1
-0.0815923 0.088385 0
0.0879462 -0.063655 1
-0.0879462 0.063655 0
0.0888369 -0.0398583 1
-0.0888369 0.0398583 0
0.0848769 -0.018285 1
-0.0848769 0.018285 0
0.0769231 3.33333e-06 1
-0.0769231 -3.33333e-06 0
可视化后,数据集如下所示:

