09_DL(Deep_Learning)

17_RNN과 CNN 조합

chuuvelop 2025. 4. 29. 17:31
728x90
RNN과 CNN조합

 

 

from tensorflow.keras.datasets import imdb
from tensorflow.keras.preprocessing.sequence import pad_sequences
from tensorflow import keras
from sklearn.model_selection import train_test_split
import numpy as np
import matplotlib.pyplot as plt

 

데이터 준비

(x_train, y_train), (x_test, y_test) = imdb.load_data(num_words = 5000)
x_train, x_val, y_train, y_val = train_test_split(x_train, y_train, test_size = 0.2,
                                                  stratify = y_train, random_state = 26)
train_seq = pad_sequences(x_train, maxlen = 100)
val_seq = pad_sequences(x_val, maxlen = 100)
test_seq = pad_sequences(x_test, maxlen = 100)

 

 

모델 구성

model = keras.Sequential()
model.add(keras.Input(shape =(100,)))
model.add(keras.layers.Embedding(5000, 128))
model.add(keras.layers.Dropout(0.5))
model.add(keras.layers.Conv1D(64, 5, activation = "relu"))
model.add(keras.layers.LSTM(64))
model.add(keras.layers.Dense(1, activation = "sigmoid"))

 

model.summary()

 

model.compile(loss = "binary_crossentropy", optimizer = "adam", metrics = ["accuracy"])
es_cb = keras.callbacks.EarlyStopping(patience = 4, restore_best_weights = True)
cp_cb = keras.callbacks.ModelCheckpoint("./model/cnnrnn-model.keras", save_best_only = True)

 

history = model.fit(train_seq, y_train, batch_size = 128, epochs = 100,
                    validation_data = (val_seq, y_val), callbacks = [cp_cb, es_cb])

 

model.evaluate(test_seq, y_test)

[0.3350280523300171, 0.8543199896812439]
728x90