TensorFlowでkeras その11
2165 ワード
概要
TensorFlowでkerasやってみた。
fizzbuzz学習してみた。
写真
環境
windows 7 sp1 64bit
anaconda3
tensorflow 1.2
サンプルコード
import numpy as np
from tensorflow.contrib.keras.python.keras.models import Sequential
from tensorflow.contrib.keras.python.keras.layers import Dense, Activation
from tensorflow.contrib.keras.python.keras.models import Model
def binary_encode(i, num_digits):
return np.array([i >> d & 1 for d in range(num_digits)])
def fizz_buzz_encode(i):
if i % 15 == 0:
return np.array([0, 0, 0, 1])
elif i % 5 == 0:
return np.array([0, 0, 1, 0])
elif i % 3 == 0:
return np.array([0, 1, 0, 0])
else:
return np.array([1, 0, 0, 0])
def fizz_buzz(i, prediction):
return [str(i), "fizz", "buzz", "fizzbuzz"][prediction]
NUM_DIGITS = 10
trX = np.array([binary_encode(i, NUM_DIGITS) for i in range(101, 2 ** NUM_DIGITS)])
trY = np.array([fizz_buzz_encode(i) for i in range(101, 2 ** NUM_DIGITS)])
model = Sequential()
model.add(Dense(128, input_dim = 10))
model.add(Activation('tanh'))
model.add(Dense(4, input_dim = 128))
model.add(Activation('softmax'))
model.compile(loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
model.fit(trX, trY, epochs = 3600, batch_size = 128)
f = open('fizzbuzz2.json', 'w')
f.write(model.to_json())
model.save('fizzbuzz2.hdf5')
print ("save model")
numbers = np.arange(1, 101)
teX = np.array([binary_encode(i, NUM_DIGITS) for i in range(1, 101)])
teY = np.array([fizz_buzz_encode(i) for i in range(1, 101)])
score = model.evaluate(teX, teY, verbose = 0)
print ('Test loss : {:>.4f}'.format(score[0]))
print ('Test accuracy: {:>.4f}'.format(score[1]))
import numpy as np
from tensorflow.contrib.keras.python.keras.models import Sequential
from tensorflow.contrib.keras.python.keras.layers import Dense, Activation
from tensorflow.contrib.keras.python.keras.models import Model
def binary_encode(i, num_digits):
return np.array([i >> d & 1 for d in range(num_digits)])
def fizz_buzz_encode(i):
if i % 15 == 0:
return np.array([0, 0, 0, 1])
elif i % 5 == 0:
return np.array([0, 0, 1, 0])
elif i % 3 == 0:
return np.array([0, 1, 0, 0])
else:
return np.array([1, 0, 0, 0])
def fizz_buzz(i, prediction):
return [str(i), "fizz", "buzz", "fizzbuzz"][prediction]
NUM_DIGITS = 10
trX = np.array([binary_encode(i, NUM_DIGITS) for i in range(101, 2 ** NUM_DIGITS)])
trY = np.array([fizz_buzz_encode(i) for i in range(101, 2 ** NUM_DIGITS)])
model = Sequential()
model.add(Dense(128, input_dim = 10))
model.add(Activation('tanh'))
model.add(Dense(4, input_dim = 128))
model.add(Activation('softmax'))
model.compile(loss = 'categorical_crossentropy', optimizer = 'adam', metrics = ['accuracy'])
model.fit(trX, trY, epochs = 3600, batch_size = 128)
f = open('fizzbuzz2.json', 'w')
f.write(model.to_json())
model.save('fizzbuzz2.hdf5')
print ("save model")
numbers = np.arange(1, 101)
teX = np.array([binary_encode(i, NUM_DIGITS) for i in range(1, 101)])
teY = np.array([fizz_buzz_encode(i) for i in range(1, 101)])
score = model.evaluate(teX, teY, verbose = 0)
print ('Test loss : {:>.4f}'.format(score[0]))
print ('Test accuracy: {:>.4f}'.format(score[1]))
Author And Source
この問題について(TensorFlowでkeras その11), 我々は、より多くの情報をここで見つけました https://qiita.com/ohisama@github/items/9f5d0a76bdfb533d478f著者帰属:元の著者の情報は、元のURLに含まれています。著作権は原作者に属する。
Content is automatically searched and collected through network algorithms . If there is a violation . Please contact us . We will adjust (correct author information ,or delete content ) as soon as possible .