Up モデルの構築 作成: 2021-04-23
更新: 2021-04-23


    ここまでの経過:
      $ source [venv のパス]/venv/bin/activate (venv) $ python >>> import tensorflow as tf >>> tf.enable_eager_execution() >>> train_file_path = "[ / からのパス]/.keras/datasets/train.csv" >>> test_file_path = "[ / からのパス]/.keras/datasets/eval.csv" >>> LABEL_COLUMN = 'survived' >>> def get_dataset(file_path, **kwargs): ... dataset = tf.data.experimental.make_csv_dataset( ... file_path, ... batch_size=5, ... label_name=LABEL_COLUMN, ... na_value="?", ... num_epochs=1, ... ignore_errors=True, ... **kwargs) ... return dataset ... >>> >>> raw_train_data = get_dataset(train_file_path) >>> raw_test_data = get_dataset(test_file_path) >>> import numpy as np >>> np.set_printoptions(precision=3, suppress=True) >>> def show_batch(dataset): ... for batch, label in dataset.take(1): ... for key, value in batch.items(): ... print("{:20s}: {}".format(key,value.numpy())) ... >>> ------------------------------------------------------------------ >>> class PackNumericFeatures(object): ... def __init__(self, names): ... self.names = names ... ... def __call__(self, features, labels): ... numeric_features = [features.pop(name) for name in self.names] ... numeric_features = [tf.cast(feat, tf.float32) for feat in numeric_features] ... numeric_features = tf.stack(numeric_features, axis=-1) ... features['numeric'] = numeric_features ... return features, labels ... >>> >>> NUMERIC_FEATURES = ['age','n_siblings_spouses','parch', 'fare'] >>> packed_train_data = raw_train_data.map(PackNumericFeatures(NUMERIC_FEATURES)) >>> packed_test_data = raw_test_data.map(PackNumericFeatures(NUMERIC_FEATURES)) >>> import pandas as pd >>> desc = pd.read_csv(train_file_path)[NUMERIC_FEATURES].describe() >>> MEAN = np.array(desc.T['mean']) >>> STD = np.array(desc.T['std']) >>> def normalize_numeric_data(data, mean, std): ... return (data-mean)/std ... >>> >>> import functools >>> normalizer = functools.partial(normalize_numeric_data, mean=MEAN, std=STD) >>> numeric_column = tf.feature_column.numeric_column( ... 'numeric', normalizer_fn=normalizer, shape=[len(NUMERIC_FEATURES)]) ... >>> >>> numeric_columns = [numeric_column] >>> CATEGORIES = { ... 'sex': ['male', 'female'], ... 'class' : ['First', 'Second', 'Third'], ... 'deck' : ['A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J'], ... 'embark_town' : ['Cherbourg', 'Southhampton', 'Queenstown'], ... 'alone' : ['y', 'n'] ... } ... >>> >>> categorical_columns = [] >>> for feature, vocab in CATEGORIES.items(): ... cat_col = tf.feature_column.categorical_column_with_vocabulary_list( ... key=feature, vocabulary_list=vocab) ... categorical_columns.append(tf.feature_column.indicator_column(cat_col)) ... >>> >>> preprocessing_layer ... = tf.keras.layers.DenseFeatures(categorical_columns+numeric_columns)


    preprocessing_layer から始まる model の構成:
    1. preprocessing_layer
    2. tf.keras.layers.Dense(128, activation='relu')
        128個のノード(ニューロン)が全結合。
    3. tf.keras.layers.Dense(128, activation='relu')
    4. tf.keras.layers.Dense(1)


    このモデルの構築:
      >>> model = tf.keras.Sequential([ ... preprocessing_layer, ... tf.keras.layers.Dense(128, activation='relu'), ... tf.keras.layers.Dense(128, activation='relu'), ... tf.keras.layers.Dense(1), ... ]) >>> model.compile( ... loss=tf.keras.losses.BinaryCrossentropy(from_logits=True), ... optimizer='adam', ... metrics=['accuracy']) >>>