tensorflow2.0チュートリアル2

5335 ワード

 # -*- coding: utf-8 -*-
# Created by 'Zhou Bingbing'  on 2019/7/6
import os

import tensorflow as tf

# t= tf.constant(['cafe','coffee','  '])
# print(tf.strings.length(t,unit='UTF8_CHAR'))
# s=tf.SparseTensor(indices=[[0,1],[1,0],[2,3]],values=[1,2,3],dense_shape=[3,4])
# print(s)
# print(tf.sparse.to_dense(s))

# v=tf.Variable([[1,2,3],[4,5,6]])
# print(v)
# print(v.value())
# print(v.numpy())
# @tf.function
# def scaled_elu(z,scale=1.0,alpha=1.0):
#     is_positive=tf.greater_equal(z,0.0)
#     return scale*tf.where(is_positive,z,alpha*scale*tf.nn.elu(z))
# # print(scaled_elu(tf.constant(tf.constant(-3.0))))
# # print(scaled_elu(tf.constant(tf.constant([-3,-2.5]))))
# #
# # print('tf.function')
# scaled_elu_tf = tf.function(scaled_elu)
# # print(scaled_elu_tf(tf.constant(-3.0)))
# # print(scaled_elu_tf(tf.constant([-3.0,-2.5])))
#
# # %timeit scaled_elu(tf.random.normal(1000,1000))
# # %timeit scaled_elu_tf(tf.random.normal(1000,1000))
# var = tf.Variable(0.)
# @tf.function
# def add_21():
#     return var.assign_add(21)
# # print(add_21())
#
# @tf.function(input_signature=[tf.TensorSpec([None],tf.int32,name='X')])
# def cube(z):
#     return tf.pow(z,3)
# cube_func_int32 = cube.get_concrete_function(tf.TensorSpec([None],tf.int32))
#
# print(cube_func_int32)
# from tensorflow import keras

'''    '''
# def  f(x):
#     return 3.0*x**2+2.0*x+1
# def approximae_derivate(f,x,eps=1e-3):
#     return (f(x+eps)-f(x-eps))/2*(eps)
# print(approximae_derivate(f,1))
# def g(x1,x2):
#     return (x1+5)*(x2**2)
# def approximate_gradient(g,x1,x2,eps=1e-3):
#     dg_x1=approximae_derivate(lambda x:g(x,x2),x1,eps)
#     dg_x2=approximae_derivate(lambda x:g(x,x1),x2,eps)
#     return dg_x1,dg_x2
# print(approximate_gradient(g,2,3))
# learining_rate=0.1
# optimizer = keras.optimizers.SGD(lr=learining_rate)
# x=tf.Variable(0.0)
# for _ in range(100):
#     with tf.GradientTape() as tape:
#         z=f(x)
#     dz_dx=tape.gradient(z,x)
#     optimizer.apply_gradients([dz_dx,x])
# print(x)
import numpy as np
'''tf data'''
# dataset= tf.data.Dataset.from_tensor_slices(np.arange(10))
# print(dataset)
# dataset=dataset.repeat(3).batch(7)
# for item in dataset:
#     print(item)
# x=np.array([[1,2],[3,4],[5,6]])
# y=np.array(['cat','dog','fox'])
# dataset3=tf.data.Dataset.from_tensor_slices((x,y))
# for item_x,item_y in dataset3:
#     print(item_x.numpy(),item_y.numpy())

#
# dataset4= tf.data.Dataset.from_tensor_slices({'feature':x,'label':y})
# for item in dataset4:
#     print(item['feature'].numpy(),item['label'].numpy())
from sklearn.datasets import fetch_california_housing
housing = fetch_california_housing()
from sklearn.model_selection import train_test_split
x_train_all,x_test,y_train_all,y_test = train_test_split(housing.data,housing.target,random_state=7)
x_train,x_valid,y_train,y_valid = train_test_split(x_train_all,y_train_all,random_state=11)
print(x_train.shape,y_train.shape)
print(x_valid.shape,y_valid.shape)
print(x_test.shape,y_test.shape)
from sklearn.preprocessing import StandardScaler
scaler=StandardScaler()
x_train_scaled = scaler.fit_transform(x_train)
x_valid_scaled =scaler.transform(x_valid)
x_test_scaled = scaler.transform(x_test)
output_dir ='generate_csv'
if not os.path.exists(output_dir):
    os.mkdir(output_dir)
def save_to_csv(output_dir,data,name_prefix,header=None,n_parts=10):
    path_format =os.path.join(output_dir,'{}_{:02d}.csv')
    filenames=[]
    for file_idx, row_indices in enumerate(np.array_split(np.arange(len(data)),n_parts)):
        np.array_split(np.arange(len(data)),n_parts)
        part_csv = path_format.format(name_prefix,file_idx)
        filenames.append(part_csv)
        with open(part_csv, "wt", encoding="utf-8") as f:
            if header is not None:
                f.write(header + "
") for row_index in row_indices: f.write(",".join( [repr(col) for col in data[row_index]])) f.write('
') return filenames train_data = np.c_[x_train_scaled, y_train] valid_data = np.c_[x_valid_scaled, y_valid] test_data = np.c_[x_test_scaled, y_test] header_cols = housing.feature_names + ["MidianHouseValue"] header_str = ",".join(header_cols) train_filenames = save_to_csv(output_dir, train_data, "train", header_str, n_parts=20) valid_filenames = save_to_csv(output_dir, valid_data, "valid", header_str, n_parts=10) test_filenames = save_to_csv(output_dir, test_data, "test", header_str, n_parts=10) filename_dataset = tf.data.Dataset.list_files(train_filenames) # for filename in filename_dataset: # print(filename) n_readers=5 dataset=filename_dataset.interleave(lambda filename:tf.data.TextLineDataset(filename).skip(1),cycle_length=n_readers) def parse_csv_line(line,n_fields): defs=[tf.constant(np.nan)]*n_fields parsed_fields=tf.io.decode_csv(line,record_defaults=defs) x=tf.stack(parsed_fields[0:-1]) #x y=tf.stack(parsed_fields[-1:])# return x,y