TensorFlow學習筆記#1
阿新 • • 發佈:2018-12-14
TensorFlow Learning Notes #1
基礎
1.Tensors are the primary data structure that TensorFlow uses to operate on the computational graph.
#Create fixed tensors
zero_tsr = tf.zeros([row_dim, col_dim])
ones_tsr = tf.ones([row_dim, col_dim])
filled_tsr = tf.fill([row_dim, col_dim], 42)
constant_tsr = tf.constant([1,2,3])
#Create similar shape of tensors
zeros_sim = tf.zeros_like(constant_tsr)
zeros_sim = tf.ones_like(constant_tsr)
#Create sequence tensors
linear_tsr = tf.linspace(start=0.0, stop=1.0, num=3.0)
integer_seq_tsr = tf.range(start=6, limit=15, delta=3)
#Create random tensors
randunif_tsr = tf.random_uniform([row.dim, col_dim], minval=0, maxval=1)
randnorm_tsr = tf.random_normal([row_dim, col_dim], mean=0.0, stddev=1.0)
runcnorm_tsr = tf.truncated_normal([row_dim, col_dim], mean=0.0, stddev=1.0)
shuffled_output = tf.random_shuffle(input_tensor)
cropped_output = tf.random_crop(input_tensor, crop_size)
cropped_image = tf.random_crop(my_image, [height/2, width/2, 3])
2.Variables are the parameters of the algorithm and TensorFlow keeps track of how to change these to optimize the algorithm. Placeholders are objects that allow you to feed in data of a specific type and shape and depend on the results of the computational graph, such as the expected outcome of a computation.
#Create and initialize a variable
my_var = tf.Variable(tf.zeros([2, 3]))
sess = tf.Session()
initialize_op = tf.global_variables_initializer()
sess.run(initialize_op)
#Declar a placeholder and perform operations on it
sess = tf.Session()
x = tf.placeholder(tf.float32, shape=[2,2])
y = tf.identity(x)
x_vals = np.random.rand(2,2)
sess.run(y, feed_dict={x: x_vals})
3.Matrices
#Create matrices
import tensorflow as tf
sess = tf.Session()
identity_matrix = tf.diag([1.0, 1.0, 1.0])
A = tf.truncated_normal([2, 3])
B = tf.fill([2,3], 5.0)
C = tf.random_uniform([3,2])
D = tf.convert_to_tensor(np.array([[1., 2., 3.], [-3., -7., -1.], [0., 5., -2.]]))
print(sess.run(identity_matrix))
#Addition, subtraction and multiplication
print(sess.run(A+B))
print(sess.run(B-B))
print(sess.run(tf.matmul(B, identity_matrix)))
#Transpose, inverse and determinant
print(sess.run(tf.transpose(C)))
print(sess.run(tf.matrix_inverse(D)))
print(sess.run(tf.matrix_determinant(D)))
#Decompositoins
print(sess.run(tf.cholesky(identity_matrix)))
#Eigenvalues and eigenvectors
print(sess.run(tf.self_adjoint_eig(D))
4.Operations
Operations | Description |
---|---|
div() | division returns the same type as the inputs |
truediv() | division returns a float |
floordiv() | division returns an integer |
mod() | returns the remainder after the division |
cross() | returns the cross-product between two tensors |
abs() | absolute value of one input tensor |
ceil() | ceiling function of one input tensor |
cos() | cosine function of one input tensor |
exp() | base e exponential of one input tensor |
floor() | floor function of one input tensor |
inv() | multiplicative inverse (1/x) of one input tensor |
log() | natural logarithm of one input tensor |
maximum() | element-wise max of two tensors |
minimum() | element-wise min of two tensors |
neg() | negative of one input tensor |
pow() | the first tensor raised to the second tensor element-wise |
round() | rounds one input tensor |
rsqrt() | one over the square root of one tensor |
sign() | returns -1, 0 or 1, depending on the sign of the tensor |
sin() | sine function of one input tensor |
sqrt() | square root of one input tensor |
square() | square of one input tensor |
digamma() | psi function, the derivative of the lgamma() function |
erf() | gaussian error function, element-wise, of one tensor |
erfc() | complimentary error function of one tensor |
igamma() | lower regularized incomplete gamma function |
igammac() | upper regularized incomplete gamma function |
lbeta() | natural logarithm of the absolute value of the beta function |
lgamma() | natural logarithm of the absolute value of the gamma function |
squared_difference() | computes the square of the differences between two tensors |
5.Activation Functions
#Create rectifier linear unit, ReLU
import tensorflow as tf
sess = tf.Session()
print(sess.run(tf.nn.relu([-3., 3., 10.])))
#Create ReLU6 = min(max(0,x),6)
print(sess.run(tf.nn.relu6([-3., 3., 10.])))
#Create sigmoid function = 1/(1+exp(-x))
print(sess.run(tf.nn.sigmoid([-1., 0., 1.])))
#Create tanh function = ((exp(x)-exp(-x))/(exp(x)+exp(-x))
print(sess.run(tf.nn.tanh([-1., 0., 1.])))
#Create softsign function = x/(abs(x)+1)
print(sess.run(tf.nn.softsign([-1., 0., -1.])))
#Create softplus function = log(exp(x)+1)
print(sess.run(tf.nn.softplus([-1., 0., -1.])))
#Create Exponential Linear Unit, ELU function = (exp(x)+1) if x<0 else x
print(sess.run(tf.nn.elu([-1., 0., -1.])))
6.Data Sources
How to access datasets through TensorFlow and Python
#Iris data
from sklearn import datasets
iris = datasets.load_iris()
print(len(iris.data))
print(len(iris.target))
print(iris.target[0])
print(set(iris.target))
#Birth weight data
import requests
birthdata_url = 'http://faculty.washington.edu/heagerty/Courses/b513/WEB2002/datasets/lowbwt.dat'
birth_file = requests.get(birthdata_url)
birth_header = ['ID','LOW','AGE','LWT','RACE','SMOKE','PTL','HT','UI','FTV','BWT']
birth_data = [[float(x) for x in y.split(' ') if len(x) >=1] for y in birth_file.text.split('\n') if len(y) >=1]
print(len(birth_data))
print(len(birth_data[0]))
#Boston Housing data
import requests
housing_url = 'https://archive.ics.uci.edu/ml/machine-learning-databases/housing/housing.data'
housing_header = ['CRIM', 'ZN', 'INDUS', 'CHAS', 'NOX', 'RM', 'AGE', 'DIS', 'RAD', 'TAX', 'PTRATIO', 'B', 'LSTAT', 'MEDV']
housing_file = requests.get(housing_url)
housing_data = [[float(x) for x in y.split(' ') if len(x)>=1] for y in housing_file.text.split('\n') if len(y)>=1]
print(len(housing_data))
print(len(housing_data[0]))
#MNIST handwriting data: (Mixed National Instituite of Standards and Technology)
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("MNIST_data/", one_hot=True)
print(len(mnist.train.images))
print(len(mnist.test.images))
print(len(mnist.validation.images))
print(mnist.train.labels[1,:])
#Spam-ham text data
import requests
import io
from zipfile import ZipFile
zip_url = 'http://archive.ics.uci.edu/ml/machine-learning-databases/00228/smsspamcollection.zip'
r = requests.get(zip_url)
z = ZipFile(io.BytesIO(r.content))
file = z.read('SMSSpamCollection')
text_data = file.decode()
text_data = text_data.encode('ascii', errors='ignore')
text_data = text_data.decode().split('\n')
text_data = [x.split('\t') for x in text_data if len(x) >=1]
[text_data_target, text_data_train] = [list(x) for x in zip(*text_data)]
print(len(text_data_train))
print(set(text_data_target))
print(text_data_train[1])
#Movie review data
import requests
import io
import tarfile
movie_data_url = 'http://www.cs.cornell.edu/people/pabo/movie-review-data/rt-polaritydata.tar.gz'
r = requests.get(movie_data_url)
stream_data = io.BytesIO(r.content)
tmp = io.BytesIO()
while True:
s = stream_data.read(16384)
if not s:
break
tmp.write(s)
stream_data.close()
tmp.seek(0)
#Extract tar file
tar_file = tarfile.open(fileobj=tmp, mode='r:gz')
pos = tar_file.extractfile('rt-polaritydata/rt-polarity.pos')
neg = tar_file.extractfile('rt-polaritydata/rt-polarity.neg')
#Save pos/neg reviews (Also deal with encoding)
pos_data = []
for line in pos:
pos_data.append(line.decode("ISO'-8859-1").encode('ascii',errors='ignore').decode())
neg_data = []
for line in neg:
neg_data.append(line.decode("ISO'-8859-1").encode('ascii',errors='ignore').decode())
tar_file.close()
print(len(pos_data))
print(len(neg_data))
print(neg_data[0])
#The works of Shakespeare text data
import requests
shakespeare_url = 'http://www.gutenberg.org/cache/epub/100/pg100.txt'
response = requests.get(shakespeare_url)
shakespeare_file = response.content
shakespeare_text = shakespeare_file.decode('utf-8')
shakespeare_text = shakespeare_text[7675:]
print(len(shakespeare_text))
#English-German sentence translation data
import requests
import io
from zipfile import ZipFile
sentence_url = 'http://www.manythings.org/anki/deu-eng.zip'
r = requests.get(sentence_url)
z = ZipFile(io.BytesIO(r.content))
file = z.read('deu.txt')
eng_ger_data = file.decode()
eng_ger_data = eng_ger_data.encode('ascii', errors='ignore')
eng_ger_data = eng_ger_data.decode().split('\n')
eng_ger_data = [x.split('\t') for x in eng_ger_data if len(x) >=1]
[english_sentence, german_sentence] = [list(x) for x in zip(*eng_ger_data)]
print(len(english_sentence))
print(len(german_sentence))
print(eng_ger_data[10])
7.參考資料
- 《TensorFlow Machine Learning Cookbook》:https://github.com/PacktPublishing/TensorFlow-Machine-Learning-Cookbook
- TensorFlow API:https://www.tensorflow.org/api_docs/python/tf
- Google提供的TensorFlow測試環境:https://colab.research.google.com