@@ -24,20 +24,42 @@ def count_column_num(fname, field_delim):
24
24
# the last column is the class number --> -1
25
25
return len (line )
26
26
27
+
27
28
def dense_to_one_hot (labels_dense , num_classes = 10 ):
28
29
"""Convert class labels from scalars to one-hot vectors."""
29
30
num_labels = labels_dense .shape [0 ]
30
31
index_offset = np .arange (num_labels ) * num_classes
31
32
labels_one_hot = np .zeros ((num_labels , num_classes ))
32
33
labels_one_hot .flat [index_offset + labels_dense .ravel ()] = 1
33
34
return labels_one_hot
35
+
36
+
37
+ def read_and_decode (filename ):
38
+ filename_queue = tf .train .string_input_producer ([filename ])
39
+
40
+ reader = tf .TFRecordReader ()
41
+ _ , serialized_example = reader .read (filename_queue )
42
+ features = tf .parse_single_example (serialized_example ,
43
+ features = {
44
+ 'label' : tf .FixedLenFeature ([], tf .int64 ),
45
+ # We know the length of both fields. If not the
46
+ # tf.VarLenFeature could be used
47
+ 'features' : tf .FixedLenFeature ([8660 ], tf .float32 ),
48
+ })
49
+
50
+ X = tf .cast (features ['features' ], tf .float32 )
51
+ y = tf .cast (features ['label' ], tf .int32 )
52
+
53
+ return X , y
54
+
55
+
34
56
# Parameters
35
57
learning_rate = 0.001
36
58
training_epochs = 10000
37
59
display_step = 1
38
60
num_threads = 4
39
- csv_file_path = "data/merge/scat_data .txt"
40
- training_file_path = "data/merge/scat_data .tfrecords"
61
+ csv_file_path = "data/tvtsets/training_scat_data .txt"
62
+ training_file_path = "data/tvtsets/training_scat_data .tfrecords"
41
63
column_num = count_column_num (csv_file_path , " " )
42
64
# file_length = file_len(csv_file_path)
43
65
# Network Parameters
@@ -106,8 +128,6 @@ def multilayer_perceptron(x, weights, biases):
106
128
features_array = np .reshape (features_array , (1 , n_input ))
107
129
label_array = dense_to_one_hot (np .array ([label ]), num_classes = n_classes )
108
130
109
- with open ("0504_log.txt" , "w" ) as f :
110
- f .write ("features: {}, label: {}" .format (features_array , label_array ))
111
131
_ , c = sess .run ([optimizer , cost ], feed_dict = {x : features_array , y : label_array })
112
132
# Display logs per epoch step
113
133
if epoch % display_step == 0 :
0 commit comments