diff --git a/paddle/test_recognize_digits_conv.py b/paddle/test_recognize_digits_conv.py index dc34568..6436e43 100644 --- a/paddle/test_recognize_digits_conv.py +++ b/paddle/test_recognize_digits_conv.py @@ -8,6 +8,7 @@ from paddle.v2.fluid.executor import Executor from paddle.v2.fluid.optimizer import AdamOptimizer from paddle.v2.fluid.initializer import NormalInitializer +from paddle.v2.fluid.param_attr import ParamAttr import numpy as np import time @@ -42,8 +43,8 @@ predict = layers.fc(input=conv_pool_2, size=SIZE, act="softmax", - param_initializer=NormalInitializer( - loc=0.0, scale=scale, seed=SEED)) + param_attr=ParamAttr(initializer=NormalInitializer( + loc=0.0, scale=scale, seed=SEED))) cost = layers.cross_entropy(input=predict, label=label) avg_cost = layers.mean(x=cost) diff --git a/tensorflow/refactor_mnist.py b/tensorflow/refactor_mnist.py index 75c8da0..6371932 100644 --- a/tensorflow/refactor_mnist.py +++ b/tensorflow/refactor_mnist.py @@ -104,7 +104,9 @@ def paddle_random_normal(shape, loc=.0, scale=1., seed=1, dtype="float32"): optimizer = tf.train.AdamOptimizer(learning_rate=0.001, beta1=0.9, beta2=0.999) train_op = optimizer.minimize(avg_cost) -with tf.Session() as sess: +config = tf.ConfigProto( + intra_op_parallelism_threads=1, inter_op_parallelism_threads=1) +with tf.Session(config=config) as sess: init_g = tf.global_variables_initializer() init_l = tf.local_variables_initializer() sess.run(init_g)