Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

why it takes too long #74

Open
NileZhou opened this issue Oct 8, 2019 · 2 comments
Open

why it takes too long #74

NileZhou opened this issue Oct 8, 2019 · 2 comments

Comments

@NileZhou
Copy link

NileZhou commented Oct 8, 2019

for 1 second voice , it real need 8 seconds to clean? i make a same generator network, it just takes 0.2 second for 1 second data

@NileZhou
Copy link
Author

NileZhou commented Oct 8, 2019

`import tensorflow as tf
import time

def leakyrelu(x, alpha=0.3, name='lrelu'):
return tf.maximum(x, alpha * x, name=name)

def downconv(x, output_dim, kwidth=5, pool=2, init=None, uniform=False,
bias_init=None, name='downconv'):
""" Downsampled convolution 1d """
x2d = tf.expand_dims(x, 2)
w_init = init
if w_init is None:
w_init = xavier_initializer(uniform=uniform)
with tf.variable_scope(name , reuse=None):
W = tf.get_variable('W', [kwidth, 1, x.get_shape()[-1], output_dim], initializer=w_init)
conv = tf.nn.conv2d(x2d, W, strides=[1, pool, 1, 1], padding='SAME')
if bias_init is not None:
b = tf.get_variable('b', [output_dim], initializer=bias_init)
conv = tf.reshape(tf.nn.bias_add(conv, b), conv.get_shape())
else:
conv = tf.reshape(conv, conv.get_shape())
# reshape back to 1d
conv = tf.reshape(conv, conv.get_shape().as_list()[:2] + [conv.get_shape().as_list()[-1]])
return conv

def deconv(x, output_shape, kwidth=5, dilation=2, init=None, uniform=False,
bias_init=None, name='deconv1d'):
input_shape = x.get_shape()
in_channels = input_shape[-1]
out_channels = output_shape[-1]
assert len(input_shape) >= 3
# reshape the tensor to use 2d operators
x2d = tf.expand_dims(x, 2)
o2d = output_shape[:2] + [1] + [output_shape[-1]]
w_init = init
if w_init is None:
w_init = xavier_initializer(uniform=uniform)
with tf.variable_scope(name , reuse=None):
# filter shape: [kwidth, output_channels, in_channels]
W = tf.get_variable('W', [kwidth, 1, out_channels, in_channels],initializer=w_init)
try:
deconv = tf.nn.conv2d_transpose(x2d, W, output_shape=o2d, strides=[1, dilation, 1, 1])
except AttributeError:
# support for versions of TF before 0.7.0
# based on https://github.com/carpedm20/DCGAN-tensorflow
deconv = tf.nn.conv2d_transpose(x2d, W, output_shape=o2d, strides=[1, dilation, 1, 1])
if bias_init is not None:
b = tf.get_variable('b', [out_channels], initializer=tf.constant_initializer(0.))
deconv = tf.reshape(tf.nn.bias_add(deconv, b), deconv.get_shape())
else:
deconv = tf.reshape(deconv, deconv.get_shape())
# reshape back to 1d
deconv = tf.reshape(deconv, output_shape)
return deconv

batchsize=100

def G(noise):
noise_w = tf.random_normal([100, 16384], mean=0.0, stddev=0.01, dtype=tf.float32)
h_i = tf.expand_dims(noise_w, -1)
skips = []
g_enc_depths = [16, 32, 32, 64, 64, 128, 128, 256, 256, 512, 1024]
for index in range(10):
h_i = downconv(h_i, g_enc_depths[index], 31, init=tf.truncated_normal_initializer(stddev=0.02), name='enc_{}'.format(index))
#9
skips.append(h_i)
h_i = leakyrelu(h_i)
h_i = downconv(h_i, g_enc_depths[10], 31, init=tf.truncated_normal_initializer(stddev=0.02), name='enc_{}'.format(10))
h_i = leakyrelu(h_i)
z = tf.random_normal([batchsize, h_i.get_shape().as_list()[1], g_enc_depths[-1]], mean=0.0, stddev=1, name='z', dtype=tf.float32)
h_i = tf.concat([z, h_i], 2)
g_dec_depths = g_enc_depths[:-1][::-1] + [1]
for index in range(10):
h_i_dim = h_i.get_shape().as_list()
out_shape = [h_i_dim[0], h_i_dim[1] * 2, g_dec_depths[index]]
h_i = deconv(h_i, out_shape, kwidth=31, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), name='dec_{}'.format(index))
h_i = leakyrelu(h_i)
skip_ = skips[-(index + 1)]
h_i = tf.concat([h_i, skip_], 2)
h_i_dim = h_i.get_shape().as_list()
out_shape = [h_i_dim[0], h_i_dim[1] * 2, g_dec_depths[10]]
h_i = deconv(h_i, out_shape, kwidth=31, dilation=2, init=tf.truncated_normal_initializer(stddev=0.02), name='dec_{}'.format(10))
h_i = tf.tanh(h_i)
return h_i

a=G(1)
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
sess.run([a])
sess.run([a])
print('start timer')
TIME1= time.time()
b, c = sess.run([a, a])
print(time.time() - TIME1)
`

@AoyuQC
Copy link

AoyuQC commented May 22, 2020

hi, does your version achieve good result? @NileZhou

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

2 participants