Skip to content

Commit

Permalink
avoid Pylint 'torch' has no 'from_numpy' member error
Browse files Browse the repository at this point in the history
torch has no memer function from_numpy because it is actually _C.from_numpy. This induces an error when running lint.
See pytorch/pytorch#701
  • Loading branch information
Zoufalc committed Aug 9, 2019
1 parent 4f5ec89 commit ba17e45
Show file tree
Hide file tree
Showing 4 changed files with 10 additions and 10 deletions.
9 changes: 4 additions & 5 deletions qiskit/aqua/components/neural_networks/numpy_discriminator.py
Expand Up @@ -39,10 +39,9 @@ def __init__(self, n_features=1, n_out=1):
n_out: int, output dimension
"""
self.architecture = [
{"input_dim": n_features, "output_dim": 4, "activation": "leaky_relu"},
{"input_dim": 4, "output_dim": 156, "activation": "leaky_relu"},
{"input_dim": 156, "output_dim": 52, "activation": "leaky_relu"},
{"input_dim": 52, "output_dim": n_out, "activation": "sigmoid"},
{"input_dim": n_features, "output_dim":50, "activation": "leaky_relu"},
{"input_dim": 50, "output_dim": 20, "activation": "leaky_relu"},
{"input_dim": 20, "output_dim": n_out, "activation": "sigmoid"},
]

self.parameters = []
Expand Down Expand Up @@ -229,7 +228,7 @@ def __init__(self, n_features=1, n_out=1):
self._n_features = n_features
self._n_out = n_out
self._discriminator = DiscriminatorNet(self._n_features, self._n_out)
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-5, beta_1=0.7, beta_2=0.99, noise_factor=1e-4,
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-3, beta_1=0.7, beta_2=0.99, noise_factor=1e-4,
eps=1e-6, amsgrad=True)

self._ret = {}
Expand Down
Expand Up @@ -278,7 +278,8 @@ def train(self, data, weights, penalty=True, quantum_instance=None, shots=None):
Returns: dict, with Discriminator loss (torch.Tensor) and updated parameters (array).
"""

# pylint: disable=E1101
# pylint: disable=E1102
# Reset gradients
self._optimizer.zero_grad()
real_batch = data[0]
Expand Down Expand Up @@ -311,7 +312,8 @@ def train(self, data, weights, penalty=True, quantum_instance=None, shots=None):

if penalty:
self.gradient_penalty(real_batch).backward()

# pylint: enable=E1101
# pylint: enable=E1102
# Update weights with gradients
self._optimizer.step()

Expand Down
4 changes: 2 additions & 2 deletions qiskit/aqua/components/neural_networks/quantum_generator.py
Expand Up @@ -126,8 +126,8 @@ def __init__(self, bounds, num_qubits, generator_circuit=None, init_params=None,
else:
raise AquaError('Set univariate variational distribution to represent univariate data')
# Set optimizer for updating the generator network
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-5, beta_1=0.9, beta_2=0.99, noise_factor=1e-8,
eps=1e-10, amsgrad=True, snapshot_dir=snapshot_dir)
self._optimizer = ADAM(maxiter=1, tol=1e-6, lr=1e-3, beta_1=0.7, beta_2=0.99, noise_factor=1e-6,
eps=1e-6, amsgrad=True, snapshot_dir=snapshot_dir)

if np.ndim(self._bounds) == 1:
bounds = np.reshape(self._bounds, (1, len(self._bounds)))
Expand Down
1 change: 0 additions & 1 deletion test/aqua/test_qgan.py
Expand Up @@ -107,7 +107,6 @@ def setUp(self):
g_circuit = UnivariateVariationalDistribution(sum(num_qubits), var_form, init_params,
low=self._bounds[0],
high=self._bounds[1])
# initial_distribution=init_distribution,
# Set quantum generator
self.qgan.set_generator(generator_circuit=g_circuit)

Expand Down

0 comments on commit ba17e45

Please sign in to comment.