Skip to content

Commit

Permalink
Some presumably now unneeded reshapes?
Browse files Browse the repository at this point in the history
  • Loading branch information
Esben Jannik Bjerrum committed Mar 7, 2024
1 parent 84d1315 commit ba3d190
Show file tree
Hide file tree
Showing 2 changed files with 2 additions and 2 deletions.
2 changes: 1 addition & 1 deletion scikit_mol/descriptors.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ def transform(self, x: List[Mol], y=None) -> np.ndarray:
with get_context(self.start_method).Pool(processes=n_processes) as pool:
params = self.get_params()
x_chunks = np.array_split(x, n_chunks)
x_chunks = [x.reshape(-1, 1) for x in x_chunks]
#x_chunks = [x.reshape(-1, 1) for x in x_chunks]
arrays = pool.map(parallel_helper, [(params, x) for x in x_chunks]) #is the helper function a safer way of handling the picklind and child process communication
arr = np.concatenate(arrays)
return arr
Expand Down
2 changes: 1 addition & 1 deletion scikit_mol/fingerprints.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,7 +118,7 @@ def transform(self, X, y=None):
parameters = self.get_params()
# TODO: create "transform_parallel" function in the core module,
# and use it here and in the descriptors transformer
x_chunks = [np.array(x).reshape(-1, 1) for x in x_chunks]
#x_chunks = [np.array(x).reshape(-1, 1) for x in x_chunks]
arrays = pool.map(parallel_helper, [(self.__class__.__name__, parameters, x_chunk) for x_chunk in x_chunks])

arr = np.concatenate(arrays)
Expand Down

0 comments on commit ba3d190

Please sign in to comment.