Skip to content

Commit

Permalink
Updated dynet to use initialization
Browse files Browse the repository at this point in the history
Former-commit-id: b0041d7
  • Loading branch information
neubig committed Oct 11, 2016
1 parent 865f4d1 commit d111837
Show file tree
Hide file tree
Showing 2 changed files with 7 additions and 4 deletions.
7 changes: 5 additions & 2 deletions dynet/model.cc
Expand Up @@ -111,7 +111,8 @@ LookupParameterStorage::LookupParameterStorage(unsigned n, const Dim& d) : dim(d
all_grads.device = all_values.device = default_device;
default_device->allocate_tensor(DeviceMempool::PS, all_values);
default_device->allocate_tensor(DeviceMempool::PS, all_grads);
TensorTools::Zero(all_values);
ParameterInitGlorot init(true);
init.initialize_params(all_values);
initialize_lookups();
}

Expand Down Expand Up @@ -192,7 +193,9 @@ void ParameterInitConst::initialize_params(Tensor & values) const {
}

void ParameterInitGlorot::initialize_params(Tensor & values) const {
float my_scale = sqrt(6) / sqrt(values.d.sum_dims());
int dims = 0, dim_len = values.d.nd-(lookup?1:0);
for(int i = 0; i < dim_len; ++i) dims += values.d[i];
float my_scale = sqrt(6) / sqrt(dims);
TensorTools::RandomizeUniform(values, -my_scale, my_scale);
}

Expand Down
4 changes: 2 additions & 2 deletions dynet/model.h
Expand Up @@ -196,10 +196,10 @@ struct ParameterInitConst : public ParameterInit {
};

struct ParameterInitGlorot : public ParameterInit {
ParameterInitGlorot() {}
ParameterInitGlorot(bool is_lookup = false) : lookup(is_lookup) {}
virtual void initialize_params(Tensor & values) const override;
private:
float cnst;
bool lookup;
};

struct ParameterInitSaxe : public ParameterInit {
Expand Down

0 comments on commit d111837

Please sign in to comment.