Skip to content
This repository was archived by the owner on Jul 7, 2023. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
Show all changes
24 commits
Select commit Hold shift + click to select a range
ec45cd5
Added new initializers
joaogui1 Aug 20, 2019
29821d8
Fixed glorot uniform
joaogui1 Aug 20, 2019
431b268
Fixed order of init arguments
joaogui1 Aug 20, 2019
eaacbb3
Added uniform random and documentation
joaogui1 Aug 21, 2019
ff08e30
Merge https://github.com/tensorflow/tensor2tensor into initializers
joaogui1 Aug 22, 2019
f30f579
Added requested changes to initializers.py
joaogui1 Aug 22, 2019
a833246
Added requested changes to initializers_test.py
joaogui1 Aug 22, 2019
531b52a
Added choice of input axis and output axis
joaogui1 Aug 22, 2019
6b2a12b
Added choice of input axis and output axis
joaogui1 Aug 22, 2019
3a772d3
Merge https://github.com/tensorflow/tensor2tensor into initializers
joaogui1 Aug 22, 2019
30df555
fixed errors and implemented requested changes
joaogui1 Aug 24, 2019
22d57b6
fixed typo
joaogui1 Aug 24, 2019
021a094
tests passing
joaogui1 Aug 24, 2019
7d1a409
Merge https://github.com/tensorflow/tensor2tensor into initializers
joaogui1 Aug 24, 2019
212307a
fixed get fans
joaogui1 Aug 25, 2019
5536aed
Merge https://github.com/tensorflow/tensor2tensor into initializers
joaogui1 Aug 26, 2019
50823b3
Fixed numpy weird behavior
joaogui1 Aug 26, 2019
ca507ec
Fixed typo
joaogui1 Aug 26, 2019
ca967b0
Fixed merge conflict
joaogui1 Aug 28, 2019
427f1cb
Use scipy.special.expit for sigmoid
joaogui1 Aug 28, 2019
51dfe52
Use clip instead of maximum, see colab on pull request
joaogui1 Aug 28, 2019
5af21d2
Added rectifiers
joaogui1 Aug 28, 2019
7fd566b
Added Gaussian rectifier
joaogui1 Aug 28, 2019
22820b0
Added Gaussian rectifier
joaogui1 Aug 28, 2019
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Use scipy.special.expit for sigmoid
  • Loading branch information
joaogui1 committed Aug 28, 2019
commit 427f1cbc7d9e465f445457bfa45329b1c7f81ea5
1 change: 1 addition & 0 deletions tensor2tensor/trax/backend.py
Original file line number Diff line number Diff line change
Expand Up @@ -158,6 +158,7 @@ def jax_randint(key, shape, minval, maxval, dtype=onp.int32):
"name": "jax",
"np": jnp,
"logsumexp": jax_special.logsumexp,
"expit": jax_special.expit,
"conv": jax_conv,
"avg_pool": jax_avg_pool,
"max_pool": jax_max_pool,
Expand Down
2 changes: 1 addition & 1 deletion tensor2tensor/trax/layers/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ def Relu(x, **unused_kwargs):

@base.layer()
def Sigmoid(x, **unused_kwargs):
return 1. / (1. + np.exp(-x))
return backend.expit(x)


@base.layer()
Expand Down
19 changes: 0 additions & 19 deletions tensor2tensor/trax/layers/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,14 +22,9 @@
from tensor2tensor.trax import backend


<<<<<<< HEAD
def _get_fans(shape, out_dim=-1, in_dim=-2):
#temporary fix until numpy.delete supports negative indices
=======
def _GetFans(shape, out_dim=-1, in_dim=-2):
"""Get the fan-in and fan-out sizes for the given shape and dims."""
# Temporary fix until numpy.delete supports negative indices.
>>>>>>> 8edb68ca31a02fe96a591e2ca4bd38a0e447277b
if out_dim < 0:
out_dim += len(shape)
if in_dim < 0:
Expand All @@ -39,17 +34,11 @@ def _GetFans(shape, out_dim=-1, in_dim=-2):
if len(shape) >= 2:
fan_in, fan_out = shape[in_dim], shape[out_dim]
elif len(shape) == 1:
<<<<<<< HEAD
fan_in, fan_out = shape[0]
else:
fan_in, fan_out = 1.
=======
fan_in = shape[0]
fan_out = shape[0]
else:
fan_in = 1.
fan_out = 1.
>>>>>>> 8edb68ca31a02fe96a591e2ca4bd38a0e447277b
fan_in *= receptive_field
fan_out *= receptive_field
return fan_in, fan_out
Expand All @@ -75,11 +64,7 @@ def Init(shape, rng):


def VarianceScalingInitializer(out_dim, in_dim, scale, mode, distribution):
<<<<<<< HEAD
"""Initializer capable of adapting its scale to the shape of weights tensors."""
=======
"""Initializer capable of adapting its scale to the shape of weights."""
>>>>>>> 8edb68ca31a02fe96a591e2ca4bd38a0e447277b
if scale <= 0.:
raise ValueError('scale must be positive float, {} given'.format(scale))
if mode not in {'fan_in', 'fan_out', 'fan_avg'}:
Expand All @@ -88,12 +73,8 @@ def VarianceScalingInitializer(out_dim, in_dim, scale, mode, distribution):
.format(mode))

def Init(shape, rng):
<<<<<<< HEAD
fan_in, fan_out = _get_fans(shape, out_dim, in_dim)
=======
"""The initializer function."""
fan_in, fan_out = _GetFans(shape, out_dim, in_dim)
>>>>>>> 8edb68ca31a02fe96a591e2ca4bd38a0e447277b
gain = scale
if mode == 'fan_in':
gain /= fan_in
Expand Down