Skip to content
This repository was archived by the owner on Jul 7, 2023. It is now read-only.
Merged
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Prev Previous commit
Next Next commit
Get rid of ae_input, since do_compress_attend is doing the same thing
PiperOrigin-RevId: 187102333
  • Loading branch information
T2T Team authored and Ryan Sepassi committed Mar 2, 2018
commit 4e068fc4b84a3de642db0d8fd7d87f44df409113
7 changes: 1 addition & 6 deletions tensor2tensor/models/research/transformer_vae.py
Original file line number Diff line number Diff line change
Expand Up @@ -338,10 +338,7 @@ def ae_transformer_internal(inputs,
targets, _ = common_layers.pad_to_same_length(
targets, max_targets_len_from_inputs,
final_length_divisible_by=2**hparams.num_compress_steps)
if hparams.ae_input:
targets_c = compress(targets, inputs, False, hparams, "compress")
else:
targets_c = compress(targets, None, False, hparams, "compress")
targets_c = compress(targets, inputs, False, hparams, "compress")
if hparams.mode != tf.estimator.ModeKeys.PREDICT:
# Compress and bottleneck.
latents_dense, latents_discrete, extra_loss, embed = hparams.bottleneck(
Expand Down Expand Up @@ -638,8 +635,6 @@ def transformer_ae_small():
# Reshape method for DVQ: slice, project
hparams.add_hparam("reshape_method", "slice")
hparams.add_hparam("trainable_projections", False)
# Add option to pass the input to the autoencoder
hparams.add_hparam("ae_input", True)
# Hparams for Dirichlet process process
hparams.add_hparam("dp_alpha", 0.5)
hparams.add_hparam("dp_strength", 0.25)
Expand Down