-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
befde29
commit 8180cba
Showing
5 changed files
with
143 additions
and
51 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
File renamed without changes.
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,19 +1,19 @@ | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 3 --not_train_embeddin --examples 300000 > tmp/log_1.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 6 --head_transformer 4 --num_lstm 3 --not_train_embeddin --examples 300000 > tmp/log_2.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 9 --head_transformer 4 --num_lstm 3 --not_train_embeddin --examples 300000 > tmp/log_3.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 12 --head_transformer 4 --num_lstm 3 --not_train_embeddin --examples 300000 > tmp/log_4.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 6 --not_train_embeddin --examples 300000 > tmp/log_5.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 9 --not_train_embeddin --examples 300000 > tmp/log_6.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 12 --not_train_embeddin --examples 300000 > tmp/log_7.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 3 --examples 300000 > tmp/log_8.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 12 --head_transformer 4 --num_lstm 3 --examples 300000 > tmp/log_9.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 10 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 12 --examples 300000 > tmp/log_10.txt 2>&1 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 3 --head_transformer 4 --num_lstm 3 --not_train_embedding --examples 300000 > tmp/log_1.txt 2>&1 | ||
# sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 6 --head_transformer 4 --num_lstm 3 --not_train_embedding --examples 300000 > tmp/log_2.txt 2>&1 | ||
# sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 9 --head_transformer 4 --num_lstm 3 --not_train_embedding --examples 300000 > tmp/log_3.txt 2>&1 | ||
# sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 12 --head_transformer 4 --num_lstm 3 --not_train_embedding --examples 300000 > tmp/log_4.txt 2>&1 | ||
# sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 1 --head_transformer 4 --num_lstm 6 --not_train_embedding --examples 100000 > tmp/log_5.txt 2>&1 | ||
sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 1 --head_transformer 4 --num_lstm 9 --not_train_embedding --examples 100000 > tmp/log_6.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 1 --head_transformer 4 --num_lstm 12 --not_train_embedding --examples 300000 > tmp/log_7.txt 2>&1 | ||
# sleep 60 | ||
python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 1 --head_transformer 4 --num_lstm 3 --examples 100000 > tmp/log_8.txt 2>&1 | ||
sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 12 --head_transformer 4 --num_lstm 3 --examples 300000 > tmp/log_9.txt 2>&1 | ||
# sleep 60 | ||
# python Transformer_multi_input.py --load_from_npy --epoch 3 --batch_size 256 --num_transformer 1 --head_transformer 4 --num_lstm 12 --examples 300000 > tmp/log_10.txt 2>&1 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,48 @@ | ||
def get_age_model(creative_id_emb, ad_id_emb, product_id_emb): | ||
embed_dim = 128 # Embedding size for each token | ||
num_heads = 1 # Number of attention heads | ||
ff_dim = 256 # Hidden layer size in feed forward network inside transformer | ||
|
||
# shape:(sequence长度, ) | ||
# first input | ||
input_creative_id = Input(shape=(None,), name='creative_id') | ||
x1 = TokenAndPositionEmbedding( | ||
maxlen, NUM_creative_id, embed_dim, creative_id_emb)(input_creative_id) | ||
for _ in range(args.num_transformer): | ||
x1 = TransformerBlock(embed_dim, num_heads, ff_dim)(x1) | ||
for _ in range(args.num_lstm): | ||
x1 = Bidirectional(LSTM(256, return_sequences=True))(x1) | ||
x1 = layers.GlobalMaxPooling1D()(x1) | ||
|
||
# second input | ||
input_ad_id = Input(shape=(None,), name='ad_id') | ||
x2 = TokenAndPositionEmbedding( | ||
maxlen, NUM_ad_id, embed_dim, ad_id_emb)(input_ad_id) | ||
for _ in range(args.num_transformer): | ||
x2 = TransformerBlock(embed_dim, num_heads, ff_dim)(x2) | ||
for _ in range(args.num_lstm): | ||
x2 = Bidirectional(LSTM(256, return_sequences=True))(x2) | ||
x2 = layers.GlobalMaxPooling1D()(x2) | ||
|
||
# third input | ||
input_product_id = Input(shape=(None,), name='product_id') | ||
x3 = TokenAndPositionEmbedding( | ||
maxlen, NUM_product_id, embed_dim, product_id_emb)(input_product_id) | ||
for _ in range(args.num_transformer): | ||
x3 = TransformerBlock(embed_dim, num_heads, ff_dim)(x3) | ||
for _ in range(args.num_lstm): | ||
x3 = Bidirectional(LSTM(256, return_sequences=True))(x3) | ||
x3 = layers.GlobalMaxPooling1D()(x3) | ||
|
||
# concat x1 x2 x3 | ||
x = concatenate([x1, x2, x3]) | ||
# x = x1 + x2 + x3 | ||
x = Dense(20)(x) | ||
output_y = Dense(10, activation='softmax')(x) | ||
|
||
model = Model([input_creative_id, input_ad_id, input_product_id], output_y) | ||
model.compile(loss='categorical_crossentropy', | ||
optimizer='adam', metrics=['accuracy']) | ||
model.summary() | ||
|
||
return model |