blob: 683e1afbc7c0a54e2ad36ddb1079b800c7b668fc [file] [log] [blame]
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"from keras.layers import Input, Embedding, LSTM, Dense\n",
"from keras.models import Model\n",
"\n",
"# Headline input: meant to receive sequences of 100 integers, between 1 and 10000.\n",
"# Note that we can name any layer by passing it a \"name\" argument.\n",
"main_input = Input(shape=(100,), dtype='int32', name='main_input')\n",
"\n",
"# This embedding layer will encode the input sequence\n",
"# into a sequence of dense 512-dimensional vectors.\n",
"x = Embedding(output_dim=512, input_dim=10000, input_length=100)(main_input)\n",
"\n",
"# A LSTM will transform the vector sequence into a single vector,\n",
"# containing information about the entire sequence\n",
"lstm_out = LSTM(32)(x)\n",
"auxiliary_output = Dense(1, activation='sigmoid', name='aux_output')(lstm_out)\n",
"auxiliary_input = Input(shape=(5,), name='aux_input')\n",
"\n",
"# We stack a deep densely-connected network on top\n",
"x = Dense(64, activation='relu')(x)\n",
"x = Dense(64, activation='relu')(x)\n",
"x = Dense(64, activation='relu')(x)\n",
"\n",
"# And finally we add the main logistic regression layer\n",
"main_output = Dense(1, activation='sigmoid', name='main_output')(x)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "KERNEL_NAME"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}