import tensorflow as tf
class LSTNet(tf.keras.Model):
def __init__(self, input_shape, output_shape, num_time_steps, hidden_size, num_layers, num_heads):
super(LSTNet, self).__init__()
self.input_shape = input_shape
self.output_shape = output_shape
self.num_time_steps = num_time_steps
self.hidden_size = hidden_size
self.num_layers = num_layers
self.num_heads = num_heads
# Convolutional Layer
self.conv1d = tf.keras.layers.Conv1D(filters=32, kernel_size=5, activation='relu', padding='same')
# BiGRU Layer
self.bigru = tf.keras.layers.Bidirectional(tf.keras.layers.GRU(units=self.hidden_size, return_sequences=True))
# Multi-Head Attention Layer
self.multihead_attention = tf.keras.layers.MultiHeadAttention(num_heads=self.num_heads, key_dim=self.hidden_size)
# Fully Connected Layers
self.fc1 = tf.keras.layers.Dense(units=64, activation='relu')
self.fc2 = tf.keras.layers.Dense(units=self.output_shape)
def call(self, inputs):
# Convolutional Layer
x = self.conv1d(inputs)
# BiGRU Layer
x = self.bigru(x)
# Multi-Head Attention Layer
x = self.multihead_attention(x, x)
# Flatten the output of the Multi-Head Attention Layer
x = tf.reshape(x, shape=(-1, self.num_time_steps * self.hidden_size))
# Fully Connected Layers
x = self.fc1(x)
x = self.fc2(x)
return x