Skip to content

Commit

Permalink
fix: anno
Browse files Browse the repository at this point in the history
  • Loading branch information
LongxingTan committed Jul 25, 2024
1 parent c2cfe32 commit 1595274
Show file tree
Hide file tree
Showing 13 changed files with 10 additions and 60 deletions.
6 changes: 3 additions & 3 deletions examples/README.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
# TFTS examples

[time series prediction](./run_prediction_simple.py)

## kaggle datasets
- [web traffic prediction]()
- [ventilator]()


## more time series examples
- [classification]()
- [anomaly detection]()
- [uncertainty]()

- [time series anomaly detection](./run_anomaly.py)
Empty file removed examples/__init__.py
Empty file.
32 changes: 0 additions & 32 deletions examples/run_classification.py

This file was deleted.

14 changes: 5 additions & 9 deletions tfts/layers/attention_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan
"""Layer for :py:class:`~tfts.models.transformer` :py:class:`~tfts.models.autoformer`"""

import math
Expand Down Expand Up @@ -30,9 +28,7 @@ def __init__(self, hidden_size: int, num_attention_heads: int, attention_probs_d
super(Attention, self).__init__()
if hidden_size % num_attention_heads:
raise ValueError(
"Hidden size ({}) must be divisible by the number of heads ({}).".format(
hidden_size, num_attention_heads
)
f"Hidden size {hidden_size} must be divisible by the number of heads {num_attention_heads}."
)
self.hidden_size = hidden_size
self.num_attention_heads = num_attention_heads
Expand Down Expand Up @@ -105,7 +101,7 @@ def __init__(
hidden_size: int,
num_attention_heads: int,
attention_probs_dropout_prob: float = 0.0,
**kwargs: Dict[str, Any]
**kwargs: Dict[str, Any],
) -> None:
super(SelfAttention, self).__init__()
self.attention = Attention(
Expand All @@ -116,19 +112,19 @@ def build(self, input_shape: Tuple[Optional[int], ...]) -> None:
super(SelfAttention, self).build(input_shape)

def call(self, x: tf.Tensor, mask: Optional[tf.Tensor] = None):
"""Self attention
"""Self attention layer
Parameters
----------
x : tf.Tensor
input tensor for self-attention
3D input tensor for self-attention, (batch_size, sequence_length, feature_size)
mask : tf.Tensor, optional
masked, by default None
Returns
-------
tf.Tensor
self attention output
3D self attention output, (batch_size, sequence_length, attention_hidden_size)
"""
return self.attention(x, x, x, mask)

Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/autoformer_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.autoformer`"""

import math
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/cnn_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.wavenet`"""

from typing import Any, Callable, Dict, List, Optional, Tuple, Type, Union
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/deepar_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.deepar`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/dense_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.wavenet` :py:class:`~tfts.models.transformer`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/embed_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.transformer`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/mask_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.transformer`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/nbeats_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.nbeats`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down
2 changes: 2 additions & 0 deletions tfts/layers/position_layer.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
"""Layer for :py:class:`~tfts.models.transformer`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union

import numpy as np
Expand Down
2 changes: 0 additions & 2 deletions tfts/layers/unet_layer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
# -*- coding: utf-8 -*-
# @author: Longxing Tan, [email protected]
"""Layer for :py:class:`~tfts.models.unet`"""

from typing import Any, Callable, Dict, Optional, Tuple, Type, Union
Expand Down

0 comments on commit 1595274

Please sign in to comment.