Skip to content

Commit 4bfa307

Browse files
committed
Removed unused dt
1 parent 89a255b commit 4bfa307

File tree

4 files changed

+2
-6
lines changed

4 files changed

+2
-6
lines changed

src/nf/nf_dense_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
submodule(nf_dense_layer) nf_dense_layer_submodule
22

33
use nf_activation, only: activation_function
4-
use nf_base_layer, only: base_layer
54
use nf_random, only: random_normal
65

76
implicit none

src/nf/nf_embedding_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,6 @@
33
#define ABSOLUTE 2
44

55
submodule(nf_embedding_layer) nf_embedding_layer_submodule
6-
use nf_base_layer, only: base_layer
76
implicit none
87
contains
98
module function embedding_layer_cons(vocab_size, model_dimension, positional) result(res)

src/nf/nf_flatten_layer_submodule.f90

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
!! It is used internally by the layer type.
55
!! It is not intended to be used directly by the user.
66

7-
use nf_base_layer, only: base_layer
87

98
implicit none
109

src/nf/nf_multihead_attention_layer_submodule.f90

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,5 @@
11
submodule(nf_multihead_attention_layer) nf_multihead_attention_layer_submodule
22
use nf_activation, only: softmax
3-
use nf_base_layer, only: base_layer
43
use nf_linear2d_layer, only: linear2d_layer
54

65
implicit none
@@ -288,7 +287,7 @@ module subroutine init_base(self, input_shape)
288287
self % model_dimension = input_shape(2)
289288

290289
if (mod(self % model_dimension, self % n_heads) /= 0) then
291-
write(stderr, '(a)'), 'Number of heads must be divisible by model dimension'
290+
write(stderr, '(a)') 'Number of heads must be divisible by model dimension'
292291
error stop
293292
end if
294293
self % head_size = self % model_dimension / self % n_heads
@@ -335,4 +334,4 @@ module subroutine init_base(self, input_shape)
335334
allocate(self % jacobian, mold=self % d_sdpa)
336335
allocate(self % d_normalize, mold=self % attention_matrix)
337336
end subroutine init_base
338-
end submodule nf_multihead_attention_layer_submodule
337+
end submodule nf_multihead_attention_layer_submodule

0 commit comments

Comments
 (0)