diff --git a/src/common.jl b/src/common.jl
index 8144a3d7cfc8f9633ad168952cfc7e70ab360284..e5fe03059e2b5ce8d0b8325090f1f0f86e3c3a4c 100644
--- a/src/common.jl
+++ b/src/common.jl
@@ -98,8 +98,8 @@ function divergence end
     end
 end
 
-@generated function divergence(w::StaticKernels.Window{S,N}) where {S<:SArray,N}
-    T = eltype(S)
+@generated function divergence(w::StaticKernels.Window{S,N}) where {M,N,S<:SMatrix{M,N}}
+    T = SVector{M,eltype(S)}
     sz = size(S)
     sz[end] == N || throw(ArgumentError("last eltype dimension does not match array dimensionality"))
 
diff --git a/test/runtests.jl b/test/runtests.jl
index ffd5184885915ed0204084044d5a2c04c8b5aa4f..46d7c7bde98b27f73cd01ec80d0060951c2d7ce1 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -98,3 +98,9 @@ end
     end
     @test fetch_u(st) ≈ fetch_u(stref)
 end
+
+@testset "divergence on extended arrays" begin
+    k = Kernel{(-1:1,-1:1)}(DualTVDD.divergence)
+    a = rand(SMatrix{2, 2, Float64}, 3, 3)
+    map(k, extend(a, StaticKernels.ExtensionNothing()))
+end