@@ -2,7 +2,7 @@ module Loess
2
2
3
3
import Distances. euclidean
4
4
5
- using Statistics
5
+ using Statistics, LinearAlgebra
6
6
7
7
export loess, predict
8
8
@@ -44,6 +44,9 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
44
44
45
45
n, m = size (xs)
46
46
q = ceil (Int, (span * n))
47
+ if q < degree + 1
48
+ throw (ArgumentError (" neighborhood size must be larger than degree+1=$(degree + 1 ) but was $q . Try increasing the value of span." ))
49
+ end
47
50
48
51
# TODO : We need to keep track of how we are normalizing so we can
49
52
# correctly apply predict to unnormalized data. We should have a normalize
@@ -53,7 +56,6 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
53
56
end
54
57
55
58
kdtree = KDTree (xs, 0.05 * span)
56
- verts = Array {T} (undef, length (kdtree. verts), m)
57
59
58
60
# map verticies to their index in the bs coefficient matrix
59
61
verts = Dict {Vector{T}, Int} ()
@@ -69,6 +71,7 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
69
71
# TODO : higher degree fitting
70
72
us = Array {T} (undef, q, 1 + degree * m)
71
73
vs = Array {T} (undef, q)
74
+
72
75
for (vert, k) in verts
73
76
# reset perm
74
77
for i in 1 : n
@@ -85,20 +88,22 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
85
88
dmax = maximum ([ds[perm[i]] for i = 1 : q])
86
89
87
90
for i in 1 : q
88
- pi = perm[i]
89
- w = tricubic (ds[pi ] / dmax)
91
+ pᵢ = perm[i]
92
+ w = tricubic (ds[pᵢ ] / dmax)
90
93
us[i,1 ] = w
91
94
for j in 1 : m
92
- x = xs[pi , j]
95
+ x = xs[pᵢ , j]
93
96
wxl = w
94
97
for l in 1 : degree
95
98
wxl *= x
96
- us[i, 1 + (j- 1 )* degree + l] = wxl # w*x^l
99
+ us[i, 1 + (j - 1 )* degree + l] = wxl # w*x^l
97
100
end
98
101
end
99
- vs[i] = ys[pi ] * w
102
+ vs[i] = ys[pᵢ ] * w
100
103
end
101
- bs[k,:] = us \ vs
104
+
105
+ F = qr (us, Val (true ))
106
+ bs[k,:] = F\ vs
102
107
end
103
108
104
109
LoessModel {T} (xs, ys, bs, verts, kdtree)
@@ -149,11 +154,16 @@ function predict(model::LoessModel{T}, zs::AbstractVector{T}) where T <: Abstrac
149
154
if m == 1
150
155
@assert (length (adjacent_verts) == 2 )
151
156
z = zs[1 ]
152
- u = (z - adjacent_verts[1 ][1 ]) /
153
- (adjacent_verts[2 ][1 ] - adjacent_verts[1 ][1 ])
157
+ v₁, v₂ = adjacent_verts[1 ][1 ], adjacent_verts[2 ][1 ]
158
+
159
+ if z == v₁ || z == v₂
160
+ return evalpoly (zs, model. bs[model. verts[[z]],:])
161
+ end
162
+
163
+ u = (z - v₁)/ (v₂ - v₁)
154
164
155
- y1 = evalpoly (zs, model. bs[model. verts[[adjacent_verts[ 1 ][ 1 ] ]],:])
156
- y2 = evalpoly (zs, model. bs[model. verts[[adjacent_verts[ 2 ][ 1 ] ]],:])
165
+ y1 = evalpoly (zs, model. bs[model. verts[[v₁ ]],:])
166
+ y2 = evalpoly (zs, model. bs[model. verts[[v₂ ]],:])
157
167
return (1.0 - u) * y1 + u * y2
158
168
else
159
169
error (" Multivariate blending not yet implemented" )
0 commit comments