Skip to content

Commit b98ae19

Browse files
authored
Merge pull request #30 from JuliaStats/aa/stuff
A few updates
2 parents df352ae + 6c23e1a commit b98ae19

File tree

6 files changed

+148
-117
lines changed

6 files changed

+148
-117
lines changed

.gitignore

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,7 @@
1+
*.jl.cov
2+
*.jl.*.cov
3+
*.jl.mem
4+
Manifest.toml
5+
docs/build/
6+
docs/site/
7+
docs/Manifest.toml

.travis.yml

Lines changed: 8 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
11
language: julia
22
os:
3-
- linux
4-
- osx
3+
- linux
54
julia:
6-
- 0.7
7-
- 1.0
8-
- nightly
5+
- 0.7
6+
- 1.0
7+
- 1.1
8+
- nightly
99
notifications:
10-
email: false
11-
sudo: false
10+
email: false
11+
after_success:
12+
- julia -e 'using Pkg; Pkg.add("Coverage"); using Coverage; Codecov.submit(process_folder())';

Project.toml

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
name = "Loess"
2+
uuid = "4345ca2d-374a-55d4-8d30-97f9976e7612"
3+
4+
[deps]
5+
Distances = "b4f34e82-e78d-54a5-968a-f98e89d6e8f7"
6+
Statistics = "10745b16-79ce-11e8-11f9-7d13ad32a3b2"
7+
8+
[extras]
9+
Random = "9a3f8284-a2c9-5f02-9a11-845980a1fd5c"
10+
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
11+
12+
[targets]
13+
test = ["Random", "Test"]

src/Loess.jl

Lines changed: 61 additions & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,3 @@
1-
__precompile__()
2-
3-
41
module Loess
52

63
import Distances.euclidean
@@ -38,9 +35,11 @@ Returns:
3835
3936
"""
4037
function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
41-
normalize::Bool=true, span::T=0.75, degree::Int=2) where T <: AbstractFloat
38+
normalize::Bool=true,
39+
span::AbstractFloat=0.75,
40+
degree::Integer=2) where T<:AbstractFloat
4241
if size(xs, 1) != size(ys, 1)
43-
error("Predictor and response arrays must of the same length")
42+
throw(DimensionMismatch("Predictor and response arrays must of the same length"))
4443
end
4544

4645
n, m = size(xs)
@@ -50,7 +49,7 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
5049
# correctly apply predict to unnormalized data. We should have a normalize
5150
# function that just returns a vector of scaling factors.
5251
if normalize && m > 1
53-
xs = tnormalize!(copy(xs))
52+
xs = tnormalize!(copy(xs))
5453
end
5554

5655
kdtree = KDTree(xs, 0.05 * span)
@@ -59,7 +58,7 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
5958
# map verticies to their index in the bs coefficient matrix
6059
verts = Dict{Vector{T}, Int}()
6160
for (k, vert) in enumerate(kdtree.verts)
62-
verts[vert] = k
61+
verts[vert] = k
6362
end
6463

6564
# Fit each vertex
@@ -72,44 +71,46 @@ function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
7271
vs = Array{T}(undef, q)
7372
for (vert, k) in verts
7473
# reset perm
75-
for i in 1:n
76-
perm[i] = i
77-
end
74+
for i in 1:n
75+
perm[i] = i
76+
end
7877

7978
# distance to each point
80-
for i in 1:n
81-
ds[i] = euclidean(vec(vert), vec(xs[i,:]))
82-
end
79+
for i in 1:n
80+
ds[i] = euclidean(vec(vert), vec(xs[i,:]))
81+
end
8382

84-
# copy the q nearest points to vert into X
85-
partialsort!(perm, q, by=i -> ds[i])
86-
dmax = maximum([ds[perm[i]] for i = 1:q])
83+
# copy the q nearest points to vert into X
84+
partialsort!(perm, q, by=i -> ds[i])
85+
dmax = maximum([ds[perm[i]] for i = 1:q])
8786

88-
for i in 1:q
87+
for i in 1:q
8988
pi = perm[i]
90-
w = tricubic(ds[pi] / dmax)
91-
us[i,1] = w
92-
for j in 1:m
93-
x = xs[pi, j]
94-
wxl = w
95-
for l in 1:degree
89+
w = tricubic(ds[pi] / dmax)
90+
us[i,1] = w
91+
for j in 1:m
92+
x = xs[pi, j]
93+
wxl = w
94+
for l in 1:degree
9695
wxl *= x
97-
us[i, 1 + (j-1)*degree + l] = wxl # w*x^l
98-
end
99-
end
100-
vs[i] = ys[pi] * w
101-
end
102-
bs[k,:] = us \ vs
96+
us[i, 1 + (j-1)*degree + l] = wxl # w*x^l
97+
end
98+
end
99+
vs[i] = ys[pi] * w
100+
end
101+
bs[k,:] = us \ vs
103102
end
104103

105104
LoessModel{T}(xs, ys, bs, verts, kdtree)
106105
end
107106

108-
function loess(xs::AbstractVector{T}, ys::AbstractVector{T};
109-
normalize::Bool=true, span::T=0.75, degree::Int=2) where T <: AbstractFloat
110-
loess(reshape(xs, (length(xs), 1)), ys, normalize=normalize, span=span, degree=degree)
111-
end
107+
loess(xs::AbstractVector{T}, ys::AbstractVector{T}; kwargs...) where {T<:AbstractFloat} =
108+
loess(reshape(xs, (length(xs), 1)), ys; kwargs...)
112109

110+
function loess(xs::AbstractArray{T,N}, ys::AbstractVector{S}; kwargs...) where {T,N,S}
111+
R = float(promote_type(T, S))
112+
loess(convert(AbstractArray{R,N}, xs), convert(AbstractVector{R}, ys); kwargs...)
113+
end
113114

114115

115116
# Predict response values from a trained loess model and predictor observations.
@@ -126,7 +127,7 @@ end
126127
# A length n' vector of predicted response values.
127128
#
128129
function predict(model::LoessModel{T}, z::T) where T <: AbstractFloat
129-
predict(model, T[z])
130+
predict(model, T[z])
130131
end
131132

132133

@@ -136,40 +137,40 @@ function predict(model::LoessModel{T}, zs::AbstractVector{T}) where T <: Abstrac
136137
# in the univariate case, interpret a non-singleton zs as vector of
137138
# ponits, not one point
138139
if m == 1 && length(zs) > 1
139-
return predict(model, reshape(zs, (length(zs), 1)))
140+
return predict(model, reshape(zs, (length(zs), 1)))
140141
end
141142

142143
if length(zs) != m
143-
error("$(m)-dimensional model applied to length $(length(zs)) vector")
144+
error("$(m)-dimensional model applied to length $(length(zs)) vector")
144145
end
145146

146147
adjacent_verts = traverse(model.kdtree, zs)
147148

148149
if m == 1
149-
@assert(length(adjacent_verts) == 2)
150-
z = zs[1]
151-
u = (z - adjacent_verts[1][1]) /
152-
(adjacent_verts[2][1] - adjacent_verts[1][1])
153-
154-
y1 = evalpoly(zs, model.bs[model.verts[[adjacent_verts[1][1]]],:])
155-
y2 = evalpoly(zs, model.bs[model.verts[[adjacent_verts[2][1]]],:])
156-
return (1.0 - u) * y1 + u * y2
150+
@assert(length(adjacent_verts) == 2)
151+
z = zs[1]
152+
u = (z - adjacent_verts[1][1]) /
153+
(adjacent_verts[2][1] - adjacent_verts[1][1])
154+
155+
y1 = evalpoly(zs, model.bs[model.verts[[adjacent_verts[1][1]]],:])
156+
y2 = evalpoly(zs, model.bs[model.verts[[adjacent_verts[2][1]]],:])
157+
return (1.0 - u) * y1 + u * y2
157158
else
158-
error("Multivariate blending not yet implemented")
159-
# TODO:
160-
# 1. Univariate linear interpolation between adjacent verticies.
161-
# 2. Blend these estimates. (I'm not sure how this is done.)
159+
error("Multivariate blending not yet implemented")
160+
# TODO:
161+
# 1. Univariate linear interpolation between adjacent verticies.
162+
# 2. Blend these estimates. (I'm not sure how this is done.)
162163
end
163164
end
164165

165166

166167
function predict(model::LoessModel{T}, zs::AbstractMatrix{T}) where T <: AbstractFloat
167-
ys = Array{T}(undef, size(zs, 1))
168-
for i in 1:size(zs, 1)
169-
# the vec() here is not necessary on 0.5 anymore
170-
ys[i] = predict(model, vec(zs[i,:]))
171-
end
172-
ys
168+
ys = Array{T}(undef, size(zs, 1))
169+
for i in 1:size(zs, 1)
170+
# the vec() here is not necessary on 0.5 anymore
171+
ys[i] = predict(model, vec(zs[i,:]))
172+
end
173+
ys
173174
end
174175

175176
"""
@@ -201,13 +202,13 @@ function evalpoly(xs, bs)
201202
degree = div(length(bs) - 1, m)
202203
y = bs[1]
203204
for i in 1:m
204-
x = xs[i]
205+
x = xs[i]
205206
xx = x
206207
y += xx * bs[1 + (i-1)*degree + 1]
207-
for l in 2:degree
208+
for l in 2:degree
208209
xx *= x
209-
y += xx * bs[1 + (i-1)*degree + l]
210-
end
210+
y += xx * bs[1 + (i-1)*degree + l]
211+
end
211212
end
212213
y
213214
end
@@ -230,8 +231,8 @@ function tnormalize!(xs::AbstractMatrix{T}, q::T=0.1) where T <: AbstractFloat
230231
n, m = size(xs)
231232
cut = ceil(Int, (q * n))
232233
for j in 1:m
233-
tmp = sort!(xs[:,j])
234-
xs[:,j] ./= mean(tmp[cut+1:n-cut])
234+
tmp = sort!(xs[:,j])
235+
xs[:,j] ./= mean(tmp[cut+1:n-cut])
235236
end
236237
xs
237238
end

0 commit comments

Comments
 (0)