Skip to content

Commit acf298f

Browse files
authored
Merge pull request #24 from evanfields/ef_update_07
Ef update 0.7
2 parents bcc8f1a + 60ae38e commit acf298f

File tree

6 files changed

+47
-52
lines changed

6 files changed

+47
-52
lines changed

.travis.yml

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@ os:
33
- linux
44
- osx
55
julia:
6-
- 0.5
7-
- 0.6
6+
- 0.7
87
- nightly
98
notifications:
109
email: false

README.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,8 @@
11
# Loess
22

33
[![Build Status](https://travis-ci.org/JuliaStats/Loess.jl.svg?branch=master)](https://travis-ci.org/JuliaStats/Loess.jl)
4-
[![Loess](http://pkg.julialang.org/badges/Loess_0.5.svg)](http://pkg.julialang.org/?pkg=Loess)
54
[![Loess](http://pkg.julialang.org/badges/Loess_0.6.svg)](http://pkg.julialang.org/?pkg=Loess)
5+
[![Loess](http://pkg.julialang.org/badges/Loess_0.7.svg)](http://pkg.julialang.org/?pkg=Loess)
66

77
This is a pure Julia loess implementation, based on the fast kd-tree based
88
approximation described in the original Cleveland, et al papers, implemented

REQUIRE

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,2 @@
1-
julia 0.5
2-
Compat 0.17
1+
julia 0.7.0-beta2
32
Distances
4-
IterTools

src/Loess.jl

Lines changed: 20 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,18 @@
1-
isdefined(Base, :__precompile__) && __precompile__()
1+
__precompile__()
22

3-
module Loess
43

5-
using Compat
4+
module Loess
65

7-
import IterTools.product
86
import Distances.euclidean
97

8+
using Statistics
9+
1010
export loess, predict
1111

1212
include("kd.jl")
1313

1414

15-
type LoessModel{T <: AbstractFloat}
15+
mutable struct LoessModel{T <: AbstractFloat}
1616
xs::AbstractMatrix{T} # An n by m predictor matrix containing n observations from m predictors
1717
ys::AbstractVector{T} # A length n response vector
1818
bs::Matrix{T} # Least squares coefficients
@@ -37,8 +37,8 @@ Returns:
3737
A fit `LoessModel`.
3838
3939
"""
40-
function loess{T <: AbstractFloat}(xs::AbstractMatrix{T}, ys::AbstractVector{T};
41-
normalize::Bool=true, span::T=0.75, degree::Int=2)
40+
function loess(xs::AbstractMatrix{T}, ys::AbstractVector{T};
41+
normalize::Bool=true, span::T=0.75, degree::Int=2) where T <: AbstractFloat
4242
if size(xs, 1) != size(ys, 1)
4343
error("Predictor and response arrays must of the same length")
4444
end
@@ -54,7 +54,7 @@ function loess{T <: AbstractFloat}(xs::AbstractMatrix{T}, ys::AbstractVector{T};
5454
end
5555

5656
kdtree = KDTree(xs, 0.05 * span)
57-
verts = Array{T}(length(kdtree.verts), m)
57+
verts = Array{T}(undef, length(kdtree.verts), m)
5858

5959
# map verticies to their index in the bs coefficient matrix
6060
verts = Dict{Vector{T}, Int}()
@@ -63,13 +63,13 @@ function loess{T <: AbstractFloat}(xs::AbstractMatrix{T}, ys::AbstractVector{T};
6363
end
6464

6565
# Fit each vertex
66-
ds = Array{T}(n) # distances
66+
ds = Array{T}(undef, n) # distances
6767
perm = collect(1:n)
68-
bs = Array{T}(length(kdtree.verts), 1 + degree * m)
68+
bs = Array{T}(undef, length(kdtree.verts), 1 + degree * m)
6969

7070
# TODO: higher degree fitting
71-
us = Array{T}(q, 1 + degree * m)
72-
vs = Array{T}(q)
71+
us = Array{T}(undef, q, 1 + degree * m)
72+
vs = Array{T}(undef, q)
7373
for (vert, k) in verts
7474
# reset perm
7575
for i in 1:n
@@ -82,7 +82,7 @@ function loess{T <: AbstractFloat}(xs::AbstractMatrix{T}, ys::AbstractVector{T};
8282
end
8383

8484
# copy the q nearest points to vert into X
85-
select!(perm, q, by=i -> ds[i])
85+
partialsort!(perm, q, by=i -> ds[i])
8686
dmax = maximum([ds[perm[i]] for i = 1:q])
8787

8888
for i in 1:q
@@ -105,8 +105,8 @@ function loess{T <: AbstractFloat}(xs::AbstractMatrix{T}, ys::AbstractVector{T};
105105
LoessModel{T}(xs, ys, bs, verts, kdtree)
106106
end
107107

108-
function loess{T <: AbstractFloat}(xs::AbstractVector{T}, ys::AbstractVector{T};
109-
normalize::Bool=true, span::T=0.75, degree::Int=2)
108+
function loess(xs::AbstractVector{T}, ys::AbstractVector{T};
109+
normalize::Bool=true, span::T=0.75, degree::Int=2) where T <: AbstractFloat
110110
loess(reshape(xs, (length(xs), 1)), ys, normalize=normalize, span=span, degree=degree)
111111
end
112112

@@ -125,12 +125,12 @@ end
125125
# Returns:
126126
# A length n' vector of predicted response values.
127127
#
128-
function predict{T <: AbstractFloat}(model::LoessModel{T}, z::T)
128+
function predict(model::LoessModel{T}, z::T) where T <: AbstractFloat
129129
predict(model, T[z])
130130
end
131131

132132

133-
function predict{T <: AbstractFloat}(model::LoessModel{T}, zs::AbstractVector{T})
133+
function predict(model::LoessModel{T}, zs::AbstractVector{T}) where T <: AbstractFloat
134134
m = size(model.xs, 2)
135135

136136
# in the univariate case, interpret a non-singleton zs as vector of
@@ -163,8 +163,8 @@ function predict{T <: AbstractFloat}(model::LoessModel{T}, zs::AbstractVector{T}
163163
end
164164

165165

166-
function predict{T <: AbstractFloat}(model::LoessModel{T}, zs::AbstractMatrix{T})
167-
ys = Array{T}(size(zs, 1))
166+
function predict(model::LoessModel{T}, zs::AbstractMatrix{T}) where T <: AbstractFloat
167+
ys = Array{T}(undef, size(zs, 1))
168168
for i in 1:size(zs, 1)
169169
# the vec() here is not necessary on 0.5 anymore
170170
ys[i] = predict(model, vec(zs[i,:]))
@@ -226,7 +226,7 @@ Args:
226226
Modifies:
227227
`xs`
228228
"""
229-
function tnormalize!{T <: AbstractFloat}(xs::AbstractMatrix{T}, q::T=0.1)
229+
function tnormalize!(xs::AbstractMatrix{T}, q::T=0.1) where T <: AbstractFloat
230230
n, m = size(xs)
231231
cut = ceil(Int, (q * n))
232232
for j in 1:m

src/kd.jl

Lines changed: 21 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,22 +1,19 @@
1-
using Compat
2-
import Compat.view
3-
41
# Simple static kd-trees.
52

6-
@compat abstract type KDNode end
3+
abstract type KDNode end
74

8-
immutable KDLeafNode <: KDNode
5+
struct KDLeafNode <: KDNode
96
end
107

11-
immutable KDInternalNode{T <: AbstractFloat} <: KDNode
8+
struct KDInternalNode{T <: AbstractFloat} <: KDNode
129
j::Int # dimension on which the data is split
1310
med::T # median value where the split occours
1411
leftnode::KDNode
1512
rightnode::KDNode
1613
end
1714

1815

19-
immutable KDTree{T <: AbstractFloat}
16+
struct KDTree{T <: AbstractFloat}
2017
xs::AbstractMatrix{T} # A matrix of n, m-dimensional observations
2118
perm::Vector{Int} # permutation of data to avoid modifying xs
2219
root::KDNode # root node
@@ -42,14 +39,14 @@ Returns:
4239
A `KDTree` object
4340
4441
"""
45-
function KDTree{T <: AbstractFloat}(xs::AbstractMatrix{T},
46-
leaf_size_factor=0.05,
47-
leaf_diameter_factor=0.0)
42+
function KDTree(xs::AbstractMatrix{T},
43+
leaf_size_factor=0.05,
44+
leaf_diameter_factor=0.0) where T <: AbstractFloat
4845

4946
n, m = size(xs)
5047
perm = collect(1:n)
5148

52-
bounds = Array{T}(2, m)
49+
bounds = Array{T}(undef, 2, m)
5350
for j in 1:m
5451
col = xs[:,j]
5552
bounds[1, j] = minimum(col)
@@ -63,7 +60,7 @@ function KDTree{T <: AbstractFloat}(xs::AbstractMatrix{T},
6360
verts = Set{Vector{T}}()
6461

6562
# Add a vertex for each corner of the hypercube
66-
for vert in product([bounds[:,j] for j in 1:m]...)
63+
for vert in Iterators.product([bounds[:,j] for j in 1:m]...)
6764
push!(verts, T[vert...])
6865
end
6966

@@ -116,12 +113,12 @@ Modifies:
116113
Returns:
117114
Either a `KDLeafNode` or a `KDInternalNode`
118115
"""
119-
function build_kdtree{T}(xs::AbstractMatrix{T},
120-
perm::AbstractArray,
121-
bounds::Matrix{T},
122-
leaf_size_cutoff::Int,
123-
leaf_diameter_cutoff::T,
124-
verts::Set{Vector{T}})
116+
function build_kdtree(xs::AbstractMatrix{T},
117+
perm::AbstractArray,
118+
bounds::Matrix{T},
119+
leaf_size_cutoff::Int,
120+
leaf_diameter_cutoff::T,
121+
verts::Set{Vector{T}}) where T
125122
n, m = size(xs)
126123

127124
if length(perm) <= leaf_size_cutoff || diameter(bounds) <= leaf_diameter_cutoff
@@ -148,14 +145,14 @@ function build_kdtree{T}(xs::AbstractMatrix{T},
148145
# find the median and partition
149146
if isodd(length(perm))
150147
mid = length(perm) ÷ 2
151-
select!(perm, mid, by=i -> xs[i, j])
148+
partialsort!(perm, mid, by=i -> xs[i, j])
152149
med = xs[perm[mid], j]
153150
mid1 = mid
154151
mid2 = mid + 1
155152
else
156153
mid1 = length(perm) ÷ 2
157154
mid2 = mid1 + 1
158-
select!(perm, mid1:mid2, by=i -> xs[i, j])
155+
partialsort!(perm, mid1:mid2, by=i -> xs[i, j])
159156
med = (xs[perm[mid1], j] + xs[perm[mid2], j]) / 2
160157
end
161158

@@ -169,7 +166,7 @@ function build_kdtree{T}(xs::AbstractMatrix{T},
169166
rightnode = build_kdtree(xs, view(perm,mid2:length(perm)), rightbounds,
170167
leaf_size_cutoff, leaf_diameter_cutoff, verts)
171168

172-
coords = Array{Array}(m)
169+
coords = Array{Array}(undef, m)
173170
for i in 1:m
174171
if i == j
175172
coords[i] = [med]
@@ -178,7 +175,7 @@ function build_kdtree{T}(xs::AbstractMatrix{T},
178175
end
179176
end
180177

181-
for vert in product(coords...)
178+
for vert in Iterators.product(coords...)
182179
push!(verts, T[vert...])
183180
end
184181

@@ -192,7 +189,7 @@ end
192189
Given a bounding hypecube `bounds`, return its verticies
193190
"""
194191
function bounds_verts(bounds::Matrix)
195-
collect(product([bounds[:, i] for i in 1:size(bounds, 2)]...))
192+
collect(Iterators.product([bounds[:, i] for i in 1:size(bounds, 2)]...))
196193
end
197194

198195

@@ -202,7 +199,7 @@ end
202199
Traverse the tree `kdtree` to the bottom and return the verticies of
203200
the bounding hypercube of the leaf node containing the point `x`.
204201
"""
205-
function traverse{T}(kdtree::KDTree{T}, x::AbstractVector{T})
202+
function traverse(kdtree::KDTree{T}, x::AbstractVector{T}) where T
206203
m = size(kdtree.bounds, 2)
207204

208205
if length(x) != m

test/runtests.jl

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
11
# These tests don't do much except ensure that the package loads,
22
# and does something sensible if it does.
33
using Loess
4-
using Base.Test
5-
using Compat
4+
using Test
5+
using Random
6+
using Statistics
67

78
srand(100)
89
xs = 10 .* rand(100)

0 commit comments

Comments
 (0)