Skip to content

Commit

Permalink
fix 0.7 deprecation
Browse files Browse the repository at this point in the history
  • Loading branch information
panlanfeng committed Oct 2, 2018
1 parent f7821fd commit f9445b7
Show file tree
Hide file tree
Showing 8 changed files with 41 additions and 44 deletions.
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ os:
- osx
- linux
julia:
- 0.5
- 1.0
- nightly
matrix:
allow_failures:
Expand Down
5 changes: 0 additions & 5 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,12 +1,7 @@
# KernelEstimator

[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.5.svg)](http://pkg.julialang.org/?pkg=KernelEstimator)
[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.6.svg)](http://pkg.julialang.org/?pkg=KernelEstimator)

Linux: [![Build Status](https://travis-ci.org/panlanfeng/KernelEstimator.jl.svg?branch=master)](https://travis-ci.org/panlanfeng/KernelEstimator.jl)

[![Coverage Status](https://coveralls.io/repos/panlanfeng/KernelEstimator.jl/badge.svg?branch=master)](https://coveralls.io/r/panlanfeng/KernelEstimator.jl?branch=master)


The Julia package for nonparametric kernel density estimate and regression. This package currently includes univariate kernel density estimate, local constant regression (Nadaraya-Watson regression) and local linear regression. It can also compute the Bootstrap confidence band [4].

Expand Down
9 changes: 4 additions & 5 deletions REQUIRE
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
julia 0.5
julia 0.7
Distributions
Optim 0.5.0
Optim
StatsBase
Cubature 1.2.0
StatsFuns
Yeppp
HCubature
SpecialFunctions
11 changes: 5 additions & 6 deletions src/KernelEstimator.jl
Original file line number Diff line number Diff line change
@@ -1,15 +1,14 @@
VERSION >= v"0.4" && __precompile__()

module KernelEstimator
module KernelEstimator
using Distributions
using Optim
using Yeppp
using StatsBase
using Cubature
using HCubature
import StatsBase: RealVector, RealMatrix
using StatsFuns
import StatsFuns: invsqrt2π, log2π, sqrt2, invsqrt2
# Compat.@irrational invsqrt2π 0.398942280401432677939946 big(1.)/sqrt(big(2.)*π)

using Distributions: invsqrt2π, log2π, sqrt2, invsqrt2
using SpecialFunctions



Expand Down
14 changes: 7 additions & 7 deletions src/bandwidth.jl
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ end

#for general kernel
function Jh(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
pquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n)
hquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n)
end
function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)

Expand All @@ -58,7 +58,7 @@ function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n:
end
#For betakernel
function Jh(xdata::RealVector, logxdata::RealVector,log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
pquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n)
hquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n)
end
function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)

Expand All @@ -72,7 +72,7 @@ function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVe
end
#For gammakernel
function Jh(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real)
pquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n)
hquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n)
end
function leaveoneout(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int)

Expand Down Expand Up @@ -104,12 +104,12 @@ function bwlscv(xdata::RealVector, kernel::Function)
xlb = 0.0
xub = 1.0
hub = 0.25
logxdata = Yeppp.log(xdata)
log1_xdata = Yeppp.log(1.0 .- xdata)
logxdata = log.(xdata)
log1_xdata = log.(1.0 .- xdata)
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, log1_xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
elseif kernel == gammakernel
xlb = 0.0
logxdata = Yeppp.log(xdata)
logxdata = log.(xdata)
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
end
return Optim.minimizer(Optim.optimize(h -> Jh(xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2))
Expand Down Expand Up @@ -342,7 +342,7 @@ function bwlocalconstant(xdata::RealMatrix, ydata::RealVector, kernel::Array{Fun
if any(h_output .<= 0.0)
for j in 1:p
if h_output[j] .<= 0.0
h_output[j] = 2.* h0[j]
h_output[j] = 2 .* h0[j]
end
end
h_output = Optim.minimizer(Optim.optimize(h->lscvlocalconstant(xdata, ydata, kernel, h, w, n), h_output))
Expand Down
28 changes: 14 additions & 14 deletions src/kernel.jl
Original file line number Diff line number Diff line change
Expand Up @@ -41,20 +41,20 @@ function betakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
elseif x>1-2*h
b = rhoxb(1-x, h) - 1
end

minus!(w, 1.0, xdata, n)
Yeppp.log!(w, w)
wtmp = Yeppp.log(xdata)
w .= log.(w)
wtmp = log.(xdata)
multiply!(w, b)
multiply!(wtmp, a)
Yeppp.add!(w, w, wtmp)
w .= w .+ wtmp

# for ind in 1:n
# @inbounds w[ind] = a * log(xdata[ind]) + b * log(1 - xdata[ind])
# end

add!(w, -lbeta(a+1, b+1))
Yeppp.exp!(w, w)
w .= exp.(w)
nothing
end
function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Real, w::Vector, n::Int)
Expand All @@ -68,12 +68,12 @@ function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Re
elseif x>1-2*h
b = rhoxb(1-x, h) - 1
end

for ind in 1:n
@inbounds w[ind] = a * logxdata[ind] + b * log1_xdata[ind]
end
add!(w, -lbeta(a+1, b+1))
Yeppp.exp!(w, w)
w .= exp.(w)
nothing
end
#f̂(x) = 1/n ∑ᵢ K(xᵢ;x /b+1, b )
Expand All @@ -87,15 +87,15 @@ function gammakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
rhob = 0.25 * rhob * rhob + 1.0
end

Yeppp.log!(w, xdata)
w .= log.(xdata)
multiply!(w, rhob-1.0)
tmp = -rhob*log(h)-lgamma(rhob)
add!(w, tmp)
h1 = 1/h
for ind in 1:n
@inbounds w[ind] -= xdata[ind] * h1
end
Yeppp.exp!(w, w)
w .= exp.(w)
nothing
end
function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real, w::Vector, n::Int)
Expand All @@ -115,7 +115,7 @@ function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real,
for ind in 1:n
@inbounds w[ind] -= xdata[ind] * h1
end
Yeppp.exp!(w, w)
w .= exp.(w)
nothing
end

Expand All @@ -134,8 +134,8 @@ function gaussiankernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
@inbounds w[ind]=-0.5*abs2((x - xdata[ind])*h1) - tmp
end
# add!(w, tmp, n)
Yeppp.exp!(w, w)
w .= exp.(w)

nothing
end
function ekernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int)
Expand Down
5 changes: 3 additions & 2 deletions test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
using KernelEstimator
using Base.Test

using Test
using Random
using StatsBase
# write your own tests here
include("testreg.jl")
11 changes: 7 additions & 4 deletions test/testreg.jl
Original file line number Diff line number Diff line change
@@ -1,9 +1,12 @@

##Univariate kerneldensity and regression
using Distributions
srand(2017);
Random.seed!(2017);

linreg(x, y) = hcat(fill!(similar(x), 1), x) \ y

x=rand(Normal(10), 500)
xeval=linspace(minimum(x), maximum(x), 100)
xeval=range(minimum(x), stop=maximum(x), length=100)
h = bwlscv(x, gaussiankernel)
@test h>0
denvalues=kerneldensity(x, xeval=xeval)
Expand Down Expand Up @@ -44,7 +47,7 @@ regfit = x*inv(x'*x)*x'*y

###Bounded gamma kernel density and regression
x = rand(Gamma(4,2), 500)
xeval = linspace(0.01,20, 100)
xeval = range(0.01, stop=20, length=100)
h = bwlscv(x, gammakernel)
@test h>0
denvalues = kerneldensity(x, xeval=xeval, kernel=gammakernel, lb=0.0)
Expand All @@ -61,7 +64,7 @@ yfit1=npr(x, y, xeval=x, reg=locallinear, kernel=gammakernel, lb=0.0)

#bounded beta kernel density and regression
x = rand(Beta(4,2), 500) * 10
xeval = linspace(0, 10, 100)
xeval = range(0, stop=10, length=100)
h = bwlscv(x./10, betakernel)
@test h>0
denvalues=kerneldensity(x, xeval=xeval, kernel=betakernel,h=h, lb=0.0,ub=10.0)
Expand Down

0 comments on commit f9445b7

Please sign in to comment.