From f9445b7f0c3972c77a9db5725e5f8aed536c83f3 Mon Sep 17 00:00:00 2001 From: Lanfeng Pan Date: Mon, 1 Oct 2018 22:46:50 -0400 Subject: [PATCH] fix 0.7 deprecation --- .travis.yml | 2 +- README.md | 5 ----- REQUIRE | 9 ++++----- src/KernelEstimator.jl | 11 +++++------ src/bandwidth.jl | 14 +++++++------- src/kernel.jl | 28 ++++++++++++++-------------- test/runtests.jl | 5 +++-- test/testreg.jl | 11 +++++++---- 8 files changed, 41 insertions(+), 44 deletions(-) diff --git a/.travis.yml b/.travis.yml index f5d68c6..47bb23b 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,7 +3,7 @@ os: - osx - linux julia: - - 0.5 + - 1.0 - nightly matrix: allow_failures: diff --git a/README.md b/README.md index 25e1e46..2da6174 100644 --- a/README.md +++ b/README.md @@ -1,12 +1,7 @@ # KernelEstimator -[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.5.svg)](http://pkg.julialang.org/?pkg=KernelEstimator) -[![KernelEstimator](http://pkg.julialang.org/badges/KernelEstimator_0.6.svg)](http://pkg.julialang.org/?pkg=KernelEstimator) - Linux: [![Build Status](https://travis-ci.org/panlanfeng/KernelEstimator.jl.svg?branch=master)](https://travis-ci.org/panlanfeng/KernelEstimator.jl) -[![Coverage Status](https://coveralls.io/repos/panlanfeng/KernelEstimator.jl/badge.svg?branch=master)](https://coveralls.io/r/panlanfeng/KernelEstimator.jl?branch=master) - The Julia package for nonparametric kernel density estimate and regression. This package currently includes univariate kernel density estimate, local constant regression (Nadaraya-Watson regression) and local linear regression. It can also compute the Bootstrap confidence band [4]. diff --git a/REQUIRE b/REQUIRE index b9a0780..6e0eb74 100644 --- a/REQUIRE +++ b/REQUIRE @@ -1,7 +1,6 @@ -julia 0.5 +julia 0.7 Distributions -Optim 0.5.0 +Optim StatsBase -Cubature 1.2.0 -StatsFuns -Yeppp +HCubature +SpecialFunctions diff --git a/src/KernelEstimator.jl b/src/KernelEstimator.jl index d17fc8d..09e1e08 100644 --- a/src/KernelEstimator.jl +++ b/src/KernelEstimator.jl @@ -1,15 +1,14 @@ VERSION >= v"0.4" && __precompile__() -module KernelEstimator +module KernelEstimator using Distributions using Optim -using Yeppp using StatsBase -using Cubature +using HCubature import StatsBase: RealVector, RealMatrix -using StatsFuns -import StatsFuns: invsqrt2π, log2π, sqrt2, invsqrt2 -# Compat.@irrational invsqrt2π 0.398942280401432677939946 big(1.)/sqrt(big(2.)*π) + +using Distributions: invsqrt2π, log2π, sqrt2, invsqrt2 +using SpecialFunctions diff --git a/src/bandwidth.jl b/src/bandwidth.jl index 6b09ee3..0ca113d 100644 --- a/src/bandwidth.jl +++ b/src/bandwidth.jl @@ -44,7 +44,7 @@ end #for general kernel function Jh(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real) - pquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n) + hquadrature(x->begin kernel(x, xdata,h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, kernel, h, w, n) end function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int) @@ -58,7 +58,7 @@ function leaveoneout(xdata::RealVector, kernel::Function, h::Real, w::Vector, n: end #For betakernel function Jh(xdata::RealVector, logxdata::RealVector,log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real) - pquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n) + hquadrature(x->begin kernel(x, logxdata, log1_xdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, log1_xdata, kernel, h, w, n) end function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int) @@ -72,7 +72,7 @@ function leaveoneout(xdata::RealVector, logxdata::RealVector, log1_xdata::RealVe end #For gammakernel function Jh(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int, xlb::Real, xub::Real) - pquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n) + hquadrature(x->begin kernel(x, xdata, logxdata, h,w,n); mean(w)^2; end, xlb, xub, maxevals=200)[1] - leaveoneout(xdata, logxdata, kernel, h, w, n) end function leaveoneout(xdata::RealVector, logxdata::RealVector, kernel::Function, h::Real, w::Vector, n::Int) @@ -104,12 +104,12 @@ function bwlscv(xdata::RealVector, kernel::Function) xlb = 0.0 xub = 1.0 hub = 0.25 - logxdata = Yeppp.log(xdata) - log1_xdata = Yeppp.log(1.0 .- xdata) + logxdata = log.(xdata) + log1_xdata = log.(1.0 .- xdata) return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, log1_xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2)) elseif kernel == gammakernel xlb = 0.0 - logxdata = Yeppp.log(xdata) + logxdata = log.(xdata) return Optim.minimizer(Optim.optimize(h -> Jh(xdata, logxdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2)) end return Optim.minimizer(Optim.optimize(h -> Jh(xdata, kernel, h, w, n, xlb,xub), hlb, hub, iterations=200,abs_tol=h0/n^2)) @@ -342,7 +342,7 @@ function bwlocalconstant(xdata::RealMatrix, ydata::RealVector, kernel::Array{Fun if any(h_output .<= 0.0) for j in 1:p if h_output[j] .<= 0.0 - h_output[j] = 2.* h0[j] + h_output[j] = 2 .* h0[j] end end h_output = Optim.minimizer(Optim.optimize(h->lscvlocalconstant(xdata, ydata, kernel, h, w, n), h_output)) diff --git a/src/kernel.jl b/src/kernel.jl index 9d51a4e..70ad618 100644 --- a/src/kernel.jl +++ b/src/kernel.jl @@ -41,20 +41,20 @@ function betakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int) elseif x>1-2*h b = rhoxb(1-x, h) - 1 end - + minus!(w, 1.0, xdata, n) - Yeppp.log!(w, w) - wtmp = Yeppp.log(xdata) + w .= log.(w) + wtmp = log.(xdata) multiply!(w, b) multiply!(wtmp, a) - Yeppp.add!(w, w, wtmp) - + w .= w .+ wtmp + # for ind in 1:n # @inbounds w[ind] = a * log(xdata[ind]) + b * log(1 - xdata[ind]) # end - + add!(w, -lbeta(a+1, b+1)) - Yeppp.exp!(w, w) + w .= exp.(w) nothing end function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Real, w::Vector, n::Int) @@ -68,12 +68,12 @@ function betakernel(x::Real, logxdata::RealVector, log1_xdata::RealVector, h::Re elseif x>1-2*h b = rhoxb(1-x, h) - 1 end - + for ind in 1:n @inbounds w[ind] = a * logxdata[ind] + b * log1_xdata[ind] end add!(w, -lbeta(a+1, b+1)) - Yeppp.exp!(w, w) + w .= exp.(w) nothing end #f̂(x) = 1/n ∑ᵢ K(xᵢ;x /b+1, b ) @@ -87,7 +87,7 @@ function gammakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int) rhob = 0.25 * rhob * rhob + 1.0 end - Yeppp.log!(w, xdata) + w .= log.(xdata) multiply!(w, rhob-1.0) tmp = -rhob*log(h)-lgamma(rhob) add!(w, tmp) @@ -95,7 +95,7 @@ function gammakernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int) for ind in 1:n @inbounds w[ind] -= xdata[ind] * h1 end - Yeppp.exp!(w, w) + w .= exp.(w) nothing end function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real, w::Vector, n::Int) @@ -115,7 +115,7 @@ function gammakernel(x::Real, xdata::RealVector, logxdata::RealVector, h::Real, for ind in 1:n @inbounds w[ind] -= xdata[ind] * h1 end - Yeppp.exp!(w, w) + w .= exp.(w) nothing end @@ -134,8 +134,8 @@ function gaussiankernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int) @inbounds w[ind]=-0.5*abs2((x - xdata[ind])*h1) - tmp end # add!(w, tmp, n) - Yeppp.exp!(w, w) - + w .= exp.(w) + nothing end function ekernel(x::Real, xdata::RealVector, h::Real, w::Vector, n::Int) diff --git a/test/runtests.jl b/test/runtests.jl index 4cada91..b8ca254 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -1,5 +1,6 @@ using KernelEstimator -using Base.Test - +using Test +using Random +using StatsBase # write your own tests here include("testreg.jl") diff --git a/test/testreg.jl b/test/testreg.jl index f9ee5c5..c2aee61 100644 --- a/test/testreg.jl +++ b/test/testreg.jl @@ -1,9 +1,12 @@ ##Univariate kerneldensity and regression using Distributions -srand(2017); +Random.seed!(2017); + +linreg(x, y) = hcat(fill!(similar(x), 1), x) \ y + x=rand(Normal(10), 500) -xeval=linspace(minimum(x), maximum(x), 100) +xeval=range(minimum(x), stop=maximum(x), length=100) h = bwlscv(x, gaussiankernel) @test h>0 denvalues=kerneldensity(x, xeval=xeval) @@ -44,7 +47,7 @@ regfit = x*inv(x'*x)*x'*y ###Bounded gamma kernel density and regression x = rand(Gamma(4,2), 500) -xeval = linspace(0.01,20, 100) +xeval = range(0.01, stop=20, length=100) h = bwlscv(x, gammakernel) @test h>0 denvalues = kerneldensity(x, xeval=xeval, kernel=gammakernel, lb=0.0) @@ -61,7 +64,7 @@ yfit1=npr(x, y, xeval=x, reg=locallinear, kernel=gammakernel, lb=0.0) #bounded beta kernel density and regression x = rand(Beta(4,2), 500) * 10 -xeval = linspace(0, 10, 100) +xeval = range(0, stop=10, length=100) h = bwlscv(x./10, betakernel) @test h>0 denvalues=kerneldensity(x, xeval=xeval, kernel=betakernel,h=h, lb=0.0,ub=10.0)