nls.Rout.save 23 KB
Newer Older
1

2
R version 3.5.2 RC (2018-12-13 r75856) -- "Eggshell Igloo"
3
Copyright (C) 2018 The R Foundation for Statistical Computing
4
Platform: x86_64-pc-linux-gnu (64-bit)
5 6 7 8 9 10 11 12 13 14 15 16 17

R is free software and comes with ABSOLUTELY NO WARRANTY.
You are welcome to redistribute it under certain conditions.
Type 'license()' or 'licence()' for distribution details.

R is a collaborative project with many contributors.
Type 'contributors()' for more information and
'citation()' on how to cite R or R packages in publications.

Type 'demo()' for some demos, 'help()' for on-line help, or
'help.start()' for an HTML browser interface to help.
Type 'q()' to quit R.

18
> #  File src/library/stats/tests/nls.R
19
> #  Part of the R package, https://www.R-project.org
20 21 22 23 24 25 26 27 28 29 30 31
> #
> #  This program is free software; you can redistribute it and/or modify
> #  it under the terms of the GNU General Public License as published by
> #  the Free Software Foundation; either version 2 of the License, or
> #  (at your option) any later version.
> #
> #  This program is distributed in the hope that it will be useful,
> #  but WITHOUT ANY WARRANTY; without even the implied warranty of
> #  MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
> #  GNU General Public License for more details.
> #
> #  A copy of the GNU General Public License is available at
32
> #  https://www.R-project.org/Licenses/
33
> 
34 35 36
> ## tests of nls, especially of weighted fits
> 
> library(stats)
37 38 39 40
> options(digits = 5) # to avoid trivial printed differences
> options(useFancyQuotes = FALSE) # avoid fancy quotes in o/p
> options(show.nls.convergence = FALSE) # avoid non-diffable output
> options(warn = 1)
41 42 43
> 
> have_MASS <- requireNamespace('MASS', quietly = TRUE)
> 
44
> pdf("nls-test.pdf")
45
> 
46 47 48
> ## utility for comparing nls() results:  [TODO: use more often below]
> .n <- function(r) r[names(r) != "call"]
> 
49 50 51 52 53 54 55 56 57 58
> ## selfStart.default() w/ no parameters:
> logist <- deriv( ~Asym/(1+exp(-(x-xmid)/scal)), c("Asym", "xmid", "scal"),
+ 		function(x, Asym, xmid, scal){} )
> logistInit <- function(mCall, LHS, data) {
+     xy <- sortedXyData(mCall[["x"]], LHS, data)
+     if(nrow(xy) < 3) stop("Too few distinct input values to fit a logistic")
+     Asym <- max(abs(xy[,"y"]))
+     if (Asym != max(xy[,"y"])) Asym <- -Asym  # negative asymptote
+     xmid <- NLSstClosestX(xy, 0.5 * Asym)
+     scal <- NLSstClosestX(xy, 0.75 * Asym) - xmid
59 60
+     setNames(c(Asym, xmid, scal),
+ 	     mCall[c("Asym", "xmid", "scal")])
61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76
+ }
> logist <- selfStart(logist, initial = logistInit) ##-> Error in R 1.5.0
> str(logist)
function (x, Asym, xmid, scal)  
 - attr(*, "initial")=function (mCall, LHS, data)  
 - attr(*, "class")= chr "selfStart"
> 
> ## lower and upper in algorithm="port"
> set.seed(123)
> x <- runif(200)
> a <- b <- 1; c <- -0.1
> y <- a+b*x+c*x^2+rnorm(200, sd=0.05)
> plot(x,y)
> curve(a+b*x+c*x^2, add = TRUE)
> nls(y ~ a+b*x+c*I(x^2), start = c(a=1, b=1, c=0.1), algorithm = "port")
Nonlinear regression model
77 78
  model: y ~ a + b * x + c * I(x^2)
   data: parent.frame()
79 80 81 82
      a       b       c 
 1.0058  0.9824 -0.0897 
 residual sum-of-squares: 0.46

83
Algorithm "port", convergence message: relative convergence (4)
84 85 86
> (fm <- nls(y ~ a+b*x+c*I(x^2), start = c(a=1, b=1, c=0.1),
+            algorithm = "port", lower = c(0, 0, 0)))
Nonlinear regression model
87 88
  model: y ~ a + b * x + c * I(x^2)
   data: parent.frame()
89 90 91 92
   a    b    c 
1.02 0.89 0.00 
 residual sum-of-squares: 0.468

93
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
94
> if(have_MASS) print(confint(fm))
95 96 97
Waiting for profiling to be done...
     2.5%    97.5%
a 1.00875 1.037847
98 99
b 0.84138 0.914645
c      NA 0.042807
100 101 102 103 104 105 106 107 108 109 110 111
> 
> ## weighted nls fit: unsupported < 2.3.0
> set.seed(123)
> y <- x <- 1:10
> yeps <- y + rnorm(length(y), sd = 0.01)
> wts <- rep(c(1, 2), length = 10); wts[5] <- 0
> fit0 <- lm(yeps ~ x, weights = wts)
> summary(fit0, cor = TRUE)

Call:
lm(formula = yeps ~ x, weights = wts)

112
Weighted Residuals:
113 114 115 116
     Min       1Q   Median       3Q      Max 
-0.01562 -0.00723 -0.00158  0.00403  0.02413 

Coefficients:
117 118 119 120 121
            Estimate Std. Error t value Pr(>|t|)    
(Intercept)  0.00517    0.00764    0.68     0.52    
x            0.99915    0.00119  841.38   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
122 123

Residual standard error: 0.0132 on 7 degrees of freedom
124 125
Multiple R-squared:     1,	Adjusted R-squared:     1 
F-statistic: 7.08e+05 on 1 and 7 DF,  p-value: <2e-16
126 127 128 129 130 131 132 133

Correlation of Coefficients:
  (Intercept)
x -0.89      

> cf0 <- coef(summary(fit0))[, 1:2]
> fit <- nls(yeps ~ a + b*x, start = list(a = 0.12345, b = 0.54321),
+            weights = wts, trace = TRUE)
134 135
112.14 :  0.12345 0.54321
0.0012128 :  0.0051705 0.9991529
136 137 138 139 140
> summary(fit, cor = TRUE)

Formula: yeps ~ a + b * x

Parameters:
141 142 143 144 145
  Estimate Std. Error t value Pr(>|t|)    
a  0.00517    0.00764    0.68     0.52    
b  0.99915    0.00119  841.37   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
146 147 148 149 150 151 152

Residual standard error: 0.0132 on 7 degrees of freedom

Correlation of Parameter Estimates:
  a    
b -0.89

153
> stopifnot(all.equal(residuals(fit), residuals(fit0), tolerance = 1e-5,
154 155 156 157 158
+                     check.attributes = FALSE))
> stopifnot(df.residual(fit) == df.residual(fit0))
> cf1 <- coef(summary(fit))[, 1:2]
> fit2 <- nls(yeps ~ a + b*x, start = list(a = 0.12345, b = 0.54321),
+             weights = wts, trace = TRUE, algorithm = "port")
159 160 161
  0:     56.070572: 0.123450 0.543210
  1:     6.3964587:  1.34546 0.700840
  2: 0.00060639084: 0.00517053 0.999153
162
  3: 0.00060639084: 0.00517051 0.999153
163 164 165 166 167
> summary(fit2, cor = TRUE)

Formula: yeps ~ a + b * x

Parameters:
168 169 170 171 172
  Estimate Std. Error t value Pr(>|t|)    
a  0.00517    0.00764    0.68     0.52    
b  0.99915    0.00119  841.38   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
173 174 175 176 177 178 179

Residual standard error: 0.0132 on 7 degrees of freedom

Correlation of Parameter Estimates:
  a    
b -0.89

180
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
181

182 183 184
> cf2 <- coef(summary(fit2))[, 1:2]
> rownames(cf0) <- c("a", "b")
> # expect relative errors ca 2e-08
185 186 187
> stopifnot(all.equal(cf1, cf0, tolerance = 1e-6),
+           all.equal(cf1, cf0, tolerance = 1e-6))
> stopifnot(all.equal(residuals(fit2), residuals(fit0), tolerance = 1e5,
188 189 190 191 192 193 194 195 196 197 198 199
+                     check.attributes = FALSE))
> 
> 
> DNase1 <- subset(DNase, Run == 1)
> DNase1$wts <- rep(8:1, each = 2)
> fm1 <- nls(density ~ SSlogis(log(conc), Asym, xmid, scal),
+            data = DNase1, weights = wts)
> summary(fm1)

Formula: density ~ SSlogis(log(conc), Asym, xmid, scal)

Parameters:
200 201 202 203 204 205
     Estimate Std. Error t value Pr(>|t|)    
Asym   2.3350     0.0966    24.2  3.5e-12 ***
xmid   1.4731     0.0947    15.6  8.8e-10 ***
scal   1.0385     0.0304    34.1  4.2e-14 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
206 207 208 209 210 211 212 213 214 215 216 217 218

Residual standard error: 0.0355 on 13 degrees of freedom

> 
> ## directly
> fm2 <- nls(density ~ Asym/(1 + exp((xmid - log(conc))/scal)),
+            data = DNase1, weights = wts,
+            start = list(Asym = 3, xmid = 0, scal = 1))
> summary(fm2)

Formula: density ~ Asym/(1 + exp((xmid - log(conc))/scal))

Parameters:
219 220 221 222 223 224
     Estimate Std. Error t value Pr(>|t|)    
Asym   2.3350     0.0966    24.2  3.5e-12 ***
xmid   1.4731     0.0947    15.6  8.8e-10 ***
scal   1.0385     0.0304    34.1  4.2e-14 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
225 226 227

Residual standard error: 0.0355 on 13 degrees of freedom

228 229 230
> stopifnot(all.equal(coef(summary(fm2)), coef(summary(fm1)), tolerance = 1e-6))
> stopifnot(all.equal(residuals(fm2), residuals(fm1), tolerance = 1e-5))
> stopifnot(all.equal(fitted(fm2), fitted(fm1), tolerance = 1e-6))
231 232 233 234 235 236 237 238 239
> fm2a <- nls(density ~ Asym/(1 + exp((xmid - log(conc)))),
+             data = DNase1, weights = wts,
+             start = list(Asym = 3, xmid = 0))
> anova(fm2a, fm2)
Analysis of Variance Table

Model 1: density ~ Asym/(1 + exp((xmid - log(conc))))
Model 2: density ~ Asym/(1 + exp((xmid - log(conc))/scal))
  Res.Df Res.Sum Sq Df  Sum Sq F value Pr(>F)
240 241
1     14     0.0186                          
2     13     0.0164  1 0.00212    1.68   0.22
242 243 244 245 246 247 248 249 250
> 
> ## and without using weights
> fm3 <- nls(~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal))),
+            data = DNase1, start = list(Asym = 3, xmid = 0, scal = 1))
> summary(fm3)

Formula: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal)))

Parameters:
251 252 253 254 255 256
     Estimate Std. Error t value Pr(>|t|)    
Asym   2.3350     0.0966    24.2  3.5e-12 ***
xmid   1.4731     0.0947    15.6  8.8e-10 ***
scal   1.0385     0.0304    34.1  4.2e-14 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
257 258 259

Residual standard error: 0.0355 on 13 degrees of freedom

260
> stopifnot(all.equal(coef(summary(fm3)), coef(summary(fm1)), tolerance = 1e-6))
261
> ft <- with(DNase1, density - fitted(fm3)/sqrt(wts))
262
> stopifnot(all.equal(ft, fitted(fm1), tolerance = 1e-6))
263 264
> # sign of residuals is reversed
> r <- with(DNase1, -residuals(fm3)/sqrt(wts))
265
> all.equal(r, residuals(fm1), tolerance = 1e-5)
266 267 268 269 270 271 272 273 274
[1] TRUE
> fm3a <- nls(~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))))),
+             data = DNase1, start = list(Asym = 3, xmid = 0))
> anova(fm3a, fm3)
Analysis of Variance Table

Model 1: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc)))))
Model 2: 0 ~ sqrt(wts) * (density - Asym/(1 + exp((xmid - log(conc))/scal)))
  Res.Df Res.Sum Sq Df  Sum Sq F value Pr(>F)
275 276
1     14     0.0186                          
2     13     0.0164  1 0.00212    1.68   0.22
277 278 279 280 281 282 283 284 285 286
> 
> ## using conditional linearity
> fm4 <- nls(density ~ 1/(1 + exp((xmid - log(conc))/scal)),
+            data = DNase1, weights = wts,
+            start = list(xmid = 0, scal = 1), algorithm = "plinear")
> summary(fm4)

Formula: density ~ 1/(1 + exp((xmid - log(conc))/scal))

Parameters:
287 288 289 290 291 292
     Estimate Std. Error t value Pr(>|t|)    
xmid   1.4731     0.0947    15.6  8.8e-10 ***
scal   1.0385     0.0304    34.1  4.2e-14 ***
.lin   2.3350     0.0966    24.2  3.5e-12 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
293 294 295 296 297

Residual standard error: 0.0355 on 13 degrees of freedom

> cf <- coef(summary(fm4))[c(3,1,2), ]
> rownames(cf)[2] <- "Asym"
298
> stopifnot(all.equal(cf, coef(summary(fm1)), tolerance = 1e-6,
299
+                     check.attributes = FALSE))
300 301
> stopifnot(all.equal(residuals(fm4), residuals(fm1), tolerance = 1e-5))
> stopifnot(all.equal(fitted(fm4), fitted(fm1), tolerance = 1e-6))
302 303 304 305 306 307 308 309 310
> fm4a <- nls(density ~ 1/(1 + exp((xmid - log(conc)))),
+             data = DNase1, weights = wts,
+             start = list(xmid = 0), algorithm = "plinear")
> anova(fm4a, fm4)
Analysis of Variance Table

Model 1: density ~ 1/(1 + exp((xmid - log(conc))))
Model 2: density ~ 1/(1 + exp((xmid - log(conc))/scal))
  Res.Df Res.Sum Sq Df  Sum Sq F value Pr(>F)
311 312
1     14     0.0186                          
2     13     0.0164  1 0.00212    1.68   0.22
313 314 315 316 317 318 319 320 321 322 323
> 
> ## using 'port'
> fm5 <- nls(density ~ Asym/(1 + exp((xmid - log(conc))/scal)),
+            data = DNase1, weights = wts,
+            start = list(Asym = 3, xmid = 0, scal = 1),
+            algorithm = "port")
> summary(fm5)

Formula: density ~ Asym/(1 + exp((xmid - log(conc))/scal))

Parameters:
324 325 326 327 328 329
     Estimate Std. Error t value Pr(>|t|)    
Asym   2.3350     0.0966    24.2  3.5e-12 ***
xmid   1.4731     0.0947    15.6  8.8e-10 ***
scal   1.0385     0.0304    34.1  4.2e-14 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
330 331 332

Residual standard error: 0.0355 on 13 degrees of freedom

333
Algorithm "port", convergence message: relative convergence (4)
334

335 336 337
> stopifnot(all.equal(coef(summary(fm5)), coef(summary(fm1)), tolerance = 1e-6))
> stopifnot(all.equal(residuals(fm5), residuals(fm1), tolerance = 1e-5))
> stopifnot(all.equal(fitted(fm5), fitted(fm1), tolerance = 1e-6))
338 339 340 341
> 
> ## check profiling
> pfm1 <- profile(fm1)
> pfm3 <- profile(fm3)
342 343
> for(m in names(pfm1))
+     stopifnot(all.equal(pfm1[[m]], pfm3[[m]], tolerance = 1e-5))
344
> pfm5 <- profile(fm5)
345
> for(m in names(pfm1))
346
+     stopifnot(all.equal(pfm1[[m]], pfm5[[m]], tolerance = 1e-5))
347 348 349 350 351
> if(have_MASS) {
+     print(c1 <- confint(fm1))
+     print(c4 <- confint(fm4, 1:2))
+     stopifnot(all.equal(c1[2:3, ], c4, tolerance = 1e-3))
+ }
352 353
Waiting for profiling to be done...
        2.5%  97.5%
354 355 356
Asym 2.14936 2.5724
xmid 1.28535 1.6966
scal 0.97526 1.1068
357 358 359 360 361 362 363 364 365 366 367 368 369 370 371 372 373 374 375 376 377 378
Waiting for profiling to be done...
       2.5%  97.5%
xmid 1.2866 1.6949
scal 0.9757 1.1063
> 
> ## some low-dimensional examples
> npts <- 1000
> set.seed(1001)
> x <- runif(npts)
> b <- 0.7
> y <- x^b+rnorm(npts, sd=0.05)
> a <- 0.5
> y2 <- a*x^b+rnorm(npts, sd=0.05)
> c <- 1.0
> y3 <- a*(x+c)^b+rnorm(npts, sd=0.05)
> d <- 0.5
> y4 <- a*(x^d+c)^b+rnorm(npts, sd=0.05)
> m1 <- c(y ~ x^b, y2 ~ a*x^b, y3 ~ a*(x+exp(logc))^b)
> s1 <- list(c(b=1), c(a=1,b=1), c(a=1,b=1,logc=0))
> for(p in 1:3) {
+     fm <- nls(m1[[p]], start = s1[[p]])
+     print(fm)
379 380
+     if(have_MASS) print(confint(fm))
+     fm <- nls(m1[[p]], start = s1[[p]], algorithm = "port")
381
+     print(fm)
382
+     if(have_MASS) print(confint(fm))
383 384
+ }
Nonlinear regression model
385 386
  model: y ~ x^b
   data: parent.frame()
387 388 389
    b 
0.695 
 residual sum-of-squares: 2.39
390 391 392 393
Waiting for profiling to be done...
   2.5%   97.5% 
0.68704 0.70281 
Nonlinear regression model
394 395
  model: y ~ x^b
   data: parent.frame()
396 397 398 399
    b 
0.695 
 residual sum-of-squares: 2.39

400
Algorithm "port", convergence message: relative convergence (4)
401 402 403 404
Waiting for profiling to be done...
   2.5%   97.5% 
0.68704 0.70281 
Nonlinear regression model
405 406
  model: y2 ~ a * x^b
   data: parent.frame()
407 408 409
    a     b 
0.502 0.724 
 residual sum-of-squares: 2.51
410 411 412 413 414
Waiting for profiling to be done...
     2.5%   97.5%
a 0.49494 0.50893
b 0.70019 0.74767
Nonlinear regression model
415 416
  model: y2 ~ a * x^b
   data: parent.frame()
417 418 419 420
    a     b 
0.502 0.724 
 residual sum-of-squares: 2.51

421
Algorithm "port", convergence message: relative convergence (4)
422 423 424 425 426
Waiting for profiling to be done...
     2.5%   97.5%
a 0.49494 0.50893
b 0.70019 0.74767
Nonlinear regression model
427 428
  model: y3 ~ a * (x + exp(logc))^b
   data: parent.frame()
429 430 431
     a      b   logc 
 0.558  0.603 -0.176 
 residual sum-of-squares: 2.44
432 433
Waiting for profiling to be done...
         2.5%   97.5%
434 435 436
a     0.35006 0.66057
b     0.45107 0.91473
logc -0.64627 0.40946
437
Nonlinear regression model
438 439
  model: y3 ~ a * (x + exp(logc))^b
   data: parent.frame()
440 441 442 443
     a      b   logc 
 0.558  0.603 -0.176 
 residual sum-of-squares: 2.44

444
Algorithm "port", convergence message: relative convergence (4)
445 446
Waiting for profiling to be done...
         2.5%   97.5%
447 448 449
a     0.35006 0.66057
b     0.45107 0.91473
logc -0.64627 0.40946
450
> 
451 452 453 454 455 456
> if(have_MASS) {
+     fm <- nls(y2~x^b, start=c(b=1), algorithm="plinear")
+     print(confint(profile(fm)))
+     fm <- nls(y3 ~ (x+exp(logc))^b, start=c(b=1, logc=0), algorithm="plinear")
+     print(confint(profile(fm)))
+ }
457 458 459
   2.5%   97.5% 
0.70019 0.74767 
         2.5%   97.5%
460 461
b     0.45105 0.91471
logc -0.64625 0.40933
462 463 464
> 
> 
> ## more profiling with bounds
465
> op <- options(digits=3)
466 467 468 469 470 471 472 473 474 475 476 477 478 479
> npts <- 10
> set.seed(1001)
> a <- 2
> b <- 0.5
> x <- runif(npts)
> y <- a*x/(1+a*b*x) + rnorm(npts, sd=0.2)
> gfun <- function(a,b,x) {
+     if(a < 0 || b < 0) stop("bounds violated")
+     a*x/(1+a*b*x)
+ }
> m1 <- nls(y ~ gfun(a,b,x), algorithm = "port",
+           lower = c(0,0), start = c(a=1, b=1))
> (pr1 <- profile(m1))
$a
480 481 482 483 484 485 486 487 488 489 490 491 492
      tau par.vals.a par.vals.b
1  -3.869      0.706      0.000
2  -3.114      0.802      0.000
3  -0.863      1.124      0.000
4   0.000      1.538      0.263
5   0.590      1.952      0.446
6   1.070      2.423      0.592
7   1.534      3.082      0.737
8   1.969      4.034      0.878
9   2.376      5.502      1.014
10  2.751      7.929      1.144
11  3.090     12.263      1.264
12  3.375     20.845      1.373
493 494

$b
495 496 497 498 499 500 501 502 503
     tau par.vals.a par.vals.b
1 -0.673     1.2087     0.0272
2  0.000     1.5381     0.2633
3  0.707     2.0026     0.4994
4  1.365     2.6295     0.7236
5  1.994     3.5762     0.9522
6  2.611     5.1820     1.1962
7  3.225     8.2162     1.4614
8  3.820    17.3946     1.7512
504 505 506

attr(,"original.fit")
Nonlinear regression model
507 508
  model: y ~ gfun(a, b, x)
   data: parent.frame()
509 510 511
    a     b 
1.538 0.263 
 residual sum-of-squares: 0.389
512

513
Algorithm "port", convergence message: relative convergence (4)
514 515 516 517 518
attr(,"summary")

Formula: y ~ gfun(a, b, x)

Parameters:
519 520 521 522 523
  Estimate Std. Error t value Pr(>|t|)  
a    1.538      0.617    2.49    0.037 *
b    0.263      0.352    0.75    0.476  
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
524

525
Residual standard error: 0.221 on 8 degrees of freedom
526

527
Algorithm "port", convergence message: relative convergence (4)
528

529 530
attr(,"class")
[1] "profile.nls" "profile"    
531
> if(have_MASS) print(confint(pr1))
532 533 534
  2.5% 97.5%
a 0.96  5.20
b   NA  1.07
535 536 537 538 539 540 541 542 543
> 
> gfun <- function(a,b,x) {
+     if(a < 0 || b < 0 || a > 1.5 || b > 1) stop("bounds violated")
+     a*x/(1+a*b*x)
+ }
> m2 <- nls(y ~ gfun(a,b,x), algorithm = "port",
+           lower = c(0, 0), upper=c(1.5, 1), start = c(a=1, b=1))
> profile(m2)
$a
544 545 546 547 548
     tau par.vals.a par.vals.b
1 -3.681      0.729      0.000
2 -2.945      0.823      0.000
3 -0.977      1.099      0.000
4  0.000      1.500      0.243
549 550

$b
551 552 553 554 555 556 557
     tau par.vals.a par.vals.b
1 -0.733    1.18200    0.00395
2  0.000    1.50000    0.24263
3  1.645    1.50000    0.48132
4  2.154    1.50000    0.57869
5  2.727    1.50000    0.70706
6  3.288    1.50000    0.85748
558 559 560

attr(,"original.fit")
Nonlinear regression model
561 562
  model: y ~ gfun(a, b, x)
   data: parent.frame()
563 564
    a     b 
1.500 0.243 
565
 residual sum-of-squares: 0.39
566

567
Algorithm "port", convergence message: relative convergence (4)
568 569 570 571 572
attr(,"summary")

Formula: y ~ gfun(a, b, x)

Parameters:
573 574 575 576 577
  Estimate Std. Error t value Pr(>|t|)  
a    1.500      0.598    2.51    0.036 *
b    0.243      0.356    0.68    0.514  
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
578

579
Residual standard error: 0.221 on 8 degrees of freedom
580

581
Algorithm "port", convergence message: relative convergence (4)
582

583 584
attr(,"class")
[1] "profile.nls" "profile"    
585
> if(have_MASS) print(confint(m2))
586
Waiting for profiling to be done...
587 588 589 590
   2.5% 97.5%
a 0.907    NA
b    NA 0.611
> options(op)
591
> 
592
> ## scoping problems
593
> test <- function(trace=TRUE)
594 595 596 597 598 599
+ {
+     x <- seq(0,5,len=20)
+     n <- 1
+     y <- 2*x^2 + n + rnorm(x)
+     xy <- data.frame(x=x,y=y)
+     myf <- function(x,a,b,c) a*x^b+c
600 601 602 603 604
+     list(with.start=
+          nls(y ~ myf(x,a,b,n), data=xy, start=c(a=1,b=1), trace=trace),
+          no.start= ## cheap auto-init to 1
+ 	 suppressWarnings(
+ 	     nls(y ~ myf(x,A,B,n), data=xy)))
605
+ }
606
> t1 <- test(); t1$with.start
607 608 609 610 611 612 613
8291.9 :  1 1
726.02 :  0.80544 2.42971
552.85 :  1.290 2.129
70.431 :  1.9565 1.9670
26.555 :  1.9788 2.0064
26.503 :  1.9798 2.0046
26.503 :  1.9799 2.0046
614
Nonlinear regression model
615 616
  model: y ~ myf(x, a, b, n)
   data: xy
617 618 619
   a    b 
1.98 2.00 
 residual sum-of-squares: 26.5
620
> ##__with.start:
621 622 623
> ## failed to find n in 2.2.x
> ## found wrong n in 2.3.x
> ## finally worked in 2.4.0
624 625 626 627 628 629
> ##__no.start: failed in 3.0.2
> stopifnot(all.equal(.n(t1[[1]]), .n(t1[[2]])))
> rm(a,b)
> t2 <- test(FALSE)
> stopifnot(all.equal(lapply(t1, .n),
+ 		    lapply(t2, .n), tolerance = 0.16))# different random error
630
> 
631 632
> 
> ## list 'start'
633
> set.seed(101)# (remain independent of above)
634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650
> getExpmat <- function(theta, t)
+ {
+         conc <- matrix(nrow = length(t), ncol = length(theta))
+         for(i in 1:length(theta)) conc[, i] <- exp(-theta[i] * t)
+         conc
+ }
> expsum <- as.vector(getExpmat(c(.05,.005), 1:100) %*% c(1,1))
> expsumNoisy <- expsum + max(expsum) *.001 * rnorm(100)
> expsum.df <-data.frame(expsumNoisy)
> 
> ## estimate decay rates, amplitudes with default Gauss-Newton
> summary (nls(expsumNoisy ~ getExpmat(k, 1:100) %*% sp, expsum.df,
+              start = list(k = c(.6,.02), sp = c(1,2))))

Formula: expsumNoisy ~ getExpmat(k, 1:100) %*% sp

Parameters:
651 652 653 654 655 656 657
    Estimate Std. Error t value Pr(>|t|)    
k1  5.00e-02   2.73e-04     183   <2e-16 ***
k2  4.97e-03   4.77e-05     104   <2e-16 ***
sp1 1.00e+00   3.96e-03     253   <2e-16 ***
sp2 9.98e-01   4.43e-03     225   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
658

659
Residual standard error: 0.00182 on 96 degrees of freedom
660 661 662 663 664 665 666 667 668 669

> 
> ## didn't work with port in 2.4.1
> summary (nls(expsumNoisy ~ getExpmat(k, 1:100) %*% sp, expsum.df,
+              start = list(k = c(.6,.02), sp = c(1,2)),
+              algorithm = "port"))

Formula: expsumNoisy ~ getExpmat(k, 1:100) %*% sp

Parameters:
670 671 672 673 674 675 676
    Estimate Std. Error t value Pr(>|t|)    
k1  5.00e-02   2.73e-04     183   <2e-16 ***
k2  4.97e-03   4.77e-05     104   <2e-16 ***
sp1 1.00e+00   3.96e-03     253   <2e-16 ***
sp2 9.98e-01   4.43e-03     225   <2e-16 ***
---
Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
677

678
Residual standard error: 0.00182 on 96 degrees of freedom
679

680
Algorithm "port", convergence message: both X-convergence and relative convergence (5)
681 682 683

> 
> 
684 685 686 687 688 689 690 691 692 693
> ## PR13540
> 
> x <- runif(200)
> b0 <- c(rep(0,100),runif(100))
> b1 <- 1
> fac <- as.factor(rep(c(0,1), each = 100))
> y <- b0 + b1*x + rnorm(200, sd=0.05)
> # next failed in 2.8.1
> fit <- nls(y~b0[fac] + b1*x, start = list(b0=c(1,1), b1=1),
+            algorithm ="port", upper = c(100, 100, 100))
694
> # next did not "fail" in proposed fix:
695
> fiB <- nls(y~b0[fac] + b1*x, start = list(b0=c(1,1), b1=101),
696 697
+            algorithm ="port", upper = c(100, 100, 100),
+            control = list(warnOnly=TRUE))# warning ..
698
Warning in nls(y ~ b0[fac] + b1 * x, start = list(b0 = c(1, 1), b1 = 101),  :
699
  Convergence failure: initial par violates constraints
700
> with(fiB$convInfo, ## start par. violates constraints
701
+      stopifnot(isConv == FALSE, stopCode == 300))
702
> 
703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737
> 
> ## PR#17367 -- nls() quoting non-syntactical variable names
> ##
> op <- options(warn = 2)# no warnings allowed from here
> ##
> dN <- data.frame('NO [µmol/l]' = c(1,3,8,17), t = 1:4, check.names=FALSE)
> fnN <- `NO [µmol/l]` ~ a + k* exp(t)
> ## lm() works,  nls() should too
> lm.N  <- lm(`NO [µmol/l]` ~ exp(t) ,                          data = dN)
> summary(lm.N) -> slmN
> nm. <- nls(`NO [µmol/l]` ~ a + k*exp(t), start=list(a=0,k=1), data = dN)
> ## In R <= 3.4.x : Error in eval(predvars, data, env) : object 'NO' not found
> nmf <- nls(fnN,                          start=list(a=0,k=1), data = dN)
> ## (ditto; gave identical error)
> noC  <- function(L) L[-match("call", names(L))]
> stopifnot(all.equal(noC (nm.), noC (nmf)))
> ##
> ## with list for which  as.data.frame() does not work [-> different branch, not using model.frame!]
> ## list version (has been valid "forever", still doubtful, rather give error [FIXME] ?)
> lsN <- c(as.list(dN), list(foo="bar")); lsN[["t"]] <- 1:8
> nmL <- nls(`NO [µmol/l]` ~ a + k*exp(t), start=list(a=0,k=1), data = lsN)
> stopifnot(all.equal(coef(nmL), c(a = 5.069866, k = 0.003699669), tol = 4e-7))# seen 4.2e-8
> 
> ## trivial RHS -- should work even w/o 'start='
> fi1 <- nls(y ~ a, start = list(a=1))
> ## -> 2 deprecation warnings "length 1 in vector-arithmetic" from nlsModel()  in R 3.4.x ..
> options(op) # warnings about missing 'start' ok:
> f.1 <- nls(y ~ a) # failed in R 3.4.x
Warning in nls(y ~ a) :
  No starting values specified for some parameters.
Initializing 'a' to '1.'.
Consider specifying 'start' or using a selfStart model
> stopifnot(all.equal(noC(f.1), noC(fi1)),
+ 	  all.equal(coef(f.1), c(a = mean(y))))
> 
738 739
> proc.time()
   user  system elapsed 
740
  1.583   0.075   1.641