In [1]:
set.seed(1485)
    len = 24
    x = runif(len)
    y = x^3+rnorm(len, 0,0.06)
    ds = data.frame(x = x, y = y)
    str(ds)
    plot( y ~ x, main ="Known cubic with noise")
    s  = seq(0,1,length =100)
    lines(s, s^3, lty =2, col ="green")
    m = nls(y ~ I(x^power), data = ds, start = list(power=1), trace = T)
    class(m)
    summary(m)
    power = round(summary(m)$coefficients[1], 3)
        power.se = round(summary(m)$coefficients[2], 3)
    plot(y ~ x, main = "Fitted power model", sub = "Blue: fit; green: known")
    s = seq(0, 1, length = 100)
    lines(s, s^3, lty = 2, col = "green")
    lines(s, predict(m, list(x = s)), lty = 1, col = "blue")
    text(0, 0.5, paste("y =x^ (", power, " +/- ", power.se, ")", sep = ""), pos = 4)


'data.frame':	24 obs. of  2 variables:
 $ x: num  0.838 0.5285 0.8344 0.0721 0.9242 ...
 $ y: num  0.5448 0.1412 0.6284 0.0106 0.7139 ...
1.814289 :  1
0.3012573 :  2.050361
0.08280959 :  2.881157
0.07643193 :  3.091093
0.07643191 :  3.091473
0.07643191 :  3.091458
'nls'
Formula: y ~ I(x^power)

Parameters:
      Estimate Std. Error t value Pr(>|t|)    
power    3.091      0.161    19.2 1.17e-15 ***
---
Signif. codes:  0 ‘***’ 0.001 ‘**’ 0.01 ‘*’ 0.05 ‘.’ 0.1 ‘ ’ 1

Residual standard error: 0.05765 on 23 degrees of freedom

Number of iterations to convergence: 5 
Achieved convergence tolerance: 8.046e-07

In [ ]: