Skip to contents

Get a `tof_model`'s processed predictor matrix (for glmnet)

Usage

tof_get_model_x(tof_model)

Arguments

tof_model

A tof_model

Value

An x value formatted for glmnet

Examples

feature_tibble <-
    dplyr::tibble(
        sample = as.character(1:100),
        cd45 = runif(n = 100),
        pstat5 = runif(n = 100),
        cd34 = runif(n = 100),
        outcome = (3 * cd45) + (4 * pstat5) + rnorm(100),
        class =
            as.factor(
                dplyr::if_else(outcome > median(outcome), "class1", "class2")
            ),
        multiclass =
            as.factor(
                c(rep("class1", 30), rep("class2", 30), rep("class3", 40))
            ),
        event = c(rep(0, times = 30), rep(1, times = 70)),
        time_to_event = rnorm(n = 100, mean = 10, sd = 2)
    )

split_data <- tof_split_data(feature_tibble, split_method = "simple")

# train a regression model
regression_model <-
    tof_train_model(
        split_data = split_data,
        predictor_cols = c(cd45, pstat5, cd34),
        response_col = outcome,
        model_type = "linear"
    )

tof_get_model_x(regression_model)
#>                 cd45       pstat5         cd34
#>   [1,]  0.0877035262  0.829793783 -0.968683447
#>   [2,]  0.2918501460  0.319452371  0.800569439
#>   [3,] -1.2269611704 -0.928646978  0.807730170
#>   [4,]  0.3266083909 -0.607621377 -0.901594550
#>   [5,] -0.1231041972 -0.444289623 -0.494594781
#>   [6,]  1.5059998407 -0.900442264  1.020763855
#>   [7,]  0.1174391067 -0.906308128  1.288819999
#>   [8,]  0.6059688310 -0.054174815 -0.643953097
#>   [9,]  0.1536267282 -0.027273746 -1.449196567
#>  [10,] -1.6076978094 -0.480038641  0.152308688
#>  [11,] -0.6057635688  1.149837807  1.559579738
#>  [12,] -1.2133761908 -0.320409039 -1.520753810
#>  [13,] -1.1736510272 -1.321028232 -1.635396484
#>  [14,] -0.5110341672  0.440724908  0.364573728
#>  [15,] -1.4754129765 -0.198923773 -1.035149436
#>  [16,]  0.3246659766  1.272483299 -0.655049154
#>  [17,]  1.3003916085  1.545365366  0.593805445
#>  [18,] -0.1257498216 -1.131472650  0.922662129
#>  [19,] -1.5982641004  1.108000792 -1.316715983
#>  [20,]  0.1582729808 -1.492907432  0.370311344
#>  [21,]  1.1089634539  0.943408190 -0.648307102
#>  [22,] -1.2383270084  1.374553316  0.001903408
#>  [23,] -1.0415443938  1.092551902  0.511497514
#>  [24,] -0.5144447422  0.807921061 -1.762456106
#>  [25,]  0.6025849687  1.380902083  0.585598230
#>  [26,] -0.1234833745  0.389286831 -1.453363542
#>  [27,] -1.0696533575 -0.009721015 -0.233546515
#>  [28,] -0.8800708859  1.531075744  1.503963564
#>  [29,]  0.9416140877  0.384510889  1.663362701
#>  [30,] -0.7275012830  1.532845645 -0.037682001
#>  [31,]  1.3178970986 -1.548102531 -1.212087715
#>  [32,] -1.4915239562 -0.212303725 -0.195494079
#>  [33,] -0.1161436057  1.182686680  0.268296391
#>  [34,] -0.7647825642  1.286841294 -1.808555887
#>  [35,] -0.5200457880  0.831032273  0.645547868
#>  [36,] -0.9817538172  0.395569966  0.473801688
#>  [37,]  0.7972317256  0.544322467 -0.670152761
#>  [38,]  1.1053709483 -0.306839661  1.480771002
#>  [39,] -0.1886989489 -0.046366861  0.462121536
#>  [40,]  0.6288941216 -0.982222320 -1.424423188
#>  [41,] -1.6273743429 -0.635951656 -0.473005223
#>  [42,]  0.9809659311 -0.425944473  1.700537277
#>  [43,] -1.5523262820 -0.856909935 -0.839871795
#>  [44,] -0.6576770757 -0.977890262 -0.785642128
#>  [45,] -0.9958291894 -1.594200427  1.676041390
#>  [46,]  0.2828183780  1.539237636 -1.168691796
#>  [47,]  0.0008241368  0.107879703 -0.031884374
#>  [48,]  0.8467101981  1.564892225  1.204252566
#>  [49,] -0.8175202372  1.156048099 -1.322301425
#>  [50,] -1.4530059713  0.799186164 -0.938520711
#>  [51,]  1.7121255067  1.386763854 -1.094021634
#>  [52,] -0.7507502536  1.061895897  1.442621826
#>  [53,]  1.3378727571 -0.777024917 -0.612565242
#>  [54,]  1.5508399521  0.648075118 -0.394862653
#>  [55,]  1.6910142455  0.196987192 -1.814924127
#>  [56,]  0.8097644324 -1.837699125 -0.324791846
#>  [57,]  1.2046067475 -1.434562840  0.633540750
#>  [58,]  0.3373400699  1.538177325 -0.590777651
#>  [59,]  1.4076883817  0.357989206  0.216105111
#>  [60,]  0.7676643664  1.033249521  1.742619876
#>  [61,]  1.0588637833 -0.172356097 -0.360171281
#>  [62,]  1.4316085003 -0.768483954  0.748025427
#>  [63,]  1.1211474035 -0.794173026  0.734620103
#>  [64,]  1.4746509859 -1.767649605  1.312726068
#>  [65,]  0.8382666407  1.040543856  1.534130853
#>  [66,] -1.6959048626 -1.347244584  0.670641955
#>  [67,] -1.1139537995 -0.541029219  0.644128079
#>  [68,] -0.4834780130 -0.809639190  0.637378507
#>  [69,] -1.2833087135 -1.345892974  0.635578574
#>  [70,]  1.4278697403  0.157837180  0.334544325
#>  [71,]  0.8179944057 -0.861551862  0.429984418
#>  [72,] -0.3572864259 -1.737504943  0.309427107
#>  [73,]  0.1073285464  1.509181482  1.004547019
#>  [74,] -0.3276563590  0.351424101  0.573437637
#>  [75,] -0.2599908109  0.086931422 -0.821823365
#>  [76,]  0.1755280279  0.230063248  1.067548665
#>  [77,] -0.3734789132 -0.205327829 -0.098932151
#>  [78,]  0.7537409485 -0.735763209  1.084537004
#>  [79,]  0.3607494988 -1.597975379  1.427041573
#>  [80,] -0.4521013676  1.148874155  1.113521539
#>  [81,] -0.1396013375 -0.042671954 -1.104942414
#>  [82,]  0.7150689340 -0.774626941 -1.434992466
#>  [83,]  1.6054448746  1.423435202 -0.533968844
#>  [84,]  0.3902314924 -1.244460786 -0.380625796
#>  [85,] -0.2840894171  0.249574019  0.500213366
#>  [86,]  0.3580295343  1.090010312 -0.733583127
#>  [87,] -1.3019738319 -0.118892177 -0.982532835
#>  [88,] -1.4729152951 -0.939569685 -0.583579366
#>  [89,] -1.1682440613  0.318443541  0.347178575
#>  [90,]  0.1282435264 -1.514481669 -0.368335955
#>  [91,]  1.2907191832 -0.703374750 -0.171244219
#>  [92,]  0.5985112338 -1.734542668  1.295446904
#>  [93,]  1.4654890208  1.176278968  1.627644913
#>  [94,] -0.2046741225  0.174062751 -1.427151447
#>  [95,]  0.7689103424 -1.187481662 -1.645146346
#>  [96,] -1.2414118259 -0.881406684 -1.310608886
#>  [97,] -1.1787887576  1.267665202  0.125200033
#>  [98,]  1.5715367494 -0.523919150  0.748005728
#>  [99,] -1.7782543970  0.398499084 -0.672255047
#> [100,] -0.4746675978  0.452923311  0.089694752