Skip to main content

Table 2 The model parameters

From: Learning to predict in-hospital mortality risk in the intensive care unit with attention-based temporal convolution network

Model

The parameter settings

Decision Tree (DT)

criterion = “gini” # The function to measure the quality of a split, supported criteria

# are “gini” for the Gini impurity

splitter = “best” # The strategy used to choose the split at each node

max_depth = None # The maximum depth of the tree

min_samples_split = 2 # The minimum number of samples required to split an

# internal node

min_samples_leaf = 1 # The minimum number of samples required to be at a leaf

# node

min_weight_fraction_leaf = 0.0 # The minimum weighted fraction of the sum total

# of weights required to be at a leaf node

max_features = None # The number of features to consider when looking for the

# best split

random_state = None # It is the seed used by the random number generator

max_leaf_nodes = None # Grow trees with max_leaf_nodes in best-first fashion,

# if None then unlimited number of leaf nodes

class_weight = None # Weights associated with classes, if not given, all classes are

# supposed to have weight one

presort = False # The data is not presorted

support vector machine (SVM)

kernel = “rbf” # Specifies the kernel type to be used in the algorithm

# “rbf” is Gaussian kernel function

gamma = “auto” # Kernel coefficient for ‘rbf’

probability = True # Whether to enable probability estimates

logistic regression (LR)

solver = “lbfgs” # The optimized algorithm is “lbfgs”

multi_class = “auto” # Determines the multi-class strategy if y contains more than

# two classes

penalty = “l2” # Specifies the norm used in the penalization, the ‘l2’ penalty is the

# standard used in SVC

Random forest (RF)

n_estimators = 100 # The number of trees in the forest