Univariate Linear Regression
An introduction to linear regression models.
Scalable - Built on top of Numpyro, TensorFlow Probability and Jax, for cpu, gpu and tpu vectorization and parallelization.
Flexibility trade-offs - low-level abstraction coding available but also pre-build function for high-level of abstraction.
Unified - One framework for both Python and R.
Accessibility - 30 documented models.
Intuitive - model-building syntax.
BI provides a unified experience across Julia, Python, and R. Whether you work in R’s formula syntax, Python’s object-oriented approach, or Julia’s mathematical elegance, the model logic remains consistent.
from BI import bi
# Setup device------------------------------------------------
m = bi(platform='cpu') # cpu, gpu or : tuple
# Import Data ------------------------------------------------
m.data(data_path)
# Define model ------------------------------------------------
def model(arg1,argb2):
pass
# Run mcmc ------------------------------------------------
m.fit(model)
# Summary ------------------------------------------------
m.summary()
# Diagnostics ------------------------------------------------
m.diag()
def model(kcal_per_g, index_clade):
alpha = m.bi.dist.normal(0, 0.5, shape=(4,), name = 'a') # shape based on the number of clades
beta = m.bi.dist.normal(0, 0.5, shape=(4,), name = 'b')
sigma = m.bi.dist.exponential( 1, name = 's')
mu = a[index_clade]+b[index_clade]*mass
m.normal(mu, s, obs=kcal_per_g)
def model(kcal_per_g, index_clade):
a = m.dist.normal(5, 2, name = 'a')
b = m.dist.normal(-1, 0.5, name = 'b')
sigma = m.dist.exponential( 1, name = 'sigma')
varying_intercept, varying_slope = m.effects.varying_effects(
N_group = N_cafes,
group = cafe,
global_intercept= a,
global_slope= b,
group_name = 'cafe'
)
#| label: model-to-latex
#| results: hold
#| echo: true
from BI import bi
m = bi(platform='cpu')
# define model ------------------------------------------------
def model(weight, height):
alpha = m.dist.normal( 178, 20, name = 'a')
beta = m.dist.log_normal( 0, 1, name = 'b')
sigma = m.dist.uniform( 0, 50, name = 's')
m.dist.normal(alpha + beta * weight , sigma, obs=height)
# Run sampler ------------------------------------------------
m.model = model
m.latex()
\begin{aligned} height &\sim \text{Normal}(\alpha + \beta * weight, \sigma) \\ \sigma &\sim \text{Uniform}(0, 50) \\ \beta &\sim \text{LogNormal}(0, 1) \\ \alpha &\sim \text{Normal}(178, 20) \end{aligned}