- Access the following helper to add the neurodebian repository in your apt list https://neuro.debian.net/install_pkg.html?p=fsl
I'm using Debian GNU/Linux 11.0 (bullseye) and a server in Munich.
from typing import Type, Dict | |
import numpy as np | |
import pandas as pd | |
def bootstrap(data: Type[pd.DataFrame], | |
n_resamples: int, | |
id_column: str, | |
seed: int=42) -> Type[pd.DataFrame]: | |
""" | |
Draw N resamples with replacements from the given data. |
# simulation of models based on gene expression data | |
# we create two different datasets drawn from normal distributions | |
# with different mean and standard deviation | |
# we then normalize the data using quantile normalization | |
# and fit a logistic regression model with glmnet | |
# load libraries | |
library(dplyr) | |
library(glmnet) | |
library(preprocessCore) |
def sample_and_shuffle(file, frac=0.1, save_to_file = False, **kwargs): | |
""" | |
shuffle each column in a given dataframe and draw n samples | |
this is useful for developing methods out of the server for sensitive data | |
and testing on a data file that keeps the same structure | |
:param file: path to csv file | |
:param frac: fraction of data to sample | |
:param save_to_file: save the sampled data to a csv file | |
:param kwargs: keyword arguments for pandas.read_csv |
import numpy as np | |
def brownian_particle(n_steps, n_samples, dx, y0): | |
""" | |
Brownian particle simulator | |
:param n_steps: total steps | |
:param n_samples: number of trajectories | |
:param dx: maximum step length | |
:param y0: starting position |
I'm using Debian GNU/Linux 11.0 (bullseye) and a server in Munich.
import numpy as np | |
import matplotlib.pyplot as plt | |
from mpl_toolkits.mplot3d import axes3d, Axes3D | |
from dolfin import * | |
from fenics import * | |
from matplotlib import cm | |
# model parameters | |
γ_GFR = 1. # rate of GFR reaching its steady state |
""" | |
In this script we integrate the diffusion equation by applying the stencil matrix through | |
the convolve function from scipy.ndimage. This function applies a convolution, which means | |
that for each element in matrix phi it will multiply the weights in the intencil to the | |
element itself and its 8 nearest neighbors, summing the result subsequently. | |
""" | |
import numpy as np | |
import matplotlib.pyplot as plt | |
from scipy.ndimage import convolve |
import numpy as np | |
import matplotlib.pyplot as plt | |
def xdot(x, y, alpha, beta, gamma, delta): | |
""" | |
this function represents the rate of change of the prey population | |
""" | |
return alpha * x - beta * x * y | |
def ydot(x, y, alpha, beta, gamma, delta): |
library(tidyverse) | |
library(googleLanguageR) | |
# STEP1: Generate access key for google cloud | |
# read more: https://cloud.google.com/translate/docs/basic/setup-basic | |
gl_auth('auth_key.json') # access Google Cloud Translation API with your credentials | |
df <- read.csv('german_data_dictionary.csv') # read data |
""" | |
LASSO regression (L1 regularization) with gradient descent | |
TODO: estimate intercept | |
phydev.github.io | |
""" | |
def predict(X, beta): | |
""" | |
predict the regression | |
""" |