using DimensionalData
using GeoData
# Functions to be extended
"""
gridcoordinates(::Gridtype,x)
using Dagger | |
struct PartialShard | |
chunks::Dict{Dagger.Processor,Tuple{Dagger.Chunk,Dagger.Chunk}} | |
end | |
function partialshard(@nospecialize(f); procs=nothing, workers=nothing, per_thread=false) | |
if procs === nothing |
import Interpolations | |
import Dierckx | |
function interpolate_data(tsteps_old, tsteps_new, y, options) | |
# Apply interpolation according to the options | |
y_interp = if options.interpolation_method == "Dierckx" | |
if options.boundary_condition == "flat" | |
s = Dierckx.Spline1D(tsteps_old,y,bc="nearest") | |
s(tsteps_new) |
module SphericalHarmonics | |
using FastTransforms | |
import Geodesy: LLA,distance | |
immutable PLM{T} | |
coefs::Matrix{T} | |
end | |
function lm2ij(l,m) | |
l<0 && throw(BoundsError("l must be positive")) |
using DimensionalData
using GeoData
# Functions to be extended
"""
gridcoordinates(::Gridtype,x)
import Tokenize.Tokens: IDENTIFIER, LSQUARE, COMMA, RSQUARE, STRING, ENDMARKER | |
function parsetokens(t,ar) | |
if t[1].kind==Tokenize.Tokens.IDENTIFIER | |
@assert t[2].kind==LSQUARE | |
brackcount=1 | |
i=3 | |
while true | |
t[i].kind==LSQUARE && (brackcount=brackcount+1) | |
t[i].kind==RSQUARE && (brackcount=brackcount-1) | |
brackcount==0 && break |
FROM jupyter/datascience-notebook:eb149a8c333a | |
MAINTAINER [email protected] | |
LABEL version='0.6.1' | |
USER root | |
RUN sudo apt-get update && sudo apt-get upgrade -y | |
RUN sudo apt-get install -y git unzip apt-utils vim | |
ADD start.sh /usr/local/bin/start.sh |
{ | |
"cells": [ | |
{ | |
"cell_type": "code", | |
"execution_count": 1, | |
"metadata": {}, | |
"outputs": [], | |
"source": [ | |
"import numpy as np\n", | |
"import numpy.fft as fft\n", |
""" | |
broadcast_reduce(f,op,v0,A,Bs...) | |
Should behave like mapreduce, with the only difference that singleton dimension are expanded like in broadcast. | |
""" | |
function broadcast_reduce(f,op,v0,A,Bs...) | |
shape = Base.Broadcast.broadcast_indices(A,Bs...) | |
iter = CartesianRange(shape) | |
keeps, Idefaults = Base.Broadcast.map_newindexer(shape, A, Bs) | |
_broadcast_reduce(f,op,v0,keeps,Idefaults,A, Bs,Val{length(Bs)}(),iter) |