automatic naming to make deploying hubs easier.

This commit is contained in:
mathematicalmichael 2019-01-11 10:42:54 -07:00
parent ec9ecbe628
commit 389d934f40
7 changed files with 58 additions and 297 deletions

30
.env
View File

@ -6,43 +6,41 @@
# values for environment variables in the configuration file IF the variables
# are not set in the shell environment.
EMAIL=consistentbayes@gmail.com
DOMAIN_NAME=consistentbayes.com
# To override these values, set the shell environment variables.
JUPYTERHUB_VERSION=0.9.4
# Name of Docker machine
DOCKER_MACHINE_NAME=jupyterhub
# Assign a port for the hub to be hosted on.
# To check the ports that are in use, run `docker ps`.
# Generally, picking a random number between 8000-9999 won't be an issue.
PORT_NUM=8001
HUB_NAME=vcarey
# Name of Docker network
DOCKER_NETWORK_NAME=jupyterhub-network
#DOCKER_NETWORK_NAME=stathub-network
# Single-user Jupyter Notebook server container image
DOCKER_NOTEBOOK_IMAGE=jupyter/datascience-notebook
DOCKER_NOTEBOOK_IMAGE=jupyter/scipy-notebook
# the local image we use, after pinning jupyterhub version
LOCAL_NOTEBOOK_IMAGE=jupyterhub-user
#LOCAL_NOTEBOOK_IMAGE=jupyterhub-user
# Notebook directory in the container.
# This will be /home/jovyan/work if the default
# This directory is stored as a docker volume for each user
# This directory is mapped to a docker volume for each user
DOCKER_NOTEBOOK_DIR=/home/jovyan/work
# Docker run command to use when spawning single-user containers
# DO NOT CHANGE THIS. It is how the docker-stacks notebooks launch the single-user servers.
DOCKER_SPAWN_CMD=start-singleuser.sh
# Name of JupyterHub container data volume
DATA_VOLUME_HOST=jupyterhub-data
#DATA_VOLUME_HOST=stathub-data
# Data volume container mount point
DATA_VOLUME_CONTAINER=/data
#DATA_VOLUME_CONTAINER=/data
# Name of JupyterHub postgres database data volume
DB_VOLUME_HOST=jupyterhub-db-data
# Postgres volume container mount point
DB_VOLUME_CONTAINER=/var/lib/postgresql/data
#DB_VOLUME_HOST=stathub-db-data
# The name of the postgres database containing JupyterHub state
POSTGRES_DB=jupyterhub
#POSTGRES_DB=stathub

View File

@ -6,13 +6,11 @@ include .env
.DEFAULT_GOAL=build
network:
@docker network inspect $(DOCKER_NETWORK_NAME) >/dev/null 2>&1 || docker network create $(DOCKER_NETWORK_NAME)
@docker network inspect $(HUB_NAME)-network >/dev/null 2>&1 || docker network create $(HUB_NAME)-network
volumes:
@docker volume inspect $(DATA_VOLUME_HOST) >/dev/null 2>&1 || docker volume create --name $(DATA_VOLUME_HOST)
@docker volume inspect $(DB_VOLUME_HOST) >/dev/null 2>&1 || docker volume create --name $(DB_VOLUME_HOST)
@docker volume inspect ro_shared_volume >/dev/null 2>&1 || docker volume create --name ro_shared_volume
@docker volume inspect rw_shared_volume >/dev/null 2>&1 || docker volume create --name rw_shared_volume
@docker volume inspect $(HUB_NAME)-data >/dev/null 2>&1 || docker volume create --name $(HUB_NAME)-data
@docker volume inspect $(HUB_NAME)-db-data >/dev/null 2>&1 || docker volume create --name $(HUB_NAME)-db-data
secrets/postgres.env:
@echo "Generating postgres password in $@"
@ -30,10 +28,6 @@ secrets/jupyterhub.key:
@echo "Need an SSL key in secrets/jupyterhub.key"
@exit 1
secrets/acme.json:
@chmod 600 secrets/acme.json
@exit 1
userlist:
@echo "Add usernames, one per line, to ./userlist, such as:"
@echo " zoe admin"
@ -48,13 +42,13 @@ userlist:
# cert_files=
#endif
check-files: userlist secrets/acme.json secrets/oauth.env secrets/postgres.env
check-files: userlist secrets/postgres.env
pull:
docker pull $(DOCKER_NOTEBOOK_IMAGE)
notebook_image: pull singleuser/Dockerfile
docker build -t $(LOCAL_NOTEBOOK_IMAGE) \
docker build -t $(HUB_NAME)-user:latest \
--build-arg JUPYTERHUB_VERSION=$(JUPYTERHUB_VERSION) \
--build-arg DOCKER_NOTEBOOK_IMAGE=$(DOCKER_NOTEBOOK_IMAGE) \
singleuser

View File

@ -7,15 +7,15 @@ version: "3"
services:
hub-db:
image: postgres:9.5
container_name: jupyterhub-db
container_name: ${HUB_NAME}-db
restart: always
environment:
POSTGRES_DB: ${POSTGRES_DB}
PGDATA: ${DB_VOLUME_CONTAINER}
POSTGRES_DB: ${HUB_NAME}
PGDATA: "/var/lib/postgresql/data"
env_file:
- secrets/postgres.env
volumes:
- "db:${DB_VOLUME_CONTAINER}"
- "db:/var/lib/postgresql/data"
hub:
depends_on:
@ -26,31 +26,28 @@ services:
args:
JUPYTERHUB_VERSION: ${JUPYTERHUB_VERSION}
restart: always
image: jupyterhub
container_name: jupyterhub
image: ${HUB_NAME}
container_name: ${HUB_NAME}
volumes:
# Bind Docker socket on the host so we can connect to the daemon from
# within the container
- "/var/run/docker.sock:/var/run/docker.sock:rw"
# Bind Docker volume on host for JupyterHub database and cookie secrets
- "data:${DATA_VOLUME_CONTAINER}"
- "data:/data"
- "./jupyterhub_config.py:/srv/jupyterhub/jupyterhub_config.py"
- "/home/math/:/home/"
ports:
- "8000:8000"
- "${PORT_NUM}:8000"
links:
- hub-db
environment:
# All containers will join this network
DOCKER_NETWORK_NAME: ${DOCKER_NETWORK_NAME}
# JupyterHub will spawn this Notebook image for users
DOCKER_NOTEBOOK_IMAGE: ${LOCAL_NOTEBOOK_IMAGE}
HUB_NAME: ${HUB_NAME}
# Notebook directory inside user image
DOCKER_NOTEBOOK_DIR: ${DOCKER_NOTEBOOK_DIR}
# Using this run command (optional)
DOCKER_SPAWN_CMD: ${DOCKER_SPAWN_CMD}
# Postgres db info
POSTGRES_DB: ${POSTGRES_DB}
POSTGRES_DB: ${HUB_NAME}
POSTGRES_HOST: hub-db
env_file:
- secrets/postgres.env
@ -61,12 +58,12 @@ services:
volumes:
data:
external:
name: ${DATA_VOLUME_HOST}
name: ${HUB_NAME}-data
db:
external:
name: ${DB_VOLUME_HOST}
name: ${HUB_NAME}-db-data
networks:
default:
external:
name: ${DOCKER_NETWORK_NAME}
name: ${HUB_NAME}-network

View File

@ -8,7 +8,7 @@ from subprocess import check_call
c = get_config()
# Spawner dropdown menu?
enable_options=True
enable_options=False
# We rely on environment variables to configure JupyterHub so that we
# avoid having to rebuild the JupyterHub container every time we change a
# configuration parameter.
@ -20,10 +20,10 @@ c.JupyterHub.spawner_class = spawner = 'dockerspawner.DockerSpawner'
# Spawn containers from this image (or a whitelist)
#c.DockerSpawner.image = "jupyter/datascience-notebook:7254cdcfa22b"
c.DockerSpawner.image = os.environ['DOCKER_NOTEBOOK_IMAGE']
c.DockerSpawner.image = '%s-user'%os.environ['HUB_NAME']
if enable_options:
# if whitelist enabled, the .container_image will be ignored in favor of the options below:
c.DockerSpawner.image_whitelist = {'fenics': "jupyterhub-user",
c.DockerSpawner.image_whitelist = {'default': c.DockerSpawner.image ,
'scipy-notebook': "jupyter/scipy-notebook",
'datascience-notebook': "jupyter/datascience-notebook",
'r-notebook': 'jupyter/r-notebook',
@ -39,10 +39,11 @@ spawn_cmd = os.environ.get('DOCKER_SPAWN_CMD', "start-singleuser.sh")
c.DockerSpawner.extra_create_kwargs.update({ 'command': spawn_cmd })
# Memory limit
c.Spawner.mem_limit = '1G' # RAM limit
c.Spawner.mem_limit = '2G' # RAM limit
#c.Spawner.cpu_limit = 0.1
# Connect containers to this Docker network
network_name = os.environ['DOCKER_NETWORK_NAME']
network_name = '%s-network'%os.environ['HUB_NAME']
c.DockerSpawner.use_internal_ip = True
c.DockerSpawner.network_name = network_name
# Pass the network name as argument to spawned containers
@ -71,7 +72,9 @@ c.DockerSpawner.remove_containers = True
c.DockerSpawner.debug = True
# User containers will access hub by container name on the Docker network
c.JupyterHub.hub_ip = 'jupyterhub'
c.JupyterHub.hub_ip = os.environ['HUB_NAME']
# The hub will be hosted at example.com/HUB_NAME/
c.JupyterHub.base_url = u'/%s/'%os.environ['HUB_NAME']
#c.JupyterHub.hub_port = 8001
# TLS config
@ -98,7 +101,7 @@ c.HashAuthenticator.show_logins = True # Optional, defaults to False
### Database Interaction - cookies, db for jupyterhub
# Persist hub data on volume mounted inside container
data_dir = os.environ.get('DATA_VOLUME_CONTAINER', '/data')
data_dir = '/data' # DATA_VOLUME_CONTAINER
c.JupyterHub.cookie_secret_file = os.path.join(data_dir,
'jupyterhub_cookie_secret')
@ -106,7 +109,7 @@ c.JupyterHub.cookie_secret_file = os.path.join(data_dir,
c.JupyterHub.db_url = 'postgresql://postgres:{password}@{host}/{db}'.format(
host=os.environ['POSTGRES_HOST'],
password=os.environ['POSTGRES_PASSWORD'],
db=os.environ['POSTGRES_DB'],
db=os.environ['HUB_NAME'],
)
# Whitlelist users and admins

View File

@ -15,8 +15,17 @@ USER jovyan
RUN python3 -m pip install --no-cache jupyterhub==$JUPYTERHUB_VERSION nbresuse jupyter-rsession-proxy
RUN conda install -c conda-forge fenics
RUN conda create --quiet --yes -p $CONDA_DIR/envs/python2 python=2.7 ipython ipykernel kernda numpy pandas matplotlib ipywidgets yaml && \
conda clean -tipsy
USER root
# Create a global kernelspec in the image and modify it so that it properly activates
# the python2 conda environment.
RUN $CONDA_DIR/envs/python2/bin/python -m ipykernel install && \
$CONDA_DIR/envs/python2/bin/kernda -o -y /usr/local/share/jupyter/kernels/python2/kernel.json
# R pre-requisites
RUN apt-get update && \
apt-get install -y --no-install-recommends \
@ -77,6 +86,7 @@ RUN apt-get clean && \
RUN python3 -m pip install jupyterhub jupyter-rsession-proxy \
&& jupyter labextension install jupyterlab-server-proxy
USER jovyan
RUN python3 -m pip install git+https://github.com/jupyterhub/jupyter-rsession-proxy

View File

@ -1,244 +0,0 @@
ARG DOCKER_NOTEBOOK_IMAGE
FROM $DOCKER_NOTEBOOK_IMAGE
ARG JUPYTERHUB_VERSION
# Install vim in case someone wants to use the terminal
USER root
RUN apt-get update && \
apt-get install -y vim && \
apt-get install -y gcc && \
rm -rf /var/lib/apt/lists/*
RUN python3 -m pip install --no-cache jupyterhub==$JUPYTERHUB_VERSION nbresuse
#any additional installations go here.
LABEL maintainer="Jupyter Project <jupyter@googlegroups.com>"
ARG NB_USER="student"
ARG NB_UID="1001"
ARG NB_GID="100"
USER root
# Install all OS dependencies for notebook server that starts but lacks all
# features (e.g., download as all possible file formats)
ENV DEBIAN_FRONTEND noninteractive
RUN apt-get update && apt-get -yq dist-upgrade \
&& apt-get install -yq --no-install-recommends \
wget \
bzip2 \
ca-certificates \
sudo \
locales \
fonts-liberation \
&& rm -rf /var/lib/apt/lists/*
RUN echo "en_US.UTF-8 UTF-8" > /etc/locale.gen && \
locale-gen
# Configure environment
ENV CONDA_DIR=/opt/conda \
SHELL=/bin/bash \
NB_USER=$NB_USER \
NB_UID=$NB_UID \
NB_GID=$NB_GID \
LC_ALL=en_US.UTF-8 \
LANG=en_US.UTF-8 \
LANGUAGE=en_US.UTF-8
ENV PATH=$CONDA_DIR/bin:$PATH \
HOME=/home/$NB_USER
ADD fix-permissions /usr/local/bin/fix-permissions
# Create jovyan user with UID=1000 and in the 'users' group
# and make sure these dirs are writable by the `users` group.
RUN groupadd wheel -g 11 && \
echo "auth required pam_wheel.so use_uid" >> /etc/pam.d/su && \
useradd -m -s /bin/bash -N -u $NB_UID $NB_USER && \
mkdir -p $CONDA_DIR && \
chown $NB_USER:$NB_GID $CONDA_DIR && \
chmod g+w /etc/passwd && \
fix-permissions $HOME && \
fix-permissions $CONDA_DIR
USER $NB_UID
# Setup work directory for backward-compatibility
RUN mkdir /home/$NB_USER/work && \
fix-permissions /home/$NB_USER
# Install conda as jovyan and check the md5 sum provided on the download site
ENV MINICONDA_VERSION 4.5.11
RUN cd /tmp && \
wget --quiet https://repo.continuum.io/miniconda/Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \
echo "e1045ee415162f944b6aebfe560b8fee *Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \
/bin/bash Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \
rm Miniconda3-${MINICONDA_VERSION}-Linux-x86_64.sh && \
$CONDA_DIR/bin/conda config --system --prepend channels conda-forge && \
$CONDA_DIR/bin/conda config --system --set auto_update_conda false && \
$CONDA_DIR/bin/conda config --system --set show_channel_urls true && \
$CONDA_DIR/bin/conda install --quiet --yes conda="${MINICONDA_VERSION%.*}.*" && \
$CONDA_DIR/bin/conda update --all --quiet --yes && \
conda clean -tipsy && \
rm -rf /home/$NB_USER/.cache/yarn && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
USER root
RUN ln -s /usr/bin/python3 /usr/bin/python && \
mkdir /.symlinks && \
cd /.symlinks &&
ln -s $CONDA_DIR/conda conda && \
ln -s $CONDA_DIR/activate activate && \
ln -s $CONDA_DIR/deactivate deactivate && \
export PATH=/.symlinks:$PATH
USER $NB_UID
# Install Tini
RUN conda install --quiet --yes 'tini=0.18.0' && \
conda list tini | grep tini | tr -s ' ' | cut -d ' ' -f 1,2 >> $CONDA_DIR/conda-meta/pinned && \
conda clean -tipsy && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
# Install Jupyter Notebook, Lab, and Hub
# Generate a notebook server config
# Cleanup temporary files
# Correct permissions
# Do all this in a single RUN command to avoid duplicating all of the
# files across image layers when the permissions change
RUN conda install --quiet --yes \
'notebook=5.7.2' \
'jupyterhub=0.9.4' \
'jupyterlab=0.35.4' && \
conda clean -tipsy && \
jupyter labextension install @jupyterlab/hub-extension@^0.12.0 && \
npm cache clean --force && \
jupyter notebook --generate-config && \
rm -rf $CONDA_DIR/share/jupyter/lab/staging && \
rm -rf /home/$NB_USER/.cache/yarn && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
USER root
EXPOSE 8888
WORKDIR $HOME
# Configure container startup
ENTRYPOINT ["tini", "-g", "--"]
CMD ["start-notebook.sh"]
### Minimal Notebook:
USER root
# Install all OS dependencies for fully functional notebook server
RUN apt-get update && apt-get install -yq --no-install-recommends \
build-essential \
emacs \
git \
inkscape \
jed \
libsm6 \
libxext-dev \
libxrender1 \
lmodern \
netcat \
pandoc \
python-dev \
texlive-fonts-extra \
texlive-fonts-recommended \
texlive-generic-recommended \
texlive-latex-base \
texlive-latex-extra \
texlive-xetex \
unzip \
nano \
&& rm -rf /var/lib/apt/lists/*
### Now onto scipy-notebook
# ffmpeg for matplotlib anim
RUN apt-get update && \
apt-get install -y --no-install-recommends ffmpeg && \
rm -rf /var/lib/apt/lists/*
USER $NB_UID
# Install Python 3 packages
# Remove pyqt and qt pulled in for matplotlib since we're only ever going to
# use notebook-friendly backends in these images
RUN conda install --quiet --yes \
'conda-forge::blas=*=openblas' \
'ipywidgets=7.2*' \
'pandas=0.23*' \
'numexpr=2.6*' \
'matplotlib=2.2*' \
'scipy=1.1*' \
'seaborn=0.9*' \
'scikit-learn=0.20*' \
'scikit-image=0.14*' \
'sympy=1.1*' \
'cython=0.28*' \
'patsy=0.5*' \
'statsmodels=0.9*' \
'cloudpickle=0.5*' \
'dill=0.2*' \
'numba=0.38*' \
'bokeh=0.13*' \
'sqlalchemy=1.2*' \
'hdf5=1.10*' \
'h5py=2.7*' \
'vincent=0.4.*' \
'beautifulsoup4=4.6.*' \
'protobuf=3.*' \
'xlrd' && \
conda remove --quiet --yes --force qt pyqt && \
conda clean -tipsy && \
# Activate ipywidgets extension in the environment that runs the notebook server
jupyter nbextension enable --py widgetsnbextension --sys-prefix && \
# Also activate ipywidgets extension for JupyterLab
# Check this URL for most recent compatibilities
# https://github.com/jupyter-widgets/ipywidgets/tree/master/packages/jupyterlab-manager
jupyter labextension install @jupyter-widgets/jupyterlab-manager@^0.38.1 && \
jupyter labextension install jupyterlab_bokeh@0.6.3 && \
npm cache clean --force && \
rm -rf $CONDA_DIR/share/jupyter/lab/staging && \
rm -rf /home/$NB_USER/.cache/yarn && \
rm -rf /home/$NB_USER/.node-gyp && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
# Install facets which does not have a pip or conda package at the moment
RUN cd /tmp && \
git clone https://github.com/PAIR-code/facets.git && \
cd facets && \
jupyter nbextension install facets-dist/ --sys-prefix && \
cd && \
rm -rf /tmp/facets && \
fix-permissions $CONDA_DIR && \
fix-permissions /home/$NB_USER
# Import matplotlib the first time to build the font cache.
ENV XDG_CACHE_HOME /home/$NB_USER/.cache/
RUN MPLBACKEND=Agg python -c "import matplotlib.pyplot" && \
fix-permissions /home/$NB_USER
USER $NB_UID
# Add local files as late as possible to avoid cache busting
COPY start.sh /usr/local/bin/
COPY start-notebook.sh /usr/local/bin/
COPY start-singleuser.sh /usr/local/bin/
COPY jupyter_notebook_config.py /etc/jupyter/
RUN fix-permissions /etc/jupyter/

View File

@ -1,4 +1,6 @@
FROM jupyter/r-notebook
ARG DOCKER_NOTEBOOK_IMAGE
FROM $DOCKER_NOTEBOOK_IMAGE
ARG JUPYTERHUB_VERSION
USER root
@ -23,7 +25,8 @@ RUN apt-get clean && \
USER $NB_USER
RUN pip install git+https://github.com/jupyterhub/jupyter-rsession-proxy
RUN pip install git+https://github.com/jupyterhub/jupyter-rsession-proxy \
&& jupyter labextension install jupyterlab-server-proxy
# The desktop package uses /usr/lib/rstudio/bin
ENV PATH="${PATH}:/usr/lib/rstudio-server/bin"