Merge branch 'master' into graphviz
This commit is contained in:
commit
238a78aafc
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
6
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -2,6 +2,6 @@
|
|||||||
|
|
||||||
##### I completed the 3 steps below:
|
##### I completed the 3 steps below:
|
||||||
|
|
||||||
- [] I've read the [Contribution Guide](http://laradock.io/contributing).
|
- [ ] I've read the [Contribution Guide](http://laradock.io/contributing).
|
||||||
- [] I've updated the **documentation**. (refer to [this](http://laradock.io/contributing/#update-the-documentation-site) for how to do so).
|
- [ ] I've updated the **documentation**. (refer to [this](http://laradock.io/contributing/#update-the-documentation-site) for how to do so).
|
||||||
- [] I enjoyed my time contributing and making developer's life easier :)
|
- [ ] I enjoyed my time contributing and making developer's life easier :)
|
||||||
|
@ -739,6 +739,25 @@ docker-compose up -d mariadb phpmyadmin
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<br>
|
||||||
|
<a name="Use-Gitlab"></a>
|
||||||
|
## Use Gitlab
|
||||||
|
|
||||||
|
1 - Run the Gitlab Container (`gitlab`) with the `docker-compose up` command. Example:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker-compose up -d gitlab
|
||||||
|
```
|
||||||
|
|
||||||
|
2 - Open your browser and visit the localhost on port **8989**: `http://localhost:8989`
|
||||||
|
<br>
|
||||||
|
*Note: You may change GITLAB_DOMAIN_NAME to your own domain name like `http://gitlab.example.com` default is `http://localhost`*
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
<a name="Use-Adminer"></a>
|
<a name="Use-Adminer"></a>
|
||||||
## Use Adminer
|
## Use Adminer
|
||||||
@ -1456,6 +1475,19 @@ e) set it to `true`
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
<a name="Install php calendar extension"></a>
|
||||||
|
## Install php calendar extension
|
||||||
|
|
||||||
|
1 - Open the `.env` file
|
||||||
|
<br>
|
||||||
|
2 - Search for the `PHP_FPM_INSTALL_CALENDAR` argument under the PHP-FPM container
|
||||||
|
<br>
|
||||||
|
3 - Set it to `true`
|
||||||
|
<br>
|
||||||
|
4 - Re-build the containers `docker-compose build php-fpm`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
<br>
|
<br>
|
||||||
|
@ -97,7 +97,7 @@ Beanstalkd - RabbitMQ - PHP Worker
|
|||||||
- **Queueing Management:**
|
- **Queueing Management:**
|
||||||
Beanstalkd Console - RabbitMQ Console
|
Beanstalkd Console - RabbitMQ Console
|
||||||
- **Random Tools:**
|
- **Random Tools:**
|
||||||
HAProxy - Certbot - Blackfire - Selenium - Jenkins - ElasticSearch - Kibana - Grafana - Mailhog - MailDev - Minio - Varnish - Swoole - NetData - Portainer - Laravel Echo...
|
HAProxy - Certbot - Blackfire - Selenium - Jenkins - ElasticSearch - Kibana - Grafana - Gitlab - Mailhog - MailDev - Minio - Varnish - Swoole - NetData - Portainer - Laravel Echo...
|
||||||
|
|
||||||
Laradock introduces the **Workspace** Image, as a development environment.
|
Laradock introduces the **Workspace** Image, as a development environment.
|
||||||
It contains a rich set of helpful tools, all pre-configured to work and integrate with almost any combination of Containers and tools you may choose.
|
It contains a rich set of helpful tools, all pre-configured to work and integrate with almost any combination of Containers and tools you may choose.
|
||||||
|
@ -144,6 +144,7 @@ services:
|
|||||||
- INSTALL_SWOOLE=${PHP_FPM_INSTALL_SWOOLE}
|
- INSTALL_SWOOLE=${PHP_FPM_INSTALL_SWOOLE}
|
||||||
- INSTALL_IMAGE_OPTIMIZERS=${PHP_FPM_INSTALL_IMAGE_OPTIMIZERS}
|
- INSTALL_IMAGE_OPTIMIZERS=${PHP_FPM_INSTALL_IMAGE_OPTIMIZERS}
|
||||||
- INSTALL_IMAGEMAGICK=${PHP_FPM_INSTALL_IMAGEMAGICK}
|
- INSTALL_IMAGEMAGICK=${PHP_FPM_INSTALL_IMAGEMAGICK}
|
||||||
|
- INSTALL_CALENDAR=${PHP_FPM_INSTALL_CALENDAR}
|
||||||
volumes:
|
volumes:
|
||||||
- ./php-fpm/php${PHP_VERSION}.ini:/usr/local/etc/php/php.ini
|
- ./php-fpm/php${PHP_VERSION}.ini:/usr/local/etc/php/php.ini
|
||||||
- ${APP_CODE_PATH_HOST}:${APP_CODE_PATH_CONTAINER}
|
- ${APP_CODE_PATH_HOST}:${APP_CODE_PATH_CONTAINER}
|
||||||
@ -522,7 +523,6 @@ services:
|
|||||||
- frontend
|
- frontend
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
|
|
||||||
### ElasticSearch ########################################
|
### ElasticSearch ########################################
|
||||||
elasticsearch:
|
elasticsearch:
|
||||||
build: ./elasticsearch
|
build: ./elasticsearch
|
||||||
@ -545,7 +545,6 @@ services:
|
|||||||
- frontend
|
- frontend
|
||||||
- backend
|
- backend
|
||||||
|
|
||||||
|
|
||||||
### Kibana ##############################################
|
### Kibana ##############################################
|
||||||
kibana:
|
kibana:
|
||||||
build: ./kibana
|
build: ./kibana
|
||||||
@ -718,8 +717,109 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ${DATA_PATH_HOST}/portainer_data:/data
|
- ${DATA_PATH_HOST}/portainer_data:/data
|
||||||
- /var/run/docker.sock:/var/run/docker.sock
|
- /var/run/docker.sock:/var/run/docker.sock
|
||||||
|
extra_hosts:
|
||||||
|
- "dockerhost:${DOCKER_HOST_IP}"
|
||||||
ports:
|
ports:
|
||||||
- 9010:9000
|
- 9010:9000
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
|
||||||
|
### Gitlab ################################################
|
||||||
|
gitlab:
|
||||||
|
build:
|
||||||
|
context: ./gitlab
|
||||||
|
environment:
|
||||||
|
GITLAB_OMNIBUS_CONFIG: |
|
||||||
|
external_url '${GITLAB_DOMAIN_NAME}'
|
||||||
|
redis['enable'] = false
|
||||||
|
nginx['listen_https'] = false
|
||||||
|
nginx['listen_port'] = 80
|
||||||
|
postgresql['enable'] = false
|
||||||
|
gitlab_rails['trusted_proxies'] = ['caddy','nginx','apache2']
|
||||||
|
gitlab_rails['redis_host'] = 'redis'
|
||||||
|
gitlab_rails['redis_database'] = 8
|
||||||
|
gitlab_rails['db_host'] = 'postgres'
|
||||||
|
gitlab_rails['db_username'] = 'laradock_gitlab'
|
||||||
|
gitlab_rails['db_password'] = 'laradock_gitlab'
|
||||||
|
gitlab_rails['db_database'] = 'laradock_gitlab'
|
||||||
|
gitlab_rails['initial_root_password'] = '${GITLAB_ROOT_PASSWORD}'
|
||||||
|
gitlab_rails['gitlab_shell_ssh_port'] = ${GITLAB_HOST_SSH_PORT}
|
||||||
|
volumes:
|
||||||
|
- ${DATA_PATH_HOST}/gitlab/config:/etc/gitlab
|
||||||
|
- ${DATA_PATH_HOST}/gitlab/data:/var/opt/gitlab
|
||||||
|
- ${GITLAB_HOST_LOG_PATH}:/var/log/gitlab
|
||||||
|
ports:
|
||||||
|
- "${GITLAB_HOST_HTTP_PORT}:80"
|
||||||
|
- "${GITLAB_HOST_HTTPS_PORT}:443"
|
||||||
|
- "${GITLAB_HOST_SSH_PORT}:22"
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
depends_on:
|
||||||
|
- redis
|
||||||
|
- postgres
|
||||||
|
gitlab-runner:
|
||||||
|
image: gitlab/gitlab-runner:latest
|
||||||
|
environment:
|
||||||
|
- CI_SERVER_URL=${GITLAB_DOMAIN_NAME}
|
||||||
|
volumes:
|
||||||
|
- ${DATA_PATH_HOST}/gitlab/runner:/etc/gitlab-runner
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:rw
|
||||||
|
restart: always
|
||||||
|
|
||||||
|
### JupyterHub #########################################
|
||||||
|
jupyterhub:
|
||||||
|
build:
|
||||||
|
context: ./jupyterhub
|
||||||
|
depends_on:
|
||||||
|
- postgres
|
||||||
|
- jupyterhub-user
|
||||||
|
restart: always
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/var/run/docker.sock:rw
|
||||||
|
- ${DATA_PATH_HOST}/jupyterhub/:/data
|
||||||
|
- ${JUPYTERHUB_CUSTOM_CONFIG}:/jupyterhub_config.py
|
||||||
|
- ${JUPYTERHUB_USER_DATA}:/user-data
|
||||||
|
- ${JUPYTERHUB_USER_LIST}:/userlist
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
ports:
|
||||||
|
- "${JUPYTERHUB_PORT}:80"
|
||||||
|
environment:
|
||||||
|
- TERM=xterm
|
||||||
|
- JUPYTERHUB_USER_DATA=${JUPYTERHUB_USER_DATA}
|
||||||
|
- JUPYTERHUB_POSTGRES_DB=${JUPYTERHUB_POSTGRES_DB}
|
||||||
|
- JUPYTERHUB_POSTGRES_USER=${JUPYTERHUB_POSTGRES_USER}
|
||||||
|
- JUPYTERHUB_POSTGRES_HOST=${JUPYTERHUB_POSTGRES_HOST}
|
||||||
|
- JUPYTERHUB_POSTGRES_PASSWORD=${JUPYTERHUB_POSTGRES_PASSWORD}
|
||||||
|
- JUPYTERHUB_OAUTH_CALLBACK_URL=${JUPYTERHUB_OAUTH_CALLBACK_URL}
|
||||||
|
- JUPYTERHUB_OAUTH_CLIENT_ID=${JUPYTERHUB_OAUTH_CLIENT_ID}
|
||||||
|
- JUPYTERHUB_OAUTH_CLIENT_SECRET=${JUPYTERHUB_OAUTH_CLIENT_SECRET}
|
||||||
|
- JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE=${JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE}
|
||||||
|
jupyterhub-user:
|
||||||
|
build:
|
||||||
|
context: ./jupyterhub
|
||||||
|
dockerfile: Dockerfile.user
|
||||||
|
command: ["sh", "-c", "echo \"build only\""]
|
||||||
|
|
||||||
|
### IPython #########################################
|
||||||
|
ipython-controller:
|
||||||
|
build:
|
||||||
|
context: ./ipython
|
||||||
|
dockerfile: Dockerfile.controller
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
extra_hosts:
|
||||||
|
- "laradock-ipython:${LARADOCK_IPYTHON_CONTROLLER_IP}"
|
||||||
|
ports:
|
||||||
|
- "33327-33338:33327-33338"
|
||||||
|
ipython-engine:
|
||||||
|
build:
|
||||||
|
context: ./ipython
|
||||||
|
dockerfile: Dockerfile.engine
|
||||||
|
networks:
|
||||||
|
- backend
|
||||||
|
extra_hosts:
|
||||||
|
- "laradock-ipython:${LARADOCK_IPYTHON_CONTROLLER_IP}"
|
||||||
|
|
||||||
### Docker-in-Docker ################################################
|
### Docker-in-Docker ################################################
|
||||||
docker-in-docker:
|
docker-in-docker:
|
||||||
@ -763,6 +863,7 @@ services:
|
|||||||
mongo-webui:
|
mongo-webui:
|
||||||
build:
|
build:
|
||||||
context: ./mongo-webui
|
context: ./mongo-webui
|
||||||
|
restart: always
|
||||||
environment:
|
environment:
|
||||||
- ROOT_URL=${MONGO_WEBUI_ROOT_URL}
|
- ROOT_URL=${MONGO_WEBUI_ROOT_URL}
|
||||||
- MONGO_URL=${MONGO_WEBUI_MONGO_URL}
|
- MONGO_URL=${MONGO_WEBUI_MONGO_URL}
|
||||||
|
26
env-example
26
env-example
@ -149,6 +149,7 @@ PHP_FPM_INSTALL_LDAP=false
|
|||||||
PHP_FPM_INSTALL_SWOOLE=false
|
PHP_FPM_INSTALL_SWOOLE=false
|
||||||
PHP_FPM_INSTALL_PG_CLIENT=false
|
PHP_FPM_INSTALL_PG_CLIENT=false
|
||||||
PHP_FPM_INSTALL_PCNTL=false
|
PHP_FPM_INSTALL_PCNTL=false
|
||||||
|
PHP_FPM_INSTALL_CALENDAR=false
|
||||||
|
|
||||||
### PHP_WORKER ############################################
|
### PHP_WORKER ############################################
|
||||||
|
|
||||||
@ -361,6 +362,31 @@ SOLR_VERSION=5.5
|
|||||||
SOLR_PORT=8983
|
SOLR_PORT=8983
|
||||||
SOLR_DATAIMPORTHANDLER_MYSQL=false
|
SOLR_DATAIMPORTHANDLER_MYSQL=false
|
||||||
|
|
||||||
|
### GITLAB ###############################################
|
||||||
|
GITLAB_HOST_HTTP_PORT=8989
|
||||||
|
GITLAB_HOST_HTTPS_PORT=9898
|
||||||
|
GITLAB_HOST_SSH_PORT=2289
|
||||||
|
GITLAB_DOMAIN_NAME=http://localhost
|
||||||
|
GITLAB_ROOT_PASSWORD=laradock
|
||||||
|
GITLAB_HOST_LOG_PATH=./logs/gitlab
|
||||||
|
|
||||||
|
### JUPYTERHUB ###############################################
|
||||||
|
JUPYTERHUB_POSTGRES_HOST=postgres
|
||||||
|
JUPYTERHUB_POSTGRES_USER=laradock_jupyterhub
|
||||||
|
JUPYTERHUB_POSTGRES_PASSWORD=laradock_jupyterhub
|
||||||
|
JUPYTERHUB_POSTGRES_DB=laradock_jupyterhub
|
||||||
|
JUPYTERHUB_PORT=9991
|
||||||
|
JUPYTERHUB_OAUTH_CALLBACK_URL=http://laradock:9991/hub/oauth_callback
|
||||||
|
JUPYTERHUB_OAUTH_CLIENT_ID={GITHUB_CLIENT_ID}
|
||||||
|
JUPYTERHUB_OAUTH_CLIENT_SECRET={GITHUB_CLIENT_SECRET}
|
||||||
|
JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE=laradock_jupyterhub-user
|
||||||
|
JUPYTERHUB_CUSTOM_CONFIG=./jupyterhub/jupyterhub_config.py
|
||||||
|
JUPYTERHUB_USER_DATA=/jupyterhub
|
||||||
|
JUPYTERHUB_USER_LIST=./jupyterhub/userlist
|
||||||
|
|
||||||
|
### IPYTHON ##################################################
|
||||||
|
LARADOCK_IPYTHON_CONTROLLER_IP=127.0.0.1
|
||||||
|
|
||||||
### NETDATA ###############################################
|
### NETDATA ###############################################
|
||||||
NETDATA_PORT=19999
|
NETDATA_PORT=19999
|
||||||
|
|
||||||
|
3
gitlab/Dockerfile
Normal file
3
gitlab/Dockerfile
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
FROM gitlab/gitlab-ce:latest
|
||||||
|
|
||||||
|
LABEL maintainer="ahkui <ahkui@outlook.com>"
|
17
ipython/Dockerfile.controller
Normal file
17
ipython/Dockerfile.controller
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
FROM python:3.5-alpine
|
||||||
|
|
||||||
|
LABEL maintainer="ahkui <ahkui@outlook.com>"
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
RUN apk add --no-cache build-base
|
||||||
|
|
||||||
|
RUN python -m pip --quiet --no-cache-dir install \
|
||||||
|
ipyparallel
|
||||||
|
|
||||||
|
RUN ipython profile create --parallel --profile=default
|
||||||
|
|
||||||
|
COPY ipcontroller-client.json /root/.ipython/profile_default/security/ipcontroller-client.json
|
||||||
|
COPY ipcontroller-engine.json /root/.ipython/profile_default/security/ipcontroller-engine.json
|
||||||
|
|
||||||
|
CMD ["sh","-c","ipcontroller --ip=* --reuse"]
|
23
ipython/Dockerfile.engine
Normal file
23
ipython/Dockerfile.engine
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
FROM python:3.5-alpine
|
||||||
|
|
||||||
|
LABEL maintainer="ahkui <ahkui@outlook.com>"
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
RUN apk add --no-cache build-base
|
||||||
|
|
||||||
|
RUN python -m pip --quiet --no-cache-dir install \
|
||||||
|
ipyparallel \
|
||||||
|
numpy \
|
||||||
|
pandas \
|
||||||
|
pymongo \
|
||||||
|
redis \
|
||||||
|
requests \
|
||||||
|
bs4
|
||||||
|
|
||||||
|
RUN ipython profile create --parallel --profile=default
|
||||||
|
|
||||||
|
COPY ipcontroller-client.json /root/.ipython/profile_default/security/ipcontroller-client.json
|
||||||
|
COPY ipcontroller-engine.json /root/.ipython/profile_default/security/ipcontroller-engine.json
|
||||||
|
|
||||||
|
CMD ["sh","-c","ipcluster engines"]
|
16
ipython/ipcontroller-client.json
Normal file
16
ipython/ipcontroller-client.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"key": "868074dd-060311910ab3d6991611bccf",
|
||||||
|
"signature_scheme": "hmac-sha256",
|
||||||
|
"unpack": "json",
|
||||||
|
"pack": "json",
|
||||||
|
"ssh": "",
|
||||||
|
"task_scheme": "leastload",
|
||||||
|
"interface": "tcp://*",
|
||||||
|
"location": "laradock-ipython",
|
||||||
|
"notification": 33338,
|
||||||
|
"iopub": 33337,
|
||||||
|
"control": 33336,
|
||||||
|
"mux": 33335,
|
||||||
|
"task": 33334,
|
||||||
|
"registration": 33333
|
||||||
|
}
|
16
ipython/ipcontroller-engine.json
Normal file
16
ipython/ipcontroller-engine.json
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
{
|
||||||
|
"key": "868074dd-060311910ab3d6991611bccf",
|
||||||
|
"signature_scheme": "hmac-sha256",
|
||||||
|
"unpack": "json",
|
||||||
|
"pack": "json",
|
||||||
|
"ssh": "",
|
||||||
|
"interface": "tcp://*",
|
||||||
|
"location": "laradock-ipython",
|
||||||
|
"iopub": 33327,
|
||||||
|
"hb_ping": 33328,
|
||||||
|
"hb_pong": 33329,
|
||||||
|
"control": 33330,
|
||||||
|
"mux": 33331,
|
||||||
|
"task": 33332,
|
||||||
|
"registration": 33333
|
||||||
|
}
|
26
jupyterhub/Dockerfile
Normal file
26
jupyterhub/Dockerfile
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
FROM python
|
||||||
|
LABEL maintainer="ahkui <ahkui@outlook.com>"
|
||||||
|
|
||||||
|
ENV JUPYTERHUB_USER_DATA ${JUPYTERHUB_USER_DATA}
|
||||||
|
ENV JUPYTERHUB_POSTGRES_DB ${JUPYTERHUB_POSTGRES_DB}
|
||||||
|
ENV JUPYTERHUB_POSTGRES_USER ${JUPYTERHUB_POSTGRES_USER}
|
||||||
|
ENV JUPYTERHUB_POSTGRES_HOST ${JUPYTERHUB_POSTGRES_HOST}
|
||||||
|
ENV JUPYTERHUB_POSTGRES_PASSWORD ${JUPYTERHUB_POSTGRES_PASSWORD}
|
||||||
|
ENV JUPYTERHUB_OAUTH_CALLBACK_URL ${JUPYTERHUB_OAUTH_CALLBACK_URL}
|
||||||
|
ENV JUPYTERHUB_OAUTH_CLIENT_ID ${JUPYTERHUB_OAUTH_CLIENT_ID}
|
||||||
|
ENV JUPYTERHUB_OAUTH_CLIENT_SECRET ${JUPYTERHUB_OAUTH_CLIENT_SECRET}
|
||||||
|
ENV JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE ${JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE}
|
||||||
|
|
||||||
|
RUN curl -sL https://deb.nodesource.com/setup_10.x | bash -
|
||||||
|
|
||||||
|
RUN apt update -yqq && \
|
||||||
|
apt-get install -y nodejs
|
||||||
|
|
||||||
|
RUN npm install -g configurable-http-proxy
|
||||||
|
|
||||||
|
RUN pip install jupyterhub
|
||||||
|
RUN pip install oauthenticator
|
||||||
|
RUN pip install dockerspawner
|
||||||
|
RUN pip install psycopg2 psycopg2-binary
|
||||||
|
|
||||||
|
CMD ["sh", "-c", "jupyterhub upgrade-db && jupyterhub -f /jupyterhub_config.py"]
|
72
jupyterhub/Dockerfile.user
Normal file
72
jupyterhub/Dockerfile.user
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
FROM tensorflow/tensorflow:latest-gpu
|
||||||
|
|
||||||
|
MAINTAINER ahkui <ahkui@outlook.com>
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python \
|
||||||
|
python-dev \
|
||||||
|
&& \
|
||||||
|
apt-get autoremove -y && \
|
||||||
|
apt-get autoclean && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
wget \
|
||||||
|
git \
|
||||||
|
&& \
|
||||||
|
apt-get autoremove -y && \
|
||||||
|
apt-get autoclean && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
RUN curl -O https://bootstrap.pypa.io/get-pip.py && \
|
||||||
|
python3 get-pip.py && \
|
||||||
|
rm get-pip.py
|
||||||
|
|
||||||
|
RUN python3 -m pip --quiet --no-cache-dir install \
|
||||||
|
Pillow \
|
||||||
|
h5py \
|
||||||
|
ipykernel \
|
||||||
|
jupyter \
|
||||||
|
notebook \
|
||||||
|
jupyterhub \
|
||||||
|
matplotlib \
|
||||||
|
numpy \
|
||||||
|
pandas \
|
||||||
|
scipy \
|
||||||
|
sklearn \
|
||||||
|
Flask \
|
||||||
|
gunicorn \
|
||||||
|
pymongo \
|
||||||
|
redis \
|
||||||
|
requests \
|
||||||
|
ipyparallel \
|
||||||
|
bs4 \
|
||||||
|
&& \
|
||||||
|
python3 -m ipykernel.kernelspec
|
||||||
|
|
||||||
|
RUN pip --no-cache-dir install \
|
||||||
|
https://storage.googleapis.com/tensorflow/linux/gpu/tensorflow_gpu-1.8.0-cp35-cp35m-linux_x86_64.whl
|
||||||
|
|
||||||
|
RUN ln -s -f /usr/bin/python3 /usr/bin/python
|
||||||
|
|
||||||
|
COPY start.sh /usr/local/bin/
|
||||||
|
COPY start-notebook.sh /usr/local/bin/
|
||||||
|
COPY start-singleuser.sh /usr/local/bin/
|
||||||
|
RUN chmod +x /usr/local/bin/start.sh
|
||||||
|
RUN chmod +x /usr/local/bin/start-notebook.sh
|
||||||
|
RUN chmod +x /usr/local/bin/start-singleuser.sh
|
||||||
|
|
||||||
|
RUN wget --quiet https://github.com/krallin/tini/releases/download/v0.10.0/tini && \
|
||||||
|
mv tini /usr/local/bin/tini && \
|
||||||
|
chmod +x /usr/local/bin/tini
|
||||||
|
|
||||||
|
# cleanup
|
||||||
|
RUN rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||||
|
|
||||||
|
ENTRYPOINT ["tini", "--"]
|
||||||
|
|
||||||
|
CMD ["start-notebook.sh"]
|
||||||
|
|
||||||
|
|
121
jupyterhub/jupyterhub_config.py
Normal file
121
jupyterhub/jupyterhub_config.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
# Configuration file for JupyterHub
|
||||||
|
import os
|
||||||
|
|
||||||
|
c = get_config()
|
||||||
|
|
||||||
|
def create_dir_hook(spawner):
|
||||||
|
username = spawner.user.name # get the username
|
||||||
|
volume_path = os.path.join('/user-data', username)
|
||||||
|
if not os.path.exists(volume_path):
|
||||||
|
# create a directory with umask 0755
|
||||||
|
# hub and container user must have the same UID to be writeable
|
||||||
|
# still readable by other users on the system
|
||||||
|
os.mkdir(volume_path, 0o755)
|
||||||
|
os.chown(volume_path, 1000,100)
|
||||||
|
# now do whatever you think your user needs
|
||||||
|
# ...
|
||||||
|
pass
|
||||||
|
|
||||||
|
# attach the hook function to the spawner
|
||||||
|
c.Spawner.pre_spawn_hook = create_dir_hook
|
||||||
|
|
||||||
|
# We rely on environment variables to configure JupyterHub so that we
|
||||||
|
# avoid having to rebuild the JupyterHub container every time we change a
|
||||||
|
# configuration parameter.
|
||||||
|
|
||||||
|
# Spawn single-user servers as Docker containers
|
||||||
|
c.JupyterHub.spawner_class = 'dockerspawner.DockerSpawner'
|
||||||
|
|
||||||
|
# Spawn containers from this image
|
||||||
|
c.DockerSpawner.image = os.environ['JUPYTERHUB_LOCAL_NOTEBOOK_IMAGE']
|
||||||
|
|
||||||
|
# JupyterHub requires a single-user instance of the Notebook server, so we
|
||||||
|
# default to using the `start-singleuser.sh` script included in the
|
||||||
|
# jupyter/docker-stacks *-notebook images as the Docker run command when
|
||||||
|
# spawning containers. Optionally, you can override the Docker run command
|
||||||
|
# using the DOCKER_SPAWN_CMD environment variable.
|
||||||
|
spawn_cmd = os.environ.get('JUPYTERHUB_DOCKER_SPAWN_CMD', "start-singleuser.sh")
|
||||||
|
c.DockerSpawner.extra_create_kwargs.update({ 'command': spawn_cmd })
|
||||||
|
|
||||||
|
# Connect containers to this Docker network
|
||||||
|
network_name = os.environ.get('JUPYTERHUB_NETWORK_NAME','laradock_backend')
|
||||||
|
c.DockerSpawner.use_internal_ip = True
|
||||||
|
c.DockerSpawner.network_name = network_name
|
||||||
|
|
||||||
|
# Pass the network name as argument to spawned containers
|
||||||
|
c.DockerSpawner.extra_host_config = { 'network_mode': network_name, 'runtime': 'nvidia' }
|
||||||
|
# c.DockerSpawner.extra_host_config = { 'network_mode': network_name, "devices":["/dev/nvidiactl","/dev/nvidia-uvm","/dev/nvidia0"] }
|
||||||
|
# Explicitly set notebook directory because we'll be mounting a host volume to
|
||||||
|
# it. Most jupyter/docker-stacks *-notebook images run the Notebook server as
|
||||||
|
# user `jovyan`, and set the notebook directory to `/home/jovyan/work`.
|
||||||
|
# We follow the same convention.
|
||||||
|
# notebook_dir = os.environ.get('JUPYTERHUB_DOCKER_NOTEBOOK_DIR') or '/home/jovyan/work'
|
||||||
|
notebook_dir = '/notebooks'
|
||||||
|
c.DockerSpawner.notebook_dir = notebook_dir
|
||||||
|
|
||||||
|
# Mount the real user's Docker volume on the host to the notebook user's
|
||||||
|
# notebook directory in the container
|
||||||
|
user_data = os.environ.get('JUPYTERHUB_USER_DATA','/jupyterhub')
|
||||||
|
c.DockerSpawner.volumes = {
|
||||||
|
user_data+'/{username}': notebook_dir
|
||||||
|
}
|
||||||
|
|
||||||
|
c.DockerSpawner.extra_create_kwargs.update({ 'user': 'root'})
|
||||||
|
|
||||||
|
# volume_driver is no longer a keyword argument to create_container()
|
||||||
|
# c.DockerSpawner.extra_create_kwargs.update({ 'volume_driver': 'local' })
|
||||||
|
# Remove containers once they are stopped
|
||||||
|
c.DockerSpawner.remove_containers = True
|
||||||
|
|
||||||
|
# For debugging arguments passed to spawned containers
|
||||||
|
c.DockerSpawner.debug = True
|
||||||
|
|
||||||
|
# User containers will access hub by container name on the Docker network
|
||||||
|
c.JupyterHub.hub_ip = 'jupyterhub'
|
||||||
|
c.JupyterHub.hub_port = 8000
|
||||||
|
|
||||||
|
# TLS config
|
||||||
|
c.JupyterHub.port = 80
|
||||||
|
# c.JupyterHub.ssl_key = os.environ['SSL_KEY']
|
||||||
|
# c.JupyterHub.ssl_cert = os.environ['SSL_CERT']
|
||||||
|
|
||||||
|
# Authenticate users with GitHub OAuth
|
||||||
|
c.JupyterHub.authenticator_class = 'oauthenticator.GitHubOAuthenticator'
|
||||||
|
c.GitHubOAuthenticator.oauth_callback_url = os.environ['JUPYTERHUB_OAUTH_CALLBACK_URL']
|
||||||
|
c.GitHubOAuthenticator.client_id = os.environ['JUPYTERHUB_OAUTH_CLIENT_ID']
|
||||||
|
c.GitHubOAuthenticator.client_secret = os.environ['JUPYTERHUB_OAUTH_CLIENT_SECRET']
|
||||||
|
|
||||||
|
# Persist hub data on volume mounted inside container
|
||||||
|
data_dir = '/data'
|
||||||
|
|
||||||
|
c.JupyterHub.cookie_secret_file = os.path.join(data_dir,
|
||||||
|
'jupyterhub_cookie_secret')
|
||||||
|
|
||||||
|
print(os.environ)
|
||||||
|
|
||||||
|
c.JupyterHub.db_url = 'postgresql://{user}:{password}@{host}/{db}'.format(
|
||||||
|
user=os.environ['JUPYTERHUB_POSTGRES_USER'],
|
||||||
|
host=os.environ['JUPYTERHUB_POSTGRES_HOST'],
|
||||||
|
password=os.environ['JUPYTERHUB_POSTGRES_PASSWORD'],
|
||||||
|
db=os.environ['JUPYTERHUB_POSTGRES_DB'],
|
||||||
|
)
|
||||||
|
|
||||||
|
# Whitlelist users and admins
|
||||||
|
c.Authenticator.whitelist = whitelist = set()
|
||||||
|
c.Authenticator.admin_users = admin = set()
|
||||||
|
c.JupyterHub.admin_access = True
|
||||||
|
pwd = os.path.dirname(__file__)
|
||||||
|
with open(os.path.join(pwd, 'userlist')) as f:
|
||||||
|
for line in f:
|
||||||
|
if not line:
|
||||||
|
continue
|
||||||
|
parts = line.split()
|
||||||
|
name = parts[0]
|
||||||
|
print(name)
|
||||||
|
whitelist.add(name)
|
||||||
|
if len(parts) > 1 and parts[1] == 'admin':
|
||||||
|
admin.add(name)
|
||||||
|
admin.add('laradock')
|
12
jupyterhub/start-notebook.sh
Normal file
12
jupyterhub/start-notebook.sh
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
if [[ ! -z "${JUPYTERHUB_API_TOKEN}" ]]; then
|
||||||
|
# launched by JupyterHub, use single-user entrypoint
|
||||||
|
exec /usr/local/bin/start-singleuser.sh $*
|
||||||
|
else
|
||||||
|
. /usr/local/bin/start.sh jupyter notebook $*
|
||||||
|
fi
|
40
jupyterhub/start-singleuser.sh
Normal file
40
jupyterhub/start-singleuser.sh
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# set default ip to 0.0.0.0
|
||||||
|
if [[ "$NOTEBOOK_ARGS $@" != *"--ip="* ]]; then
|
||||||
|
NOTEBOOK_ARGS="--ip=0.0.0.0 $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# handle some deprecated environment variables
|
||||||
|
# from DockerSpawner < 0.8.
|
||||||
|
# These won't be passed from DockerSpawner 0.9,
|
||||||
|
# so avoid specifying --arg=empty-string
|
||||||
|
# if [ ! -z "$NOTEBOOK_DIR" ]; then
|
||||||
|
# NOTEBOOK_ARGS="--notebook-dir='$NOTEBOOK_DIR' $NOTEBOOK_ARGS"
|
||||||
|
# fi
|
||||||
|
if [ ! -z "$JPY_PORT" ]; then
|
||||||
|
NOTEBOOK_ARGS="--port=$JPY_PORT $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
if [ ! -z "$JPY_USER" ]; then
|
||||||
|
NOTEBOOK_ARGS="--user=$JPY_USER $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
if [ ! -z "$JPY_COOKIE_NAME" ]; then
|
||||||
|
NOTEBOOK_ARGS="--cookie-name=$JPY_COOKIE_NAME $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
if [ ! -z "$JPY_BASE_URL" ]; then
|
||||||
|
NOTEBOOK_ARGS="--base-url=$JPY_BASE_URL $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
if [ ! -z "$JPY_HUB_PREFIX" ]; then
|
||||||
|
NOTEBOOK_ARGS="--hub-prefix=$JPY_HUB_PREFIX $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
if [ ! -z "$JPY_HUB_API_URL" ]; then
|
||||||
|
NOTEBOOK_ARGS="--hub-api-url=$JPY_HUB_API_URL $NOTEBOOK_ARGS"
|
||||||
|
fi
|
||||||
|
|
||||||
|
NOTEBOOK_ARGS=" --allow-root --notebook-dir='/notebooks' $NOTEBOOK_ARGS"
|
||||||
|
|
||||||
|
. /usr/local/bin/start.sh jupyterhub-singleuser $NOTEBOOK_ARGS $@
|
7
jupyterhub/start.sh
Normal file
7
jupyterhub/start.sh
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Copyright (c) Jupyter Development Team.
|
||||||
|
# Distributed under the terms of the Modified BSD License.
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
exec sh -c "env PATH=$PATH $*"
|
1
jupyterhub/userlist
Normal file
1
jupyterhub/userlist
Normal file
@ -0,0 +1 @@
|
|||||||
|
laradock
|
@ -9,9 +9,7 @@ LABEL maintainer="Mahmoud Zalt <mahmoud@zalt.me>"
|
|||||||
|
|
||||||
ARG TZ=UTC
|
ARG TZ=UTC
|
||||||
ENV TZ ${TZ}
|
ENV TZ ${TZ}
|
||||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone
|
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && chown -R mysql:root /var/lib/mysql/
|
||||||
|
|
||||||
RUN chown -R mysql:root /var/lib/mysql/
|
|
||||||
|
|
||||||
COPY my.cnf /etc/mysql/conf.d/my.cnf
|
COPY my.cnf /etc/mysql/conf.d/my.cnf
|
||||||
|
|
||||||
|
@ -469,6 +469,19 @@ RUN if [ ${INSTALL_IMAP} = true ]; then \
|
|||||||
docker-php-ext-install imap \
|
docker-php-ext-install imap \
|
||||||
;fi
|
;fi
|
||||||
|
|
||||||
|
###########################################################################
|
||||||
|
# Calendar:
|
||||||
|
###########################################################################
|
||||||
|
|
||||||
|
USER root
|
||||||
|
|
||||||
|
ARG INSTALL_CALENDAR=false
|
||||||
|
|
||||||
|
RUN if [ ${INSTALL_CALENDAR} = true ]; then \
|
||||||
|
docker-php-ext-configure calendar && \
|
||||||
|
docker-php-ext-install calendar \
|
||||||
|
;fi
|
||||||
|
|
||||||
###########################################################################
|
###########################################################################
|
||||||
# Check PHP version:
|
# Check PHP version:
|
||||||
###########################################################################
|
###########################################################################
|
||||||
|
@ -1 +1,3 @@
|
|||||||
*.sh
|
*.sh
|
||||||
|
!init_gitlab_db.sh
|
||||||
|
!init_jupyterhub_db.sh
|
||||||
|
41
postgres/docker-entrypoint-initdb.d/init_gitlab_db.sh
Normal file
41
postgres/docker-entrypoint-initdb.d/init_gitlab_db.sh
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copy createdb.sh.example to createdb.sh
|
||||||
|
# then uncomment then set database name and username to create you need databases
|
||||||
|
#
|
||||||
|
# example: .env POSTGRES_USER=appuser and need db name is myshop_db
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER myuser WITH PASSWORD 'mypassword';
|
||||||
|
# CREATE DATABASE myshop_db;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE myshop_db TO myuser;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# this sh script will auto run when the postgres container starts and the $DATA_PATH_HOST/postgres not found.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db1 WITH PASSWORD 'db1';
|
||||||
|
# CREATE DATABASE db1;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db1 TO db1;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db2 WITH PASSWORD 'db2';
|
||||||
|
# CREATE DATABASE db2;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db2 TO db2;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db3 WITH PASSWORD 'db3';
|
||||||
|
# CREATE DATABASE db3;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db3 TO db3;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
### default database and user for gitlab ##############################################
|
||||||
|
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
CREATE USER laradock_gitlab WITH PASSWORD 'laradock_gitlab';
|
||||||
|
CREATE DATABASE laradock_gitlab;
|
||||||
|
GRANT ALL PRIVILEGES ON DATABASE laradock_gitlab TO laradock_gitlab;
|
||||||
|
ALTER ROLE laradock_gitlab CREATEROLE SUPERUSER;
|
||||||
|
EOSQL
|
41
postgres/docker-entrypoint-initdb.d/init_jupyterhub_db.sh
Normal file
41
postgres/docker-entrypoint-initdb.d/init_jupyterhub_db.sh
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
#
|
||||||
|
# Copy createdb.sh.example to createdb.sh
|
||||||
|
# then uncomment then set database name and username to create you need databases
|
||||||
|
#
|
||||||
|
# example: .env POSTGRES_USER=appuser and need db name is myshop_db
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER myuser WITH PASSWORD 'mypassword';
|
||||||
|
# CREATE DATABASE myshop_db;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE myshop_db TO myuser;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# this sh script will auto run when the postgres container starts and the $DATA_PATH_HOST/postgres not found.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db1 WITH PASSWORD 'db1';
|
||||||
|
# CREATE DATABASE db1;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db1 TO db1;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db2 WITH PASSWORD 'db2';
|
||||||
|
# CREATE DATABASE db2;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db2 TO db2;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
# psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
# CREATE USER db3 WITH PASSWORD 'db3';
|
||||||
|
# CREATE DATABASE db3;
|
||||||
|
# GRANT ALL PRIVILEGES ON DATABASE db3 TO db3;
|
||||||
|
# EOSQL
|
||||||
|
#
|
||||||
|
### default database and user for jupyterhub ##############################################
|
||||||
|
psql -v ON_ERROR_STOP=1 --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" <<-EOSQL
|
||||||
|
CREATE USER laradock_jupyterhub WITH PASSWORD 'laradock_jupyterhub';
|
||||||
|
CREATE DATABASE laradock_jupyterhub;
|
||||||
|
GRANT ALL PRIVILEGES ON DATABASE laradock_jupyterhub TO laradock_jupyterhub;
|
||||||
|
ALTER ROLE laradock_jupyterhub CREATEROLE SUPERUSER;
|
||||||
|
EOSQL
|
@ -1 +1 @@
|
|||||||
* * * * * laradock php /var/www/artisan schedule:run >> /dev/null 2>&1
|
* * * * * laradock /usr/bin/php /var/www/artisan schedule:run >> /dev/null 2>&1
|
||||||
|
Loading…
Reference in New Issue
Block a user