Skip to content

Commit

Permalink
implement pre-commit (#55)
Browse files Browse the repository at this point in the history
* implement pre-commit

lint files with pre-commit hooks

* align github workflows with IMG_PYTHON_VERSION variable
Changes to be committed:
	modified:   .github/workflows/docker-base-image.yml
	modified:   .github/workflows/docker-build-n-test.yml
	modified:   .github/workflows/docker-publish.yml

* setup .hadolint.yaml

Changes to be committed:
	new file:   .hadolint.yaml
	modified:   .pre-commit-config.yaml

* run markdown lint in README.md
Changes to be committed:
	modified:   README.md
  • Loading branch information
gnzsnz authored Nov 19, 2023
1 parent 0d8191e commit ce28235
Show file tree
Hide file tree
Showing 11 changed files with 173 additions and 100 deletions.
2 changes: 1 addition & 1 deletion .env-dist
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
# There is no special handling of quotation marks. This means that they are
# part of the VAL. don't do VAR=''
# build
PYTHON_VERSION=3.11
IMG_PYTHON_VERSION=3.11
USER=gordon
USER_ID=1000
USER_GID=1000
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/docker-base-image.yml
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ jobs:
USER=${{ env.USER }}
USER_ID=${{ env.USER_ID }}
USER_GID=${{ env.USER_GID }}
PYTHON_VERSION=${{ env.PYTHON_VERSION}}
IMG_PYTHON_VERSION=${{ env.IMG_PYTHON_VERSION}}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_VERSION }}
labels: ${{ steps.meta.outputs.labels }}

Expand Down Expand Up @@ -136,6 +136,6 @@ jobs:
USER=${{ env.USER }}
USER_ID=${{ env.USER_ID }}
USER_GID=${{ env.USER_GID }}
PYTHON_VERSION=${{ env.PYTHON_VERSION}}
IMG_PYTHON_VERSION=${{ env.IMG_PYTHON_VERSION}}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_VERSION }}
labels: ${{ steps.meta.outputs.labels }}
6 changes: 3 additions & 3 deletions .github/workflows/docker-build-n-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
uses: docker/setup-qemu-action@v3
with:
platforms: ${{ env.PLATFORMS }}

- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

Expand All @@ -45,7 +45,7 @@ jobs:
ghcr.io/${{ env.IMAGE_NAME }}
flavor: |
latest=true
- name: Build Docker image
uses: docker/build-push-action@v5
with:
Expand All @@ -59,6 +59,6 @@ jobs:
USER=${{ env.USER }}
USER_ID=${{ env.USER_ID }}
USER_GID=${{ env.USER_GID }}
PYTHON_VERSION=${{ env.PYTHON_VERSION}}
IMG_PYTHON_VERSION=${{ env.IMG_PYTHON_VERSION}}
tags: ${{ env.IMAGE_NAME }}:${{ env.IMAGE_VERSION }}
labels: ${{ steps.meta.outputs.labels }}
2 changes: 1 addition & 1 deletion .github/workflows/docker-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,7 @@ jobs:
USER=${{ env.USER }}
USER_ID=${{ env.USER_ID }}
USER_GID=${{ env.USER_GID }}
PYTHON_VERSION=${{ env.PYTHON_VERSION}}
IMG_PYTHON_VERSION=${{ env.IMG_PYTHON_VERSION}}
platforms: ${{ env.PLATFORMS }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -4,3 +4,4 @@ docker-compose.yml
.python-version
.ipynb_checkpoints/
.virtual_documents/
.DS_Store
4 changes: 4 additions & 0 deletions .hadolint.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
ignored:
- DL3008
- SC2028
- DL3003
31 changes: 31 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.5.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files
- id: check-shebang-scripts-are-executable
- id: check-executables-have-shebangs
- id: requirements-txt-fixer
- repo: https://github.com/jumanjihouse/pre-commit-hooks
rev: 3.0.0
hooks:
- id: shellcheck
- id: shfmt
- repo: https://github.com/hadolint/hadolint
rev: v2.12.0
hooks:
- id: hadolint
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.37.0
hooks:
- id: markdownlint
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.27.1
hooks:
- id: check-github-workflows
- id: check-github-actions
40 changes: 22 additions & 18 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,14 +1,16 @@
#
###############################################################################
# Builder stage
#
ARG PYTHON_VERSION
FROM python:"${PYTHON_VERSION:-3.11}" AS builder
###############################################################################
ARG IMG_PYTHON_VERSION
FROM python:"${IMG_PYTHON_VERSION}" AS builder

ENV APT_PROXY_FILE=/etc/apt/apt.conf.d/01proxy

COPY requirements.txt .
COPY requirements.txt /.

SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN if [ -n "$APT_PROXY" ]; then \
echo 'Acquire::http { Proxy "'$APT_PROXY'"; }' \
echo "Acquire::http { Proxy \"${APT_PROXY}\"; }" \
| tee "${APT_PROXY_FILE}" \
;fi && \
echo "deb http://deb.debian.org/debian bookworm contrib" | tee /etc/apt/sources.list.d/contrib.list && \
Expand All @@ -24,16 +26,16 @@ RUN if [ -n "$APT_PROXY" ]; then \
sha256sum -c ta-lib-0.4.0-linux_"$(uname -m)".tgz.sha256 && \
cd / && tar xzf /tmp/ta-lib-0.4.0-linux_"$(uname -m)".tgz && \
export PREFIX=/usr/local/ta-lib && \
export TA_LIBRARY_PATH=$PREFIX/lib && \
export TA_INCLUDE_PATH=$PREFIX/include && \
export TA_LIBRARY_PATH="$PREFIX/lib" && \
export TA_INCLUDE_PATH="$PREFIX/include" && \
# end TA-Lib
pip wheel --no-cache-dir --wheel-dir /wheels -r requirements.txt
pip wheel --no-cache-dir --wheel-dir /wheels -r /requirements.txt

#
###############################################################################
# Final stage
#
ARG PYTHON_VERSION
FROM python:"${PYTHON_VERSION:-3.11}"-slim
###############################################################################
ARG IMG_PYTHON_VERSION
FROM python:"${IMG_PYTHON_VERSION}"-slim

ENV APT_PROXY_FILE=/etc/apt/apt.conf.d/01proxy

Expand Down Expand Up @@ -75,8 +77,9 @@ ENV SHELL="/bin/bash"
COPY --from=builder /usr/share/fonts/truetype /usr/share/fonts/truetype
COPY --from=builder /usr/local/ta-lib/ /usr/local/ta-lib/

SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN if [ -n "$APT_PROXY" ]; then \
echo 'Acquire::http { Proxy "'$APT_PROXY'"; }' \
echo "Acquire::http { Proxy \"${APT_PROXY}\"; }" \
| tee "${APT_PROXY_FILE}" \
;fi && \
apt-get update && \
Expand All @@ -87,19 +90,20 @@ RUN if [ -n "$APT_PROXY" ]; then \
if [ -f "${APT_PROXY_FILE}" ]; then \
rm "${APT_PROXY_FILE}" \
;fi && \
groupadd --gid ${USER_GID} ${USER} && \
useradd -ms /bin/bash --uid ${USER_ID} --gid ${USER_GID} ${USER} && \
groupadd --gid "${USER_GID}" "${USER}" && \
useradd -ms /bin/bash --uid "${USER_ID}" --gid "${USER_GID}" "${USER}" && \
echo "${USER} ALL=(ALL) NOPASSWD:ALL" | tee -a /etc/sudoers && \
python -c "import compileall; compileall.compile_path(maxlevels=10)"

USER $USER_ID:$USER_GID

SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN --mount=type=bind,from=builder,source=/wheels,target=/wheels \
pip install --user --no-cache-dir /wheels/* && \
# Import matplotlib the first time to build the font cache.
MPLBACKEND=Agg python -c "import matplotlib.pyplot" && \
mkdir ${JUPYTER_SERVER_ROOT} && \
python -c "import compileall; compileall.compile_dir('${BASE_DATA}/lib/python$(echo $PYTHON_VERSION | cut -d '.' -f1,2)/site-packages', force=True)"
mkdir "${JUPYTER_SERVER_ROOT}" && \
python -c "import compileall; compileall.compile_dir('${BASE_DATA}/lib/python$(echo "$PYTHON_VERSION" | cut -d '.' -f1,2)/site-packages', force=True)"

COPY entrypoint.sh /
WORKDIR ${JUPYTER_SERVER_ROOT}
Expand Down
82 changes: 59 additions & 23 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,20 +4,37 @@ A dockerized Jupyter quant research environment.

## Highlights

- Includes tools for quant analysis, statsmodels, pymc, arch, py_vollib, zipline-reloaded, PyPortfolioOpt, etc.
- The usual suspects are included, numpy, pandas, sci-py, scikit-learn, yellowbricks, shap, optuna.
- ib_insync for Interactive Broker connectivity. Works well with [IB Gateway](https://github.com/gnzsnz/ib-gateway-docker) docker image.
- Includes all major Python packages for statistical and time series analysis, see [requirements](https://github.com/gnzsnz/jupyter-quant/blob/master/requirements.txt). For an extensive list check [list installed packages](#list-installed-packages) section.
- [Zipline-reloaded](https://github.com/stefan-jansen/zipline-reloaded/), [pyfolio-reloaded](https://github.com/stefan-jansen/pyfolio-reloaded) and [alphalens-reloaded](https://github.com/stefan-jansen/alphalens-reloaded).
- Designed for [ephemeral](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#create-ephemeral-containers) containers. Relevant data for your environment will survive your container.
- Includes tools for quant analysis, statsmodels, pymc, arch, py_vollib,
zipline-reloaded, PyPortfolioOpt, etc.
- The usual suspects are included, numpy, pandas, sci-py, scikit-learn,
yellowbricks, shap, optuna.
- ib_insync for Interactive Broker connectivity. Works well with
[IB Gateway](https://github.com/gnzsnz/ib-gateway-docker) docker image.
- Includes all major Python packages for statistical and time series analysis,
see [requirements](https://github.com/gnzsnz/jupyter-quant/blob/master/requirements.txt).
For an extensive list check
[list installed packages](#list-installed-packages) section.
- [Zipline-reloaded](https://github.com/stefan-jansen/zipline-reloaded/),
[pyfolio-reloaded](https://github.com/stefan-jansen/pyfolio-reloaded)
and [alphalens-reloaded](https://github.com/stefan-jansen/alphalens-reloaded).
- Designed for [ephemeral](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/#create-ephemeral-containers)
containers. Relevant data for your environment will survive your container.
- Optimized for size, it's a 2GB image vs 4GB for jupyter/scipy-notebook
- Includes jedi language server and jupyterlab-lsp, black and isort.
- It does NOT include conda/mamba. All packages are installed with pip under ~/.local/lib/python. Which should be mounted in a dedicated volume to preserver your environment.
- It does NOT include conda/mamba. All packages are installed with pip under
`~/.local/lib/python`. Which should be mounted in a dedicated volume to
preserver your environment.
- Includes Cython, Numba, bottleneck and numexpr to speed up things
- sudo, so you can install new packages if needed.
- bash and stow, so you can [BYODF](#install-your-dotfiles) (bring your own dot files). Plus common command line utilities like git, less, nano (tiny), jq, [ssh](#install-your-ssh-keys), curl, bash completion and others.
- Support for [apt cache](https://github.com/gnzsnz/apt-cacher-ng). If you have other Linux boxes using you can leverage your cache. apt cache support major Linux distributions not only Debian/Ubuntu.
- It does not include a build environment. If you need to install a package that does not provide wheels you can build your wheels, as explained in [common tasks](#build-wheels-outside-the-container)
- bash and stow, so you can [BYODF](#install-your-dotfiles) (bring your own dot
files). Plus common command line utilities like git, less, nano (tiny), jq,
[ssh](#install-your-ssh-keys), curl, bash completion and others.
- Support for [apt cache](https://github.com/gnzsnz/apt-cacher-ng). If you have
other Linux boxes using it can leverage your cache. apt cache supports major
Linux distributions not only Debian/Ubuntu.
- It does not include a built environment. If you need to install a package
that does not provide wheels you can build your wheels, as explained
in [common tasks](#build-wheels-outside-the-container)

## Quick Start

Expand Down Expand Up @@ -59,20 +76,29 @@ docker compose up

The image is designed to work with 3 volumes:

1. `quant_data` - volume for ~/.local folder. It contains caches and all python packages. This enables to add additional packages through pip.
1. `quant_conf` - volume for ~/.config, all config goes here. This includes jupyter, ipython, matplotlib, etc
1. Bind mount (but you could use a named volume) - volume for all notebooks, under `~/Notebooks`.
1. `quant_data` - volume for ~/.local folder. It contains caches and all
python packages. This enables to add additional packages through pip.
2. `quant_conf` - volume for ~/.config, all config goes here. This includes
jupyter, ipython, matplotlib, etc
3. Bind mount (but you could use a named volume) - volume for all notebooks,
under `~/Notebooks`.

This allows to have ephemeral containers and to keep your notebooks (3), your config (2) and your additional packages (1). Eventually you would need to update the image, in this case your notebooks (3) can move without issues, your config (2) should still work but no warranty, and your packages in `quant_data` could still be used but you should refresh it with new image. Eventually you would need to refresh (1) and less frequently (2)
This allows to have ephemeral containers and to keep your notebooks (3), your
config (2) and your additional packages (1). Eventually you would need to
update the image, in this case your notebooks (3) can move without issues,
your config (2) should still work but no warranty, and your packages in
`quant_data` could still be used but you should refresh it with a new image.
Eventually, you would need to refresh (1) and less frequently (2)

## Common tasks

### Get running server URL

```bash
docker exec -it jupyterquant jupyter-server list
Currently running servers:
http://40798f7a604a:8888/?token=ebf9e870d2aa0ed877590eb83b4d3bbbdfbd55467422a167 :: /home/gordon/Notebooks
http://40798f7a604a:8888/?token=
ebf9e870d2aa0ed877590eb83b4d3bbbdfbd55467422a167 :: /home/gordon/Notebooks
```

or
Expand All @@ -81,7 +107,8 @@ or
docker logs -t jupyter-quant 2>&1 | grep '127.0.0.1:8888/lab?token='
```

You will need to change hostname (40798f7a604a in this case) or 127.0.0.1 by your docker host ip.
You will need to change hostname (40798f7a604a in this case) or 127.0.0.1 by
your docker host ip.

### Show jupyter config

Expand Down Expand Up @@ -133,19 +160,28 @@ docker run -it --rm -v $PWD/wheels:/wheels python:3.11 bash
pip wheel --no-cache-dir --wheel-dir /wheels numpy
```

This will build wheels for numpy (or any other package that you need) and save the file in `$PWD/wheels`. Then you can copy the wheels in your notebooks mount (3 above) and install it within the container. You can even drag and drop into jupyter.
This will build wheels for numpy (or any other package that you need) and save
the file in `$PWD/wheels`. Then you can copy the wheels in your notebook mount
(3 above) and install it within the container. You can even drag and drop into
Jupyter.

### Install your dotfiles.
### Install your dotfiles

`git clone` your dotfiles to `Notebook/etc/dotfiles`, set environment variable `BYODF=/home/gordon/Notebook/etc/dotfiles` in your docker compose. When the container starts up stow will create links like `/home/gordon/.bashrc`
`git clone` your dotfiles to `Notebook/etc/dotfiles`, set environment variable
`BYODF=/home/gordon/Notebook/etc/dotfiles` in your `docker-compose.yml` When
the container starts up stow will create links like `/home/gordon/.bashrc`

### Install your SSH keys

You need to define environment variable `SSH_KEY_DIR` which should point to a location with your keys. Suggested place is `SSH_KEYDIR=/home/gordon/Notebooks/etc/ssh`, make sure the director has the right permissions. Something like `chmod 700 Notebooks/etc/ssh` should work.
You need to define environment variable `SSH_KEY_DIR` which should point to a
location with your keys. The suggested place is
`SSH_KEYDIR=/home/gordon/Notebooks/etc/ssh`, make sure the director has the
right permissions. Something like `chmod 700 Notebooks/etc/ssh` should work.

The `entrypoint.sh` script will create a symbolic link pointing to `$SSH_KEYDIR` on `/home/gordon/.ssh`.
The `entrypoint.sh` script will create a symbolic link pointing to
`$SSH_KEYDIR` on `/home/gordon/.ssh`.

Within Jupyter's terminal you can then:
Within Jupyter's terminal, you can then:

```shell
# start agent
Expand Down
Loading

0 comments on commit ce28235

Please sign in to comment.