Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
*
!src
!transformers
!requirements.txt
3 changes: 3 additions & 0 deletions .gitmodules
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[submodule "transformers"]
path = transformers
url = https://github.com/bigcode-project/transformers.git
16 changes: 16 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
FROM nvcr.io/nvidia/pytorch:22.11-py3

ARG USER=1000
ARG USERNAME=user

WORKDIR /app
ENV PYTHONPATH=/app

RUN useradd -m -u $USER -s /bin/bash $USERNAME \
&& chown $USERNAME /app

COPY --chown=$USERNAME ./requirements.txt ./
COPY --chown=$USERNAME transformers/ ./transformers
RUN pip install -r requirements.txt

COPY --chown=$USERNAME src/ ./src
6 changes: 6 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
check_dirs := src scripts

style:
black --preview $(check_dirs)
isort $(check_dirs)

batch_size := 1

install-mqa-transformers:
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
[tool.black]
line-length = 119
target-version = ['py35']
8 changes: 8 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
accelerate==0.15.0
bitsandbytes
deepspeed==0.7.7
./transformers

# TODO: Dev only
isort>=5.5.4
black~=22.0
1 change: 1 addition & 0 deletions src/pipelines/pipeline.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from typing import List, Tuple, Union

import torch

from transformers import AutoTokenizer, BloomConfig, BloomForCausalLM, GPT2Config, GPT2LMHeadModel


Expand Down
1 change: 1 addition & 0 deletions transformers
Submodule transformers added at b7e212