feat: cache huggingface gpt2 tokenizer files (#1138)

This commit is contained in:
takatost
2023-09-10 12:16:21 +08:00
committed by GitHub
parent 6637629045
commit 877da82b06

View File

@@ -34,6 +34,8 @@ RUN apt-get update \
COPY --from=base /pkg /usr/local COPY --from=base /pkg /usr/local
COPY . /app/api/ COPY . /app/api/
RUN python -c "from transformers import GPT2TokenizerFast; GPT2TokenizerFast.from_pretrained('gpt2')"
COPY docker/entrypoint.sh /entrypoint.sh COPY docker/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh RUN chmod +x /entrypoint.sh