diff --git a/PythonRpcServer/requirements.txt b/PythonRpcServer/requirements.txt index 8e8d725..96176cd 100644 --- a/PythonRpcServer/requirements.txt +++ b/PythonRpcServer/requirements.txt @@ -1,21 +1,14 @@ -# 2024-1-17 Removed Pygments,python-genutils and ipython -# Also removed jedi. why autocomplete?? jedi==0.19.1 -#Why? prompt-toolkit==3.0.43 - -#Did not try updating (maybe nexttime) -#protobuf==3.15.0 #4.25.2 -protobuf==4.25.2 - -#Floowing Updated to latest 2024-1-17: -certifi==2024.7.4 +# 2025-4-30 +protobuf +certifi==2025.4.26 backcall==0.2.0 chardet==5.2.0 -click==8.1.7 -decorator==5.1.1 +click==8.1.8 +decorator==5.2.1 ffmpy==0.3.1 -grpcio==1.60.0 -grpcio-tools==1.60.0 -idna==3.7 +grpcio +grpcio-tools +idna==3.10 KalturaApiClient==19.3.0 lxml==5.1.0 parso==0.8.3 @@ -29,11 +22,45 @@ tqdm==4.66.3 traitlets==4.3.3 urllib3==2.2.2 wcwidth==0.2.13 - -# Not versioned numpy -# No longer maintained pytube # if not available, use the tar.gz package (see Dockerfile) yt-dlp + +# 2024-1-17 Removed Pygments,python-genutils and ipython +# Also removed jedi. why autocomplete?? jedi==0.19.1 +#Why? prompt-toolkit==3.0.43 + +#Did not try updating (maybe nexttime) +#protobuf==3.15.0 #4.25.2 +# protobuf==4.25.2 + +# #Floowing Updated to latest 2024-1-17: +# certifi==2024.7.4 +# backcall==0.2.0 +# chardet==5.2.0 +# click==8.1.7 +# decorator==5.1.1 +# ffmpy==0.3.1 +# grpcio==1.60.0 +# grpcio-tools==1.60.0 +# idna==3.7 +# KalturaApiClient==19.3.0 +# lxml==5.1.0 +# parso==0.8.3 +# pexpect==4.9.0 +# pickleshare==0.7.5 +# ptyprocess==0.7.0 +# requests==2.32.2 +# requests-toolbelt==1.0.0 +# six==1.16.0 +# tqdm==4.66.3 +# traitlets==4.3.3 +# urllib3==2.2.2 +# wcwidth==0.2.13 + +# # Not versioned +# numpy +# # No longer maintained pytube # if not available, use the tar.gz package (see Dockerfile) +# yt-dlp #Always get latest # protobuf version 3.18.3 causes NotImplementedError("To be implemented") in PythonRpcServer/mediaprovider.py diff --git a/pythonrpcserver.Dockerfile b/pythonrpcserver.Dockerfile index d956874..7952738 100644 --- a/pythonrpcserver.Dockerfile +++ b/pythonrpcserver.Dockerfile @@ -1,10 +1,9 @@ # ------------------------------ # Stage 1: Build Whisper.cpp # ------------------------------ -FROM --platform=linux/amd64 python:3.8.15-slim-buster AS whisperbuild +FROM --platform=linux/amd64 python:3.13.3-bookworm AS whisperbuild RUN apt-get update && \ - apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg git && \ - apt-get install -y wget && \ + apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg git wget && \ wget https://github.com/Kitware/CMake/releases/download/v3.27.7/cmake-3.27.7-linux-x86_64.sh -O /tmp/cmake-install.sh && \ chmod +x /tmp/cmake-install.sh && \ /tmp/cmake-install.sh --skip-license --prefix=/usr/local && \ @@ -19,27 +18,38 @@ RUN bash ./models/download-ggml-model.sh tiny.en RUN bash ./models/download-ggml-model.sh large-v3 # ------------------------------ -# Stage 2: Setup Python RPC Server +# Stage 2: Build Python Dependencies # ------------------------------ -FROM --platform=linux/amd64 python:3.8.15-slim-buster AS rpcserver +FROM --platform=linux/amd64 python:3.13.3-bookworm AS builder +RUN apt-get update && apt-get install -y build-essential libxml2-dev libxslt1-dev zlib1g-dev libssl-dev libffi-dev python3-dev + +COPY ./PythonRpcServer/requirements.txt . +RUN pip install --no-cache-dir --upgrade pip && \ + pip wheel --wheel-dir=/wheels -r requirements.txt + +# ------------------------------ +# Stage 3: Setup Python RPC Server +# ------------------------------ +FROM --platform=linux/amd64 python:3.13.3-slim-bookworm AS rpcserver RUN apt-get update && \ - apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg + apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg libxml2-dev libxslt1-dev zlib1g-dev libssl-dev libffi-dev python3-dev ENV OMP_THREAD_LIMIT=1 COPY --from=whisperbuild /whisper.cpp/build/bin/whisper-cli /usr/local/bin/whisper COPY --from=whisperbuild /whisper.cpp/models /PythonRpcServer/models -WORKDIR /PythonRpcServer +# copy pre-built wheels from builder stage +COPY --from=builder /wheels /wheels +COPY ./PythonRpcServer/requirements.txt . +RUN pip install --no-cache-dir --upgrade pip && \ + pip install --no-cache-dir --find-links=/wheels -r requirements.txt + +WORKDIR /PythonRpcServer COPY ./PythonRpcServer/transcribe_hellohellohello.wav . RUN whisper -ojf -f transcribe_hellohellohello.wav -COPY ./PythonRpcServer/requirements.txt requirements.txt -RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r requirements.txt - COPY ct.proto ct.proto RUN python -m grpc_tools.protoc -I . --python_out=./ --grpc_python_out=./ ct.proto COPY ./PythonRpcServer . - -CMD [ "nice", "-n", "18", "ionice", "-c", "2", "-n", "6", "python3", "-u", "/PythonRpcServer/server.py" ] +CMD ["nice", "-n", "18", "ionice", "-c", "2", "-n", "6", "python3", "-u", "/PythonRpcServer/server.py"] diff --git a/pythonrpcserver_legacy.Dockerfile b/pythonrpcserver_legacy.Dockerfile deleted file mode 100644 index 96a317a..0000000 --- a/pythonrpcserver_legacy.Dockerfile +++ /dev/null @@ -1,47 +0,0 @@ -# # ------------------------------ -# # Stage 1: Build Whisper.cpp -# # ------------------------------ - FROM --platform=linux/amd64 python:3.8.15-slim-buster AS whisperbuild - RUN apt-get update && \ - apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg git - - WORKDIR /whisper.cpp - # RUN git clone https://github.com/ggerganov/whisper.cpp . && make - RUN git clone https://github.com/ggerganov/whisper.cpp . && \ - git checkout 021eef1 && \ - make - RUN bash ./models/download-ggml-model.sh base.en - RUN bash ./models/download-ggml-model.sh tiny.en - RUN bash ./models/download-ggml-model.sh large-v3 - -# ------------------------------ -# Stage 2: Setup Python RPC Server -# ------------------------------ - FROM --platform=linux/amd64 python:3.8.15-slim-buster AS rpcserver - RUN apt-get update && \ - apt-get install -y curl gcc g++ make libglib2.0-0 libsm6 libxext6 libxrender-dev ffmpeg - - ENV OMP_THREAD_LIMIT=1 - COPY --from=whisperbuild /whisper.cpp/main /usr/local/bin/whisper - COPY --from=whisperbuild /whisper.cpp/models /PythonRpcServer/models - WORKDIR /PythonRpcServer - - # Don't copy any py files here, so that we don't need to re-run whisper - COPY ./PythonRpcServer/transcribe_hellohellohello.wav . - # The output of tis whisper run is used when we set MOCK_RECOGNITION=MOCK for quick testing - RUN whisper -ojf -f transcribe_hellohellohello.wav - - COPY ./PythonRpcServer/requirements.txt requirements.txt - RUN pip install --no-cache-dir --upgrade pip && \ - pip install --no-cache-dir -r requirements.txt - - COPY ct.proto ct.proto - RUN python -m grpc_tools.protoc -I . --python_out=./ --grpc_python_out=./ ct.proto - - COPY ./PythonRpcServer . - - - CMD [ "nice", "-n", "18", "ionice", "-c", "2", "-n", "6", "python3", "-u", "/PythonRpcServer/server.py" ] - - -