Skip to content

Commit

Permalink
Merge pull request #256 from roboflow/release/0.9.9
Browse files Browse the repository at this point in the history
Release 0.9.9
  • Loading branch information
paulguerrie authored Feb 7, 2024
2 parents c14ea3f + 51a4cf8 commit 7900225
Show file tree
Hide file tree
Showing 31 changed files with 1,164 additions and 74 deletions.
8 changes: 4 additions & 4 deletions .github/workflows/test.jetson_4.5.0.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ jobs:
uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}

- name: 🦾 Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install -r requirements/requirements.test.integration.txt
- name: 🔨 Build and Push Test Docker - Jetson 4.5.0
run : |
run: |
docker pull roboflow/roboflow-inference-server-jetson-4.5.0:test
docker build -t roboflow/roboflow-inference-server-jetson-4.5.0:test -f docker/dockerfiles/Dockerfile.onnx.jetson.4.5.0 .
docker push roboflow/roboflow-inference-server-jetson-4.5.0:test
Expand All @@ -37,7 +37,7 @@ jobs:
PORT=9101 INFERENCE_SERVER_REPO=roboflow-inference-server-jetson-4.5.0 make start_test_docker_jetson
- name: 🧪 Regression Tests - Jetson 4.5.0
run: |
SKIP_YOLOV8_TEST=true SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python3 -m pytest tests/inference/integration_tests/
SKIP_VISUALISATION_TESTS=true MAX_WAIT=300 SKIP_GROUNDING_DINO_TEST=true SKIP_YOLOV8_TEST=true SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python3 -m pytest tests/inference/integration_tests/
- name: 🧹 Cleanup Test Docker - Jetson 4.5.0
run: make stop_test_docker
if: success() || failure()
if: success() || failure()
8 changes: 4 additions & 4 deletions .github/workflows/test.jetson_4.6.1.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ jobs:
uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}

- name: 🦾 Install dependencies
run: |
python3 -m pip install --upgrade pip
python3 -m pip install -r requirements/requirements.test.integration.txt
- name: 🔨 Build and Push Test Docker - Jetson 4.6.1
run : |
run: |
docker pull roboflow/roboflow-inference-server-jetson-4.6.1:test
docker build -t roboflow/roboflow-inference-server-jetson-4.6.1:test -f docker/dockerfiles/Dockerfile.onnx.jetson.4.6.1 .
docker push roboflow/roboflow-inference-server-jetson-4.6.1:test
Expand All @@ -37,7 +37,7 @@ jobs:
PORT=9101 INFERENCE_SERVER_REPO=roboflow-inference-server-jetson-4.6.1 make start_test_docker_jetson
- name: 🧪 Regression Tests - Jetson 4.6.1
run: |
SKIP_YOLOV8_TEST=true SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python3 -m pytest tests/inference/integration_tests/
SKIP_SPEED_TEST=true SKIP_DOCTR_TEST=true SKIP_CLIP_TEST=true SKIP_VISUALISATION_TESTS=true MAX_WAIT=300 SKIP_GROUNDING_DINO_TEST=true SKIP_YOLOV8_TEST=true SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python3 -m pytest tests/inference/integration_tests/
- name: 🧹 Cleanup Test Docker - Jetson 4.6.1
run: make stop_test_docker
if: success() || failure()
if: success() || failure()
8 changes: 4 additions & 4 deletions .github/workflows/test.jetson_5.1.1.yml
Original file line number Diff line number Diff line change
Expand Up @@ -22,13 +22,13 @@ jobs:
uses: actions/checkout@v3
with:
ref: ${{ github.head_ref }}

- name: 🦾 Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install -r requirements/requirements.test.integration.txt
- name: 🔨 Build and Push Test Docker - Jetson 5.1.1
run : |
run: |
docker pull roboflow/roboflow-inference-server-jetson-5.1.1:test
docker build -t roboflow/roboflow-inference-server-jetson-5.1.1:test -f docker/dockerfiles/Dockerfile.onnx.jetson.5.1.1 .
docker push roboflow/roboflow-inference-server-jetson-5.1.1:test
Expand All @@ -37,7 +37,7 @@ jobs:
PORT=9101 INFERENCE_SERVER_REPO=roboflow-inference-server-jetson-5.1.1 make start_test_docker_jetson
- name: 🧪 Regression Tests - Jetson 5.1.1
run: |
SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python -m pytest tests/inference/integration_tests/
SKIP_VISUALISATION_TESTS=true MAX_WAIT=300 SKIP_GROUNDING_DINO_TEST=true SKIP_GAZE_TEST=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python -m pytest tests/inference/integration_tests/
- name: 🧹 Cleanup Test Docker - Jetson 5.1.1
run: make stop_test_docker
if: success() || failure()
if: success() || failure()
2 changes: 1 addition & 1 deletion .github/workflows/test.nvidia_t4.yml
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ jobs:
PORT=9101 INFERENCE_SERVER_REPO=roboflow-inference-server-gpu-parallel make start_test_docker_gpu
- name: 🧪 Regression Tests - Parallel GPU
run: |
FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python -m pytest tests/inference/integration_tests/regression_test.py tests/inference/integration_tests/batch_regression_test.py
SKIP_VISUALISATION_TESTS=true FUNCTIONAL=true PORT=9101 API_KEY=${{ secrets.API_KEY }} asl_instance_segmentation_API_KEY=${{ secrets.ASL_INSTANCE_SEGMENTATION_API_KEY }} asl_poly_instance_seg_API_KEY=${{ secrets.ASL_POLY_INSTANCE_SEG_API_KEY }} bccd_favz3_API_KEY=${{ secrets.BCCD_FAVZ3_API_KEY }} bccd_i4nym_API_KEY=${{ secrets.BCCD_I4NYM_API_KEY }} cats_and_dogs_smnpl_API_KEY=${{ secrets.CATS_AND_DOGS_SMNPL_API_KEY }} coins_xaz9i_API_KEY=${{ secrets.COINS_XAZ9I_API_KEY }} melee_API_KEY=${{ secrets.MELEE_API_KEY }} yolonas_test_API_KEY=${{ secrets.YOLONAS_TEST_API_KEY }} python -m pytest tests/inference/integration_tests/regression_test.py tests/inference/integration_tests/batch_regression_test.py
- name: 🧹 Cleanup Test Docker - Parallel GPU
run: make stop_test_docker
if: success() || failure()
2 changes: 1 addition & 1 deletion .release/pypi/inference.core.setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,5 @@ def read_requirements(path):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
python_requires=">=3.9",
)
5 changes: 3 additions & 2 deletions .release/pypi/inference.cpu.setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ def read_requirements(path):
"requirements/_requirements.txt",
"requirements/requirements.cpu.txt",
]
).extend([f'inference-cli=={__version__}']),
)
+ [f"inference-cli=={__version__}"],
extras_require={
"clip": read_requirements("requirements/requirements.clip.txt"),
"gaze": read_requirements("requirements/requirements.gaze.txt"),
Expand All @@ -68,5 +69,5 @@ def read_requirements(path):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
python_requires=">=3.9",
)
5 changes: 3 additions & 2 deletions .release/pypi/inference.gpu.setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ def read_requirements(path):
"requirements/_requirements.txt",
"requirements/requirements.gpu.txt",
]
).extend([f'inference-cli=={__version__}']),
)
+ [f"inference-cli=={__version__}"],
packages=find_packages(
where=root,
exclude=(
Expand Down Expand Up @@ -68,5 +69,5 @@ def read_requirements(path):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
python_requires=">=3.9",
)
2 changes: 1 addition & 1 deletion .release/pypi/inference.sdk.setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,5 +60,5 @@ def read_requirements(path):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
python_requires=">=3.9",
)
5 changes: 3 additions & 2 deletions .release/pypi/inference.setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,8 @@ def read_requirements(path):
"requirements/_requirements.txt",
"requirements/requirements.cpu.txt",
]
).extend([f'inference-cli=={__version__}']),
)
+ [f"inference-cli=={__version__}"],
extras_require={
"clip": read_requirements("requirements/requirements.clip.txt"),
"gaze": read_requirements("requirements/requirements.gaze.txt"),
Expand All @@ -68,5 +69,5 @@ def read_requirements(path):
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">=3.7",
python_requires=">=3.9",
)
2 changes: 1 addition & 1 deletion docker/dockerfiles/Dockerfile.onnx.cpu
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ COPY --from=base / /
WORKDIR /build
COPY . .
RUN make create_wheels
RUN pip3 install dist/inference_core*.whl dist/inference_cpu*.whl dist/inference_sdk*.whl
RUN pip3 install dist/inference_cli*.whl dist/inference_core*.whl dist/inference_cpu*.whl dist/inference_sdk*.whl

WORKDIR /notebooks
COPY examples/notebooks .
Expand Down
2 changes: 1 addition & 1 deletion docker/dockerfiles/Dockerfile.onnx.gpu
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ WORKDIR /build
COPY . .
RUN ln -s /usr/bin/python3 /usr/bin/python
RUN /bin/make create_wheels_for_gpu_notebook
RUN pip3 install dist/inference_core*.whl dist/inference_gpu*.whl dist/inference_sdk*.whl
RUN pip3 install dist/inference_cli*.whl dist/inference_core*.whl dist/inference_gpu*.whl dist/inference_sdk*.whl

WORKDIR /notebooks
COPY examples/notebooks .
Expand Down
10 changes: 10 additions & 0 deletions docker/dockerfiles/Dockerfile.onnx.gpu.parallel
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,15 @@ RUN echo net.core.somaxconn=1024 > /etc/sysctl.conf
FROM scratch
COPY --from=base / /

WORKDIR /build
COPY . .
RUN ln -s /usr/bin/python3 /usr/bin/python
RUN /bin/make create_wheels_for_gpu_notebook
RUN pip3 install dist/inference_core*.whl dist/inference_gpu*.whl dist/inference_sdk*.whl

WORKDIR /notebooks
COPY examples/notebooks .

WORKDIR /app/
COPY inference inference
COPY inference/enterprise/parallel/parallel_http_config.py parallel_http.py
Expand All @@ -51,5 +60,6 @@ ENV PROJECT=roboflow-platform
ENV CORE_MODELS_ENABLED=false
ENV WORKFLOWS_STEP_EXECUTION_MODE=local
ENV WORKFLOWS_MAX_CONCURRENT_STEPS=1
ENV REDIS_HOST=localhost

ENTRYPOINT python3 entrypoint.py
34 changes: 26 additions & 8 deletions docker/dockerfiles/Dockerfile.onnx.jetson.4.6.1
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,23 @@ ARG DEBIAN_FRONTEND=noninteractive
ENV LANG en_US.UTF-8

RUN apt-get update -y && apt-get upgrade -y && apt-get install -y \
python3.8 \
python3.8-dev \
build-essential \
zlib1g-dev \
libncurses5-dev \
libgdbm-dev \
libnss3-dev \
libssl-dev \
libreadline-dev \
libffi-dev \
curl \
libbz2-dev \
software-properties-common

RUN wget https://www.python.org/ftp/python/3.9.0/Python-3.9.0.tar.xz && tar -xf Python-3.9.0.tar.xz && rm Python-3.9.0.tar.xz
WORKDIR ./Python-3.9.0
RUN ./configure && make altinstall

RUN apt-get update -y && apt-get upgrade -y && apt-get install -y \
lshw \
git \
python3-pip \
Expand All @@ -27,29 +42,32 @@ COPY requirements/requirements.clip.txt \
requirements/requirements.http.txt \
requirements/requirements.doctr.txt \
requirements/requirements.groundingdino.txt \
requirements/requirements.sdk.http.txt \
requirements/_requirements.txt \
./

RUN python3.8 -m pip install --ignore-installed PyYAML && rm -rf ~/.cache/pip
RUN python3.9 -m pip install --ignore-installed PyYAML && rm -rf ~/.cache/pip

RUN python3.8 -m pip install --upgrade pip && python3.8 -m pip install \
RUN python3.9 -m pip install --upgrade pip && python3.9 -m pip install \
-r _requirements.txt \
-r requirements.clip.txt \
-r requirements.http.txt \
-r requirements.doctr.txt \
-r requirements.groundingdino.txt \
-r requirements.sdk.http.txt \
jupyterlab \
--upgrade \
&& rm -rf ~/.cache/pip

RUN python3.8 -m pip uninstall --yes onnxruntime
RUN wget https://nvidia.box.com/shared/static/2sv2fv1wseihaw8ym0d4srz41dzljwxh.whl -O onnxruntime_gpu-1.11.0-cp38-cp38-linux_aarch64.whl
RUN python3.8 -m pip install onnxruntime_gpu-1.11.0-cp38-cp38-linux_aarch64.whl \
RUN python3.9 -m pip uninstall --yes onnxruntime
RUN wget https://nvidia.box.com/shared/static/jmomlpcctmjojz14zbwa12lxmeh2h6o5.whl -O onnxruntime_gpu-1.11.0-cp39-cp39-linux_aarch64.whl
RUN python3.9 -m pip install onnxruntime_gpu-1.11.0-cp39-cp39-linux_aarch64.whl \
&& rm -rf ~/.cache/pip \
&& rm onnxruntime_gpu-1.11.0-cp38-cp38-linux_aarch64.whl
&& rm onnxruntime_gpu-1.11.0-cp39-cp39-linux_aarch64.whl

WORKDIR /app/
COPY inference inference
COPY inference_sdk inference_sdk
COPY docker/config/gpu_http.py gpu_http.py

ENV VERSION_CHECK_MODE=continuous
Expand Down
19 changes: 12 additions & 7 deletions docker/dockerfiles/Dockerfile.onnx.jetson.5.1.1
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,8 @@ ARG DEBIAN_FRONTEND=noninteractive
ENV LANG en_US.UTF-8

RUN apt-get update -y && apt-get install -y \
python3.9 \
python3.9-dev \
lshw \
git \
python3-pip \
Expand All @@ -24,30 +26,33 @@ COPY requirements/requirements.clip.txt \
requirements/requirements.http.txt \
requirements/requirements.doctr.txt \
requirements/requirements.groundingdino.txt \
requirements/requirements.sdk.http.txt \
requirements/_requirements.txt \
./

RUN pip3 install --ignore-installed PyYAML && rm -rf ~/.cache/pip
RUN python3.9 -m pip install --ignore-installed PyYAML && rm -rf ~/.cache/pip

RUN pip3 install --upgrade pip && pip3 install \
RUN python3.9 -m pip install --upgrade pip && python3.9 -m pip install \
git+https://github.com/pypdfium2-team/pypdfium2 \
-r _requirements.txt \
-r requirements.clip.txt \
-r requirements.http.txt \
-r requirements.doctr.txt \
-r requirements.groundingdino.txt \
-r requirements.sdk.http.txt \
jupyterlab \
--upgrade \
&& rm -rf ~/.cache/pip

RUN pip3 uninstall --yes onnxruntime
RUN wget https://nvidia.box.com/shared/static/v59xkrnvederwewo2f1jtv6yurl92xso.whl -O onnxruntime_gpu-1.12.1-cp38-cp38-linux_aarch64.whl
RUN pip3 install onnxruntime_gpu-1.12.1-cp38-cp38-linux_aarch64.whl "opencv-python-headless<4.3" \
RUN python3.9 -m pip uninstall --yes onnxruntime
RUN wget https://nvidia.box.com/shared/static/5dei4auhjh5ij7rmuvljmdy5q1en3bhf.whl -O onnxruntime_gpu-1.12.1-cp39-cp39-linux_aarch64.whl
RUN python3.9 -m pip install onnxruntime_gpu-1.12.1-cp39-cp39-linux_aarch64.whl "opencv-python-headless>4" \
&& rm -rf ~/.cache/pip \
&& rm onnxruntime_gpu-1.12.1-cp38-cp38-linux_aarch64.whl
&& rm onnxruntime_gpu-1.12.1-cp39-cp39-linux_aarch64.whl

WORKDIR /app/
COPY inference inference
COPY inference_sdk inference_sdk
COPY docker/config/gpu_http.py gpu_http.py

ENV VERSION_CHECK_MODE=continuous
Expand All @@ -60,7 +65,7 @@ ENV NUM_WORKERS=1
ENV HOST=0.0.0.0
ENV PORT=9001
ENV OPENBLAS_CORETYPE=ARMV8
ENV LD_PRELOAD=/usr/lib/aarch64-linux-gnu/libgomp.so.1:/usr/local/lib/python3.8/dist-packages/torch.libs/libgomp-d22c30c5.so.1.0.0
ENV LD_PRELOAD=/usr/lib/aarch64-linux-gnu/libgomp.so.1
ENV WORKFLOWS_STEP_EXECUTION_MODE=local
ENV WORKFLOWS_MAX_CONCURRENT_STEPS=1

Expand Down
Loading

0 comments on commit 7900225

Please sign in to comment.