Skip to content

Commit 0edf9ca

Browse files
authored
Fix hub-dependent tests for PRs (#1119)
* Remove the hub token * replace repos * style
1 parent c39a511 commit 0edf9ca

File tree

2 files changed

+8
-14
lines changed

2 files changed

+8
-14
lines changed

.github/workflows/pr_tests.yml

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -67,8 +67,6 @@ jobs:
6767
6868
- name: Run fast PyTorch CPU tests
6969
if: ${{ matrix.config.framework == 'pytorch' }}
70-
env:
71-
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
7270
run: |
7371
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
7472
-s -v -k "not Flax and not Onnx" \
@@ -77,8 +75,6 @@ jobs:
7775
7876
- name: Run fast Flax TPU tests
7977
if: ${{ matrix.config.framework == 'flax' }}
80-
env:
81-
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
8278
run: |
8379
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
8480
-s -v -k "Flax" \
@@ -87,8 +83,6 @@ jobs:
8783
8884
- name: Run fast ONNXRuntime CPU tests
8985
if: ${{ matrix.config.framework == 'onnxruntime' }}
90-
env:
91-
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
9286
run: |
9387
python -m pytest -n 2 --max-worker-restart=0 --dist=loadfile \
9488
-s -v -k "Onnx" \
@@ -141,8 +135,6 @@ jobs:
141135
142136
- name: Run fast PyTorch tests on M1 (MPS)
143137
shell: arch -arch arm64 bash {0}
144-
env:
145-
HUGGING_FACE_HUB_TOKEN: ${{ secrets.HUGGING_FACE_HUB_TOKEN }}
146138
run: |
147139
${CONDA_RUN} python -m pytest -n 1 -s -v --make-reports=tests_torch_mps tests/
148140

tests/test_config.py

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -244,28 +244,30 @@ def test_load_ddim_from_pndm(self):
244244
logger = logging.get_logger("diffusers.configuration_utils")
245245

246246
with CaptureLogger(logger) as cap_logger:
247-
ddim = DDIMScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler")
247+
ddim = DDIMScheduler.from_config("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler")
248248

249249
assert ddim.__class__ == DDIMScheduler
250250
# no warning should be thrown
251251
assert cap_logger.out == ""
252252

253-
def test_load_ddim_from_euler(self):
253+
def test_load_euler_from_pndm(self):
254254
logger = logging.get_logger("diffusers.configuration_utils")
255255

256256
with CaptureLogger(logger) as cap_logger:
257-
euler = EulerDiscreteScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler")
257+
euler = EulerDiscreteScheduler.from_config(
258+
"hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
259+
)
258260

259261
assert euler.__class__ == EulerDiscreteScheduler
260262
# no warning should be thrown
261263
assert cap_logger.out == ""
262264

263-
def test_load_ddim_from_euler_ancestral(self):
265+
def test_load_euler_ancestral_from_pndm(self):
264266
logger = logging.get_logger("diffusers.configuration_utils")
265267

266268
with CaptureLogger(logger) as cap_logger:
267269
euler = EulerAncestralDiscreteScheduler.from_config(
268-
"runwayml/stable-diffusion-v1-5", subfolder="scheduler"
270+
"hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler"
269271
)
270272

271273
assert euler.__class__ == EulerAncestralDiscreteScheduler
@@ -276,7 +278,7 @@ def test_load_pndm(self):
276278
logger = logging.get_logger("diffusers.configuration_utils")
277279

278280
with CaptureLogger(logger) as cap_logger:
279-
pndm = PNDMScheduler.from_config("runwayml/stable-diffusion-v1-5", subfolder="scheduler")
281+
pndm = PNDMScheduler.from_config("hf-internal-testing/tiny-stable-diffusion-torch", subfolder="scheduler")
280282

281283
assert pndm.__class__ == PNDMScheduler
282284
# no warning should be thrown

0 commit comments

Comments
 (0)