You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Traceback (most recent call last):
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\queueing.py", line 495, in call_prediction
output = await route_utils.call_process_api(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\route_utils.py", line 235, in call_process_api
output = await app.get_blocks().process_api(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\blocks.py", line 1627, in process_api
result = await self.call_function(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\blocks.py", line 1173, in call_function
prediction = await anyio.to_thread.run_sync(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio\to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio_backends_asyncio.py", line 2144, in run_sync_in_worker_thread
return await future
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio_backends_asyncio.py", line 851, in run
result = context.run(func, *args)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\utils.py", line 690, in wrapper
response = f(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "E:\StableCascade\StableCascade\gradio_app\app.py", line 79, in generate
prior_embeds = generate_prior(
File "E:\StableCascade\StableCascade\gradio_app\app.py", line 32, in generate_prior
prior_output = prior(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\diffusers\pipelines\stable_cascade\pipeline_stable_cascade_prior.py", line 490, in call
) = self.encode_prompt(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\diffusers\pipelines\stable_cascade\pipeline_stable_cascade_prior.py", line 156, in encode_prompt
text_encoder_output = self.text_encoder(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 1216, in forward
text_outputs = self.text_model(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 711, in forward
encoder_outputs = self.encoder(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 638, in forward
layer_outputs = encoder_layer(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 380, in forward
hidden_states, attn_weights = self.self_attn(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 269, in forward
query_states = self.q_proj(hidden_states) * self.scale
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\linear.py", line 114, in forward
return F.linear(input, self.weight, self.bias)
RuntimeError: CUDA error: CUBLAS_STATUS_NOT_INITIALIZED when calling cublasCreate(handle)
The text was updated successfully, but these errors were encountered:
Traceback (most recent call last):
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\queueing.py", line 495, in call_prediction
output = await route_utils.call_process_api(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\route_utils.py", line 235, in call_process_api
output = await app.get_blocks().process_api(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\blocks.py", line 1627, in process_api
result = await self.call_function(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\blocks.py", line 1173, in call_function
prediction = await anyio.to_thread.run_sync(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio\to_thread.py", line 56, in run_sync
return await get_async_backend().run_sync_in_worker_thread(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio_backends_asyncio.py", line 2144, in run_sync_in_worker_thread
return await future
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\anyio_backends_asyncio.py", line 851, in run
result = context.run(func, *args)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\gradio\utils.py", line 690, in wrapper
response = f(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "E:\StableCascade\StableCascade\gradio_app\app.py", line 79, in generate
prior_embeds = generate_prior(
File "E:\StableCascade\StableCascade\gradio_app\app.py", line 32, in generate_prior
prior_output = prior(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\utils_contextlib.py", line 115, in decorate_context
return func(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\diffusers\pipelines\stable_cascade\pipeline_stable_cascade_prior.py", line 490, in call
) = self.encode_prompt(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\diffusers\pipelines\stable_cascade\pipeline_stable_cascade_prior.py", line 156, in encode_prompt
text_encoder_output = self.text_encoder(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 1216, in forward
text_outputs = self.text_model(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 711, in forward
encoder_outputs = self.encoder(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 638, in forward
layer_outputs = encoder_layer(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 380, in forward
hidden_states, attn_weights = self.self_attn(
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\transformers\models\clip\modeling_clip.py", line 269, in forward
query_states = self.q_proj(hidden_states) * self.scale
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1518, in _wrapped_call_impl
return self._call_impl(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\module.py", line 1527, in _call_impl
return forward_call(*args, **kwargs)
File "C:\Users\Lan\AppData\Local\Programs\Python\Python310\lib\site-packages\torch\nn\modules\linear.py", line 114, in forward
return F.linear(input, self.weight, self.bias)
RuntimeError: CUDA error: CUBLAS_STATUS_NOT_INITIALIZED when calling
cublasCreate(handle)
The text was updated successfully, but these errors were encountered: