Skip to content

Commit ee8a3f2

Browse files
authored
rls2.4: fix coverity issue (#3191)
1 parent 5880201 commit ee8a3f2

File tree

4 files changed

+8
-8
lines changed

4 files changed

+8
-8
lines changed

examples/cpu/llm/inference/distributed/run_generation_tp.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -292,7 +292,7 @@
292292

293293
def load_image(image_file):
294294
if image_file.startswith("http://") or image_file.startswith("https://"):
295-
response = requests.get(image_file, verify=False)
295+
response = requests.get(image_file)
296296
image = Image.open(BytesIO(response.content)).convert("RGB")
297297
else:
298298
image = Image.open(image_file).convert("RGB")
@@ -400,7 +400,7 @@ def trace_handler(prof):
400400
if not hasattr(model.config, "token_latency"):
401401
model.config.token_latency = True
402402
if model_type == "git":
403-
prompt = Image.open(requests.get(args.image_url, stream=True, verify=False).raw)
403+
prompt = Image.open(requests.get(args.image_url, stream=True).raw)
404404
generate_kwargs.pop("min_new_tokens", None)
405405
elif model_type == "llava":
406406
if args.prompt is not None:

examples/cpu/llm/inference/distributed/run_generation_with_deepspeed.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -557,7 +557,7 @@ def write_checkpoints_json():
557557
from PIL import Image
558558
import requests
559559

560-
prompt = Image.open(requests.get(args.image_url, stream=True, verify=False).raw)
560+
prompt = Image.open(requests.get(args.image_url, stream=True).raw)
561561
inputs = [prompt] * args.batch_size
562562
generate_kwargs.pop("min_new_tokens", None)
563563
elif model_type == "llava":
@@ -567,7 +567,7 @@ def write_checkpoints_json():
567567

568568
def load_image(image_file):
569569
if image_file.startswith("http://") or image_file.startswith("https://"):
570-
response = requests.get(image_file, verify=False)
570+
response = requests.get(image_file)
571571
image = Image.open(BytesIO(response.content)).convert("RGB")
572572
else:
573573
image = Image.open(image_file).convert("RGB")

examples/cpu/llm/inference/single_instance/run_generation.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -239,7 +239,7 @@
239239

240240
def load_image(image_file):
241241
if image_file.startswith("http://") or image_file.startswith("https://"):
242-
response = requests.get(image_file, verify=False)
242+
response = requests.get(image_file)
243243
image = Image.open(BytesIO(response.content)).convert("RGB")
244244
else:
245245
image = Image.open(image_file).convert("RGB")
@@ -299,7 +299,7 @@ def trace_handler(prof):
299299
if not hasattr(model.config, "token_latency"):
300300
model.config.token_latency = True
301301
if model_type == "git":
302-
prompt = Image.open(requests.get(args.image_url, stream=True, verify=False).raw)
302+
prompt = Image.open(requests.get(args.image_url, stream=True).raw)
303303
generate_kwargs.pop("min_new_tokens", None)
304304
elif model_type == "llava":
305305
if args.prompt is not None:

examples/cpu/llm/inference/single_instance/run_quantization.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -362,7 +362,7 @@
362362

363363
def load_image(image_file):
364364
if image_file.startswith("http://") or image_file.startswith("https://"):
365-
response = requests.get(image_file, verify=False)
365+
response = requests.get(image_file)
366366
image = Image.open(BytesIO(response.content)).convert("RGB")
367367
else:
368368
image = Image.open(image_file).convert("RGB")
@@ -1154,7 +1154,7 @@ def calib_func(prepared_model):
11541154
)
11551155

11561156
if model.name == "git":
1157-
prompt = Image.open(requests.get(args.image_url, stream=True, verify=False).raw)
1157+
prompt = Image.open(requests.get(args.image_url, stream=True).raw)
11581158
elif model.name == "llava":
11591159
if args.prompt is not None:
11601160
prompt = args.prompt

0 commit comments

Comments
 (0)