@@ -312,7 +312,6 @@ def setup_package():
312
312
"onednn-devel==2024.1.1;platform_system=='Windows'" ,
313
313
"onednn==2024.1.1;platform_system=='Windows'" ]
314
314
315
- # Add for testing purposes for now
316
315
xpu_26_requires = copy .deepcopy (all_requires )
317
316
for exclude_require in cpu_torch_version :
318
317
xpu_26_requires .remove (exclude_require )
@@ -323,6 +322,24 @@ def setup_package():
323
322
"onednn-devel==2025.0.1" ,
324
323
"onednn==2025.0.1" ,
325
324
"dpcpp-cpp-rt==2025.0.2" ]
325
+
326
+ # Add for testing purposes for now, for ARL-H with AOT
327
+ xpu_26_arl_requires = copy .deepcopy (all_requires )
328
+ for exclude_require in cpu_torch_version :
329
+ xpu_26_arl_requires .remove (exclude_require )
330
+ xpu_26_arl_requires += ["torch==2.6.0.post0+xpu;platform_system=='Windows'" ,
331
+ "torchvision==0.21.0.post0+xpu;platform_system=='Windows'" ,
332
+ "torchaudio==2.6.0.post0+xpu;platform_system=='Windows'" ,
333
+ "intel-extension-for-pytorch==2.6.10+xpu;platform_system=='Windows'" ,
334
+ "torch==2.6.0+xpu;platform_system=='Linux'" ,
335
+ "torchvision==0.21.0+xpu;platform_system=='Linux'" ,
336
+ "torchaudio==2.6.0+xpu;platform_system=='Linux'" ,
337
+ "intel-extension-for-pytorch==2.6.10+xpu;platform_system=='Linux'" ,
338
+ "oneccl_bind_pt==2.6.0+xpu;platform_system=='Linux'" ,
339
+ "bigdl-core-xe-all==" + CORE_XE_VERSION ,
340
+ "onednn-devel==2025.0.1" ,
341
+ "onednn==2025.0.1" ,
342
+ "dpcpp-cpp-rt==2025.0.2" ]
326
343
327
344
cpp_requires = ["bigdl-core-cpp==" + CORE_XE_VERSION ,
328
345
"onednn-devel==2025.0.1;platform_system=='Windows'" ,
@@ -370,6 +387,7 @@ def setup_package():
370
387
"xpu-arl" : xpu_lnl_requires ,
371
388
"xpu-arc" : xpu_lnl_requires ,
372
389
"xpu-2-6" : xpu_26_requires ,
390
+ "xpu-2-6-arl" : xpu_26_arl_requires ,
373
391
"serving" : serving_requires ,
374
392
"cpp" : cpp_requires ,
375
393
"llama-index" : llama_index_requires }, # for internal usage when upstreaming for llama-index
0 commit comments