-
Notifications
You must be signed in to change notification settings - Fork 13
Expand file tree
/
Copy pathbuild.py
More file actions
1067 lines (875 loc) · 36.2 KB
/
build.py
File metadata and controls
1067 lines (875 loc) · 36.2 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
"""Flash build command - Package Flash applications for deployment."""
import ast
import importlib.util
import json
import logging
import re
import shutil
import subprocess
import sys
import tarfile
from pathlib import Path
from typing import Optional
import typer
from rich.console import Console
try:
import tomllib # Python 3.11+
except ImportError:
import tomli as tomllib # Python 3.9-3.10
from runpod_flash.core.resources.constants import (
MAX_TARBALL_SIZE_MB,
SUPPORTED_PYTHON_VERSIONS,
validate_python_version,
)
from ..utils.ignore import get_file_tree, load_ignore_patterns
from .build_utils.handler_generator import HandlerGenerator
from .build_utils.lb_handler_generator import LBHandlerGenerator
from .build_utils.manifest import ManifestBuilder
from .build_utils.resource_config_generator import generate_all_resource_configs
from .build_utils.scanner import RemoteDecoratorScanner
logger = logging.getLogger(__name__)
console = Console()
# Constants
# Timeout for pip install operations (large packages like torch can take 5-10 minutes)
PIP_INSTALL_TIMEOUT_SECONDS = 600
# Timeout for ensurepip (lightweight operation, typically completes in <10 seconds)
ENSUREPIP_TIMEOUT_SECONDS = 30
# Timeout for version checks (should be instant)
VERSION_CHECK_TIMEOUT_SECONDS = 5
# RunPod Serverless platform specifications
# RunPod serverless runs on x86_64 Linux, regardless of build platform
# Support multiple manylinux versions (newer versions are backward compatible)
RUNPOD_PLATFORMS = [
"manylinux_2_28_x86_64", # glibc 2.28+ (newest, for Python 3.13+)
"manylinux_2_17_x86_64", # glibc 2.17+ (covers most modern packages)
"manylinux2014_x86_64", # glibc 2.17 (legacy compatibility)
]
RUNPOD_PYTHON_IMPL = "cp" # CPython implementation
# Pip command identifiers
UV_COMMAND = "uv"
PIP_MODULE = "pip"
# Packages pre-installed in base Docker images (runpod/pytorch:*).
# Always excluded from build artifacts to avoid:
# 1. Exceeding the 500 MB tarball limit (torch alone is ~500 MB)
# 2. Redundant copies — these are already in the base Docker image
# NOTE: numpy is excluded because the base Docker image provides it, and
# keeping it out of the tarball saves ~30 MB toward the 500 MB limit.
BASE_IMAGE_PACKAGES: frozenset[str] = frozenset(
{
"torch",
"torchvision",
"torchaudio",
"numpy",
"triton",
}
)
def _find_runpod_flash(project_dir: Optional[Path] = None) -> Optional[Path]:
"""Find installed runpod_flash package directory.
Tries two strategies:
1. importlib.util.find_spec -- works for any installed runpod_flash
(dev-installed or site-packages)
2. Relative path search -- walks upward from project_dir looking for a sibling
flash repo (worktree or standard layout)
Args:
project_dir: Flash project directory, used for relative path search fallback
Returns:
Path to runpod_flash package directory, or None if not found
"""
# Strategy 1: importlib (any installed runpod_flash -- dev or site-packages)
try:
spec = importlib.util.find_spec("runpod_flash")
if spec and spec.origin:
return Path(spec.origin).parent
except Exception:
pass
# Strategy 2: search upward from project_dir for flash repo
if project_dir is None:
return None
current = project_dir.resolve()
for _ in range(6):
# Worktree layout: flash-project/flash/main/src/runpod_flash/
# Standard layout: flash-project/flash/src/runpod_flash/
for sub in ("flash/main/src/runpod_flash", "flash/src/runpod_flash"):
candidate = current / sub
if (candidate / "__init__.py").is_file():
return candidate
parent = current.parent
if parent == current:
break
current = parent
return None
def _bundle_runpod_flash(build_dir: Path, flash_pkg: Path) -> None:
"""Copy runpod_flash source into build directory.
Args:
build_dir: Target build directory
flash_pkg: Path to the runpod_flash package directory to bundle
"""
dest = build_dir / "runpod_flash"
if dest.exists():
shutil.rmtree(dest)
shutil.copytree(
flash_pkg,
dest,
ignore=shutil.ignore_patterns("__pycache__", "*.pyc", ".pytest_cache"),
)
console.print(f"[cyan]Bundled runpod_flash from {flash_pkg}[/cyan]")
def _extract_runpod_flash_dependencies(flash_pkg_dir: Path) -> list[str]:
"""Extract runtime dependencies from runpod_flash's pyproject.toml.
When bundling local runpod_flash source, we need to also install its dependencies
so they're available in the build environment.
Args:
flash_pkg_dir: Path to runpod_flash package directory (src/runpod_flash)
Returns:
List of dependency strings, empty list if parsing fails
"""
try:
# Navigate from runpod_flash package to project root
# flash_pkg_dir is src/runpod_flash, need to go up 2 levels to reach project root
project_root = flash_pkg_dir.parent.parent
pyproject_path = project_root / "pyproject.toml"
if not pyproject_path.exists():
console.print(
"[yellow]⚠ runpod_flash pyproject.toml not found, "
"dependencies may be missing[/yellow]"
)
return []
# Parse TOML
with open(pyproject_path, "rb") as f:
data = tomllib.load(f)
# Extract dependencies from [project.dependencies]
dependencies = data.get("project", {}).get("dependencies", [])
if dependencies:
console.print(
f"[dim]Found {len(dependencies)} runpod_flash dependencies to install[/dim]"
)
return dependencies
except Exception as e:
console.print(
f"[yellow]⚠ Failed to parse runpod_flash dependencies: {e}[/yellow]"
)
return []
def _normalize_package_name(name: str) -> str:
"""Normalize a package name for comparison (PEP 503: lowercase, hyphens to underscores)."""
return name.lower().replace("-", "_")
def _remove_runpod_flash_from_requirements(build_dir: Path) -> None:
"""Remove runpod_flash from requirements.txt and clean up dist-info since we bundled source."""
req_file = build_dir / "requirements.txt"
if not req_file.exists():
return
lines = req_file.read_text().splitlines()
filtered = [
line
for line in lines
if not line.strip().lower().startswith("runpod_flash")
and not line.strip().lower().startswith("runpod-flash")
]
req_file.write_text("\n".join(filtered) + "\n")
# Remove runpod_flash dist-info directory to avoid conflicts with bundled source
# dist-info is created by pip install and can confuse Python's import system
for dist_info in build_dir.glob("runpod_flash-*.dist-info"):
if dist_info.is_dir():
shutil.rmtree(dist_info)
def _resolve_pip_python_version(manifest: dict) -> str | None:
"""Determine the target Python version for pip from the manifest.
One tarball serves all resources, so all must share the same ABI.
Returns the highest version found (GPU base image dictates the floor).
Returns:
The target Python version string, or None if not available.
"""
versions = set()
for resource in manifest.get("resources", {}).values():
version = resource.get("target_python_version")
if version:
versions.add(version)
if not versions:
return None
# All resources should agree, but if they differ, use the highest
# (GPU base image pins the minimum, and one tarball must work everywhere)
return max(versions)
def run_build(
project_dir: Path,
app_name: str,
no_deps: bool = False,
output_name: str | None = None,
exclude: str | None = None,
verbose: bool = False,
) -> Path:
"""Run the build process and return the artifact path.
Contains all build steps: validate, collect files, manifest, deps, tarball.
Always bundles the runpod_flash installed in the current environment.
Always keeps the build directory — caller decides cleanup.
Args:
project_dir: Flash project directory
app_name: Application name
no_deps: Skip transitive dependencies during pip install
output_name: Custom archive name (default: artifact.tar.gz)
exclude: Comma-separated packages to exclude
verbose: Show archive and build directory paths in summary
Returns:
Path to the created artifact archive
Raises:
typer.Exit: On build failure (including when archive exceeds 500 MB)
"""
if not validate_project_structure(project_dir):
console.print("[red]Error:[/red] Not a valid Flash project")
console.print("Run [bold]flash init[/bold] to create a Flash project")
raise typer.Exit(1)
# Create build directory first to ensure clean state before collecting files
build_dir = create_build_directory(project_dir, app_name)
# Parse exclusions: merge user-specified with always-excluded base image packages
user_excluded = []
if exclude:
user_excluded = [pkg.strip().lower() for pkg in exclude.split(",")]
excluded_packages = list(set(user_excluded) | BASE_IMAGE_PACKAGES)
spec = load_ignore_patterns(project_dir)
files = get_file_tree(project_dir, spec)
# Validate Python version unconditionally — even projects with no dependencies
# must build on a supported Python to avoid runtime ABI mismatches.
python_version = f"{sys.version_info.major}.{sys.version_info.minor}"
try:
validate_python_version(python_version)
except ValueError:
console.print(
f"\n[red]Python {python_version} is not supported for Flash deployment.[/red]"
)
console.print(
f"[yellow]Supported versions: {', '.join(SUPPORTED_PYTHON_VERSIONS)}[/yellow]"
)
console.print(
"[yellow]Please switch your local Python interpreter to a supported "
"version, or build inside a virtual environment that uses one.[/yellow]"
)
raise typer.Exit(1)
try:
copy_project_files(files, project_dir, build_dir)
try:
scanner = RemoteDecoratorScanner(build_dir)
remote_functions = scanner.discover_remote_functions()
manifest_builder = ManifestBuilder(
app_name,
remote_functions,
scanner,
build_dir=build_dir,
python_version=python_version,
)
manifest = manifest_builder.build()
manifest_path = build_dir / "flash_manifest.json"
manifest_path.write_text(json.dumps(manifest, indent=2))
lb_generator = LBHandlerGenerator(manifest, build_dir)
lb_generator.generate_handlers()
qb_generator = HandlerGenerator(manifest, build_dir)
qb_generator.generate_handlers()
flash_dir = project_dir / ".flash"
deployment_manifest_path = flash_dir / "flash_manifest.json"
shutil.copy2(manifest_path, deployment_manifest_path)
except (ImportError, SyntaxError) as e:
console.print(f"[red]Error:[/red] Code analysis failed: {e}")
logger.exception("Code analysis failed")
raise typer.Exit(1)
except ValueError as e:
console.print(f"[red]Error:[/red] {e}")
logger.exception("Handler generation validation failed")
raise typer.Exit(1)
except Exception as e:
logger.exception("Handler generation failed")
console.print(f"[yellow]Warning:[/yellow] Handler generation failed: {e}")
except typer.Exit:
if build_dir.exists():
shutil.rmtree(build_dir)
raise
except Exception as e:
if build_dir.exists():
shutil.rmtree(build_dir)
console.print(f"[red]Error:[/red] Build failed: {e}")
logger.exception("Build failed")
raise typer.Exit(1)
# Resolve target Python version from manifest for pip wheel selection
target_python_version = None
manifest_json_path = build_dir / "flash_manifest.json"
if manifest_json_path.exists():
target_python_version = _resolve_pip_python_version(
json.loads(manifest_json_path.read_text())
)
# install dependencies
requirements = collect_requirements(project_dir, build_dir)
# filter out excluded packages (auto + user-specified)
if excluded_packages:
auto_matched = set()
user_matched = set()
filtered_requirements = []
for req in requirements:
if should_exclude_package(req, excluded_packages):
pkg_name = extract_package_name(req)
if pkg_name in BASE_IMAGE_PACKAGES:
auto_matched.add(pkg_name)
if pkg_name in user_excluded:
user_matched.add(pkg_name)
else:
filtered_requirements.append(req)
requirements = filtered_requirements
if auto_matched:
console.print(
f"[dim]Auto-excluded base image packages: "
f"{', '.join(sorted(auto_matched))}[/dim]"
)
# Only warn about unmatched user-specified packages (not auto-excludes)
user_unmatched = set(user_excluded) - user_matched - BASE_IMAGE_PACKAGES
if user_unmatched:
console.print(
f"[yellow]Warning:[/yellow] No packages matched exclusions: "
f"{', '.join(sorted(user_unmatched))}"
)
if requirements:
with console.status(f"Installing {len(requirements)} packages..."):
success = install_dependencies(
build_dir,
requirements,
no_deps,
target_python_version=target_python_version,
)
if not success:
console.print("[red]Error:[/red] Failed to install dependencies")
raise typer.Exit(1)
# Always bundle the installed runpod_flash
flash_pkg = _find_runpod_flash(project_dir)
if not flash_pkg:
console.print(
"[red]Error:[/red] Could not find runpod_flash.\n"
" Ensure runpod-flash is installed: pip install runpod-flash"
)
raise typer.Exit(1)
_bundle_runpod_flash(build_dir, flash_pkg)
_remove_runpod_flash_from_requirements(build_dir)
# Generate _flash_resource_config.py for @remote local-vs-stub dispatch.
# Must happen AFTER _bundle_runpod_flash which replaces build_dir/runpod_flash/.
manifest_json_path = build_dir / "flash_manifest.json"
if manifest_json_path.exists():
manifest_data = json.loads(manifest_json_path.read_text())
generate_all_resource_configs(manifest_data, build_dir)
# clean up and create archive
cleanup_python_bytecode(build_dir)
archive_name = output_name or "artifact.tar.gz"
archive_path = project_dir / ".flash" / archive_name
with console.status("Creating archive..."):
create_tarball(
build_dir, archive_path, app_name, excluded_packages=excluded_packages
)
size_mb = archive_path.stat().st_size / (1024 * 1024)
# fail build if archive exceeds size limit
if size_mb > MAX_TARBALL_SIZE_MB:
console.print()
console.print(
f"[red]Error:[/red] Archive exceeds RunPod limit "
f"({size_mb:.1f} MB / {MAX_TARBALL_SIZE_MB} MB)"
)
console.print(
" Torch packages are auto-excluded. Use --exclude for other large packages: "
"[dim]flash deploy --exclude transformers,scipy[/dim]"
)
if archive_path.exists():
archive_path.unlink()
if build_dir.exists():
shutil.rmtree(build_dir)
raise typer.Exit(1)
# Success summary
_display_build_summary(
archive_path, app_name, len(files), len(requirements), size_mb, verbose=verbose
)
return archive_path
def build_command(
no_deps: bool = typer.Option(
False, "--no-deps", help="Skip transitive dependencies during pip install"
),
output_name: str | None = typer.Option(
None, "--output", "-o", help="Custom archive name (default: artifact.tar.gz)"
),
exclude: str | None = typer.Option(
None,
"--exclude",
help="Comma-separated additional packages to exclude (torch packages are auto-excluded)",
),
):
"""
Build Flash application for debugging (build only, no deploy).
Creates the build artifact and keeps the .build directory for inspection.
For build + deploy, use 'flash deploy' instead.
Examples:
flash build # Build with all dependencies
flash build --no-deps # Skip transitive dependencies
flash build -o my-app.tar.gz # Custom archive name
flash build --exclude transformers # Exclude additional large packages
"""
try:
project_dir, app_name = discover_flash_project()
run_build(
project_dir=project_dir,
app_name=app_name,
no_deps=no_deps,
output_name=output_name,
exclude=exclude,
verbose=True,
)
except KeyboardInterrupt:
console.print("\n[yellow]Build cancelled by user[/yellow]")
raise typer.Exit(1)
except typer.Exit:
raise
except Exception as e:
console.print(f"\n[red]Build failed:[/red] {e}")
import traceback
console.print(traceback.format_exc())
raise typer.Exit(1)
def discover_flash_project() -> tuple[Path, str]:
"""
Discover Flash project directory and app name.
Returns:
Tuple of (project_dir, app_name)
Raises:
typer.Exit: If not in a Flash project directory
"""
project_dir = Path.cwd()
app_name = project_dir.name
return project_dir, app_name
def validate_project_structure(project_dir: Path) -> bool:
"""
Validate that directory is a Flash project.
A Flash project is any directory containing Python files. The
RemoteDecoratorScanner validates that @remote functions exist.
Args:
project_dir: Directory to validate
Returns:
True if valid Flash project
"""
py_files = list(project_dir.rglob("*.py"))
if not py_files:
console.print(f"[red]Error:[/red] No Python files found in {project_dir}")
return False
return True
def create_build_directory(project_dir: Path, app_name: str) -> Path:
"""
Create .flash/.build/ directory.
Args:
project_dir: Flash project directory
app_name: Application name (used for archive naming, not directory structure)
Returns:
Path to build directory
"""
flash_dir = project_dir / ".flash"
flash_dir.mkdir(exist_ok=True)
build_dir = flash_dir / ".build"
# Remove existing build directory
if build_dir.exists():
shutil.rmtree(build_dir)
build_dir.mkdir(parents=True, exist_ok=True)
return build_dir
def copy_project_files(files: list[Path], source_dir: Path, dest_dir: Path) -> None:
"""
Copy project files to build directory.
Args:
files: List of files to copy
source_dir: Source directory
dest_dir: Destination directory
"""
for file_path in files:
# Get relative path
rel_path = file_path.relative_to(source_dir)
# Create destination path
dest_path = dest_dir / rel_path
# Create parent directories
dest_path.parent.mkdir(parents=True, exist_ok=True)
# Copy file
shutil.copy2(file_path, dest_path)
def cleanup_python_bytecode(build_dir: Path) -> None:
"""
Remove Python bytecode files and __pycache__ directories from build directory.
These files are generated during the build process when Python imports modules
for validation. They are platform-specific and will be regenerated on the
deployment platform, so including them is unnecessary.
Args:
build_dir: Build directory to clean up
"""
# Remove all __pycache__ directories
for pycache_dir in build_dir.rglob("__pycache__"):
if pycache_dir.is_dir():
shutil.rmtree(pycache_dir)
# Remove any stray .pyc, .pyo, .pyd files
for bytecode_pattern in ["*.pyc", "*.pyo", "*.pyd"]:
for bytecode_file in build_dir.rglob(bytecode_pattern):
if bytecode_file.is_file():
bytecode_file.unlink()
def collect_requirements(project_dir: Path, build_dir: Path) -> list[str]:
"""
Collect all requirements from requirements.txt and @remote decorators.
Args:
project_dir: Flash project directory
build_dir: Build directory to scan for packaged Python files
Returns:
List of requirement strings
"""
requirements = []
# Load requirements.txt
req_file = project_dir / "requirements.txt"
if req_file.exists():
try:
content = req_file.read_text(encoding="utf-8")
for line in content.splitlines():
line = line.strip()
# Skip empty lines and comments
if line and not line.startswith("#"):
requirements.append(line)
except Exception as e:
console.print(
f"[yellow]Warning:[/yellow] Failed to read requirements.txt: {e}"
)
# Extract dependencies from @remote decorators in packaged source files
remote_deps = extract_remote_dependencies(build_dir)
requirements.extend(remote_deps)
# Remove duplicates while preserving order
seen = set()
unique_requirements = []
for req in requirements:
if req not in seen:
seen.add(req)
unique_requirements.append(req)
return unique_requirements
def extract_package_name(requirement: str) -> str:
"""
Extract the package name from a requirement specification.
Handles version specifiers, extras, and other pip requirement syntax.
Args:
requirement: Requirement string (e.g., "torch>=2.0.0", "numpy[extra]")
Returns:
Package name in lowercase (e.g., "torch", "numpy")
Examples:
>>> extract_package_name("torch>=2.0.0")
"torch"
>>> extract_package_name("numpy[extra]")
"numpy"
>>> extract_package_name("my-package==1.0.0")
"my-package"
"""
# Split on version specifiers, extras, and environment markers
# This regex matches: < > = ! [ ; (common pip requirement delimiters)
package_name = re.split(r"[<>=!\[;]", requirement)[0].strip().lower()
return package_name
def should_exclude_package(requirement: str, exclusions: list[str]) -> bool:
"""
Check if a requirement should be excluded based on package name matching.
Uses exact package name matching (after normalization) to avoid false positives.
Args:
requirement: Requirement string (e.g., "torch>=2.0.0")
exclusions: List of package names to exclude (lowercase)
Returns:
True if package should be excluded, False otherwise
Examples:
>>> should_exclude_package("torch>=2.0.0", ["torch", "numpy"])
True
>>> should_exclude_package("torch-vision==0.15.0", ["torch"])
False # torch-vision is different from torch
"""
package_name = extract_package_name(requirement)
return package_name in exclusions
def _extract_deps_from_call(call_node: ast.Call) -> list[str]:
"""Extract the dependencies=[...] keyword value from an ast.Call node."""
deps = []
for keyword in call_node.keywords:
if keyword.arg == "dependencies" and isinstance(keyword.value, ast.List):
for elt in keyword.value.elts:
if isinstance(elt, ast.Constant) and isinstance(elt.value, str):
deps.append(elt.value)
return deps
def extract_remote_dependencies(source_dir: Path) -> list[str]:
"""Extract dependencies from @remote and Endpoint(...) in Python source files.
Scans for three patterns:
- @remote(dependencies=[...]) on functions/classes
- @Endpoint(dependencies=[...]) on functions/classes (QB decorator)
- ep = Endpoint(dependencies=[...]) variable assignments (LB pattern)
Args:
source_dir: Path to directory containing Python source files
Returns:
List of dependency strings
"""
dependencies = []
for py_file in source_dir.glob("**/*.py"):
if py_file.name == "__init__.py":
continue
try:
tree = ast.parse(py_file.read_text(encoding="utf-8"))
for node in ast.walk(tree):
# @remote(dependencies=[...]) or @Endpoint(dependencies=[...])
# on function/class definitions
if isinstance(
node, (ast.ClassDef, ast.FunctionDef, ast.AsyncFunctionDef)
):
for decorator in node.decorator_list:
if isinstance(decorator, ast.Call):
func_name = None
if isinstance(decorator.func, ast.Name):
func_name = decorator.func.id
elif isinstance(decorator.func, ast.Attribute):
func_name = decorator.func.attr
if func_name in ("remote", "Endpoint"):
dependencies.extend(_extract_deps_from_call(decorator))
# ep = Endpoint(dependencies=[...]) variable assignments
if isinstance(node, ast.Assign) and isinstance(node.value, ast.Call):
call_name = None
if isinstance(node.value.func, ast.Name):
call_name = node.value.func.id
elif isinstance(node.value.func, ast.Attribute):
call_name = node.value.func.attr
if call_name == "Endpoint":
dependencies.extend(_extract_deps_from_call(node.value))
except Exception as e:
console.print(
f"[yellow]Warning:[/yellow] Failed to parse {py_file.name}: {e}"
)
return dependencies
def install_dependencies(
build_dir: Path,
requirements: list[str],
no_deps: bool,
target_python_version: str | None = None,
) -> bool:
"""
Install dependencies to build directory using pip or uv pip.
Installs packages for Linux x86_64 platform to ensure compatibility with
RunPod serverless, regardless of the build platform (macOS, Windows, Linux).
Auto-installation behavior:
- If standard pip is not available, it will be automatically installed via ensurepip
- This modifies the current virtual environment (persists after build completes)
- Standard pip is strongly preferred for cross-platform builds due to better
manylinux compatibility (uv pip has known issues with manylinux_2_27+)
Args:
build_dir: Build directory (pip --target)
requirements: List of requirements to install
no_deps: If True, skip transitive dependencies
target_python_version: Python version for wheel ABI selection (e.g. "3.12").
When set, pip downloads wheels for this version instead of the build
machine's Python. Used to match the container runtime Python.
Returns:
True if successful
"""
if not requirements:
return True
# Prefer standard pip over uv pip for cross-platform builds
# Standard pip's --platform flag works correctly with manylinux tags
# uv pip has known issues with manylinux_2_27/2_28 detection (uv issue #5106)
pip_cmd = [sys.executable, "-m", PIP_MODULE]
pip_available = False
try:
result = subprocess.run(
pip_cmd + ["--version"],
capture_output=True,
text=True,
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
)
if result.returncode == 0:
pip_available = True
except (subprocess.SubprocessError, FileNotFoundError):
pass
# If pip not available, install it using ensurepip
# This modifies the current virtual environment
if not pip_available:
console.print(
"[yellow]Standard pip not found. Installing pip for reliable cross-platform builds...[/yellow]"
)
try:
result = subprocess.run(
[sys.executable, "-m", "ensurepip", "--upgrade"],
capture_output=True,
text=True,
timeout=ENSUREPIP_TIMEOUT_SECONDS,
)
if result.returncode == 0:
# Verify pip is now available
result = subprocess.run(
pip_cmd + ["--version"],
capture_output=True,
text=True,
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
)
if result.returncode == 0:
pip_available = True
console.print(
"[green]✓[/green] Standard pip installed successfully"
)
except (subprocess.SubprocessError, FileNotFoundError) as e:
console.print(f"[yellow]Warning:[/yellow] Failed to install pip: {e}")
# If pip still not available, try uv pip (less reliable for cross-platform)
if not pip_available:
try:
result = subprocess.run(
[UV_COMMAND, PIP_MODULE, "--version"],
capture_output=True,
text=True,
timeout=VERSION_CHECK_TIMEOUT_SECONDS,
)
if result.returncode == 0:
pip_cmd = [UV_COMMAND, PIP_MODULE]
pip_available = True
console.print(
f"[yellow]Warning:[/yellow] Using '{UV_COMMAND} {PIP_MODULE}' which has known issues "
f"with newer manylinux tags (manylinux_2_27+)"
)
console.print(
"[yellow]This may fail for Python 3.13+ with newer packages (e.g., numpy 2.4+)[/yellow]"
)
except (subprocess.SubprocessError, FileNotFoundError):
pass
# If neither available, error out
if not pip_available:
console.print(
f"[red]Error:[/red] Neither {PIP_MODULE} nor {UV_COMMAND} {PIP_MODULE} found"
)
console.print(f"\n[yellow]Install {PIP_MODULE} with one of:[/yellow]")
console.print(" • python -m ensurepip --upgrade")
console.print(f" • {UV_COMMAND} {PIP_MODULE} install {PIP_MODULE}")
return False
# Determine if using uv pip or standard pip (different flag formats)
is_uv_pip = pip_cmd[0] == UV_COMMAND
# Use container Python version for wheel selection, not build machine's
local_version = f"{sys.version_info.major}.{sys.version_info.minor}"
pip_python_version = target_python_version or local_version
if target_python_version and target_python_version != local_version:
console.print(
f"[dim]Downloading wheels for Python {target_python_version} (container runtime)[/dim]"
)
# Build pip command with platform-specific flags for RunPod serverless
cmd = pip_cmd + [
"install",
"--target",
str(build_dir),
"--python-version",
pip_python_version,
"--upgrade",
]
# Add platform-specific flags based on pip variant
if is_uv_pip:
# uv pip uses --python-platform with simpler values
# Note: uv has known issues with manylinux_2_27+ detection (issue #5106)
cmd.extend(
[
"--python-platform",
"x86_64-unknown-linux-gnu",
"--no-build", # Don't build from source, use binary wheels only
]
)
else:
# Standard pip uses --platform with manylinux tags
# Specify multiple platforms for broader compatibility
for platform in RUNPOD_PLATFORMS:
cmd.extend(["--platform", platform])
cmd.extend(
[
"--implementation",
RUNPOD_PYTHON_IMPL,
"--only-binary=:all:",
]
)
if no_deps:
cmd.append("--no-deps")
cmd.extend(requirements)
# Log platform targeting info
if is_uv_pip:
platform_str = "x86_64-unknown-linux-gnu"
else:
platform_str = f"{len(RUNPOD_PLATFORMS)} manylinux variants"
logger.debug(f"Installing for: {platform_str}, Python {pip_python_version}")
try:
result = subprocess.run(
cmd,
capture_output=True,
text=True,
timeout=PIP_INSTALL_TIMEOUT_SECONDS,
)
if result.returncode != 0:
console.print(f"[red]pip install failed:[/red]\n{result.stderr}")
return False
return True
except subprocess.TimeoutExpired:
console.print(
f"[red]pip install timed out ({PIP_INSTALL_TIMEOUT_SECONDS} seconds)[/red]"
)
return False
except Exception as e:
console.print(f"[red]pip install error:[/red] {e}")
return False
def create_tarball(
build_dir: Path,
output_path: Path,
app_name: str,
excluded_packages: list[str] | None = None,
) -> None:
"""
Create gzipped tarball of build directory, excluding base image packages.
Filters at tarball creation time rather than constraining pip resolution,
because pip constraints (`<0.0.0a0`) break resolution for any package that
transitively depends on excluded packages (ResolutionImpossible).
Args:
build_dir: Build directory to archive
output_path: Output archive path
app_name: Application name (unused, for compatibility)
excluded_packages: Package names to exclude from the archive
"""
# Build set of normalized names for fast lookup
excluded_normalized: set[str] = set()
if excluded_packages:
excluded_normalized = {_normalize_package_name(p) for p in excluded_packages}