From 12655eaf6a67c357f4c953199493c65834699534 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 10 Jul 2025 18:25:30 +0000 Subject: [PATCH 1/2] Initial plan From 6c95f176314a485b16bbcf3286885460c922ed76 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Thu, 10 Jul 2025 18:36:30 +0000 Subject: [PATCH 2/2] Replace pkg_resources with modern packaging alternatives Co-authored-by: scbedd <45376673+scbedd@users.noreply.github.com> --- common/smoketest/dependencies.py | 78 +++++++++++++++++++++++----- scripts/devops_tasks/common_tasks.py | 49 ++++++++++++++--- 2 files changed, 108 insertions(+), 19 deletions(-) diff --git a/common/smoketest/dependencies.py b/common/smoketest/dependencies.py index eabc25e77cf5..0328de2c43cd 100644 --- a/common/smoketest/dependencies.py +++ b/common/smoketest/dependencies.py @@ -1,5 +1,13 @@ import argparse -import pkg_resources + +# Modern packaging imports +try: + import importlib.metadata as importlib_metadata +except ImportError: + # Python < 3.8 fallback + import importlib_metadata + +from packaging.requirements import Requirement try: # pip < 20 @@ -11,23 +19,40 @@ from pip._internal.network.session import PipSession def combine_requirements(requirements): - name = requirements[0].project_name + name = requirements[0].name # packaging.requirements.Requirement uses 'name' instead of 'project_name' specs = [] for req in requirements: - if len(req.specs) == 0: + if len(req.specifier) == 0: # packaging.requirements.Requirement uses 'specifier' instead of 'specs' continue - specs.extend([s[0] + s[1] for s in req.specs]) + # Convert specifier to the expected format + specs.extend([str(spec) for spec in req.specifier]) return name + ",".join(specs) def get_dependencies(packages): requirements = [] for package in packages: - package_info = pkg_resources.working_set.by_key[package] - - applicable_requirements = [r for r in package_info.requires() if r.marker is None or r.marker.evaluate()] - requirements.extend(applicable_requirements) + try: + # Get the distribution for this package + package_info = importlib_metadata.distribution(package) + + # Get requirements and process them like pkg_resources did + if package_info.requires: + for req_str in package_info.requires: + # Parse the requirement string + req = Requirement(req_str) + + # Apply the same filtering as the original code: + # include requirements where marker is None or evaluates to True + if req.marker is None or req.marker.evaluate(): + requirements.append(req) + except importlib_metadata.PackageNotFoundError: + # Package not found, skip it (similar to how pkg_resources would handle missing packages) + continue + except Exception: + # Skip packages that can't be processed + continue return requirements @@ -47,7 +72,34 @@ def get_dependencies(packages): args = parser.parse_args() # Get package names from requirements.txt requirements = parse_requirements(args.requirements_file, session=PipSession()) - package_names = [item.req.name for item in requirements] + + # Handle different pip versions - extract package names + package_names = [] + for item in requirements: + if hasattr(item, 'requirement'): + # Parse the requirement string to get the name + req_str = item.requirement + if isinstance(req_str, str): + # Parse the requirement string using packaging.requirements + try: + req = Requirement(req_str) + package_names.append(req.name) + except Exception: + # If parsing fails, try to extract name directly + name = req_str.split('==')[0].split('>=')[0].split('<=')[0].split('>')[0].split('<')[0].strip() + package_names.append(name) + else: + package_names.append(req_str.name) + elif hasattr(item, 'req'): + # Older pip versions + package_names.append(item.req.name) + else: + # Try to get name from the object directly + try: + package_names.append(item.name) + except AttributeError: + # Skip items we can't parse + continue dependencies = get_dependencies(package_names) @@ -61,10 +113,12 @@ def get_dependencies(packages): # According to https://packaging.python.org/glossary/#term-requirement-specifier grouped_dependencies = {} for dep in dependencies: - if dep.key in grouped_dependencies: - grouped_dependencies[dep.key].append(dep) + # Use 'name' instead of 'key' for packaging.requirements.Requirement + dep_name = dep.name + if dep_name in grouped_dependencies: + grouped_dependencies[dep_name].append(dep) else: - grouped_dependencies[dep.key] = [dep] + grouped_dependencies[dep_name] = [dep] final_dependencies = [combine_requirements(r) for r in grouped_dependencies.values()] diff --git a/scripts/devops_tasks/common_tasks.py b/scripts/devops_tasks/common_tasks.py index f92b0aaa324d..d65ca1647608 100644 --- a/scripts/devops_tasks/common_tasks.py +++ b/scripts/devops_tasks/common_tasks.py @@ -18,8 +18,15 @@ from argparse import Namespace from typing import Iterable -# Assumes the presence of setuptools -from pkg_resources import parse_version, parse_requirements, Requirement, WorkingSet, working_set +# Modern packaging imports +try: + import importlib.metadata as importlib_metadata +except ImportError: + # Python < 3.8 fallback + import importlib_metadata + +from packaging.version import parse as parse_version +from packaging.requirements import Requirement # this assumes the presence of "packaging" from packaging.specifiers import SpecifierSet @@ -247,9 +254,37 @@ def find_tools_packages(root_path): def get_installed_packages(paths=None): """Find packages in default or given lib paths""" - # WorkingSet returns installed packages in given path - # working_set returns installed packages in default path - # if paths is set then find installed packages from given paths - ws = WorkingSet(paths) if paths else working_set - return ["{0}=={1}".format(p.project_name, p.version) for p in ws] + # Use importlib.metadata to get installed packages + if paths: + import sys + # For path-specific search, we need to create a new metadata finder + # that searches in the specified paths + packages = [] + for path in paths: + if os.path.exists(path): + # Add the path temporarily to find distributions there + original_path = sys.path[:] + try: + sys.path.insert(0, path) + # Get distributions and filter by location + for dist in importlib_metadata.distributions(): + try: + # Check if the distribution is actually from this path + dist_path = str(dist._path) if hasattr(dist, '_path') else '' + if path in dist_path: + package_name = dist.metadata['Name'] + package_version = dist.version + package_str = "{0}=={1}".format(package_name, package_version) + if package_str not in packages: + packages.append(package_str) + except Exception: + # Skip packages that can't be processed + continue + finally: + sys.path[:] = original_path + return packages + else: + # Get all distributions from default paths + distributions = importlib_metadata.distributions() + return ["{0}=={1}".format(dist.metadata['Name'], dist.version) for dist in distributions]