commit
393988cebd
@ -0,0 +1,9 @@
|
|||||||
|
from swarms import Agent
|
||||||
|
|
||||||
|
Agent(
|
||||||
|
agent_name="Stock-Analysis-Agent",
|
||||||
|
model_name="deepseek/deepseek-reasoner",
|
||||||
|
max_loops="auto",
|
||||||
|
interactive=True,
|
||||||
|
streaming_on=True,
|
||||||
|
).run("What are 5 hft algorithms")
|
@ -1,238 +0,0 @@
|
|||||||
import concurrent.futures
|
|
||||||
from typing import Callable, Any, Dict, List
|
|
||||||
from swarms.utils.loguru_logger import initialize_logger
|
|
||||||
|
|
||||||
logger = initialize_logger(log_folder="func_calling_executor")
|
|
||||||
|
|
||||||
# def openai_tool_executor(
|
|
||||||
# tools: List[Dict[str, Any]],
|
|
||||||
# function_map: Dict[str, Callable],
|
|
||||||
# verbose: bool = True,
|
|
||||||
# return_as_string: bool = False,
|
|
||||||
# *args,
|
|
||||||
# **kwargs,
|
|
||||||
# ) -> Callable:
|
|
||||||
# """
|
|
||||||
# Creates a function that dynamically and concurrently executes multiple functions based on parameters specified
|
|
||||||
# in a list of tool dictionaries, with extensive error handling and validation.
|
|
||||||
|
|
||||||
# Args:
|
|
||||||
# tools (List[Dict[str, Any]]): A list of dictionaries, each containing configuration for a tool, including parameters.
|
|
||||||
# function_map (Dict[str, Callable]): A dictionary mapping function names to their corresponding callable functions.
|
|
||||||
# verbose (bool): If True, enables verbose logging.
|
|
||||||
# return_as_string (bool): If True, returns the results as a concatenated string.
|
|
||||||
|
|
||||||
# Returns:
|
|
||||||
# Callable: A function that, when called, executes the specified functions concurrently with the parameters given.
|
|
||||||
|
|
||||||
# Examples:
|
|
||||||
# >>> def test_function(param1: int, param2: str) -> str:
|
|
||||||
# ... return f"Test function called with parameters: {param1}, {param2}"
|
|
||||||
|
|
||||||
# >>> tool_executor = openai_tool_executor(
|
|
||||||
# ... tools=[
|
|
||||||
# ... {
|
|
||||||
# ... "type": "function",
|
|
||||||
# ... "function": {
|
|
||||||
# ... "name": "test_function",
|
|
||||||
# ... "parameters": {
|
|
||||||
# ... "param1": 1,
|
|
||||||
# ... "param2": "example"
|
|
||||||
# ... }
|
|
||||||
# ... }
|
|
||||||
# ... }
|
|
||||||
# ... ],
|
|
||||||
# ... function_map={
|
|
||||||
# ... "test_function": test_function
|
|
||||||
# ... },
|
|
||||||
# ... return_as_string=True
|
|
||||||
# ... )
|
|
||||||
# >>> results = tool_executor()
|
|
||||||
# >>> print(results)
|
|
||||||
# """
|
|
||||||
|
|
||||||
# def tool_executor():
|
|
||||||
# # Prepare tasks for concurrent execution
|
|
||||||
# results = []
|
|
||||||
# logger.info(f"Executing {len(tools)} tools concurrently.")
|
|
||||||
# with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
||||||
# futures = []
|
|
||||||
# for tool in tools:
|
|
||||||
# if tool.get("type") != "function":
|
|
||||||
# continue # Skip non-function tool entries
|
|
||||||
|
|
||||||
# function_info = tool.get("function", {})
|
|
||||||
# func_name = function_info.get("name")
|
|
||||||
# logger.info(f"Executing function: {func_name}")
|
|
||||||
|
|
||||||
# # Check if the function name is mapped to an actual function
|
|
||||||
# if func_name not in function_map:
|
|
||||||
# error_message = f"Function '{func_name}' not found in function map."
|
|
||||||
# logger.error(error_message)
|
|
||||||
# results.append(error_message)
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# # Validate parameters
|
|
||||||
# params = function_info.get("parameters", {})
|
|
||||||
# if not params:
|
|
||||||
# error_message = f"No parameters specified for function '{func_name}'."
|
|
||||||
# logger.error(error_message)
|
|
||||||
# results.append(error_message)
|
|
||||||
# continue
|
|
||||||
|
|
||||||
# # Submit the function for execution
|
|
||||||
# try:
|
|
||||||
# future = executor.submit(
|
|
||||||
# function_map[func_name], **params
|
|
||||||
# )
|
|
||||||
# futures.append((func_name, future))
|
|
||||||
# except Exception as e:
|
|
||||||
# error_message = f"Failed to submit the function '{func_name}' for execution: {e}"
|
|
||||||
# logger.error(error_message)
|
|
||||||
# results.append(error_message)
|
|
||||||
|
|
||||||
# # Gather results from all futures
|
|
||||||
# for func_name, future in futures:
|
|
||||||
# try:
|
|
||||||
# result = future.result() # Collect result from future
|
|
||||||
# results.append(f"{func_name}: {result}")
|
|
||||||
# except Exception as e:
|
|
||||||
# error_message = f"Error during execution of function '{func_name}': {e}"
|
|
||||||
# logger.error(error_message)
|
|
||||||
# results.append(error_message)
|
|
||||||
|
|
||||||
# if return_as_string:
|
|
||||||
# return "\n".join(results)
|
|
||||||
|
|
||||||
# logger.info(f"Results: {results}")
|
|
||||||
|
|
||||||
# return results
|
|
||||||
|
|
||||||
# return tool_executor
|
|
||||||
|
|
||||||
|
|
||||||
def openai_tool_executor(
|
|
||||||
tools: List[Dict[str, Any]],
|
|
||||||
function_map: Dict[str, Callable],
|
|
||||||
verbose: bool = True,
|
|
||||||
return_as_string: bool = False,
|
|
||||||
*args,
|
|
||||||
**kwargs,
|
|
||||||
) -> Callable:
|
|
||||||
def tool_executor():
|
|
||||||
results = []
|
|
||||||
logger.info(f"Executing {len(tools)} tools concurrently.")
|
|
||||||
with concurrent.futures.ThreadPoolExecutor() as executor:
|
|
||||||
futures = []
|
|
||||||
for tool in tools:
|
|
||||||
if tool.get("type") != "function":
|
|
||||||
continue
|
|
||||||
|
|
||||||
function_info = tool.get("function", {})
|
|
||||||
func_name = function_info.get("name")
|
|
||||||
logger.info(f"Executing function: {func_name}")
|
|
||||||
|
|
||||||
if func_name not in function_map:
|
|
||||||
error_message = f"Function '{func_name}' not found in function map."
|
|
||||||
logger.error(error_message)
|
|
||||||
results.append(error_message)
|
|
||||||
continue
|
|
||||||
|
|
||||||
params = function_info.get("parameters", {})
|
|
||||||
if not params:
|
|
||||||
error_message = f"No parameters specified for function '{func_name}'."
|
|
||||||
logger.error(error_message)
|
|
||||||
results.append(error_message)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if (
|
|
||||||
"name" in params
|
|
||||||
and params["name"] in function_map
|
|
||||||
):
|
|
||||||
try:
|
|
||||||
result = function_map[params["name"]](
|
|
||||||
**params
|
|
||||||
)
|
|
||||||
results.append(f"{params['name']}: {result}")
|
|
||||||
except Exception as e:
|
|
||||||
error_message = f"Failed to execute the function '{params['name']}': {e}"
|
|
||||||
logger.error(error_message)
|
|
||||||
results.append(error_message)
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
future = executor.submit(
|
|
||||||
function_map[func_name], **params
|
|
||||||
)
|
|
||||||
futures.append((func_name, future))
|
|
||||||
except Exception as e:
|
|
||||||
error_message = f"Failed to submit the function '{func_name}' for execution: {e}"
|
|
||||||
logger.error(error_message)
|
|
||||||
results.append(error_message)
|
|
||||||
|
|
||||||
for func_name, future in futures:
|
|
||||||
try:
|
|
||||||
result = future.result()
|
|
||||||
results.append(f"{func_name}: {result}")
|
|
||||||
except Exception as e:
|
|
||||||
error_message = f"Error during execution of function '{func_name}': {e}"
|
|
||||||
logger.error(error_message)
|
|
||||||
results.append(error_message)
|
|
||||||
|
|
||||||
if return_as_string:
|
|
||||||
return "\n".join(results)
|
|
||||||
|
|
||||||
logger.info(f"Results: {results}")
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
return tool_executor
|
|
||||||
|
|
||||||
|
|
||||||
# function_schema = {
|
|
||||||
# "name": "execute",
|
|
||||||
# "description": "Executes code on the user's machine **in the users local environment** and returns the output",
|
|
||||||
# "parameters": {
|
|
||||||
# "type": "object",
|
|
||||||
# "properties": {
|
|
||||||
# "language": {
|
|
||||||
# "type": "string",
|
|
||||||
# "description": "The programming language (required parameter to the `execute` function)",
|
|
||||||
# "enum": [
|
|
||||||
# # This will be filled dynamically with the languages OI has access to.
|
|
||||||
# ],
|
|
||||||
# },
|
|
||||||
# "code": {
|
|
||||||
# "type": "string",
|
|
||||||
# "description": "The code to execute (required)",
|
|
||||||
# },
|
|
||||||
# },
|
|
||||||
# "required": ["language", "code"],
|
|
||||||
# },
|
|
||||||
# }
|
|
||||||
|
|
||||||
|
|
||||||
# def execute(language: str, code: str):
|
|
||||||
# """
|
|
||||||
# Executes code on the user's machine **in the users local environment** and returns the output
|
|
||||||
|
|
||||||
# Args:
|
|
||||||
# language (str): The programming language (required parameter to the `execute` function)
|
|
||||||
# code (str): The code to execute (required)
|
|
||||||
|
|
||||||
# Returns:
|
|
||||||
# str: The output of the code execution
|
|
||||||
# """
|
|
||||||
# # This function will be implemented by the user
|
|
||||||
# return "Code execution not implemented yet"
|
|
||||||
|
|
||||||
|
|
||||||
# # Example execution
|
|
||||||
# out = openai_tool_executor(
|
|
||||||
# tools=[function_schema],
|
|
||||||
# function_map={
|
|
||||||
# "execute": execute,
|
|
||||||
# },
|
|
||||||
# return_as_string=True,
|
|
||||||
# )
|
|
||||||
# print(out)
|
|
@ -1,263 +0,0 @@
|
|||||||
"""
|
|
||||||
Lazy Package Loader
|
|
||||||
|
|
||||||
This module provides utilities for lazy loading Python packages to improve startup time
|
|
||||||
and reduce memory usage by only importing packages when they are actually used.
|
|
||||||
|
|
||||||
Features:
|
|
||||||
- Type-safe lazy loading of packages
|
|
||||||
- Support for nested module imports
|
|
||||||
- Auto-completion support in IDEs
|
|
||||||
- Thread-safe implementation
|
|
||||||
- Comprehensive test coverage
|
|
||||||
"""
|
|
||||||
|
|
||||||
from types import ModuleType
|
|
||||||
from typing import (
|
|
||||||
Optional,
|
|
||||||
Dict,
|
|
||||||
Any,
|
|
||||||
Callable,
|
|
||||||
Type,
|
|
||||||
TypeVar,
|
|
||||||
Union,
|
|
||||||
cast,
|
|
||||||
)
|
|
||||||
import importlib
|
|
||||||
import functools
|
|
||||||
import threading
|
|
||||||
from importlib.util import find_spec
|
|
||||||
from swarms.utils.auto_download_check_packages import (
|
|
||||||
auto_check_and_download_package,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
T = TypeVar("T")
|
|
||||||
C = TypeVar("C")
|
|
||||||
|
|
||||||
|
|
||||||
class ImportError(Exception):
|
|
||||||
"""Raised when a lazy import fails."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class LazyLoader:
|
|
||||||
"""
|
|
||||||
A thread-safe lazy loader for Python packages that only imports them when accessed.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
_module_name (str): The name of the module to be lazily loaded
|
|
||||||
_module (Optional[ModuleType]): The cached module instance once loaded
|
|
||||||
_lock (threading.Lock): Thread lock for safe concurrent access
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
>>> np = LazyLoader('numpy')
|
|
||||||
>>> # numpy is not imported yet
|
|
||||||
>>> result = np.array([1, 2, 3])
|
|
||||||
>>> # numpy is imported only when first used
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, module_name: str) -> None:
|
|
||||||
"""
|
|
||||||
Initialize the lazy loader with a module name.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
module_name: The fully qualified name of the module to lazily load
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ImportError: If the module cannot be found in sys.path
|
|
||||||
"""
|
|
||||||
self._module_name = module_name
|
|
||||||
self._module: Optional[ModuleType] = None
|
|
||||||
self._lock = threading.Lock()
|
|
||||||
|
|
||||||
auto_check_and_download_package(
|
|
||||||
module_name, package_manager="pip"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Verify module exists without importing it
|
|
||||||
if find_spec(module_name) is None:
|
|
||||||
raise ImportError(
|
|
||||||
f"Module '{module_name}' not found in sys.path"
|
|
||||||
)
|
|
||||||
|
|
||||||
def _load_module(self) -> ModuleType:
|
|
||||||
"""
|
|
||||||
Thread-safe module loading.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ModuleType: The loaded module
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ImportError: If module import fails
|
|
||||||
"""
|
|
||||||
if self._module is None:
|
|
||||||
with self._lock:
|
|
||||||
# Double-check pattern
|
|
||||||
if self._module is None:
|
|
||||||
try:
|
|
||||||
self._module = importlib.import_module(
|
|
||||||
self._module_name
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise ImportError(
|
|
||||||
f"Failed to import '{self._module_name}': {str(e)}"
|
|
||||||
)
|
|
||||||
return cast(ModuleType, self._module)
|
|
||||||
|
|
||||||
def __getattr__(self, name: str) -> Any:
|
|
||||||
"""
|
|
||||||
Intercepts attribute access to load the module if needed.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: The attribute name being accessed
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Any: The requested attribute from the loaded module
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
AttributeError: If the attribute doesn't exist in the module
|
|
||||||
"""
|
|
||||||
module = self._load_module()
|
|
||||||
try:
|
|
||||||
return getattr(module, name)
|
|
||||||
except AttributeError:
|
|
||||||
raise AttributeError(
|
|
||||||
f"Module '{self._module_name}' has no attribute '{name}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
def __dir__(self) -> list[str]:
|
|
||||||
"""
|
|
||||||
Returns list of attributes for autocomplete support.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List[str]: Available attributes in the module
|
|
||||||
"""
|
|
||||||
return dir(self._load_module())
|
|
||||||
|
|
||||||
def is_loaded(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the module has been loaded.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if module is loaded, False otherwise
|
|
||||||
"""
|
|
||||||
return self._module is not None
|
|
||||||
|
|
||||||
|
|
||||||
class LazyLoaderMetaclass(type):
|
|
||||||
"""Metaclass to handle lazy loading behavior"""
|
|
||||||
|
|
||||||
def __call__(cls, *args, **kwargs):
|
|
||||||
if hasattr(cls, "_lazy_loader"):
|
|
||||||
return super().__call__(*args, **kwargs)
|
|
||||||
return super().__call__(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class LazyClassLoader:
|
|
||||||
"""
|
|
||||||
A descriptor that creates the actual class only when accessed,
|
|
||||||
with proper inheritance support.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self, class_name: str, bases: tuple, namespace: Dict[str, Any]
|
|
||||||
):
|
|
||||||
self.class_name = class_name
|
|
||||||
self.bases = bases
|
|
||||||
self.namespace = namespace
|
|
||||||
self._real_class: Optional[Type] = None
|
|
||||||
self._lock = threading.Lock()
|
|
||||||
|
|
||||||
def _create_class(self) -> Type:
|
|
||||||
"""Creates the actual class if it hasn't been created yet."""
|
|
||||||
if self._real_class is None:
|
|
||||||
with self._lock:
|
|
||||||
if self._real_class is None:
|
|
||||||
# Update namespace to include metaclass
|
|
||||||
namespace = dict(self.namespace)
|
|
||||||
namespace["__metaclass__"] = LazyLoaderMetaclass
|
|
||||||
|
|
||||||
# Create the class with metaclass
|
|
||||||
new_class = LazyLoaderMetaclass(
|
|
||||||
self.class_name, self.bases, namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
# Store reference to this loader
|
|
||||||
new_class._lazy_loader = self
|
|
||||||
self._real_class = new_class
|
|
||||||
|
|
||||||
return cast(Type, self._real_class)
|
|
||||||
|
|
||||||
def __call__(self, *args: Any, **kwargs: Any) -> Any:
|
|
||||||
"""Creates an instance of the lazy loaded class."""
|
|
||||||
real_class = self._create_class()
|
|
||||||
# Use the metaclass __call__ method
|
|
||||||
return real_class(*args, **kwargs)
|
|
||||||
|
|
||||||
def __instancecheck__(self, instance: Any) -> bool:
|
|
||||||
"""Support for isinstance() checks"""
|
|
||||||
real_class = self._create_class()
|
|
||||||
return isinstance(instance, real_class)
|
|
||||||
|
|
||||||
def __subclasscheck__(self, subclass: Type) -> bool:
|
|
||||||
"""Support for issubclass() checks"""
|
|
||||||
real_class = self._create_class()
|
|
||||||
return issubclass(subclass, real_class)
|
|
||||||
|
|
||||||
|
|
||||||
def lazy_import(*names: str) -> Dict[str, LazyLoader]:
|
|
||||||
"""
|
|
||||||
Create multiple lazy loaders at once.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
*names: Module names to create lazy loaders for
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict[str, LazyLoader]: Dictionary mapping module names to their lazy loaders
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
>>> modules = lazy_import('numpy', 'pandas', 'matplotlib.pyplot')
|
|
||||||
>>> np = modules['numpy']
|
|
||||||
>>> pd = modules['pandas']
|
|
||||||
>>> plt = modules['matplotlib.pyplot']
|
|
||||||
"""
|
|
||||||
return {name.split(".")[-1]: LazyLoader(name) for name in names}
|
|
||||||
|
|
||||||
|
|
||||||
def lazy_import_decorator(
|
|
||||||
target: Union[Callable[..., T], Type[C]]
|
|
||||||
) -> Union[Callable[..., T], Type[C], LazyClassLoader]:
|
|
||||||
"""
|
|
||||||
Enhanced decorator that supports both lazy imports and lazy class loading.
|
|
||||||
"""
|
|
||||||
if isinstance(target, type):
|
|
||||||
# Store the original class details
|
|
||||||
namespace = {
|
|
||||||
name: value
|
|
||||||
for name, value in target.__dict__.items()
|
|
||||||
if not name.startswith("__")
|
|
||||||
or name in ("__init__", "__new__")
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create lazy loader
|
|
||||||
loader = LazyClassLoader(
|
|
||||||
target.__name__, target.__bases__, namespace
|
|
||||||
)
|
|
||||||
|
|
||||||
# Preserve class metadata
|
|
||||||
loader.__module__ = target.__module__
|
|
||||||
loader.__doc__ = target.__doc__
|
|
||||||
|
|
||||||
# Add reference to original class
|
|
||||||
loader._original_class = target
|
|
||||||
|
|
||||||
return loader
|
|
||||||
else:
|
|
||||||
# Handle function decoration
|
|
||||||
@functools.wraps(target)
|
|
||||||
def wrapper(*args: Any, **kwargs: Any) -> T:
|
|
||||||
return target(*args, **kwargs)
|
|
||||||
|
|
||||||
return wrapper
|
|
@ -1,6 +1,16 @@
|
|||||||
from litellm import encode
|
import subprocess
|
||||||
|
|
||||||
|
|
||||||
def count_tokens(text: str, model: str = "gpt-4o") -> int:
|
def count_tokens(text: str, model: str = "gpt-4o") -> int:
|
||||||
"""Count the number of tokens in the given text."""
|
"""Count the number of tokens in the given text."""
|
||||||
|
try:
|
||||||
|
from litellm import encode
|
||||||
|
except ImportError:
|
||||||
|
subprocess.run(["pip", "install", "litellm"])
|
||||||
|
from litellm import encode
|
||||||
|
|
||||||
return len(encode(model=model, text=text))
|
return len(encode(model=model, text=text))
|
||||||
|
|
||||||
|
|
||||||
|
# if __name__ == "__main__":
|
||||||
|
# print(count_tokens("Hello, how are you?"))
|
||||||
|
@ -0,0 +1,104 @@
|
|||||||
|
from swarms.utils.auto_download_check_packages import (
|
||||||
|
auto_check_and_download_package,
|
||||||
|
check_and_install_package,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_and_install_package_pip():
|
||||||
|
result = check_and_install_package("numpy", package_manager="pip")
|
||||||
|
print(f"Test result for 'numpy' installation using pip: {result}")
|
||||||
|
assert result, "Failed to install or verify 'numpy' using pip"
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_and_install_package_conda():
|
||||||
|
result = check_and_install_package(
|
||||||
|
"numpy", package_manager="conda"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Test result for 'numpy' installation using conda: {result}"
|
||||||
|
)
|
||||||
|
assert result, "Failed to install or verify 'numpy' using conda"
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_and_install_specific_version():
|
||||||
|
result = check_and_install_package(
|
||||||
|
"numpy", package_manager="pip", version="1.21.0"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Test result for specific version of 'numpy' installation using pip: {result}"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
result
|
||||||
|
), "Failed to install or verify specific version of 'numpy' using pip"
|
||||||
|
|
||||||
|
|
||||||
|
def test_check_and_install_with_upgrade():
|
||||||
|
result = check_and_install_package(
|
||||||
|
"numpy", package_manager="pip", upgrade=True
|
||||||
|
)
|
||||||
|
print(f"Test result for 'numpy' upgrade using pip: {result}")
|
||||||
|
assert result, "Failed to upgrade 'numpy' using pip"
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_check_and_download_single_package():
|
||||||
|
result = auto_check_and_download_package(
|
||||||
|
"scipy", package_manager="pip"
|
||||||
|
)
|
||||||
|
print(f"Test result for 'scipy' installation using pip: {result}")
|
||||||
|
assert result, "Failed to install or verify 'scipy' using pip"
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_check_and_download_multiple_packages():
|
||||||
|
packages = ["scipy", "pandas"]
|
||||||
|
result = auto_check_and_download_package(
|
||||||
|
packages, package_manager="pip"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Test result for multiple packages installation using pip: {result}"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
result
|
||||||
|
), f"Failed to install or verify one or more packages in {packages} using pip"
|
||||||
|
|
||||||
|
|
||||||
|
def test_auto_check_and_download_multiple_packages_with_versions():
|
||||||
|
packages = ["numpy:1.21.0", "pandas:1.3.0"]
|
||||||
|
result = auto_check_and_download_package(
|
||||||
|
packages, package_manager="pip"
|
||||||
|
)
|
||||||
|
print(
|
||||||
|
f"Test result for multiple packages with versions installation using pip: {result}"
|
||||||
|
)
|
||||||
|
assert (
|
||||||
|
result
|
||||||
|
), f"Failed to install or verify one or more packages in {packages} with specific versions using pip"
|
||||||
|
|
||||||
|
|
||||||
|
# Example of running tests
|
||||||
|
if __name__ == "__main__":
|
||||||
|
try:
|
||||||
|
test_check_and_install_package_pip()
|
||||||
|
print("test_check_and_install_package_pip passed")
|
||||||
|
|
||||||
|
test_check_and_install_package_conda()
|
||||||
|
print("test_check_and_install_package_conda passed")
|
||||||
|
|
||||||
|
test_check_and_install_specific_version()
|
||||||
|
print("test_check_and_install_specific_version passed")
|
||||||
|
|
||||||
|
test_check_and_install_with_upgrade()
|
||||||
|
print("test_check_and_install_with_upgrade passed")
|
||||||
|
|
||||||
|
test_auto_check_and_download_single_package()
|
||||||
|
print("test_auto_check_and_download_single_package passed")
|
||||||
|
|
||||||
|
test_auto_check_and_download_multiple_packages()
|
||||||
|
print("test_auto_check_and_download_multiple_packages passed")
|
||||||
|
|
||||||
|
test_auto_check_and_download_multiple_packages_with_versions()
|
||||||
|
print(
|
||||||
|
"test_auto_check_and_download_multiple_packages_with_versions passed"
|
||||||
|
)
|
||||||
|
|
||||||
|
except AssertionError as e:
|
||||||
|
print(f"Test failed: {str(e)}")
|
Loading…
Reference in new issue