Initial commit.
This commit is contained in:
216
.gitignore
vendored
Normal file
216
.gitignore
vendored
Normal file
@ -0,0 +1,216 @@
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[codz]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py.cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
# Pipfile.lock
|
||||
|
||||
# UV
|
||||
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# uv.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
# poetry.lock
|
||||
# poetry.toml
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
|
||||
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
|
||||
# pdm.lock
|
||||
# pdm.toml
|
||||
.pdm-python
|
||||
.pdm-build/
|
||||
|
||||
# pixi
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
|
||||
# pixi.lock
|
||||
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
|
||||
# in the .venv directory. It is recommended not to include this directory in version control.
|
||||
.pixi
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# Redis
|
||||
*.rdb
|
||||
*.aof
|
||||
*.pid
|
||||
|
||||
# RabbitMQ
|
||||
mnesia/
|
||||
rabbitmq/
|
||||
rabbitmq-data/
|
||||
|
||||
# ActiveMQ
|
||||
activemq-data/
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.envrc
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
# .idea/
|
||||
|
||||
# Abstra
|
||||
# Abstra is an AI-powered process automation framework.
|
||||
# Ignore directories containing user credentials, local state, and settings.
|
||||
# Learn more at https://abstra.io/docs
|
||||
.abstra/
|
||||
|
||||
# Visual Studio Code
|
||||
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
|
||||
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. However, if you prefer,
|
||||
# you could uncomment the following to ignore the entire vscode folder
|
||||
# .vscode/
|
||||
|
||||
# Ruff stuff:
|
||||
.ruff_cache/
|
||||
|
||||
# PyPI configuration file
|
||||
.pypirc
|
||||
|
||||
# Marimo
|
||||
marimo/_static/
|
||||
marimo/_lsp/
|
||||
__marimo__/
|
||||
|
||||
# Streamlit
|
||||
.streamlit/secrets.toml
|
||||
1
.python-version
Normal file
1
.python-version
Normal file
@ -0,0 +1 @@
|
||||
3.13
|
||||
18
pyproject.toml
Normal file
18
pyproject.toml
Normal file
@ -0,0 +1,18 @@
|
||||
[project]
|
||||
name = "mpsp"
|
||||
version = "0.1.0"
|
||||
description = "Add your description here"
|
||||
readme = "README.md"
|
||||
requires-python = ">=3.10"
|
||||
dependencies = []
|
||||
|
||||
[build-system]
|
||||
requires = ["uv_build>=0.9.17,<0.10.0"]
|
||||
build-backend = "uv_build"
|
||||
|
||||
[dependency-groups]
|
||||
dev = [
|
||||
"cloudpickle>=3.1.2",
|
||||
"numpy>=2.2.6",
|
||||
"pytest>=9.0.2",
|
||||
]
|
||||
0
src/mpsp/__init__.py
Normal file
0
src/mpsp/__init__.py
Normal file
374
src/mpsp/mpsp.py
Normal file
374
src/mpsp/mpsp.py
Normal file
@ -0,0 +1,374 @@
|
||||
"""
|
||||
Multiprocess Data Sharing Module
|
||||
|
||||
This module provides a robust and efficient way to share data between
|
||||
processes using Python's multiprocessing.Manager. It implements a singleton
|
||||
pattern to ensure consistent data access across different modules and processes.
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
import multiprocessing.managers
|
||||
import atexit
|
||||
import threading
|
||||
import os
|
||||
import logging
|
||||
import pickle
|
||||
from typing import Any, Optional, List, Dict
|
||||
|
||||
# Try to import cloudpickle for advanced serialization (e.g., lambdas, local functions)
|
||||
try:
|
||||
import cloudpickle
|
||||
except ImportError:
|
||||
cloudpickle = None
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class _CloudPickleWrapper:
|
||||
"""
|
||||
A wrapper for data serialized with cloudpickle.
|
||||
|
||||
This is used to distinguish between normal data and data that requires
|
||||
cloudpickle for deserialization.
|
||||
"""
|
||||
|
||||
def __init__(self, blob: bytes):
|
||||
self.blob = blob
|
||||
|
||||
|
||||
class MultiProcessingSharedPool:
|
||||
"""
|
||||
A singleton class for sharing data between processes.
|
||||
|
||||
This class manages a multiprocessing.Manager and its shared dictionary.
|
||||
It provides a dictionary-like interface for data sharing across processes.
|
||||
It automatically handles complex objects like lambdas if cloudpickle is installed.
|
||||
|
||||
Attributes
|
||||
----------
|
||||
_instance : MultiProcessingSharedPool
|
||||
The singleton instance.
|
||||
_lock : threading.Lock
|
||||
Lock for thread-safe singleton instantiation.
|
||||
"""
|
||||
|
||||
_instance: Optional["MultiProcessingSharedPool"] = None
|
||||
_lock = threading.Lock()
|
||||
|
||||
def __new__(cls) -> "MultiProcessingSharedPool":
|
||||
"""
|
||||
Ensure singleton instance creation.
|
||||
|
||||
Returns
|
||||
-------
|
||||
MultiProcessingSharedPool
|
||||
The singleton instance.
|
||||
"""
|
||||
if cls._instance is None:
|
||||
with cls._lock:
|
||||
if cls._instance is None:
|
||||
cls._instance = super(MultiProcessingSharedPool, cls).__new__(cls)
|
||||
cls._instance._initialized = False
|
||||
return cls._instance
|
||||
|
||||
def __init__(self):
|
||||
"""
|
||||
Initialize the MultiProcessingSharedPool instance.
|
||||
|
||||
Uses a flag to ensure initialization only happens once.
|
||||
"""
|
||||
if getattr(self, "_initialized", False):
|
||||
return
|
||||
|
||||
self._manager: Optional[multiprocessing.managers.SyncManager] = None
|
||||
self._shared_dict: Optional[Dict[str, Any]] = None
|
||||
self._owner_pid = os.getpid()
|
||||
self._init_lock = threading.Lock()
|
||||
self._initialized = True
|
||||
|
||||
# Register cleanup to run at exit
|
||||
atexit.register(self._cleanup)
|
||||
|
||||
@classmethod
|
||||
def get_instance(cls) -> "MultiProcessingSharedPool":
|
||||
"""
|
||||
Get the singleton instance of MultiProcessingSharedPool.
|
||||
|
||||
Returns
|
||||
-------
|
||||
MultiProcessingSharedPool
|
||||
The singleton instance.
|
||||
"""
|
||||
return cls()
|
||||
|
||||
def _ensure_initialized(self):
|
||||
"""
|
||||
Lazy initialization of the multiprocessing Manager and shared dictionary.
|
||||
|
||||
Raises
|
||||
------
|
||||
RuntimeError
|
||||
If the multiprocessing Manager fails to start.
|
||||
"""
|
||||
if self._shared_dict is None:
|
||||
with self._init_lock:
|
||||
if self._shared_dict is None:
|
||||
try:
|
||||
# Use the default context for manager
|
||||
self._manager = multiprocessing.Manager()
|
||||
self._shared_dict = self._manager.dict()
|
||||
logger.debug(
|
||||
f"MultiProcessingSharedPool initialized in process {os.getpid()}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to initialize multiprocessing.Manager: {e}"
|
||||
)
|
||||
raise RuntimeError(f"IPC Initialization failed: {e}")
|
||||
|
||||
def _cleanup(self):
|
||||
"""
|
||||
Clean up resources. Only the owner process can shutdown the manager.
|
||||
"""
|
||||
if os.getpid() == self._owner_pid and self._manager is not None:
|
||||
try:
|
||||
self._manager.shutdown()
|
||||
logger.debug(
|
||||
f"MultiProcessingSharedPool manager shutdown in process {os.getpid()}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug(f"Error during manager shutdown: {e}")
|
||||
finally:
|
||||
self._manager = None
|
||||
self._shared_dict = None
|
||||
|
||||
def put(self, label: str, data: Any) -> bool:
|
||||
"""
|
||||
Store data with a label.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : str
|
||||
The key for the data.
|
||||
data : Any
|
||||
The data to store.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if successful.
|
||||
"""
|
||||
if not isinstance(label, str):
|
||||
raise TypeError("Label must be a string")
|
||||
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
# Try standard storage first
|
||||
self._shared_dict[label] = data
|
||||
return True
|
||||
except (AttributeError, TypeError, pickle.PicklingError) as e:
|
||||
# If standard pickle fails, try cloudpickle if available
|
||||
if cloudpickle:
|
||||
try:
|
||||
logger.debug(
|
||||
f"Standard pickle failed for '{label}', trying cloudpickle."
|
||||
)
|
||||
blob = cloudpickle.dumps(data)
|
||||
self._shared_dict[label] = _CloudPickleWrapper(blob)
|
||||
return True
|
||||
except Exception as ce:
|
||||
logger.error(f"Cloudpickle also failed for '{label}': {ce}")
|
||||
return False
|
||||
else:
|
||||
logger.error(
|
||||
f"Failed to put data for '{label}': {e}. "
|
||||
"This often happens with lambdas or local functions. "
|
||||
"Please install 'cloudpickle' to support these objects."
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error putting data for '{label}': {e}")
|
||||
return False
|
||||
|
||||
def get(self, label: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Retrieve data by label.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : str
|
||||
The key to look up.
|
||||
default : Any, optional
|
||||
Value to return if label is not found.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Any
|
||||
The stored data or default.
|
||||
"""
|
||||
if not isinstance(label, str):
|
||||
raise TypeError("Label must be a string")
|
||||
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
val = self._shared_dict.get(label, default)
|
||||
|
||||
# Check if it's a cloudpickle-wrapped object
|
||||
if isinstance(val, _CloudPickleWrapper):
|
||||
if cloudpickle:
|
||||
return cloudpickle.loads(val.blob)
|
||||
else:
|
||||
logger.error(
|
||||
f"Data for '{label}' requires 'cloudpickle' to be deserialized. "
|
||||
"Please install it."
|
||||
)
|
||||
return default
|
||||
return val
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get data for '{label}': {e}")
|
||||
return default
|
||||
|
||||
def exists(self, label: str) -> bool:
|
||||
"""
|
||||
Check if label exists.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : str
|
||||
The key to check.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if exists.
|
||||
"""
|
||||
self._ensure_initialized()
|
||||
return label in self._shared_dict
|
||||
|
||||
def remove(self, label: str) -> bool:
|
||||
"""
|
||||
Remove a label and its data.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : str
|
||||
The key to remove.
|
||||
|
||||
Returns
|
||||
-------
|
||||
bool
|
||||
True if removed, False if not found.
|
||||
"""
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
if label in self._shared_dict:
|
||||
del self._shared_dict[label]
|
||||
return True
|
||||
return False
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to remove '{label}': {e}")
|
||||
return False
|
||||
|
||||
def pop(self, label: str, default: Any = None) -> Any:
|
||||
"""
|
||||
Remove and return the data associated with the label.
|
||||
|
||||
Parameters
|
||||
----------
|
||||
label : str
|
||||
The key to pop.
|
||||
default : Any, optional
|
||||
Value to return if label is not found.
|
||||
|
||||
Returns
|
||||
-------
|
||||
Any
|
||||
The popped data or default.
|
||||
"""
|
||||
if not isinstance(label, str):
|
||||
raise TypeError("Label must be a string")
|
||||
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
val = self._shared_dict.pop(label, default)
|
||||
if isinstance(val, _CloudPickleWrapper):
|
||||
if cloudpickle:
|
||||
return cloudpickle.loads(val.blob)
|
||||
else:
|
||||
logger.error(f"Popped data for '{label}' requires 'cloudpickle'.")
|
||||
return default
|
||||
return val
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to pop data for '{label}': {e}")
|
||||
return default
|
||||
|
||||
def size(self) -> int:
|
||||
"""
|
||||
Get the number of items stored.
|
||||
|
||||
Returns
|
||||
-------
|
||||
int
|
||||
The number of items.
|
||||
"""
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
return len(self._shared_dict)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get size: {e}")
|
||||
return 0
|
||||
|
||||
def clear(self):
|
||||
"""Clear all shared data."""
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
self._shared_dict.clear()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to clear data: {e}")
|
||||
|
||||
def keys(self) -> List[str]:
|
||||
"""
|
||||
Get all labels.
|
||||
|
||||
Returns
|
||||
-------
|
||||
List[str]
|
||||
List of keys.
|
||||
"""
|
||||
self._ensure_initialized()
|
||||
try:
|
||||
return list(self._shared_dict.keys())
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get keys: {e}")
|
||||
return []
|
||||
|
||||
def __getitem__(self, key: str) -> Any:
|
||||
"""Dictionary-like access."""
|
||||
val = self.get(key)
|
||||
if val is None and not self.exists(key):
|
||||
raise KeyError(key)
|
||||
return val
|
||||
|
||||
def __setitem__(self, key: str, value: Any):
|
||||
"""Dictionary-like assignment."""
|
||||
self.put(key, value)
|
||||
|
||||
def __delitem__(self, key: str):
|
||||
"""Dictionary-like deletion."""
|
||||
if not self.remove(key):
|
||||
raise KeyError(key)
|
||||
|
||||
def __contains__(self, key: str) -> bool:
|
||||
"""Support for 'in' operator."""
|
||||
return self.exists(key)
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
self._ensure_initialized()
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
pass
|
||||
0
test/__init__.py
Normal file
0
test/__init__.py
Normal file
46
test/conftest.py
Normal file
46
test/conftest.py
Normal file
@ -0,0 +1,46 @@
|
||||
"""
|
||||
Pytest 配置文件
|
||||
"""
|
||||
|
||||
import pytest
|
||||
import multiprocessing
|
||||
|
||||
|
||||
def pytest_configure(config):
|
||||
"""配置 pytest"""
|
||||
# 设置多进程启动方法(fork 在 Linux 上更快,spawn 在 Windows/macOS 上更稳定)
|
||||
try:
|
||||
multiprocessing.set_start_method("fork", force=True)
|
||||
except RuntimeError:
|
||||
# 如果已经设置过,忽略错误
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(scope="function", autouse=True)
|
||||
def reset_shared_pool():
|
||||
"""
|
||||
每个测试函数执行前清理共享池
|
||||
|
||||
这是一个自动使用的 fixture,确保每个测试都在干净的环境中运行
|
||||
"""
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
yield
|
||||
|
||||
# 测试结束后也清理
|
||||
pool.clear()
|
||||
|
||||
|
||||
@pytest.fixture(scope="session")
|
||||
def shared_pool():
|
||||
"""
|
||||
提供共享池实例的 fixture
|
||||
|
||||
在整个测试会话中复用同一个实例(单例模式)
|
||||
"""
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
return MultiProcessingSharedPool()
|
||||
324
test/test_basic.py
Normal file
324
test/test_basic.py
Normal file
@ -0,0 +1,324 @@
|
||||
"""
|
||||
基础功能测试 - 验证核心 API 正确性
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
|
||||
class TestBasicOperations:
|
||||
"""测试基本操作"""
|
||||
|
||||
def test_singleton_pattern(self):
|
||||
"""测试单例模式 - 多次获取应为同一实例"""
|
||||
pool1 = MultiProcessingSharedPool()
|
||||
pool2 = MultiProcessingSharedPool.get_instance()
|
||||
pool3 = MultiProcessingSharedPool()
|
||||
|
||||
assert pool1 is pool2
|
||||
assert pool2 is pool3
|
||||
assert pool1 is pool3
|
||||
|
||||
def test_put_and_get_basic_types(self):
|
||||
"""测试基础类型数据的存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 整数
|
||||
pool.put("int_key", 42)
|
||||
assert pool.get("int_key") == 42
|
||||
|
||||
# 浮点数
|
||||
pool.put("float_key", 3.14159)
|
||||
assert abs(pool.get("float_key") - 3.14159) < 1e-10
|
||||
|
||||
# 字符串
|
||||
pool.put("str_key", "hello mpsp")
|
||||
assert pool.get("str_key") == "hello mpsp"
|
||||
|
||||
# 布尔值
|
||||
pool.put("bool_key", True)
|
||||
assert pool.get("bool_key") is True
|
||||
|
||||
# None
|
||||
pool.put("none_key", None)
|
||||
assert pool.get("none_key") is None
|
||||
|
||||
def test_put_and_get_collections(self):
|
||||
"""测试集合类型数据的存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 列表
|
||||
test_list = [1, 2, 3, "a", "b", "c"]
|
||||
pool.put("list_key", test_list)
|
||||
assert pool.get("list_key") == test_list
|
||||
|
||||
# 字典
|
||||
test_dict = {"name": "test", "value": 100, "nested": {"a": 1}}
|
||||
pool.put("dict_key", test_dict)
|
||||
assert pool.get("dict_key") == test_dict
|
||||
|
||||
# 元组
|
||||
test_tuple = (1, 2, 3)
|
||||
pool.put("tuple_key", test_tuple)
|
||||
assert pool.get("tuple_key") == test_tuple
|
||||
|
||||
# 集合
|
||||
test_set = {1, 2, 3}
|
||||
pool.put("set_key", test_set)
|
||||
assert pool.get("set_key") == test_set
|
||||
|
||||
def test_exists(self):
|
||||
"""测试 exists 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert not pool.exists("nonexistent_key")
|
||||
|
||||
pool.put("existing_key", "value")
|
||||
assert pool.exists("existing_key")
|
||||
|
||||
pool.remove("existing_key")
|
||||
assert not pool.exists("existing_key")
|
||||
|
||||
def test_remove(self):
|
||||
"""测试 remove 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 移除存在的 key
|
||||
pool.put("key_to_remove", "value")
|
||||
assert pool.remove("key_to_remove") is True
|
||||
assert not pool.exists("key_to_remove")
|
||||
|
||||
# 移除不存在的 key
|
||||
assert pool.remove("nonexistent_key") is False
|
||||
|
||||
def test_pop(self):
|
||||
"""测试 pop 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# pop 存在的 key
|
||||
pool.put("key_to_pop", "popped_value")
|
||||
value = pool.pop("key_to_pop")
|
||||
assert value == "popped_value"
|
||||
assert not pool.exists("key_to_pop")
|
||||
|
||||
# pop 不存在的 key(带默认值)
|
||||
default_value = pool.pop("nonexistent_key", "default")
|
||||
assert default_value == "default"
|
||||
|
||||
# pop 不存在的 key(不带默认值)
|
||||
assert pool.pop("nonexistent_key") is None
|
||||
|
||||
def test_size(self):
|
||||
"""测试 size 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.size() == 0
|
||||
|
||||
pool.put("key1", "value1")
|
||||
assert pool.size() == 1
|
||||
|
||||
pool.put("key2", "value2")
|
||||
pool.put("key3", "value3")
|
||||
assert pool.size() == 3
|
||||
|
||||
pool.remove("key1")
|
||||
assert pool.size() == 2
|
||||
|
||||
pool.clear()
|
||||
assert pool.size() == 0
|
||||
|
||||
def test_keys(self):
|
||||
"""测试 keys 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.keys() == []
|
||||
|
||||
pool.put("key1", "value1")
|
||||
pool.put("key2", "value2")
|
||||
pool.put("key3", "value3")
|
||||
|
||||
keys = pool.keys()
|
||||
assert len(keys) == 3
|
||||
assert set(keys) == {"key1", "key2", "key3"}
|
||||
|
||||
def test_clear(self):
|
||||
"""测试 clear 方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
|
||||
pool.put("key1", "value1")
|
||||
pool.put("key2", "value2")
|
||||
|
||||
pool.clear()
|
||||
|
||||
assert pool.size() == 0
|
||||
assert pool.keys() == []
|
||||
assert not pool.exists("key1")
|
||||
|
||||
|
||||
class TestDictInterface:
|
||||
"""测试字典风格接口"""
|
||||
|
||||
def test_getitem(self):
|
||||
"""测试 __getitem__"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("test_key", "test_value")
|
||||
assert pool["test_key"] == "test_value"
|
||||
|
||||
# 访问不存在的 key 应抛出 KeyError
|
||||
with pytest.raises(KeyError):
|
||||
_ = pool["nonexistent_key"]
|
||||
|
||||
def test_setitem(self):
|
||||
"""测试 __setitem__"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool["new_key"] = "new_value"
|
||||
assert pool.get("new_key") == "new_value"
|
||||
|
||||
def test_delitem(self):
|
||||
"""测试 __delitem__"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("key_to_delete", "value")
|
||||
del pool["key_to_delete"]
|
||||
assert not pool.exists("key_to_delete")
|
||||
|
||||
# 删除不存在的 key 应抛出 KeyError
|
||||
with pytest.raises(KeyError):
|
||||
del pool["nonexistent_key"]
|
||||
|
||||
def test_contains(self):
|
||||
"""测试 __contains__ (in 操作符)"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("existing_key", "value")
|
||||
|
||||
assert "existing_key" in pool
|
||||
assert "nonexistent_key" not in pool
|
||||
|
||||
|
||||
class TestContextManager:
|
||||
"""测试上下文管理器"""
|
||||
|
||||
def test_context_manager(self):
|
||||
"""测试 with 语句支持"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
with MultiProcessingSharedPool() as p:
|
||||
p.put("ctx_key", "ctx_value")
|
||||
assert p.get("ctx_key") == "ctx_value"
|
||||
|
||||
# 上下文退出后数据应该仍然存在(管理器未关闭)
|
||||
assert pool.get("ctx_key") == "ctx_value"
|
||||
|
||||
|
||||
class TestErrorHandling:
|
||||
"""测试错误处理"""
|
||||
|
||||
def test_invalid_label_type_put(self):
|
||||
"""测试 put 时传入非法 label 类型"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
|
||||
with pytest.raises(TypeError, match="Label must be a string"):
|
||||
pool.put(123, "value")
|
||||
|
||||
with pytest.raises(TypeError, match="Label must be a string"):
|
||||
pool.put(None, "value")
|
||||
|
||||
def test_invalid_label_type_get(self):
|
||||
"""测试 get 时传入非法 label 类型"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
|
||||
with pytest.raises(TypeError, match="Label must be a string"):
|
||||
pool.get(123)
|
||||
|
||||
def test_invalid_label_type_pop(self):
|
||||
"""测试 pop 时传入非法 label 类型"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
|
||||
with pytest.raises(TypeError, match="Label must be a string"):
|
||||
pool.pop(123)
|
||||
|
||||
def test_get_with_default(self):
|
||||
"""测试 get 带默认值的场景"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# key 不存在时返回默认值
|
||||
assert pool.get("nonexistent", "default") == "default"
|
||||
assert pool.get("nonexistent", None) is None
|
||||
assert pool.get("nonexistent") is None
|
||||
|
||||
# key 存在时返回实际值
|
||||
pool.put("existing", "real_value")
|
||||
assert pool.get("existing", "default") == "real_value"
|
||||
|
||||
def test_special_label_names(self):
|
||||
"""测试特殊 label 名称"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 空字符串
|
||||
pool.put("", "empty_string_key")
|
||||
assert pool.get("") == "empty_string_key"
|
||||
|
||||
# 特殊字符
|
||||
special_keys = [
|
||||
"key with spaces",
|
||||
"key\twith\ttabs",
|
||||
"key\nwith\nnewlines",
|
||||
"key/with/slashes",
|
||||
"key.with.dots",
|
||||
"key:with:colons",
|
||||
"UPPERCASE_KEY",
|
||||
"mixedCase_Key",
|
||||
"unicode_中文_key",
|
||||
"emoji_😀_key",
|
||||
]
|
||||
|
||||
for key in special_keys:
|
||||
pool.put(key, f"value_for_{key}")
|
||||
assert pool.get(key) == f"value_for_{key}"
|
||||
|
||||
|
||||
class TestOverwrite:
|
||||
"""测试覆盖写入"""
|
||||
|
||||
def test_overwrite_value(self):
|
||||
"""测试覆盖相同 key 的值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("key", "original_value")
|
||||
assert pool.get("key") == "original_value"
|
||||
|
||||
pool.put("key", "new_value")
|
||||
assert pool.get("key") == "new_value"
|
||||
|
||||
# 不同类型覆盖
|
||||
pool.put("key", 12345)
|
||||
assert pool.get("key") == 12345
|
||||
|
||||
def test_overwrite_with_none(self):
|
||||
"""测试用 None 覆盖有值的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("key", "value")
|
||||
pool.put("key", None)
|
||||
|
||||
assert pool.get("key") is None
|
||||
assert pool.exists("key") # key 应该仍然存在
|
||||
652
test/test_edge_cases.py
Normal file
652
test/test_edge_cases.py
Normal file
@ -0,0 +1,652 @@
|
||||
"""
|
||||
边界与异常测试 - 验证鲁棒性
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
import time
|
||||
import pickle
|
||||
import pytest
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
|
||||
# ==================== 辅助函数 ====================
|
||||
|
||||
|
||||
def worker_put_empty_key(result_queue):
|
||||
"""子进程:测试空字符串 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
try:
|
||||
pool.put("", "empty_value")
|
||||
result_queue.put(("success", pool.get("")))
|
||||
except Exception as e:
|
||||
result_queue.put(("error", str(e)))
|
||||
|
||||
|
||||
def worker_get_nonexistent(result_queue):
|
||||
"""子进程:获取不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
result = pool.get("definitely_nonexistent_key_12345")
|
||||
result_queue.put(result)
|
||||
|
||||
|
||||
def worker_put_large_object(key, data, result_queue):
|
||||
"""子进程:存储大对象"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
try:
|
||||
success = pool.put(key, data)
|
||||
result_queue.put(("success", success))
|
||||
except Exception as e:
|
||||
result_queue.put(("error", str(e)))
|
||||
|
||||
|
||||
# ==================== 测试类 ====================
|
||||
|
||||
|
||||
class TestEmptyAndNoneValues:
|
||||
"""测试空值和 None 处理"""
|
||||
|
||||
def test_put_empty_string_value(self):
|
||||
"""测试存储空字符串值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("empty_value_key", "")
|
||||
assert pool.get("empty_value_key") == ""
|
||||
|
||||
def test_put_none_value(self):
|
||||
"""测试存储 None 值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("none_value_key", None)
|
||||
assert pool.get("none_value_key") is None
|
||||
|
||||
def test_put_empty_list(self):
|
||||
"""测试存储空列表"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("empty_list", [])
|
||||
assert pool.get("empty_list") == []
|
||||
|
||||
def test_put_empty_dict(self):
|
||||
"""测试存储空字典"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("empty_dict", {})
|
||||
assert pool.get("empty_dict") == {}
|
||||
|
||||
def test_put_empty_tuple(self):
|
||||
"""测试存储空元组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("empty_tuple", ())
|
||||
assert pool.get("empty_tuple") == ()
|
||||
|
||||
def test_put_empty_set(self):
|
||||
"""测试存储空集合"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("empty_set", set())
|
||||
assert pool.get("empty_set") == set()
|
||||
|
||||
|
||||
class TestSpecialLabelNames:
|
||||
"""测试特殊 label 名称"""
|
||||
|
||||
def test_empty_string_label(self):
|
||||
"""测试空字符串作为 label"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("", "empty_key_value")
|
||||
assert pool.get("") == "empty_key_value"
|
||||
assert pool.exists("")
|
||||
|
||||
def test_empty_string_label_cross_process(self):
|
||||
"""测试空字符串 label 跨进程"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("", "parent_empty_value")
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=worker_put_empty_key, args=(result_queue,))
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
status, result = result_queue.get()
|
||||
# 子进程应该可以覆盖空字符串 key
|
||||
assert status == "success"
|
||||
assert result == "empty_value"
|
||||
|
||||
def test_unicode_label(self):
|
||||
"""测试 Unicode 字符作为 label"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
unicode_keys = [
|
||||
"中文键",
|
||||
"日本語キー",
|
||||
"한국어키",
|
||||
"emoji_😀",
|
||||
"special_©_®_™",
|
||||
"math_∑_∏_√",
|
||||
"arrows_→_←_↑_↓",
|
||||
]
|
||||
|
||||
for key in unicode_keys:
|
||||
pool.put(key, f"value_for_{key}")
|
||||
|
||||
for key in unicode_keys:
|
||||
assert pool.get(key) == f"value_for_{key}"
|
||||
|
||||
def test_whitespace_label(self):
|
||||
"""测试空白字符作为 label"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
whitespace_keys = [
|
||||
" ", # 单个空格
|
||||
" ", # 两个空格
|
||||
"\t", # Tab
|
||||
"\n", # 换行
|
||||
"\r\n", # Windows 换行
|
||||
" key_with_leading_space",
|
||||
"key_with_trailing_space ",
|
||||
" key_with_both_spaces ",
|
||||
"key\twith\ttabs",
|
||||
]
|
||||
|
||||
for key in whitespace_keys:
|
||||
pool.put(key, f"value_for_repr_{repr(key)}")
|
||||
|
||||
for key in whitespace_keys:
|
||||
assert pool.get(key) == f"value_for_repr_{repr(key)}"
|
||||
|
||||
def test_special_chars_label(self):
|
||||
"""测试特殊字符作为 label"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
special_keys = [
|
||||
"key.with.dots",
|
||||
"key/with/slashes",
|
||||
"key:with:colons",
|
||||
"key|with|pipes",
|
||||
"key*with*asterisks",
|
||||
"key?with?question",
|
||||
"key<with>brackets",
|
||||
"key[with]square",
|
||||
"key{with}curly",
|
||||
"key+with+plus",
|
||||
"key=with=equals",
|
||||
"key!with!exclamation",
|
||||
"key@with@at",
|
||||
"key#with#hash",
|
||||
"key$with$dollar",
|
||||
"key%with%percent",
|
||||
"key^with^caret",
|
||||
"key&with&ersand",
|
||||
"key'with'quotes",
|
||||
'key"with"double',
|
||||
"key`with`backtick",
|
||||
"key~with~tilde",
|
||||
"key-with-hyphens",
|
||||
"key_with_underscores",
|
||||
]
|
||||
|
||||
for key in special_keys:
|
||||
pool.put(key, f"value_for_{key}")
|
||||
|
||||
for key in special_keys:
|
||||
assert pool.get(key) == f"value_for_{key}"
|
||||
|
||||
def test_very_long_label(self):
|
||||
"""测试超长 label"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 1000 字符的 label
|
||||
long_key = "a" * 1000
|
||||
pool.put(long_key, "long_key_value")
|
||||
assert pool.get(long_key) == "long_key_value"
|
||||
|
||||
|
||||
class TestNonExistentKeys:
|
||||
"""测试不存在的 key 处理"""
|
||||
|
||||
def test_get_nonexistent(self):
|
||||
"""测试获取不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
result = pool.get("nonexistent_key_12345")
|
||||
assert result is None
|
||||
|
||||
def test_get_nonexistent_with_default(self):
|
||||
"""测试获取不存在的 key 带默认值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.get("nonexistent", "default") == "default"
|
||||
assert pool.get("nonexistent", 0) == 0
|
||||
assert pool.get("nonexistent", []) == []
|
||||
assert pool.get("nonexistent", {}) == {}
|
||||
|
||||
def test_get_nonexistent_cross_process(self):
|
||||
"""测试跨进程获取不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=worker_get_nonexistent, args=(result_queue,))
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result is None
|
||||
|
||||
def test_remove_nonexistent(self):
|
||||
"""测试删除不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.remove("nonexistent_key") is False
|
||||
|
||||
def test_pop_nonexistent(self):
|
||||
"""测试弹出不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.pop("nonexistent_key") is None
|
||||
assert pool.pop("nonexistent_key", "default") == "default"
|
||||
|
||||
def test_exists_nonexistent(self):
|
||||
"""测试检查不存在的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
assert pool.exists("nonexistent_key") is False
|
||||
|
||||
|
||||
class TestLargeObjects:
|
||||
"""测试大对象序列化"""
|
||||
|
||||
def test_large_list(self):
|
||||
"""测试大型列表"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
large_list = list(range(100000))
|
||||
pool.put("large_list", large_list)
|
||||
|
||||
retrieved = pool.get("large_list")
|
||||
assert len(retrieved) == 100000
|
||||
assert retrieved[0] == 0
|
||||
assert retrieved[99999] == 99999
|
||||
|
||||
def test_large_dict(self):
|
||||
"""测试大型字典"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
large_dict = {f"key_{i}": f"value_{i}" for i in range(10000)}
|
||||
pool.put("large_dict", large_dict)
|
||||
|
||||
retrieved = pool.get("large_dict")
|
||||
assert len(retrieved) == 10000
|
||||
assert retrieved["key_0"] == "value_0"
|
||||
assert retrieved["key_9999"] == "value_9999"
|
||||
|
||||
def test_large_string(self):
|
||||
"""测试大型字符串"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
large_string = "x" * 1000000 # 1MB 字符串
|
||||
pool.put("large_string", large_string)
|
||||
|
||||
retrieved = pool.get("large_string")
|
||||
assert len(retrieved) == 1000000
|
||||
assert retrieved[0] == "x"
|
||||
assert retrieved[-1] == "x"
|
||||
|
||||
def test_deeply_nested_structure(self):
|
||||
"""测试深度嵌套结构"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 创建深度嵌套的字典
|
||||
depth = 50
|
||||
nested = "bottom"
|
||||
for i in range(depth):
|
||||
nested = {"level": i, "nested": nested}
|
||||
|
||||
pool.put("deep_nested", nested)
|
||||
|
||||
retrieved = pool.get("deep_nested")
|
||||
# 验证嵌套深度
|
||||
current = retrieved
|
||||
for i in range(depth):
|
||||
assert current["level"] == depth - 1 - i
|
||||
current = current["nested"]
|
||||
assert current == "bottom"
|
||||
|
||||
def test_large_object_cross_process(self):
|
||||
"""测试跨进程传递大对象"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
large_data = {"items": list(range(10000)), "name": "large_test"}
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_put_large_object,
|
||||
args=("large_cross", large_data, result_queue),
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
status, result = result_queue.get()
|
||||
assert status == "success"
|
||||
assert result is True
|
||||
|
||||
|
||||
class TestCircularReferences:
|
||||
"""测试循环引用"""
|
||||
|
||||
def test_circular_list(self):
|
||||
"""测试列表中的循环引用"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 创建循环引用列表
|
||||
circular = [1, 2, 3]
|
||||
circular.append(circular) # 循环引用
|
||||
|
||||
pool.put("circular_list", circular)
|
||||
|
||||
retrieved = pool.get("circular_list")
|
||||
assert retrieved[0] == 1
|
||||
assert retrieved[1] == 2
|
||||
assert retrieved[2] == 3
|
||||
# 循环引用应该被正确处理
|
||||
assert retrieved[3] is not None
|
||||
|
||||
def test_circular_dict(self):
|
||||
"""测试字典中的循环引用"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 创建循环引用字典
|
||||
circular = {"a": 1, "b": 2}
|
||||
circular["self"] = circular # 循环引用
|
||||
|
||||
pool.put("circular_dict", circular)
|
||||
|
||||
retrieved = pool.get("circular_dict")
|
||||
assert retrieved["a"] == 1
|
||||
assert retrieved["b"] == 2
|
||||
# 循环引用应该被正确处理
|
||||
assert "self" in retrieved
|
||||
|
||||
|
||||
class TestBinaryData:
|
||||
"""测试二进制数据"""
|
||||
|
||||
def test_bytes_data(self):
|
||||
"""测试字节数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
binary_data = b"\x00\x01\x02\x03\xff\xfe\xfd\xfc"
|
||||
pool.put("binary_data", binary_data)
|
||||
|
||||
retrieved = pool.get("binary_data")
|
||||
assert retrieved == binary_data
|
||||
|
||||
def test_large_binary_data(self):
|
||||
"""测试大型二进制数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
binary_data = bytes(range(256)) * 1000 # 256KB
|
||||
pool.put("large_binary", binary_data)
|
||||
|
||||
retrieved = pool.get("large_binary")
|
||||
assert retrieved == binary_data
|
||||
|
||||
def test_bytearray_data(self):
|
||||
"""测试 bytearray 数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
ba = bytearray(b"\x00\x01\x02\x03")
|
||||
pool.put("bytearray_data", ba)
|
||||
|
||||
retrieved = pool.get("bytearray_data")
|
||||
assert retrieved == ba
|
||||
|
||||
|
||||
class TestMixedTypes:
|
||||
"""测试混合类型数据"""
|
||||
|
||||
def test_heterogeneous_list(self):
|
||||
"""测试异构列表"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
mixed_list = [
|
||||
1, # int
|
||||
3.14, # float
|
||||
"string", # str
|
||||
True, # bool
|
||||
None, # NoneType
|
||||
[1, 2, 3], # list
|
||||
{"a": 1}, # dict
|
||||
(1, 2), # tuple
|
||||
{1, 2, 3}, # set
|
||||
b"binary", # bytes
|
||||
]
|
||||
|
||||
pool.put("mixed_list", mixed_list)
|
||||
|
||||
retrieved = pool.get("mixed_list")
|
||||
assert retrieved[0] == 1
|
||||
assert abs(retrieved[1] - 3.14) < 1e-10
|
||||
assert retrieved[2] == "string"
|
||||
assert retrieved[3] is True
|
||||
assert retrieved[4] is None
|
||||
assert retrieved[5] == [1, 2, 3]
|
||||
assert retrieved[6] == {"a": 1}
|
||||
assert retrieved[7] == (1, 2)
|
||||
assert retrieved[8] == {1, 2, 3}
|
||||
assert retrieved[9] == b"binary"
|
||||
|
||||
def test_heterogeneous_dict(self):
|
||||
"""测试异构字典"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
mixed_dict = {
|
||||
"int_key": 42,
|
||||
"float_key": 3.14,
|
||||
"str_key": "hello",
|
||||
"bool_key": True,
|
||||
"none_key": None,
|
||||
"list_key": [1, 2, 3],
|
||||
"dict_key": {"nested": "value"},
|
||||
"tuple_key": (1, 2, 3),
|
||||
}
|
||||
|
||||
pool.put("mixed_dict", mixed_dict)
|
||||
|
||||
retrieved = pool.get("mixed_dict")
|
||||
for key, value in mixed_dict.items():
|
||||
if isinstance(value, float):
|
||||
assert abs(retrieved[key] - value) < 1e-10
|
||||
else:
|
||||
assert retrieved[key] == value
|
||||
|
||||
|
||||
class TestConcurrentAccess:
|
||||
"""测试并发访问稳定性"""
|
||||
|
||||
|
||||
def worker_stress_test(key_prefix, iterations, result_queue):
|
||||
"""子进程:压力测试"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
errors = []
|
||||
|
||||
for i in range(iterations):
|
||||
try:
|
||||
key = f"{key_prefix}_{i}"
|
||||
pool.put(key, f"value_{i}")
|
||||
value = pool.get(key)
|
||||
if value != f"value_{i}":
|
||||
errors.append(f"Value mismatch at {key}")
|
||||
pool.remove(key)
|
||||
except Exception as e:
|
||||
errors.append(str(e))
|
||||
|
||||
result_queue.put(errors)
|
||||
|
||||
class TestConcurrentAccess:
|
||||
"""测试并发访问稳定性"""
|
||||
|
||||
def test_stress_concurrent_writes(self):
|
||||
"""压力测试:并发写入"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
num_processes = 4
|
||||
iterations = 100
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
processes = []
|
||||
|
||||
for i in range(num_processes):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_stress_test,
|
||||
args=(f"stress_{i}", iterations, result_queue),
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 收集所有错误
|
||||
all_errors = []
|
||||
for _ in range(num_processes):
|
||||
all_errors.extend(result_queue.get())
|
||||
|
||||
# 应该没有错误
|
||||
assert len(all_errors) == 0, f"Errors occurred: {all_errors}"
|
||||
|
||||
def test_rapid_put_get_cycle(self):
|
||||
"""测试快速 put-get 循环"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
for i in range(1000):
|
||||
pool.put("rapid_key", f"value_{i}")
|
||||
value = pool.get("rapid_key")
|
||||
assert value == f"value_{i}"
|
||||
|
||||
def test_rapid_key_creation_deletion(self):
|
||||
"""测试快速创建和删除 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
for i in range(100):
|
||||
key = f"temp_key_{i}"
|
||||
pool.put(key, f"temp_value_{i}")
|
||||
assert pool.exists(key)
|
||||
pool.remove(key)
|
||||
assert not pool.exists(key)
|
||||
|
||||
|
||||
class TestErrorRecovery:
|
||||
"""测试错误恢复能力"""
|
||||
|
||||
def test_put_after_error(self):
|
||||
"""测试错误后可以继续 put"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 尝试使用非法 key 类型
|
||||
try:
|
||||
pool.put(123, "value") # 应该抛出 TypeError
|
||||
except TypeError:
|
||||
pass
|
||||
|
||||
# 应该可以继续正常使用
|
||||
pool.put("valid_key", "valid_value")
|
||||
assert pool.get("valid_key") == "valid_value"
|
||||
|
||||
def test_get_after_nonexistent(self):
|
||||
"""测试获取不存在的 key 后可以继续使用"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 获取不存在的 key
|
||||
result = pool.get("nonexistent")
|
||||
assert result is None
|
||||
|
||||
# 应该可以继续正常使用
|
||||
pool.put("new_key", "new_value")
|
||||
assert pool.get("new_key") == "new_value"
|
||||
|
||||
def test_multiple_singleton_access(self):
|
||||
"""测试多次获取单例后访问"""
|
||||
pool1 = MultiProcessingSharedPool()
|
||||
pool1.put("key1", "value1")
|
||||
|
||||
pool2 = MultiProcessingSharedPool()
|
||||
pool2.put("key2", "value2")
|
||||
|
||||
pool3 = MultiProcessingSharedPool.get_instance()
|
||||
pool3.put("key3", "value3")
|
||||
|
||||
# 所有实例应该看到相同的数据
|
||||
assert pool1.get("key1") == "value1"
|
||||
assert pool1.get("key2") == "value2"
|
||||
assert pool1.get("key3") == "value3"
|
||||
|
||||
|
||||
class TestCleanup:
|
||||
"""测试清理功能"""
|
||||
|
||||
def test_clear_after_multiple_puts(self):
|
||||
"""测试多次 put 后 clear"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
for i in range(100):
|
||||
pool.put(f"key_{i}", f"value_{i}")
|
||||
|
||||
assert pool.size() == 100
|
||||
|
||||
pool.clear()
|
||||
|
||||
assert pool.size() == 0
|
||||
assert pool.keys() == []
|
||||
|
||||
def test_remove_all_keys_one_by_one(self):
|
||||
"""测试逐个删除所有 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
keys = [f"key_{i}" for i in range(50)]
|
||||
for key in keys:
|
||||
pool.put(key, f"value_for_{key}")
|
||||
|
||||
for key in keys:
|
||||
assert pool.remove(key) is True
|
||||
|
||||
assert pool.size() == 0
|
||||
523
test/test_functions.py
Normal file
523
test/test_functions.py
Normal file
@ -0,0 +1,523 @@
|
||||
"""
|
||||
函数序列化测试 - 验证 cloudpickle 集成
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
import time
|
||||
import math
|
||||
import pytest
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
|
||||
# ==================== 模块级别的普通函数 ====================
|
||||
|
||||
|
||||
def simple_function(x):
|
||||
"""简单的加法函数"""
|
||||
return x + 1
|
||||
|
||||
|
||||
def multiply_function(a, b):
|
||||
"""乘法函数"""
|
||||
return a * b
|
||||
|
||||
|
||||
def function_with_default(x, y=10):
|
||||
"""带默认参数的函数"""
|
||||
return x + y
|
||||
|
||||
|
||||
def function_with_kwargs(*args, **kwargs):
|
||||
"""带可变参数的函数"""
|
||||
return sum(args) + sum(kwargs.values())
|
||||
|
||||
|
||||
def recursive_function(n):
|
||||
"""递归函数"""
|
||||
if n <= 1:
|
||||
return 1
|
||||
return n * recursive_function(n - 1)
|
||||
|
||||
|
||||
def closure_factory(base):
|
||||
"""闭包工厂函数"""
|
||||
|
||||
def inner(x):
|
||||
return x + base
|
||||
|
||||
return inner
|
||||
|
||||
|
||||
# ==================== 辅助函数(模块级别定义)====================
|
||||
|
||||
|
||||
def worker_execute_function(key, result_queue):
|
||||
"""子进程:获取函数并执行"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
func = pool.get(key)
|
||||
if func is None:
|
||||
result_queue.put(None)
|
||||
return
|
||||
|
||||
# 执行函数(根据不同的测试函数传入不同参数)
|
||||
try:
|
||||
if key == "simple_func":
|
||||
result = func(5)
|
||||
elif key == "multiply_func":
|
||||
result = func(3, 4)
|
||||
elif key == "default_func":
|
||||
result = func(5)
|
||||
elif key == "kwargs_func":
|
||||
result = func(1, 2, 3, a=4, b=5)
|
||||
elif key == "recursive_func":
|
||||
result = func(5)
|
||||
elif key == "closure_func":
|
||||
result = func(10)
|
||||
elif key == "lambda_func":
|
||||
result = func(7)
|
||||
elif key == "lambda_with_capture":
|
||||
result = func()
|
||||
elif key == "nested_func":
|
||||
result = func(3)
|
||||
else:
|
||||
result = func()
|
||||
result_queue.put(result)
|
||||
except Exception as e:
|
||||
result_queue.put(f"ERROR: {e}")
|
||||
|
||||
|
||||
def worker_execute_lambda_with_arg(key, arg, result_queue):
|
||||
"""子进程:获取 lambda 并执行,传入参数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
func = pool.get(key)
|
||||
if func is None:
|
||||
result_queue.put(None)
|
||||
return
|
||||
result_queue.put(func(arg))
|
||||
|
||||
|
||||
def get_lambda_description(func):
|
||||
"""获取 lambda 函数的描述字符串"""
|
||||
try:
|
||||
return func.__name__
|
||||
except AttributeError:
|
||||
return str(func)
|
||||
|
||||
|
||||
# ==================== 测试类 ====================
|
||||
|
||||
|
||||
class TestNormalFunctions:
|
||||
"""测试普通函数的序列化和反序列化"""
|
||||
|
||||
def test_simple_function(self):
|
||||
"""测试简单函数的传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 存储函数
|
||||
result = pool.put("simple_func", simple_function)
|
||||
assert result is True
|
||||
|
||||
# 当前进程验证
|
||||
retrieved = pool.get("simple_func")
|
||||
assert retrieved is not None
|
||||
assert retrieved(5) == 6
|
||||
assert retrieved(10) == 11
|
||||
|
||||
def test_simple_function_cross_process(self):
|
||||
"""测试简单函数的跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 父进程存储函数
|
||||
pool.put("simple_func", simple_function)
|
||||
|
||||
# 子进程获取并执行
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("simple_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 6 # simple_function(5) = 5 + 1
|
||||
|
||||
def test_function_with_multiple_args(self):
|
||||
"""测试多参数函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("multiply_func", multiply_function)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("multiply_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 12 # multiply_function(3, 4) = 12
|
||||
|
||||
def test_function_with_default_args(self):
|
||||
"""测试带默认参数的函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("default_func", function_with_default)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("default_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 15 # function_with_default(5) = 5 + 10
|
||||
|
||||
def test_function_with_kwargs(self):
|
||||
"""测试带 **kwargs 的函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("kwargs_func", function_with_kwargs)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("kwargs_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 15 # sum(1,2,3) + sum(4,5) = 6 + 9 = 15
|
||||
|
||||
def test_recursive_function(self):
|
||||
"""测试递归函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("recursive_func", recursive_function)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("recursive_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 120 # 5! = 120
|
||||
|
||||
|
||||
class TestLambdaFunctions:
|
||||
"""测试 Lambda 函数的序列化和反序列化"""
|
||||
|
||||
def test_simple_lambda(self):
|
||||
"""测试简单 lambda 函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
simple_lambda = lambda x: x * 2
|
||||
result = pool.put("lambda_func", simple_lambda)
|
||||
assert result is True
|
||||
|
||||
# 当前进程验证
|
||||
retrieved = pool.get("lambda_func")
|
||||
assert retrieved(5) == 10
|
||||
assert retrieved(7) == 14
|
||||
|
||||
def test_simple_lambda_cross_process(self):
|
||||
"""测试简单 lambda 的跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
simple_lambda = lambda x: x * 3
|
||||
pool.put("lambda_func", simple_lambda)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("lambda_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 21 # lambda(7) = 7 * 3 = 21
|
||||
|
||||
def test_lambda_with_capture(self):
|
||||
"""测试捕获外部变量的 lambda"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
captured_value = 100
|
||||
capturing_lambda = lambda: captured_value + 1
|
||||
|
||||
pool.put("lambda_with_capture", capturing_lambda)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("lambda_with_capture", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 101 # captured_value + 1 = 101
|
||||
|
||||
def test_lambda_in_list_comprehension(self):
|
||||
"""测试在列表推导式中创建的 lambda"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 创建多个 lambda
|
||||
lambdas = [(lambda x, i=i: x + i) for i in range(5)]
|
||||
|
||||
for i, lam in enumerate(lambdas):
|
||||
pool.put(f"lambda_{i}", lam)
|
||||
|
||||
# 验证每个 lambda 都能正确捕获各自的 i
|
||||
for i in range(5):
|
||||
retrieved = pool.get(f"lambda_{i}")
|
||||
assert retrieved(10) == 10 + i
|
||||
|
||||
def test_complex_lambda(self):
|
||||
"""测试复杂的 lambda 表达式"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
complex_lambda = lambda x, y: (x**2 + y**2) ** 0.5
|
||||
pool.put("complex_lambda", complex_lambda)
|
||||
|
||||
# 子进程验证
|
||||
def worker_execute_complex_lambda(key, x, y, result_queue):
|
||||
pool = MultiProcessingSharedPool()
|
||||
func = pool.get(key)
|
||||
result_queue.put(func(x, y))
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_complex_lambda,
|
||||
args=("complex_lambda", 3, 4, result_queue),
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert abs(result - 5.0) < 1e-10 # sqrt(3^2 + 4^2) = 5
|
||||
|
||||
|
||||
class TestNestedFunctions:
|
||||
"""测试嵌套函数(在函数内部定义的函数)"""
|
||||
|
||||
def test_nested_function(self):
|
||||
"""测试嵌套函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
def outer_function(x):
|
||||
def inner_function(y):
|
||||
return y * y
|
||||
|
||||
return inner_function(x) + x
|
||||
|
||||
pool.put("nested_func", outer_function)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("nested_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 12 # outer_function(3) = 3*3 + 3 = 12
|
||||
|
||||
def test_closure_function(self):
|
||||
"""测试闭包函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
closure_func = closure_factory(100)
|
||||
pool.put("closure_func", closure_func)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_execute_function, args=("closure_func", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 110 # closure_func(10) = 10 + 100
|
||||
|
||||
def test_multiple_closures(self):
|
||||
"""测试多个闭包"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
closures = [closure_factory(i) for i in range(5)]
|
||||
for i, closure in enumerate(closures):
|
||||
pool.put(f"closure_{i}", closure)
|
||||
|
||||
# 验证每个闭包捕获的值不同
|
||||
for i in range(5):
|
||||
retrieved = pool.get(f"closure_{i}")
|
||||
assert retrieved(10) == 10 + i
|
||||
|
||||
|
||||
class TestClassMethods:
|
||||
"""测试类方法的序列化"""
|
||||
|
||||
def test_static_method(self):
|
||||
"""测试静态方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
class Calculator:
|
||||
@staticmethod
|
||||
def add(x, y):
|
||||
return x + y
|
||||
|
||||
@staticmethod
|
||||
def multiply(x, y):
|
||||
return x * y
|
||||
|
||||
pool.put("static_add", Calculator.add)
|
||||
pool.put("static_multiply", Calculator.multiply)
|
||||
|
||||
# 验证静态方法
|
||||
add_func = pool.get("static_add")
|
||||
multiply_func = pool.get("static_multiply")
|
||||
|
||||
assert add_func(2, 3) == 5
|
||||
assert multiply_func(2, 3) == 6
|
||||
|
||||
def test_class_method(self):
|
||||
"""测试类方法"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
class Counter:
|
||||
count = 0
|
||||
|
||||
@classmethod
|
||||
def increment(cls):
|
||||
cls.count += 1
|
||||
return cls.count
|
||||
|
||||
# 注意:类方法通常不能被 cloudpickle 正确序列化
|
||||
# 因为它依赖于类定义
|
||||
result = pool.put("class_method", Counter.increment)
|
||||
# 如果成功存储,尝试执行
|
||||
if result:
|
||||
try:
|
||||
method = pool.get("class_method")
|
||||
# 类方法在反序列化后可能无法正常工作
|
||||
# 这取决于 cloudpickle 的实现
|
||||
except Exception:
|
||||
pass # 预期可能失败
|
||||
|
||||
|
||||
class TestBuiltInFunctions:
|
||||
"""测试内置函数的序列化"""
|
||||
|
||||
def test_builtin_functions(self):
|
||||
"""测试 Python 内置函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 大多数内置函数可以用标准 pickle 序列化
|
||||
pool.put("builtin_sum", sum)
|
||||
pool.put("builtin_max", max)
|
||||
pool.put("builtin_min", min)
|
||||
pool.put("builtin_len", len)
|
||||
|
||||
# 验证
|
||||
assert pool.get("builtin_sum")([1, 2, 3]) == 6
|
||||
assert pool.get("builtin_max")([1, 2, 3]) == 3
|
||||
assert pool.get("builtin_min")([1, 2, 3]) == 1
|
||||
assert pool.get("builtin_len")([1, 2, 3]) == 3
|
||||
|
||||
def test_math_functions(self):
|
||||
"""测试 math 模块函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("math_sqrt", math.sqrt)
|
||||
pool.put("math_sin", math.sin)
|
||||
pool.put("math_cos", math.cos)
|
||||
|
||||
# 验证
|
||||
assert abs(pool.get("math_sqrt")(16) - 4.0) < 1e-10
|
||||
assert abs(pool.get("math_sin")(0) - 0.0) < 1e-10
|
||||
assert abs(pool.get("math_cos")(0) - 1.0) < 1e-10
|
||||
|
||||
|
||||
class TestFunctionReturnValues:
|
||||
"""测试函数作为返回值"""
|
||||
|
||||
def test_function_returned_from_function(self):
|
||||
"""测试返回函数的函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
def create_multiplier(factor):
|
||||
return lambda x: x * factor
|
||||
|
||||
pool.put("create_multiplier", create_multiplier)
|
||||
|
||||
# 在子进程中获取并使用
|
||||
def worker_get_multiplier(result_queue):
|
||||
pool = MultiProcessingSharedPool()
|
||||
factory = pool.get("create_multiplier")
|
||||
multiplier_func = factory(5) # 创建一个乘以 5 的函数
|
||||
result_queue.put(multiplier_func(10))
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(target=worker_get_multiplier, args=(result_queue,))
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 50 # 10 * 5 = 50
|
||||
|
||||
|
||||
class TestErrorHandling:
|
||||
"""测试函数序列化的错误处理"""
|
||||
|
||||
def test_unpicklable_function_fallback(self):
|
||||
"""测试无法序列化的函数回退到 cloudpickle"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 创建局部函数(无法被标准 pickle 序列化)
|
||||
def local_function(x):
|
||||
return x**2
|
||||
|
||||
# 应该通过 cloudpickle 成功存储
|
||||
result = pool.put("local_func", local_function)
|
||||
assert result is True
|
||||
|
||||
# 验证可以正确执行
|
||||
retrieved = pool.get("local_func")
|
||||
assert retrieved(5) == 25
|
||||
|
||||
def test_function_with_unpicklable_capture(self):
|
||||
"""测试捕获不可序列化对象的函数"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 捕获文件对象(不可序列化)
|
||||
try:
|
||||
with open(__file__, "r") as f:
|
||||
file_capturing_lambda = lambda: f.read()
|
||||
|
||||
# 尝试存储应该失败
|
||||
result = pool.put("file_lambda", file_capturing_lambda)
|
||||
# 如果 cloudpickle 支持,验证是否能正确失败
|
||||
except Exception:
|
||||
pass # 预期可能失败
|
||||
446
test/test_multiprocess.py
Normal file
446
test/test_multiprocess.py
Normal file
@ -0,0 +1,446 @@
|
||||
"""
|
||||
多进程数据共享测试 - 验证跨进程数据同步能力
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
import time
|
||||
import pytest
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
|
||||
# ==================== 辅助函数(需要在模块级别定义以便多进程使用)====================
|
||||
|
||||
|
||||
def worker_put_data(key, value):
|
||||
"""子进程:往共享池写入数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.put(key, value)
|
||||
return True
|
||||
|
||||
|
||||
def worker_get_data(key, result_queue):
|
||||
"""子进程:从共享池读取数据并放入结果队列"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
value = pool.get(key)
|
||||
result_queue.put(value)
|
||||
|
||||
|
||||
def worker_check_exists(key, result_queue):
|
||||
"""子进程:检查 key 是否存在"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
result_queue.put(pool.exists(key))
|
||||
|
||||
|
||||
def worker_modify_data(key, new_value, result_queue):
|
||||
"""子进程:修改数据并返回旧值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
old_value = pool.get(key)
|
||||
pool.put(key, new_value)
|
||||
result_queue.put(old_value)
|
||||
|
||||
|
||||
def worker_wait_and_get(key, wait_time, result_queue):
|
||||
"""子进程:等待一段时间后读取数据"""
|
||||
time.sleep(wait_time)
|
||||
pool = MultiProcessingSharedPool()
|
||||
result_queue.put(pool.get(key))
|
||||
|
||||
|
||||
def worker_increment_counter(key, iterations):
|
||||
"""子进程:对计数器进行递增"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
for _ in range(iterations):
|
||||
# 注意:这不是原子操作,仅用于测试并发访问
|
||||
current = pool.get(key, 0)
|
||||
pool.put(key, current + 1)
|
||||
|
||||
|
||||
def worker_pop_data(key, result_queue):
|
||||
"""子进程:弹出数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
value = pool.pop(key)
|
||||
result_queue.put(value)
|
||||
|
||||
|
||||
# ==================== 测试类 ====================
|
||||
|
||||
|
||||
class TestParentChildProcess:
|
||||
"""测试父子进程间数据传递"""
|
||||
|
||||
def test_parent_write_child_read(self):
|
||||
"""测试父进程写入,子进程读取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 父进程写入数据
|
||||
pool.put("shared_key", "shared_value")
|
||||
|
||||
# 子进程读取数据
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_data, args=("shared_key", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == "shared_value"
|
||||
|
||||
def test_child_write_parent_read(self):
|
||||
"""测试子进程写入,父进程读取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 子进程写入数据
|
||||
p = multiprocessing.Process(
|
||||
target=worker_put_data, args=("child_key", "child_value")
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
# 父进程读取数据(需要短暂等待以确保数据同步)
|
||||
time.sleep(0.1)
|
||||
result = pool.get("child_key")
|
||||
assert result == "child_value"
|
||||
|
||||
def test_parent_child_data_isolation(self):
|
||||
"""测试父子进程数据隔离 - 验证 manager.dict 的同步机制"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 父进程写入初始数据
|
||||
pool.put("isolation_test", "parent_value")
|
||||
|
||||
# 子进程读取并修改
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_modify_data,
|
||||
args=("isolation_test", "child_value", result_queue),
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
# 验证子进程读取到了父进程的值
|
||||
child_read = result_queue.get()
|
||||
assert child_read == "parent_value"
|
||||
|
||||
# 验证父进程可以看到子进程的修改
|
||||
time.sleep(0.1)
|
||||
parent_read = pool.get("isolation_test")
|
||||
assert parent_read == "child_value"
|
||||
|
||||
|
||||
class TestMultipleChildProcesses:
|
||||
"""测试多个子进程间的数据共享"""
|
||||
|
||||
def test_multiple_children_write_same_key(self):
|
||||
"""测试多个子进程写入同一个 key(后面的覆盖前面的)"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
processes = []
|
||||
for i in range(5):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_put_data, args=("shared_key", f"value_{i}")
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 短暂等待确保所有写入完成
|
||||
time.sleep(0.1)
|
||||
|
||||
# 验证有一个值被成功写入
|
||||
result = pool.get("shared_key")
|
||||
assert result.startswith("value_")
|
||||
|
||||
def test_multiple_children_write_different_keys(self):
|
||||
"""测试多个子进程写入不同的 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
num_processes = 5
|
||||
processes = []
|
||||
|
||||
for i in range(num_processes):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_put_data, args=(f"key_{i}", f"value_{i}")
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 短暂等待确保所有写入完成
|
||||
time.sleep(0.1)
|
||||
|
||||
# 验证所有值都被成功写入
|
||||
for i in range(num_processes):
|
||||
assert pool.get(f"key_{i}") == f"value_{i}"
|
||||
|
||||
def test_multiple_children_read_same_key(self):
|
||||
"""测试多个子进程读取同一个 key"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 父进程写入数据
|
||||
pool.put("shared_key", "shared_value")
|
||||
|
||||
# 多个子进程读取
|
||||
num_processes = 5
|
||||
result_queue = multiprocessing.Queue()
|
||||
processes = []
|
||||
|
||||
for _ in range(num_processes):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_data, args=("shared_key", result_queue)
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 收集所有结果
|
||||
results = []
|
||||
for _ in range(num_processes):
|
||||
results.append(result_queue.get())
|
||||
|
||||
# 所有子进程都应该读取到相同的值
|
||||
assert all(r == "shared_value" for r in results)
|
||||
|
||||
def test_concurrent_exists_check(self):
|
||||
"""测试并发检查 key 是否存在"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 写入一些数据
|
||||
for i in range(5):
|
||||
pool.put(f"key_{i}", f"value_{i}")
|
||||
|
||||
# 多个子进程并发检查
|
||||
result_queue = multiprocessing.Queue()
|
||||
processes = []
|
||||
|
||||
for i in range(10):
|
||||
key = f"key_{i % 7}" # 有些 key 存在,有些不存在
|
||||
p = multiprocessing.Process(
|
||||
target=worker_check_exists, args=(key, result_queue)
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 收集结果
|
||||
results = []
|
||||
for _ in range(10):
|
||||
results.append(result_queue.get())
|
||||
|
||||
# 前 5 个应该存在 (key_0 到 key_4),后 5 个不存在 (key_5, key_6 重复检查)
|
||||
assert sum(results) >= 5 # 至少 5 个存在
|
||||
|
||||
|
||||
# 进程池测试需要在模块级别定义 worker 函数
|
||||
def _pool_worker_map(args):
|
||||
"""进程池 map 操作的 worker"""
|
||||
idx, key = args
|
||||
shared_pool = MultiProcessingSharedPool()
|
||||
value = shared_pool.get(key)
|
||||
return idx, value
|
||||
|
||||
|
||||
def _pool_worker_apply(key):
|
||||
"""进程池 apply_async 的 worker"""
|
||||
shared_pool = MultiProcessingSharedPool()
|
||||
return shared_pool.get(key)
|
||||
|
||||
|
||||
class TestProcessPool:
|
||||
"""测试在进程池中使用"""
|
||||
|
||||
def test_pool_map_with_shared_data(self):
|
||||
"""测试在进程池 map 操作中使用共享数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 写入测试数据
|
||||
for i in range(5):
|
||||
pool.put(f"input_{i}", i * 10)
|
||||
|
||||
# 使用进程池
|
||||
with multiprocessing.Pool(processes=3) as process_pool:
|
||||
results = process_pool.map(
|
||||
_pool_worker_map, [(i, f"input_{i}") for i in range(5)]
|
||||
)
|
||||
|
||||
# 验证结果
|
||||
results_dict = {idx: val for idx, val in results}
|
||||
for i in range(5):
|
||||
assert results_dict[i] == i * 10
|
||||
|
||||
def test_pool_apply_async_with_shared_data(self):
|
||||
"""测试在进程池 apply_async 中使用共享数据"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("async_key", "async_value")
|
||||
|
||||
with multiprocessing.Pool(processes=2) as process_pool:
|
||||
result = process_pool.apply_async(_pool_worker_apply, ("async_key",))
|
||||
assert result.get(timeout=5) == "async_value"
|
||||
|
||||
|
||||
class TestDataVisibility:
|
||||
"""测试数据可见性和同步时机"""
|
||||
|
||||
def test_immediate_visibility_after_put(self):
|
||||
"""测试写入后立即可见"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
pool.put("immediate_key", "immediate_value")
|
||||
# 同一进程内应该立即可见
|
||||
assert pool.exists("immediate_key")
|
||||
assert pool.get("immediate_key") == "immediate_value"
|
||||
|
||||
def test_cross_process_visibility_with_delay(self):
|
||||
"""测试跨进程可见性(带延迟)"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 父进程写入
|
||||
pool.put("delayed_key", "delayed_value")
|
||||
|
||||
# 子进程延迟后读取
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_wait_and_get, args=("delayed_key", 0.2, result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == "delayed_value"
|
||||
|
||||
|
||||
class TestConcurrentModifications:
|
||||
"""测试并发修改场景"""
|
||||
|
||||
def test_concurrent_counter_increments(self):
|
||||
"""测试并发计数器递增(非原子操作,预期会有竞争条件)"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 初始化计数器
|
||||
pool.put("counter", 0)
|
||||
|
||||
num_processes = 4
|
||||
iterations_per_process = 10
|
||||
|
||||
processes = []
|
||||
for _ in range(num_processes):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_increment_counter,
|
||||
args=("counter", iterations_per_process),
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
time.sleep(0.1)
|
||||
|
||||
# 由于竞争条件,实际值可能小于期望值
|
||||
# 这个测试主要是为了验证并发访问不会崩溃
|
||||
final_count = pool.get("counter")
|
||||
assert isinstance(final_count, int)
|
||||
assert 0 <= final_count <= num_processes * iterations_per_process
|
||||
|
||||
def test_concurrent_pop_operations(self):
|
||||
"""测试并发 pop 操作"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 初始化多个 key
|
||||
num_keys = 5
|
||||
for i in range(num_keys):
|
||||
pool.put(f"pop_key_{i}", f"pop_value_{i}")
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
processes = []
|
||||
|
||||
for i in range(num_keys):
|
||||
p = multiprocessing.Process(
|
||||
target=worker_pop_data, args=(f"pop_key_{i}", result_queue)
|
||||
)
|
||||
processes.append(p)
|
||||
p.start()
|
||||
|
||||
for p in processes:
|
||||
p.join()
|
||||
|
||||
# 收集所有 pop 的结果
|
||||
popped_values = []
|
||||
for _ in range(num_keys):
|
||||
popped_values.append(result_queue.get())
|
||||
|
||||
# 验证所有值都被正确 pop
|
||||
assert len(popped_values) == num_keys
|
||||
for i in range(num_keys):
|
||||
assert f"pop_value_{i}" in popped_values
|
||||
|
||||
# 验证所有 key 都被移除
|
||||
time.sleep(0.1)
|
||||
for i in range(num_keys):
|
||||
assert not pool.exists(f"pop_key_{i}")
|
||||
|
||||
|
||||
class TestComplexDataTypes:
|
||||
"""测试复杂数据类型的多进程共享"""
|
||||
|
||||
def test_share_nested_dict(self):
|
||||
"""测试共享嵌套字典"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
nested_data = {
|
||||
"level1": {"level2": {"level3": [1, 2, 3]}},
|
||||
"list_of_dicts": [{"a": 1}, {"b": 2}],
|
||||
}
|
||||
|
||||
pool.put("nested_key", nested_data)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_data, args=("nested_key", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == nested_data
|
||||
|
||||
def test_share_large_list(self):
|
||||
"""测试共享大型列表"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
large_list = list(range(10000))
|
||||
pool.put("large_list_key", large_list)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_data, args=("large_list_key", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == large_list
|
||||
569
test/test_numpy.py
Normal file
569
test/test_numpy.py
Normal file
@ -0,0 +1,569 @@
|
||||
"""
|
||||
NumPy ndarray 支持测试 - 验证数组数据共享
|
||||
"""
|
||||
|
||||
import multiprocessing
|
||||
import time
|
||||
import pytest
|
||||
import numpy as np
|
||||
from mpsp.mpsp import MultiProcessingSharedPool
|
||||
|
||||
|
||||
# ==================== 辅助函数 ====================
|
||||
|
||||
|
||||
def worker_get_array(key, result_queue):
|
||||
"""子进程:获取数组并放入结果队列"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
arr = pool.get(key)
|
||||
result_queue.put(arr)
|
||||
|
||||
|
||||
def worker_modify_array(key, index, value, result_queue):
|
||||
"""子进程:读取数组、修改特定索引、返回原值"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
arr = pool.get(key)
|
||||
old_value = arr[index].copy() if isinstance(index, tuple) else arr[index]
|
||||
# 注意:此处修改的是副本,因为 get 返回的是数组副本
|
||||
result_queue.put(old_value)
|
||||
|
||||
|
||||
def worker_sum_array(key, result_queue):
|
||||
"""子进程:计算数组元素和"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
arr = pool.get(key)
|
||||
result_queue.put(np.sum(arr))
|
||||
|
||||
|
||||
def worker_check_array_properties(key, expected_shape, expected_dtype, result_queue):
|
||||
"""子进程:检查数组属性"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
arr = pool.get(key)
|
||||
result_queue.put(
|
||||
(arr.shape == expected_shape, arr.dtype == expected_dtype, arr.ndim)
|
||||
)
|
||||
|
||||
|
||||
# ==================== 测试类 ====================
|
||||
|
||||
|
||||
class TestNDBasicOperations:
|
||||
"""测试 NumPy 数组基本操作"""
|
||||
|
||||
def test_1d_array(self):
|
||||
"""测试一维数组存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([1, 2, 3, 4, 5])
|
||||
pool.put("1d_array", arr)
|
||||
|
||||
retrieved = pool.get("1d_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert retrieved.dtype == arr.dtype
|
||||
|
||||
def test_2d_array(self):
|
||||
"""测试二维数组存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
|
||||
pool.put("2d_array", arr)
|
||||
|
||||
retrieved = pool.get("2d_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert retrieved.shape == (3, 3)
|
||||
|
||||
def test_3d_array(self):
|
||||
"""测试三维数组存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.arange(24).reshape(2, 3, 4)
|
||||
pool.put("3d_array", arr)
|
||||
|
||||
retrieved = pool.get("3d_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert retrieved.shape == (2, 3, 4)
|
||||
|
||||
def test_multidimensional_array(self):
|
||||
"""测试高维数组存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 4维数组
|
||||
arr = np.random.rand(2, 3, 4, 5)
|
||||
pool.put("4d_array", arr)
|
||||
|
||||
retrieved = pool.get("4d_array")
|
||||
np.testing.assert_array_almost_equal(retrieved, arr)
|
||||
assert retrieved.shape == (2, 3, 4, 5)
|
||||
|
||||
|
||||
class TestNDDataTypes:
|
||||
"""测试不同数据类型的 NumPy 数组"""
|
||||
|
||||
def test_integer_dtypes(self):
|
||||
"""测试整数类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
dtypes = [
|
||||
np.int8,
|
||||
np.int16,
|
||||
np.int32,
|
||||
np.int64,
|
||||
np.uint8,
|
||||
np.uint16,
|
||||
np.uint32,
|
||||
np.uint64,
|
||||
]
|
||||
|
||||
for dtype in dtypes:
|
||||
arr = np.array([1, 2, 3], dtype=dtype)
|
||||
key = f"int_array_{dtype.__name__}"
|
||||
pool.put(key, arr)
|
||||
|
||||
retrieved = pool.get(key)
|
||||
assert retrieved.dtype == dtype
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
def test_float_dtypes(self):
|
||||
"""测试浮点类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
dtypes = [np.float32, np.float64]
|
||||
|
||||
for dtype in dtypes:
|
||||
arr = np.array([1.1, 2.2, 3.3], dtype=dtype)
|
||||
key = f"float_array_{dtype.__name__}"
|
||||
pool.put(key, arr)
|
||||
|
||||
retrieved = pool.get(key)
|
||||
assert retrieved.dtype == dtype
|
||||
np.testing.assert_array_almost_equal(retrieved, arr)
|
||||
|
||||
def test_complex_dtypes(self):
|
||||
"""测试复数类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
dtypes = [np.complex64, np.complex128]
|
||||
|
||||
for dtype in dtypes:
|
||||
arr = np.array([1 + 2j, 3 + 4j, 5 + 6j], dtype=dtype)
|
||||
key = f"complex_array_{dtype.__name__}"
|
||||
pool.put(key, arr)
|
||||
|
||||
retrieved = pool.get(key)
|
||||
assert retrieved.dtype == dtype
|
||||
np.testing.assert_array_almost_equal(retrieved, arr)
|
||||
|
||||
def test_bool_dtype(self):
|
||||
"""测试布尔类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([True, False, True, True, False], dtype=np.bool_)
|
||||
pool.put("bool_array", arr)
|
||||
|
||||
retrieved = pool.get("bool_array")
|
||||
assert retrieved.dtype == np.bool_
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
def test_string_dtype(self):
|
||||
"""测试字符串类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# Unicode 字符串
|
||||
arr = np.array(["hello", "world", "mpsp"])
|
||||
pool.put("string_array", arr)
|
||||
|
||||
retrieved = pool.get("string_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
def test_object_dtype(self):
|
||||
"""测试对象类型数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 对象数组可以存储不同类型的数据
|
||||
arr = np.array([1, "string", 3.14, [1, 2, 3]], dtype=object)
|
||||
pool.put("object_array", arr)
|
||||
|
||||
retrieved = pool.get("object_array")
|
||||
assert retrieved.dtype == object
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
|
||||
class TestNDCrossProcess:
|
||||
"""测试 NumPy 数组跨进程共享"""
|
||||
|
||||
def test_1d_array_cross_process(self):
|
||||
"""测试一维数组跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([10, 20, 30, 40, 50])
|
||||
pool.put("shared_1d", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_array, args=("shared_1d", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
np.testing.assert_array_equal(result, arr)
|
||||
|
||||
def test_2d_array_cross_process(self):
|
||||
"""测试二维数组跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([[1, 2], [3, 4], [5, 6]])
|
||||
pool.put("shared_2d", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_array, args=("shared_2d", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
np.testing.assert_array_equal(result, arr)
|
||||
|
||||
def test_array_properties_cross_process(self):
|
||||
"""测试数组属性跨进程保持"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.arange(12).reshape(3, 4).astype(np.float32)
|
||||
pool.put("property_test", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_check_array_properties,
|
||||
args=("property_test", (3, 4), np.float32, result_queue),
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
shape_match, dtype_match, ndim = result_queue.get()
|
||||
assert shape_match
|
||||
assert dtype_match
|
||||
assert ndim == 2
|
||||
|
||||
def test_array_operations_cross_process(self):
|
||||
"""测试在子进程中执行数组操作"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([1, 2, 3, 4, 5, 6, 7, 8, 9, 10])
|
||||
pool.put("sum_test", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_sum_array, args=("sum_test", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
assert result == 55 # sum of 1..10
|
||||
|
||||
|
||||
class TestNDSpecialArrays:
|
||||
"""测试特殊类型的 NumPy 数组"""
|
||||
|
||||
def test_empty_array(self):
|
||||
"""测试空数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([])
|
||||
pool.put("empty_array", arr)
|
||||
|
||||
retrieved = pool.get("empty_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert len(retrieved) == 0
|
||||
|
||||
def test_single_element_array(self):
|
||||
"""测试单元素数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([42])
|
||||
pool.put("single_element", arr)
|
||||
|
||||
retrieved = pool.get("single_element")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert retrieved[0] == 42
|
||||
|
||||
def test_zeros_array(self):
|
||||
"""测试零数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.zeros((5, 5))
|
||||
pool.put("zeros_array", arr)
|
||||
|
||||
retrieved = pool.get("zeros_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert np.all(retrieved == 0)
|
||||
|
||||
def test_ones_array(self):
|
||||
"""测试全一数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.ones((3, 4))
|
||||
pool.put("ones_array", arr)
|
||||
|
||||
retrieved = pool.get("ones_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert np.all(retrieved == 1)
|
||||
|
||||
def test_eye_array(self):
|
||||
"""测试单位矩阵"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.eye(5)
|
||||
pool.put("eye_array", arr)
|
||||
|
||||
retrieved = pool.get("eye_array")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
assert np.all(np.diag(retrieved) == 1)
|
||||
|
||||
def test_nan_and_inf_array(self):
|
||||
"""测试包含 NaN 和 Inf 的数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([1.0, np.nan, np.inf, -np.inf, 2.0])
|
||||
pool.put("special_values", arr)
|
||||
|
||||
retrieved = pool.get("special_values")
|
||||
assert np.isnan(retrieved[1])
|
||||
assert np.isinf(retrieved[2]) and retrieved[2] > 0
|
||||
assert np.isinf(retrieved[3]) and retrieved[3] < 0
|
||||
|
||||
def test_masked_array(self):
|
||||
"""测试掩码数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
data = np.array([1, 2, 3, 4, 5])
|
||||
mask = np.array([False, True, False, True, False])
|
||||
arr = np.ma.array(data, mask=mask)
|
||||
|
||||
pool.put("masked_array", arr)
|
||||
|
||||
retrieved = pool.get("masked_array")
|
||||
np.testing.assert_array_equal(retrieved.data, data)
|
||||
np.testing.assert_array_equal(retrieved.mask, mask)
|
||||
|
||||
|
||||
class TestNDLargeArrays:
|
||||
"""测试大型 NumPy 数组"""
|
||||
|
||||
def test_large_1d_array(self):
|
||||
"""测试大型一维数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 10000 个元素的数组
|
||||
arr = np.arange(10000)
|
||||
pool.put("large_1d", arr)
|
||||
|
||||
retrieved = pool.get("large_1d")
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
def test_large_2d_array(self):
|
||||
"""测试大型二维数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 1000x100 的数组
|
||||
arr = np.random.rand(1000, 100)
|
||||
pool.put("large_2d", arr)
|
||||
|
||||
retrieved = pool.get("large_2d")
|
||||
np.testing.assert_array_almost_equal(retrieved, arr)
|
||||
|
||||
def test_large_array_cross_process(self):
|
||||
"""测试大型数组跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.arange(100000).reshape(1000, 100)
|
||||
pool.put("large_cross", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_sum_array, args=("large_cross", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
expected_sum = np.sum(arr)
|
||||
assert result == expected_sum
|
||||
|
||||
|
||||
class TestNDStructuredArrays:
|
||||
"""测试结构化数组"""
|
||||
|
||||
def test_structured_array(self):
|
||||
"""测试结构化数组存取"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
dt = np.dtype([("name", "U10"), ("age", "i4"), ("weight", "f4")])
|
||||
arr = np.array(
|
||||
[("Alice", 25, 55.5), ("Bob", 30, 85.3), ("Charlie", 35, 75.0)], dtype=dt
|
||||
)
|
||||
|
||||
pool.put("structured_array", arr)
|
||||
|
||||
retrieved = pool.get("structured_array")
|
||||
assert retrieved.dtype == dt
|
||||
np.testing.assert_array_equal(retrieved, arr)
|
||||
|
||||
def test_structured_array_cross_process(self):
|
||||
"""测试结构化数组跨进程传递"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
dt = np.dtype([("x", "f4"), ("y", "f4"), ("z", "f4")])
|
||||
arr = np.array([(1.0, 2.0, 3.0), (4.0, 5.0, 6.0)], dtype=dt)
|
||||
|
||||
pool.put("structured_cross", arr)
|
||||
|
||||
result_queue = multiprocessing.Queue()
|
||||
p = multiprocessing.Process(
|
||||
target=worker_get_array, args=("structured_cross", result_queue)
|
||||
)
|
||||
p.start()
|
||||
p.join()
|
||||
|
||||
result = result_queue.get()
|
||||
np.testing.assert_array_equal(result, arr)
|
||||
|
||||
|
||||
class TestNDMatrixOperations:
|
||||
"""测试矩阵操作相关的数组"""
|
||||
|
||||
def test_matrix_multiplication(self):
|
||||
"""测试矩阵乘法用的数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
A = np.array([[1, 2], [3, 4]])
|
||||
B = np.array([[5, 6], [7, 8]])
|
||||
|
||||
pool.put("matrix_A", A)
|
||||
pool.put("matrix_B", B)
|
||||
|
||||
retrieved_A = pool.get("matrix_A")
|
||||
retrieved_B = pool.get("matrix_B")
|
||||
|
||||
result = np.dot(retrieved_A, retrieved_B)
|
||||
expected = np.array([[19, 22], [43, 50]])
|
||||
np.testing.assert_array_equal(result, expected)
|
||||
|
||||
def test_eigenvalue_computation(self):
|
||||
"""测试特征值计算"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
# 对称矩阵
|
||||
arr = np.array([[4, 2], [2, 4]])
|
||||
pool.put("eigen_matrix", arr)
|
||||
|
||||
retrieved = pool.get("eigen_matrix")
|
||||
eigenvalues, eigenvectors = np.linalg.eig(retrieved)
|
||||
|
||||
# 特征值应该是 6 和 2
|
||||
assert np.allclose(sorted(eigenvalues), [2, 6])
|
||||
|
||||
def test_svd_decomposition(self):
|
||||
"""测试 SVD 分解"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
|
||||
pool.put("svd_matrix", arr)
|
||||
|
||||
retrieved = pool.get("svd_matrix")
|
||||
U, S, Vh = np.linalg.svd(retrieved)
|
||||
|
||||
# 验证分解结果
|
||||
reconstructed = U @ np.diag(S) @ Vh
|
||||
np.testing.assert_array_almost_equal(reconstructed, arr)
|
||||
|
||||
|
||||
class TestNDBroadcasting:
|
||||
"""测试广播机制"""
|
||||
|
||||
def test_broadcasting_operation(self):
|
||||
"""测试广播操作"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
arr_2d = np.array([[1, 2, 3], [4, 5, 6]])
|
||||
arr_1d = np.array([10, 20, 30])
|
||||
|
||||
pool.put("array_2d", arr_2d)
|
||||
pool.put("array_1d", arr_1d)
|
||||
|
||||
retrieved_2d = pool.get("array_2d")
|
||||
retrieved_1d = pool.get("array_1d")
|
||||
|
||||
result = retrieved_2d + retrieved_1d
|
||||
expected = np.array([[11, 22, 33], [14, 25, 36]])
|
||||
np.testing.assert_array_equal(result, expected)
|
||||
|
||||
|
||||
class TestNDMixedTypes:
|
||||
"""测试混合数据类型的数组相关操作"""
|
||||
|
||||
def test_array_in_dict(self):
|
||||
"""测试字典中包含数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
data = {
|
||||
"matrix": np.array([[1, 2], [3, 4]]),
|
||||
"vector": np.array([1, 2, 3]),
|
||||
"scalar": 42,
|
||||
"name": "test",
|
||||
}
|
||||
|
||||
pool.put("dict_with_arrays", data)
|
||||
|
||||
retrieved = pool.get("dict_with_arrays")
|
||||
np.testing.assert_array_equal(retrieved["matrix"], data["matrix"])
|
||||
np.testing.assert_array_equal(retrieved["vector"], data["vector"])
|
||||
assert retrieved["scalar"] == 42
|
||||
assert retrieved["name"] == "test"
|
||||
|
||||
def test_array_in_list(self):
|
||||
"""测试列表中包含数组"""
|
||||
pool = MultiProcessingSharedPool()
|
||||
pool.clear()
|
||||
|
||||
data = [np.array([1, 2, 3]), np.array([[4, 5], [6, 7]]), "string", 42]
|
||||
|
||||
pool.put("list_with_arrays", data)
|
||||
|
||||
retrieved = pool.get("list_with_arrays")
|
||||
np.testing.assert_array_equal(retrieved[0], data[0])
|
||||
np.testing.assert_array_equal(retrieved[1], data[1])
|
||||
assert retrieved[2] == "string"
|
||||
assert retrieved[3] == 42
|
||||
323
uv.lock
generated
Normal file
323
uv.lock
generated
Normal file
@ -0,0 +1,323 @@
|
||||
version = 1
|
||||
revision = 3
|
||||
requires-python = ">=3.10"
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.11'",
|
||||
"python_full_version < '3.11'",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cloudpickle"
|
||||
version = "3.1.2"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/27/fb/576f067976d320f5f0114a8d9fa1215425441bb35627b1993e5afd8111e5/cloudpickle-3.1.2.tar.gz", hash = "sha256:7fda9eb655c9c230dab534f1983763de5835249750e85fbcef43aaa30a9a2414" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/88/39/799be3f2f0f38cc727ee3b4f1445fe6d5e4133064ec2e4115069418a5bb6/cloudpickle-3.1.2-py3-none-any.whl", hash = "sha256:9acb47f6afd73f60dc1df93bb801b472f05ff42fa6c84167d25cb206be1fbf4a" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "colorama"
|
||||
version = "0.4.6"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exceptiongroup"
|
||||
version = "1.3.1"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
dependencies = [
|
||||
{ name = "typing-extensions", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "iniconfig"
|
||||
version = "2.3.0"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "mpsp"
|
||||
version = "0.1.0"
|
||||
source = { editable = "." }
|
||||
|
||||
[package.dev-dependencies]
|
||||
dev = [
|
||||
{ name = "cloudpickle" },
|
||||
{ name = "numpy", version = "2.2.6", source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }, marker = "python_full_version < '3.11'" },
|
||||
{ name = "numpy", version = "2.4.2", source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }, marker = "python_full_version >= '3.11'" },
|
||||
{ name = "pytest" },
|
||||
]
|
||||
|
||||
[package.metadata]
|
||||
|
||||
[package.metadata.requires-dev]
|
||||
dev = [
|
||||
{ name = "cloudpickle", specifier = ">=3.1.2" },
|
||||
{ name = "numpy", specifier = ">=2.2.6" },
|
||||
{ name = "pytest", specifier = ">=9.0.2" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.2.6"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version < '3.11'",
|
||||
]
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/76/21/7d2a95e4bba9dc13d043ee156a356c0a8f0c6309dff6b21b4d71a073b8a8/numpy-2.2.6.tar.gz", hash = "sha256:e29554e2bef54a90aa5cc07da6ce955accb83f21ab5de01a62c8478897b264fd" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/9a/3e/ed6db5be21ce87955c0cbd3009f2803f59fa08df21b5df06862e2d8e2bdd/numpy-2.2.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b412caa66f72040e6d268491a59f2c43bf03eb6c96dd8f0307829feb7fa2b6fb" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/22/c2/4b9221495b2a132cc9d2eb862e21d42a009f5a60e45fc44b00118c174bff/numpy-2.2.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8e41fd67c52b86603a91c1a505ebaef50b3314de0213461c7a6e99c9a3beff90" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/fd/77/dc2fcfc66943c6410e2bf598062f5959372735ffda175b39906d54f02349/numpy-2.2.6-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:37e990a01ae6ec7fe7fa1c26c55ecb672dd98b19c3d0e1d1f326fa13cb38d163" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/7a/4f/1cb5fdc353a5f5cc7feb692db9b8ec2c3d6405453f982435efc52561df58/numpy-2.2.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:5a6429d4be8ca66d889b7cf70f536a397dc45ba6faeb5f8c5427935d9592e9cf" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/eb/17/96a3acd228cec142fcb8723bd3cc39c2a474f7dcf0a5d16731980bcafa95/numpy-2.2.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efd28d4e9cd7d7a8d39074a4d44c63eda73401580c5c76acda2ce969e0a38e83" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b4/63/3de6a34ad7ad6646ac7d2f55ebc6ad439dbbf9c4370017c50cf403fb19b5/numpy-2.2.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc7b73d02efb0e18c000e9ad8b83480dfcd5dfd11065997ed4c6747470ae8915" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/07/b6/89d837eddef52b3d0cec5c6ba0456c1bf1b9ef6a6672fc2b7873c3ec4e2e/numpy-2.2.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:74d4531beb257d2c3f4b261bfb0fc09e0f9ebb8842d82a7b4209415896adc680" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/01/c8/dc6ae86e3c61cfec1f178e5c9f7858584049b6093f843bca541f94120920/numpy-2.2.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8fc377d995680230e83241d8a96def29f204b5782f371c532579b4f20607a289" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/5b/c5/0064b1b7e7c89137b471ccec1fd2282fceaae0ab3a9550f2568782d80357/numpy-2.2.6-cp310-cp310-win32.whl", hash = "sha256:b093dd74e50a8cba3e873868d9e93a85b78e0daf2e98c6797566ad8044e8363d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a3/dd/4b822569d6b96c39d1215dbae0582fd99954dcbcf0c1a13c61783feaca3f/numpy-2.2.6-cp310-cp310-win_amd64.whl", hash = "sha256:f0fd6321b839904e15c46e0d257fdd101dd7f530fe03fd6359c1ea63738703f3" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/da/a8/4f83e2aa666a9fbf56d6118faaaf5f1974d456b1823fda0a176eff722839/numpy-2.2.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f9f1adb22318e121c5c69a09142811a201ef17ab257a1e66ca3025065b7f53ae" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b3/2b/64e1affc7972decb74c9e29e5649fac940514910960ba25cd9af4488b66c/numpy-2.2.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c820a93b0255bc360f53eca31a0e676fd1101f673dda8da93454a12e23fc5f7a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/4a/9f/0121e375000b5e50ffdd8b25bf78d8e1a5aa4cca3f185d41265198c7b834/numpy-2.2.6-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3d70692235e759f260c3d837193090014aebdf026dfd167834bcba43e30c2a42" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/31/0d/b48c405c91693635fbe2dcd7bc84a33a602add5f63286e024d3b6741411c/numpy-2.2.6-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:481b49095335f8eed42e39e8041327c05b0f6f4780488f61286ed3c01368d491" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/52/b8/7f0554d49b565d0171eab6e99001846882000883998e7b7d9f0d98b1f934/numpy-2.2.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b64d8d4d17135e00c8e346e0a738deb17e754230d7e0810ac5012750bbd85a5a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b3/dd/2238b898e51bd6d389b7389ffb20d7f4c10066d80351187ec8e303a5a475/numpy-2.2.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba10f8411898fc418a521833e014a77d3ca01c15b0c6cdcce6a0d2897e6dbbdf" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/83/6c/44d0325722cf644f191042bf47eedad61c1e6df2432ed65cbe28509d404e/numpy-2.2.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bd48227a919f1bafbdda0583705e547892342c26fb127219d60a5c36882609d1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/ae/9d/81e8216030ce66be25279098789b665d49ff19eef08bfa8cb96d4957f422/numpy-2.2.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:9551a499bf125c1d4f9e250377c1ee2eddd02e01eac6644c080162c0c51778ab" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/6a/fd/e19617b9530b031db51b0926eed5345ce8ddc669bb3bc0044b23e275ebe8/numpy-2.2.6-cp311-cp311-win32.whl", hash = "sha256:0678000bb9ac1475cd454c6b8c799206af8107e310843532b04d49649c717a47" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/31/0a/f354fb7176b81747d870f7991dc763e157a934c717b67b58456bc63da3df/numpy-2.2.6-cp311-cp311-win_amd64.whl", hash = "sha256:e8213002e427c69c45a52bbd94163084025f533a55a59d6f9c5b820774ef3303" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/82/5d/c00588b6cf18e1da539b45d3598d3557084990dcc4331960c15ee776ee41/numpy-2.2.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:41c5a21f4a04fa86436124d388f6ed60a9343a6f767fced1a8a71c3fbca038ff" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/66/ee/560deadcdde6c2f90200450d5938f63a34b37e27ebff162810f716f6a230/numpy-2.2.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de749064336d37e340f640b05f24e9e3dd678c57318c7289d222a8a2f543e90c" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3c/65/4baa99f1c53b30adf0acd9a5519078871ddde8d2339dc5a7fde80d9d87da/numpy-2.2.6-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:894b3a42502226a1cac872f840030665f33326fc3dac8e57c607905773cdcde3" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/cc/89/e5a34c071a0570cc40c9a54eb472d113eea6d002e9ae12bb3a8407fb912e/numpy-2.2.6-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:71594f7c51a18e728451bb50cc60a3ce4e6538822731b2933209a1f3614e9282" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f8/35/8c80729f1ff76b3921d5c9487c7ac3de9b2a103b1cd05e905b3090513510/numpy-2.2.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2618db89be1b4e05f7a1a847a9c1c0abd63e63a1607d892dd54668dd92faf87" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/8c/3d/1e1db36cfd41f895d266b103df00ca5b3cbe965184df824dec5c08c6b803/numpy-2.2.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd83c01228a688733f1ded5201c678f0c53ecc1006ffbc404db9f7a899ac6249" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/61/c6/03ed30992602c85aa3cd95b9070a514f8b3c33e31124694438d88809ae36/numpy-2.2.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:37c0ca431f82cd5fa716eca9506aefcabc247fb27ba69c5062a6d3ade8cf8f49" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b7/25/5761d832a81df431e260719ec45de696414266613c9ee268394dd5ad8236/numpy-2.2.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fe27749d33bb772c80dcd84ae7e8df2adc920ae8297400dabec45f0dedb3f6de" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/57/0a/72d5a3527c5ebffcd47bde9162c39fae1f90138c961e5296491ce778e682/numpy-2.2.6-cp312-cp312-win32.whl", hash = "sha256:4eeaae00d789f66c7a25ac5f34b71a7035bb474e679f410e5e1a94deb24cf2d4" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/36/fa/8c9210162ca1b88529ab76b41ba02d433fd54fecaf6feb70ef9f124683f1/numpy-2.2.6-cp312-cp312-win_amd64.whl", hash = "sha256:c1f9540be57940698ed329904db803cf7a402f3fc200bfe599334c9bd84a40b2" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f9/5c/6657823f4f594f72b5471f1db1ab12e26e890bb2e41897522d134d2a3e81/numpy-2.2.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0811bb762109d9708cca4d0b13c4f67146e3c3b7cf8d34018c722adb2d957c84" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/dc/9e/14520dc3dadf3c803473bd07e9b2bd1b69bc583cb2497b47000fed2fa92f/numpy-2.2.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:287cc3162b6f01463ccd86be154f284d0893d2b3ed7292439ea97eafa8170e0b" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/4f/06/7e96c57d90bebdce9918412087fc22ca9851cceaf5567a45c1f404480e9e/numpy-2.2.6-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:f1372f041402e37e5e633e586f62aa53de2eac8d98cbfb822806ce4bbefcb74d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/73/ed/63d920c23b4289fdac96ddbdd6132e9427790977d5457cd132f18e76eae0/numpy-2.2.6-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:55a4d33fa519660d69614a9fad433be87e5252f4b03850642f88993f7b2ca566" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/85/c5/e19c8f99d83fd377ec8c7e0cf627a8049746da54afc24ef0a0cb73d5dfb5/numpy-2.2.6-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f92729c95468a2f4f15e9bb94c432a9229d0d50de67304399627a943201baa2f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/19/49/4df9123aafa7b539317bf6d342cb6d227e49f7a35b99c287a6109b13dd93/numpy-2.2.6-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1bc23a79bfabc5d056d106f9befb8d50c31ced2fbc70eedb8155aec74a45798f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b2/6c/04b5f47f4f32f7c2b0e7260442a8cbcf8168b0e1a41ff1495da42f42a14f/numpy-2.2.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e3143e4451880bed956e706a3220b4e5cf6172ef05fcc397f6f36a550b1dd868" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/17/0a/5cd92e352c1307640d5b6fec1b2ffb06cd0dabe7d7b8227f97933d378422/numpy-2.2.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4f13750ce79751586ae2eb824ba7e1e8dba64784086c98cdbbcc6a42112ce0d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f0/3b/5cba2b1d88760ef86596ad0f3d484b1cbff7c115ae2429678465057c5155/numpy-2.2.6-cp313-cp313-win32.whl", hash = "sha256:5beb72339d9d4fa36522fc63802f469b13cdbe4fdab4a288f0c441b74272ebfd" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/cb/3b/d58c12eafcb298d4e6d0d40216866ab15f59e55d148a5658bb3132311fcf/numpy-2.2.6-cp313-cp313-win_amd64.whl", hash = "sha256:b0544343a702fa80c95ad5d3d608ea3599dd54d4632df855e4c8d24eb6ecfa1c" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/6b/9e/4bf918b818e516322db999ac25d00c75788ddfd2d2ade4fa66f1f38097e1/numpy-2.2.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0bca768cd85ae743b2affdc762d617eddf3bcf8724435498a1e80132d04879e6" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/61/66/d2de6b291507517ff2e438e13ff7b1e2cdbdb7cb40b3ed475377aece69f9/numpy-2.2.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:fc0c5673685c508a142ca65209b4e79ed6740a4ed6b2267dbba90f34b0b3cfda" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/e4/25/480387655407ead912e28ba3a820bc69af9adf13bcbe40b299d454ec011f/numpy-2.2.6-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:5bd4fc3ac8926b3819797a7c0e2631eb889b4118a9898c84f585a54d475b7e40" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/aa/4a/6e313b5108f53dcbf3aca0c0f3e9c92f4c10ce57a0a721851f9785872895/numpy-2.2.6-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:fee4236c876c4e8369388054d02d0e9bb84821feb1a64dd59e137e6511a551f8" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b7/30/172c2d5c4be71fdf476e9de553443cf8e25feddbe185e0bd88b096915bcc/numpy-2.2.6-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1dda9c7e08dc141e0247a5b8f49cf05984955246a327d4c48bda16821947b2f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/12/fb/9e743f8d4e4d3c710902cf87af3512082ae3d43b945d5d16563f26ec251d/numpy-2.2.6-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f447e6acb680fd307f40d3da4852208af94afdfab89cf850986c3ca00562f4fa" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/12/75/ee20da0e58d3a66f204f38916757e01e33a9737d0b22373b3eb5a27358f9/numpy-2.2.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:389d771b1623ec92636b0786bc4ae56abafad4a4c513d36a55dce14bd9ce8571" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/76/95/bef5b37f29fc5e739947e9ce5179ad402875633308504a52d188302319c8/numpy-2.2.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8e9ace4a37db23421249ed236fdcdd457d671e25146786dfc96835cd951aa7c1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/09/04/f2f83279d287407cf36a7a8053a5abe7be3622a4363337338f2585e4afda/numpy-2.2.6-cp313-cp313t-win32.whl", hash = "sha256:038613e9fb8c72b0a41f025a7e4c3f0b7a1b5d768ece4796b674c8f3fe13efff" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/67/0e/35082d13c09c02c011cf21570543d202ad929d961c02a147493cb0c2bdf5/numpy-2.2.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6031dd6dfecc0cf9f668681a37648373bddd6421fff6c66ec1624eed0180ee06" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/9e/3b/d94a75f4dbf1ef5d321523ecac21ef23a3cd2ac8b78ae2aac40873590229/numpy-2.2.6-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0b605b275d7bd0c640cad4e5d30fa701a8d59302e127e5f79138ad62762c3e3d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/17/f4/09b2fa1b58f0fb4f7c7963a1649c64c4d315752240377ed74d9cd878f7b5/numpy-2.2.6-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:7befc596a7dc9da8a337f79802ee8adb30a552a94f792b9c9d18c840055907db" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/af/30/feba75f143bdc868a1cc3f44ccfa6c4b9ec522b36458e738cd00f67b573f/numpy-2.2.6-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce47521a4754c8f4593837384bd3424880629f718d87c5d44f8ed763edd63543" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/37/48/ac2a9584402fb6c0cd5b5d1a91dcf176b15760130dd386bbafdbfe3640bf/numpy-2.2.6-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d042d24c90c41b54fd506da306759e06e568864df8ec17ccc17e9e884634fd00" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "numpy"
|
||||
version = "2.4.2"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
resolution-markers = [
|
||||
"python_full_version >= '3.11'",
|
||||
]
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/57/fd/0005efbd0af48e55eb3c7208af93f2862d4b1a56cd78e84309a2d959208d/numpy-2.4.2.tar.gz", hash = "sha256:659a6107e31a83c4e33f763942275fd278b21d095094044eb35569e86a21ddae" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d3/44/71852273146957899753e69986246d6a176061ea183407e95418c2aa4d9a/numpy-2.4.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e7e88598032542bd49af7c4747541422884219056c268823ef6e5e89851c8825" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/74/41/5d17d4058bd0cd96bcbd4d9ff0fb2e21f52702aab9a72e4a594efa18692f/numpy-2.4.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7edc794af8b36ca37ef5fcb5e0d128c7e0595c7b96a2318d1badb6fcd8ee86b1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/49/48/fb1ce8136c19452ed15f033f8aee91d5defe515094e330ce368a0647846f/numpy-2.4.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6e9f61981ace1360e42737e2bae58b27bf28a1b27e781721047d84bd754d32e7" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/40/a9/3feb49f17bbd1300dd2570432961f5c8a4ffeff1db6f02c7273bd020a4c9/numpy-2.4.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:cb7bbb88aa74908950d979eeaa24dbdf1a865e3c7e45ff0121d8f70387b55f73" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3f/39/fdf35cbd6d6e2fcad42fcf85ac04a85a0d0fbfbf34b30721c98d602fd70a/numpy-2.4.2-cp311-cp311-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f069069931240b3fc703f1e23df63443dbd6390614c8c44a87d96cd0ec81eb1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/1b/46/6fa4ea94f1ddf969b2ee941290cca6f1bfac92b53c76ae5f44afe17ceb69/numpy-2.4.2-cp311-cp311-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c02ef4401a506fb60b411467ad501e1429a3487abca4664871d9ae0b46c8ba32" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/09/a1/2a424e162b1a14a5bd860a464ab4e07513916a64ab1683fae262f735ccd2/numpy-2.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2653de5c24910e49c2b106499803124dde62a5a1fe0eedeaecf4309a5f639390" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/ce/a2/73014149ff250628df72c58204822ac01d768697913881aacf839ff78680/numpy-2.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1ae241bbfc6ae276f94a170b14785e561cb5e7f626b6688cf076af4110887413" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/6c/0c/73e8be2f1accd56df74abc1c5e18527822067dced5ec0861b5bb882c2ce0/numpy-2.4.2-cp311-cp311-win32.whl", hash = "sha256:df1b10187212b198dd45fa943d8985a3c8cf854aed4923796e0e019e113a1bda" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/76/ae/e0265e0163cf127c24c3969d29f1c4c64551a1e375d95a13d32eab25d364/numpy-2.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:b9c618d56a29c9cb1c4da979e9899be7578d2e0b3c24d52079c166324c9e8695" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/29/a5/c43029af9b8014d6ea157f192652c50042e8911f4300f8f6ed3336bf437f/numpy-2.4.2-cp311-cp311-win_arm64.whl", hash = "sha256:47c5a6ed21d9452b10227e5e8a0e1c22979811cad7dcc19d8e3e2fb8fa03f1a3" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/51/6e/6f394c9c77668153e14d4da83bcc247beb5952f6ead7699a1a2992613bea/numpy-2.4.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:21982668592194c609de53ba4933a7471880ccbaadcc52352694a59ecc860b3a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/1f/f8/55483431f2b2fd015ae6ed4fe62288823ce908437ed49db5a03d15151678/numpy-2.4.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40397bda92382fcec844066efb11f13e1c9a3e2a8e8f318fb72ed8b6db9f60f1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2f/20/18026832b1845cdc82248208dd929ca14c9d8f2bac391f67440707fff27c/numpy-2.4.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:b3a24467af63c67829bfaa61eecf18d5432d4f11992688537be59ecd6ad32f5e" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/7d/33/2eb97c8a77daaba34eaa3fa7241a14ac5f51c46a6bd5911361b644c4a1e2/numpy-2.4.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:805cc8de9fd6e7a22da5aed858e0ab16be5a4db6c873dde1d7451c541553aa27" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b1/91/b97fdfd12dc75b02c44e26c6638241cc004d4079a0321a69c62f51470c4c/numpy-2.4.2-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d82351358ffbcdcd7b686b90742a9b86632d6c1c051016484fa0b326a0a1548" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f5/c6/a18e59f3f0b8071cc85cbc8d80cd02d68aa9710170b2553a117203d46936/numpy-2.4.2-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e35d3e0144137d9fdae62912e869136164534d64a169f86438bc9561b6ad49f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b7/83/9751502164601a79e18847309f5ceec0b1446d7b6aa12305759b72cf98b2/numpy-2.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:adb6ed2ad29b9e15321d167d152ee909ec73395901b70936f029c3bc6d7f4460" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/61/c4/c4066322256ec740acc1c8923a10047818691d2f8aec254798f3dd90f5f2/numpy-2.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8906e71fd8afcb76580404e2a950caef2685df3d2a57fe82a86ac8d33cc007ba" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/ab/af/6157aa6da728fa4525a755bfad486ae7e3f76d4c1864138003eb84328497/numpy-2.4.2-cp312-cp312-win32.whl", hash = "sha256:ec055f6dae239a6299cace477b479cca2fc125c5675482daf1dd886933a1076f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/92/0f/7ceaaeaacb40567071e94dbf2c9480c0ae453d5bb4f52bea3892c39dc83c/numpy-2.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:209fae046e62d0ce6435fcfe3b1a10537e858249b3d9b05829e2a05218296a85" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2f/a3/56c5c604fae6dd40fa2ed3040d005fca97e91bd320d232ac9931d77ba13c/numpy-2.4.2-cp312-cp312-win_arm64.whl", hash = "sha256:fbde1b0c6e81d56f5dccd95dd4a711d9b95df1ae4009a60887e56b27e8d903fa" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a1/22/815b9fe25d1d7ae7d492152adbc7226d3eff731dffc38fe970589fcaaa38/numpy-2.4.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:25f2059807faea4b077a2b6837391b5d830864b3543627f381821c646f31a63c" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/09/f0/817d03a03f93ba9c6c8993de509277d84e69f9453601915e4a69554102a1/numpy-2.4.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bd3a7a9f5847d2fb8c2c6d1c862fa109c31a9abeca1a3c2bd5a64572955b2979" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/da/b4/f805ab79293c728b9a99438775ce51885fd4f31b76178767cfc718701a39/numpy-2.4.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:8e4549f8a3c6d13d55041925e912bfd834285ef1dd64d6bc7d542583355e2e98" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/74/09/826e4289844eccdcd64aac27d13b0fd3f32039915dd5b9ba01baae1f436c/numpy-2.4.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:aea4f66ff44dfddf8c2cffd66ba6538c5ec67d389285292fe428cb2c738c8aef" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/19/fb/cbfdbfa3057a10aea5422c558ac57538e6acc87ec1669e666d32ac198da7/numpy-2.4.2-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c3cd545784805de05aafe1dde61752ea49a359ccba9760c1e5d1c88a93bbf2b7" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/04/dc/46066ce18d01645541f0186877377b9371b8fa8017fa8262002b4ef22612/numpy-2.4.2-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0d9b7c93578baafcbc5f0b83eaf17b79d345c6f36917ba0c67f45226911d499" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/14/d9/4b5adfc39a43fa6bf918c6d544bc60c05236cc2f6339847fc5b35e6cb5b0/numpy-2.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f74f0f7779cc7ae07d1810aab8ac6b1464c3eafb9e283a40da7309d5e6e48fbb" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b7/20/adb6e6adde6d0130046e6fdfb7675cc62bc2f6b7b02239a09eb58435753d/numpy-2.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:c7ac672d699bf36275c035e16b65539931347d68b70667d28984c9fb34e07fa7" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/78/0e/0a73b3dff26803a8c02baa76398015ea2a5434d9b8265a7898a6028c1591/numpy-2.4.2-cp313-cp313-win32.whl", hash = "sha256:8e9afaeb0beff068b4d9cd20d322ba0ee1cecfb0b08db145e4ab4dd44a6b5110" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/43/bc/6352f343522fcb2c04dbaf94cb30cca6fd32c1a750c06ad6231b4293708c/numpy-2.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:7df2de1e4fba69a51c06c28f5a3de36731eb9639feb8e1cf7e4a7b0daf4cf622" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/6e/8d/6da186483e308da5da1cc6918ce913dcfe14ffde98e710bfeff2a6158d4e/numpy-2.4.2-cp313-cp313-win_arm64.whl", hash = "sha256:0fece1d1f0a89c16b03442eae5c56dc0be0c7883b5d388e0c03f53019a4bfd71" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/25/a1/9510aa43555b44781968935c7548a8926274f815de42ad3997e9e83680dd/numpy-2.4.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5633c0da313330fd20c484c78cdd3f9b175b55e1a766c4a174230c6b70ad8262" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/36/30/6bbb5e76631a5ae46e7923dd16ca9d3f1c93cfa8d4ed79a129814a9d8db3/numpy-2.4.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d9f64d786b3b1dd742c946c42d15b07497ed14af1a1f3ce840cce27daa0ce913" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/46/00/3a490938800c1923b567b3a15cd17896e68052e2145d8662aaf3e1ffc58f/numpy-2.4.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:b21041e8cb6a1eb5312dd1d2f80a94d91efffb7a06b70597d44f1bd2dfc315ab" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d3/e9/fac0890149898a9b609caa5af7455a948b544746e4b8fe7c212c8edd71f8/numpy-2.4.2-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:00ab83c56211a1d7c07c25e3217ea6695e50a3e2f255053686b081dc0b091a82" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/ea/5c/08887c54e68e1e28df53709f1893ce92932cc6f01f7c3d4dc952f61ffd4e/numpy-2.4.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2fb882da679409066b4603579619341c6d6898fc83a8995199d5249f986e8e8f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/4d/89/253db0fa0e66e9129c745e4ef25631dc37d5f1314dad2b53e907b8538e6d/numpy-2.4.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:66cb9422236317f9d44b67b4d18f44efe6e9c7f8794ac0462978513359461554" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2a/d5/cbade46ce97c59c6c3da525e8d95b7abe8a42974a1dc5c1d489c10433e88/numpy-2.4.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:0f01dcf33e73d80bd8dc0f20a71303abbafa26a19e23f6b68d1aa9990af90257" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/40/62/48f99ae172a4b63d981babe683685030e8a3df4f246c893ea5c6ef99f018/numpy-2.4.2-cp313-cp313t-win32.whl", hash = "sha256:52b913ec40ff7ae845687b0b34d8d93b60cb66dcee06996dd5c99f2fc9328657" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/07/38/e054a61cfe48ad9f1ed0d188e78b7e26859d0b60ef21cd9de4897cdb5326/numpy-2.4.2-cp313-cp313t-win_amd64.whl", hash = "sha256:5eea80d908b2c1f91486eb95b3fb6fab187e569ec9752ab7d9333d2e66bf2d6b" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/6e/a4/a05c3a6418575e185dd84d0b9680b6bb2e2dc3e4202f036b7b4e22d6e9dc/numpy-2.4.2-cp313-cp313t-win_arm64.whl", hash = "sha256:fd49860271d52127d61197bb50b64f58454e9f578cb4b2c001a6de8b1f50b0b1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/18/88/b7df6050bf18fdcfb7046286c6535cabbdd2064a3440fca3f069d319c16e/numpy-2.4.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:444be170853f1f9d528428eceb55f12918e4fda5d8805480f36a002f1415e09b" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/25/7a/1fee4329abc705a469a4afe6e69b1ef7e915117747886327104a8493a955/numpy-2.4.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:d1240d50adff70c2a88217698ca844723068533f3f5c5fa6ee2e3220e3bdb000" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/fb/0b/f9e49ba6c923678ad5bc38181c08ac5e53b7a5754dbca8e581aa1a56b1ff/numpy-2.4.2-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:7cdde6de52fb6664b00b056341265441192d1291c130e99183ec0d4b110ff8b1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/7d/12/d7de8f6f53f9bb76997e5e4c069eda2051e3fe134e9181671c4391677bb2/numpy-2.4.2-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:cda077c2e5b780200b6b3e09d0b42205a3d1c68f30c6dceb90401c13bff8fe74" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/09/63/c66418c2e0268a31a4cf8a8b512685748200f8e8e8ec6c507ce14e773529/numpy-2.4.2-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d30291931c915b2ab5717c2974bb95ee891a1cf22ebc16a8006bd59cd210d40a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/5d/6c/7f237821c9642fb2a04d2f1e88b4295677144ca93285fd76eff3bcba858d/numpy-2.4.2-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bba37bc29d4d85761deed3954a1bc62be7cf462b9510b51d367b769a8c8df325" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/c2/a7/39c4cdda9f019b609b5c473899d87abff092fc908cfe4d1ecb2fcff453b0/numpy-2.4.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:b2f0073ed0868db1dcd86e052d37279eef185b9c8db5bf61f30f46adac63c909" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/da/b3/e84bb64bdfea967cc10950d71090ec2d84b49bc691df0025dddb7c26e8e3/numpy-2.4.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7f54844851cdb630ceb623dcec4db3240d1ac13d4990532446761baede94996a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/88/f5/954a291bc1192a27081706862ac62bb5920fbecfbaa302f64682aa90beed/numpy-2.4.2-cp314-cp314-win32.whl", hash = "sha256:12e26134a0331d8dbd9351620f037ec470b7c75929cb8a1537f6bfe411152a1a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/05/cb/eff72a91b2efdd1bc98b3b8759f6a1654aa87612fc86e3d87d6fe4f948c4/numpy-2.4.2-cp314-cp314-win_amd64.whl", hash = "sha256:068cdb2d0d644cdb45670810894f6a0600797a69c05f1ac478e8d31670b8ee75" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/37/75/62726948db36a56428fce4ba80a115716dc4fad6a3a4352487f8bb950966/numpy-2.4.2-cp314-cp314-win_arm64.whl", hash = "sha256:6ed0be1ee58eef41231a5c943d7d1375f093142702d5723ca2eb07db9b934b05" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/36/2f/ee93744f1e0661dc267e4b21940870cabfae187c092e1433b77b09b50ac4/numpy-2.4.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:98f16a80e917003a12c0580f97b5f875853ebc33e2eaa4bccfc8201ac6869308" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a7/24/6535212add7d76ff938d8bdc654f53f88d35cddedf807a599e180dcb8e66/numpy-2.4.2-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:20abd069b9cda45874498b245c8015b18ace6de8546bf50dfa8cea1696ed06ef" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/5e/9d/c48f0a035725f925634bf6b8994253b43f2047f6778a54147d7e213bc5a7/numpy-2.4.2-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:e98c97502435b53741540a5717a6749ac2ada901056c7db951d33e11c885cc7d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/81/05/7c73a9574cd4a53a25907bad38b59ac83919c0ddc8234ec157f344d57d9a/numpy-2.4.2-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:da6cad4e82cb893db4b69105c604d805e0c3ce11501a55b5e9f9083b47d2ffe8" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/35/fa/4de10089f21fc7d18442c4a767ab156b25c2a6eaf187c0db6d9ecdaeb43f/numpy-2.4.2-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e4424677ce4b47fe73c8b5556d876571f7c6945d264201180db2dc34f676ab5" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b8/f9/d33e4ffc857f3763a57aa85650f2e82486832d7492280ac21ba9efda80da/numpy-2.4.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2b8f157c8a6f20eb657e240f8985cc135598b2b46985c5bccbde7616dc9c6b1e" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/c8/b8/54bdb43b6225badbea6389fa038c4ef868c44f5890f95dd530a218706da3/numpy-2.4.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5daf6f3914a733336dab21a05cdec343144600e964d2fcdabaac0c0269874b2a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a5/55/6e1a61ded7af8df04016d81b5b02daa59f2ea9252ee0397cb9f631efe9e5/numpy-2.4.2-cp314-cp314t-win32.whl", hash = "sha256:8c50dd1fc8826f5b26a5ee4d77ca55d88a895f4e4819c7ecc2a9f5905047a443" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/45/aa/fa6118d1ed6d776b0983f3ceac9b1a5558e80df9365b1c3aa6d42bf9eee4/numpy-2.4.2-cp314-cp314t-win_amd64.whl", hash = "sha256:fcf92bee92742edd401ba41135185866f7026c502617f422eb432cfeca4fe236" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/32/0a/2ec5deea6dcd158f254a7b372fb09cfba5719419c8d66343bab35237b3fb/numpy-2.4.2-cp314-cp314t-win_arm64.whl", hash = "sha256:1f92f53998a17265194018d1cc321b2e96e900ca52d54c7c77837b71b9465181" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f4/f8/50e14d36d915ef64d8f8bc4a087fc8264d82c785eda6711f80ab7e620335/numpy-2.4.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:89f7268c009bc492f506abd6f5265defa7cb3f7487dc21d357c3d290add45082" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/17/17/809b5cad63812058a8189e91a1e2d55a5a18fd04611dbad244e8aeae465c/numpy-2.4.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:e6dee3bb76aa4009d5a912180bf5b2de012532998d094acee25d9cb8dee3e44a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3e/ea/181b9bcf7627fc8371720316c24db888dcb9829b1c0270abf3d288b2e29b/numpy-2.4.2-pp311-pypy311_pp73-macosx_14_0_arm64.whl", hash = "sha256:cd2bd2bbed13e213d6b55dc1d035a4f91748a7d3edc9480c13898b0353708920" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/33/9f/413adf3fc955541ff5536b78fcf0754680b3c6d95103230252a2c9408d23/numpy-2.4.2-pp311-pypy311_pp73-macosx_14_0_x86_64.whl", hash = "sha256:cf28c0c1d4c4bf00f509fa7eb02c58d7caf221b50b467bcb0d9bbf1584d5c821" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/91/da/643aad274e29ccbdf42ecd94dafe524b81c87bcb56b83872d54827f10543/numpy-2.4.2-pp311-pypy311_pp73-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e04ae107ac591763a47398bb45b568fc38f02dbc4aa44c063f67a131f99346cb" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/66/27/965b8525e9cb5dc16481b30a1b3c21e50c7ebf6e9dbd48d0c4d0d5089c7e/numpy-2.4.2-pp311-pypy311_pp73-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:602f65afdef699cda27ec0b9224ae5dc43e328f4c24c689deaf77133dbee74d0" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/de/e5/b7d20451657664b07986c2f6e3be564433f5dcaf3482d68eaecd79afaf03/numpy-2.4.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:be71bf1edb48ebbbf7f6337b5bfd2f895d1902f6335a5830b20141fc126ffba0" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "packaging"
|
||||
version = "26.0"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pluggy"
|
||||
version = "1.6.0"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pygments"
|
||||
version = "2.19.2"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "pytest"
|
||||
version = "9.0.2"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
dependencies = [
|
||||
{ name = "colorama", marker = "sys_platform == 'win32'" },
|
||||
{ name = "exceptiongroup", marker = "python_full_version < '3.11'" },
|
||||
{ name = "iniconfig" },
|
||||
{ name = "packaging" },
|
||||
{ name = "pluggy" },
|
||||
{ name = "pygments" },
|
||||
{ name = "tomli", marker = "python_full_version < '3.11'" },
|
||||
]
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tomli"
|
||||
version = "2.4.0"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4" },
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "typing-extensions"
|
||||
version = "4.15.0"
|
||||
source = { registry = "https://mirrors.sustech.edu.cn/pypi/web/simple" }
|
||||
sdist = { url = "https://mirrors.sustech.edu.cn/pypi/web/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466" }
|
||||
wheels = [
|
||||
{ url = "https://mirrors.sustech.edu.cn/pypi/web/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548" },
|
||||
]
|
||||
Reference in New Issue
Block a user