Initial commit.

This commit is contained in:
2025-12-22 17:29:37 +08:00
commit b90f6269e7
7 changed files with 1020 additions and 0 deletions

216
.gitignore vendored Normal file
View File

@ -0,0 +1,216 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[codz]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py.cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
# Pipfile.lock
# UV
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# uv.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
# poetry.lock
# poetry.toml
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python.
# https://pdm-project.org/en/latest/usage/project/#working-with-version-control
# pdm.lock
# pdm.toml
.pdm-python
.pdm-build/
# pixi
# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control.
# pixi.lock
# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one
# in the .venv directory. It is recommended not to include this directory in version control.
.pixi
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# Redis
*.rdb
*.aof
*.pid
# RabbitMQ
mnesia/
rabbitmq/
rabbitmq-data/
# ActiveMQ
activemq-data/
# SageMath parsed files
*.sage.py
# Environments
.env
.envrc
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
# .idea/
# Abstra
# Abstra is an AI-powered process automation framework.
# Ignore directories containing user credentials, local state, and settings.
# Learn more at https://abstra.io/docs
.abstra/
# Visual Studio Code
# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore
# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore
# and can be added to the global gitignore or merged into this file. However, if you prefer,
# you could uncomment the following to ignore the entire vscode folder
# .vscode/
# Ruff stuff:
.ruff_cache/
# PyPI configuration file
.pypirc
# Marimo
marimo/_static/
marimo/_lsp/
__marimo__/
# Streamlit
.streamlit/secrets.toml

9
LICENSE Normal file
View File

@ -0,0 +1,9 @@
MIT License
Copyright (c) 2024 Easy MP Share Contributors
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.

34
easy_mp_share/__init__.py Normal file
View File

@ -0,0 +1,34 @@
"""
Easy MP Share - Multiprocess Data Sharing Module
A robust and efficient way to share data between processes using Python's
multiprocessing.Manager. This module implements a singleton pattern to ensure
consistent data access across different modules and processes.
Example:
>>> from easy_mp_share import MultiProcessingSharedPool
>>> pool = MultiProcessingSharedPool()
>>> pool.put("test_key", "test_value")
>>> pool.get("test_key")
'test_value'
>>> # Or use the convenience instance
>>> from easy_mp_share import shared_pool
>>> shared_pool.put("key", "value")
>>> shared_pool.get("key")
'value'
"""
from .core import MultiProcessingSharedPool
# Provide a convenient singleton instance for direct use
shared_pool = MultiProcessingSharedPool.get_instance()
__version__ = "0.1.0"
__author__ = "Easy MP Share Contributors"
__email__ = "contributors@easy-mp-share.org"
__all__ = [
"MultiProcessingSharedPool",
"shared_pool",
"__version__",
]

369
easy_mp_share/core.py Normal file
View File

@ -0,0 +1,369 @@
"""
Multiprocess Data Sharing Module
This module provides a robust and efficient way to share data between
processes using Python's multiprocessing.Manager. It implements a singleton
pattern to ensure consistent data access across different modules and processes.
"""
import multiprocessing
import multiprocessing.managers
import atexit
import threading
import os
import logging
import pickle
from typing import Any, Optional, List, Dict
# Try to import cloudpickle for advanced serialization (e.g., lambdas, local functions)
try:
import cloudpickle
except ImportError:
cloudpickle = None
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
class _CloudPickleWrapper:
"""
A wrapper for data serialized with cloudpickle.
This is used to distinguish between normal data and data that requires
cloudpickle for deserialization.
"""
def __init__(self, blob: bytes):
self.blob = blob
class MultiProcessingSharedPool:
"""
A singleton class for sharing data between processes.
This class manages a multiprocessing.Manager and its shared dictionary.
It provides a dictionary-like interface for data sharing across processes.
It automatically handles complex objects like lambdas if cloudpickle is installed.
Attributes
----------
_instance : MultiProcessingSharedPool
The singleton instance.
_lock : threading.Lock
Lock for thread-safe singleton instantiation.
"""
_instance: Optional["MultiProcessingSharedPool"] = None
_lock = threading.Lock()
def __new__(cls) -> "MultiProcessingSharedPool":
"""
Ensure singleton instance creation.
Returns
-------
MultiProcessingSharedPool
The singleton instance.
"""
if cls._instance is None:
with cls._lock:
if cls._instance is None:
cls._instance = super(MultiProcessingSharedPool, cls).__new__(cls)
cls._instance._initialized = False
return cls._instance
def __init__(self):
"""
Initialize the MultiProcessingSharedPool instance.
Uses a flag to ensure initialization only happens once.
"""
if getattr(self, "_initialized", False):
return
self._manager: Optional[multiprocessing.managers.SyncManager] = None
self._shared_dict: Optional[Dict[str, Any]] = None
self._owner_pid = os.getpid()
self._init_lock = threading.Lock()
self._initialized = True
# Register cleanup to run at exit
atexit.register(self._cleanup)
@classmethod
def get_instance(cls) -> "MultiProcessingSharedPool":
"""
Get the singleton instance of MultiProcessingSharedPool.
Returns
-------
MultiProcessingSharedPool
The singleton instance.
"""
return cls()
def _ensure_initialized(self):
"""
Lazy initialization of the multiprocessing Manager and shared dictionary.
Raises
------
RuntimeError
If the multiprocessing Manager fails to start.
"""
if self._shared_dict is None:
with self._init_lock:
if self._shared_dict is None:
try:
# Use the default context for manager
self._manager = multiprocessing.Manager()
self._shared_dict = self._manager.dict()
logger.debug(
f"MultiProcessingSharedPool initialized in process {os.getpid()}"
)
except Exception as e:
logger.error(
f"Failed to initialize multiprocessing.Manager: {e}"
)
raise RuntimeError(f"IPC Initialization failed: {e}")
def _cleanup(self):
"""
Clean up resources. Only the owner process can shutdown the manager.
"""
if os.getpid() == self._owner_pid and self._manager is not None:
try:
self._manager.shutdown()
logger.debug(f"MultiProcessingSharedPool manager shutdown in process {os.getpid()}")
except Exception as e:
logger.debug(f"Error during manager shutdown: {e}")
finally:
self._manager = None
self._shared_dict = None
def put(self, label: str, data: Any) -> bool:
"""
Store data with a label.
Parameters
----------
label : str
The key for the data.
data : Any
The data to store.
Returns
-------
bool
True if successful.
"""
if not isinstance(label, str):
raise TypeError("Label must be a string")
self._ensure_initialized()
try:
# Try standard storage first
self._shared_dict[label] = data
return True
except (AttributeError, TypeError, pickle.PicklingError) as e:
# If standard pickle fails, try cloudpickle if available
if cloudpickle:
try:
logger.debug(f"Standard pickle failed for '{label}', trying cloudpickle.")
blob = cloudpickle.dumps(data)
self._shared_dict[label] = _CloudPickleWrapper(blob)
return True
except Exception as ce:
logger.error(f"Cloudpickle also failed for '{label}': {ce}")
return False
else:
logger.error(
f"Failed to put data for '{label}': {e}. "
"This often happens with lambdas or local functions. "
"Please install 'cloudpickle' to support these objects."
)
return False
except Exception as e:
logger.error(f"Unexpected error putting data for '{label}': {e}")
return False
def get(self, label: str, default: Any = None) -> Any:
"""
Retrieve data by label.
Parameters
----------
label : str
The key to look up.
default : Any, optional
Value to return if label is not found.
Returns
-------
Any
The stored data or default.
"""
if not isinstance(label, str):
raise TypeError("Label must be a string")
self._ensure_initialized()
try:
val = self._shared_dict.get(label, default)
# Check if it's a cloudpickle-wrapped object
if isinstance(val, _CloudPickleWrapper):
if cloudpickle:
return cloudpickle.loads(val.blob)
else:
logger.error(
f"Data for '{label}' requires 'cloudpickle' to be deserialized. "
"Please install it."
)
return default
return val
except Exception as e:
logger.error(f"Failed to get data for '{label}': {e}")
return default
def exists(self, label: str) -> bool:
"""
Check if label exists.
Parameters
----------
label : str
The key to check.
Returns
-------
bool
True if exists.
"""
self._ensure_initialized()
return label in self._shared_dict
def remove(self, label: str) -> bool:
"""
Remove a label and its data.
Parameters
----------
label : str
The key to remove.
Returns
-------
bool
True if removed, False if not found.
"""
self._ensure_initialized()
try:
if label in self._shared_dict:
del self._shared_dict[label]
return True
return False
except Exception as e:
logger.error(f"Failed to remove '{label}': {e}")
return False
def pop(self, label: str, default: Any = None) -> Any:
"""
Remove and return the data associated with the label.
Parameters
----------
label : str
The key to pop.
default : Any, optional
Value to return if label is not found.
Returns
-------
Any
The popped data or default.
"""
if not isinstance(label, str):
raise TypeError("Label must be a string")
self._ensure_initialized()
try:
val = self._shared_dict.pop(label, default)
if isinstance(val, _CloudPickleWrapper):
if cloudpickle:
return cloudpickle.loads(val.blob)
else:
logger.error(f"Popped data for '{label}' requires 'cloudpickle'.")
return default
return val
except Exception as e:
logger.error(f"Failed to pop data for '{label}': {e}")
return default
def size(self) -> int:
"""
Get the number of items stored.
Returns
-------
int
The number of items.
"""
self._ensure_initialized()
try:
return len(self._shared_dict)
except Exception as e:
logger.error(f"Failed to get size: {e}")
return 0
def clear(self):
"""Clear all shared data."""
self._ensure_initialized()
try:
self._shared_dict.clear()
except Exception as e:
logger.error(f"Failed to clear data: {e}")
def keys(self) -> List[str]:
"""
Get all labels.
Returns
-------
List[str]
List of keys.
"""
self._ensure_initialized()
try:
return list(self._shared_dict.keys())
except Exception as e:
logger.error(f"Failed to get keys: {e}")
return []
def __getitem__(self, key: str) -> Any:
"""Dictionary-like access."""
val = self.get(key)
if val is None and not self.exists(key):
raise KeyError(key)
return val
def __setitem__(self, key: str, value: Any):
"""Dictionary-like assignment."""
self.put(key, value)
def __delitem__(self, key: str):
"""Dictionary-like deletion."""
if not self.remove(key):
raise KeyError(key)
def __contains__(self, key: str) -> bool:
"""Support for 'in' operator."""
return self.exists(key)
def __enter__(self):
"""Context manager entry."""
self._ensure_initialized()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
pass

104
pyproject.toml Normal file
View File

@ -0,0 +1,104 @@
[project]
name = "easy-mp-share"
version = "0.1.0"
description = "A robust and efficient way to share data between processes using Python's multiprocessing.Manager"
readme = "README.md"
license = {text = "MIT"}
authors = [
{name = "Easy MP Share Contributors"}
]
maintainers = [
{name = "Easy MP Share Contributors"}
]
keywords = ["multiprocessing", "data-sharing", "ipc", "python", "singleton"]
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Topic :: Software Development :: Libraries :: Python Modules",
"Topic :: System :: Distributed Computing",
]
requires-python = ">=3.8"
dependencies = [
"cloudpickle>=3.1.2",
]
[project.urls]
Homepage = "https://github.com/your-username/easy-mp-share"
Repository = "https://github.com/your-username/easy-mp-share"
Documentation = "https://github.com/your-username/easy-mp-share#readme"
"Bug Tracker" = "https://github.com/your-username/easy-mp-share/issues"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[dependency-groups]
dev = [
"pytest>=7.0.0",
"pytest-cov>=4.0.0",
"black>=23.0.0",
"ruff>=0.1.0",
"mypy>=1.0.0",
"numpy>=1.24.4",
"pandas>=2.0.3",
]
[tool.black]
line-length = 88
target-version = ['py38']
include = '\.pyi?$'
extend-exclude = '''
/(
# directories
\.eggs
| \.git
| \.hg
| \.mypy_cache
| \.tox
| \.venv
| build
| dist
)/
'''
[tool.ruff]
line-length = 88
target-version = "py38"
select = [
"E", # pycodestyle errors
"W", # pycodestyle warnings
"F", # pyflakes
"I", # isort
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"UP", # pyupgrade
]
ignore = [
"E501", # line too long, handled by black
"B008", # do not perform function calls in argument defaults
]
[tool.ruff.per-file-ignores]
"__init__.py" = ["F401"]
[tool.mypy]
python_version = "3.8"
warn_return_any = true
warn_unused_configs = true
disallow_untyped_defs = true
[tool.pytest.ini_options]
minversion = "7.0"
addopts = "-ra -q --strict-markers --strict-config"
testpaths = ["tests"]
python_files = ["test_*.py"]
python_classes = ["Test*"]
python_functions = ["test_*"]

5
tests/__init__.py Normal file
View File

@ -0,0 +1,5 @@
"""
Test package for easy_mp_share.
This package contains unit tests and integration tests for the easy_mp_share module.
"""

283
tests/test_core.py Normal file
View File

@ -0,0 +1,283 @@
"""
Unit tests for the MultiProcessingSharedPool core functionality.
These tests verify the singleton pattern, basic CRUD operations,
error handling, and dictionary-like interface.
"""
import pytest
import multiprocessing as mp
import time
from typing import Any, Dict
from easy_mp_share import MultiProcessingSharedPool, shared_pool
class TestMultiProcessingSharedPool:
"""Test cases for MultiProcessingSharedPool class."""
def setup_method(self):
"""Set up test fixtures before each test method."""
# Get a fresh instance for each test
self.pool = MultiProcessingSharedPool()
self.pool.clear() # Clear any existing data
def test_singleton_pattern(self):
"""Test that MultiProcessingSharedPool follows singleton pattern."""
pool1 = MultiProcessingSharedPool()
pool2 = MultiProcessingSharedPool()
# Should be the same instance
assert pool1 is pool2
# get_instance should also return the same instance
pool3 = MultiProcessingSharedPool.get_instance()
assert pool1 is pool3
def test_basic_put_and_get(self):
"""Test basic put and get operations."""
# Test string data
assert self.pool.put("test_key", "test_value")
assert self.pool.get("test_key") == "test_value"
# Test numeric data
assert self.pool.put("number", 42)
assert self.pool.get("number") == 42
# Test list data
test_list = [1, 2, 3, "four"]
assert self.pool.put("my_list", test_list)
assert self.pool.get("my_list") == test_list
# Test dict data
test_dict = {"key1": "value1", "key2": 123}
assert self.pool.put("my_dict", test_dict)
assert self.pool.get("my_dict") == test_dict
def test_get_with_default(self):
"""Test get operation with default values."""
# Non-existent key should return None by default
assert self.pool.get("non_existent") is None
# Non-existent key should return provided default
assert self.pool.get("non_existent", "default_value") == "default_value"
assert self.pool.get("non_existent", 42) == 42
# Existent key should return actual value, not default
self.pool.put("existing", "actual_value")
assert self.pool.get("existing", "default") == "actual_value"
def test_exists_method(self):
"""Test the exists method."""
assert not self.pool.exists("non_existent")
self.pool.put("existing_key", "value")
assert self.pool.exists("existing_key")
# Test with empty string key
self.pool.put("", "empty_key_value")
assert self.pool.exists("")
# Test with special characters
self.pool.put("key!@#$%", "special_chars")
assert self.pool.exists("key!@#$%")
def test_remove_method(self):
"""Test the remove method."""
# Remove non-existent key should return False
assert not self.pool.remove("non_existent")
# Remove existing key should return True
self.pool.put("to_remove", "value")
assert self.pool.remove("to_remove")
assert not self.pool.exists("to_remove")
# Should not be able to get removed key
assert self.pool.get("to_remove") is None
def test_pop_method(self):
"""Test the pop method."""
# Pop non-existent key should return default
assert self.pool.pop("non_existent") is None
assert self.pool.pop("non_existent", "default") == "default"
# Pop existing key should return value and remove it
self.pool.put("to_pop", "popped_value")
result = self.pool.pop("to_pop")
assert result == "popped_value"
assert not self.pool.exists("to_pop")
# Second pop should return default
assert self.pool.pop("to_pop", "gone") == "gone"
def test_size_method(self):
"""Test the size method."""
assert self.pool.size() == 0
self.pool.put("key1", "value1")
assert self.pool.size() == 1
self.pool.put("key2", "value2")
assert self.pool.size() == 2
self.pool.remove("key1")
assert self.pool.size() == 1
self.pool.clear()
assert self.pool.size() == 0
def test_keys_method(self):
"""Test the keys method."""
assert self.pool.keys() == []
# Add some keys
self.pool.put("first", "value1")
self.pool.put("second", "value2")
self.pool.put("third", "value3")
keys = self.pool.keys()
assert len(keys) == 3
assert "first" in keys
assert "second" in keys
assert "third" in keys
# Remove a key
self.pool.remove("second")
keys = self.pool.keys()
assert len(keys) == 2
assert "second" not in keys
def test_clear_method(self):
"""Test the clear method."""
self.pool.put("key1", "value1")
self.pool.put("key2", "value2")
assert self.pool.size() == 2
self.pool.clear()
assert self.pool.size() == 0
assert self.pool.keys() == []
assert self.pool.get("key1") is None
assert self.pool.get("key2") is None
def test_dictionary_interface(self):
"""Test dictionary-like interface."""
# Test __setitem__ and __getitem__
self.pool["dict_key"] = "dict_value"
assert self.pool["dict_key"] == "dict_value"
# Test __contains__
assert "dict_key" in self.pool
assert "non_existent" not in self.pool
# Test __delitem__
del self.pool["dict_key"]
assert "dict_key" not in self.pool
# Test KeyError for non-existent key
with pytest.raises(KeyError):
_ = self.pool["non_existent"]
with pytest.raises(KeyError):
del self.pool["non_existent"]
def test_type_validation(self):
"""Test type validation for keys."""
# Non-string keys should raise TypeError for put, get, and pop
with pytest.raises(TypeError):
self.pool.put(123, "value")
with pytest.raises(TypeError):
self.pool.get(123)
with pytest.raises(TypeError):
self.pool.pop(123)
# Note: __contains__ doesn't raise TypeError as it delegates to exists()
# which doesn't have type validation for performance reasons
# This is the current behavior of the implementation
result = 123 in self.pool # Should return False, not raise
assert result is False
def test_context_manager(self):
"""Test context manager interface."""
with MultiProcessingSharedPool() as pool:
pool.put("context_key", "context_value")
assert pool.get("context_key") == "context_value"
# Pool should still be usable after context
pool = MultiProcessingSharedPool()
assert pool.get("context_key") == "context_value"
def test_shared_pool_convenience_instance(self):
"""Test the shared_pool convenience instance."""
# Should be the same as MultiProcessingSharedPool.get_instance()
pool = MultiProcessingSharedPool.get_instance()
assert shared_pool is pool
# Should work like any other instance
shared_pool.put("convenience_key", "convenience_value")
assert shared_pool.get("convenience_key") == "convenience_value"
# Should be accessible from other instances
pool = MultiProcessingSharedPool()
assert pool.get("convenience_key") == "convenience_value"
@pytest.mark.skipif(
not hasattr(mp, 'Process') or mp.get_start_method() != 'spawn',
reason="Multiprocessing test requires spawn method"
)
def test_multiprocess_access(self):
"""Test that the pool works across multiple processes."""
def worker_function(results_queue: mp.Queue, worker_id: int):
"""Worker function for multiprocess testing."""
pool = MultiProcessingSharedPool()
# Put some data
pool.put(f"worker_{worker_id}_data", f"value_from_worker_{worker_id}")
# Get data from main process
main_data = pool.get("main_data")
# Get data from previous workers
if worker_id > 1:
prev_data = pool.get(f"worker_{worker_id-1}_data")
results_queue.put(prev_data)
results_queue.put(main_data)
results_queue.put(f"worker_{worker_id}_data")
# Put initial data from main process
self.pool.put("main_data", "value_from_main")
results_queue = mp.Queue()
processes = []
# Start multiple worker processes
for i in range(1, 4):
p = mp.Process(target=worker_function, args=(results_queue, i))
processes.append(p)
p.start()
# Wait for all processes to complete
for p in processes:
p.join()
# Collect results
results = []
while not results_queue.empty():
results.append(results_queue.get())
# Verify data sharing worked
assert "value_from_main" in results
assert "worker_1_data" in results
assert "worker_2_data" in results
assert "worker_3_data" in results
# Verify data is accessible from main process
assert self.pool.get("worker_1_data") == "value_from_worker_1"
assert self.pool.get("worker_2_data") == "value_from_worker_2"
assert self.pool.get("worker_3_data") == "value_from_worker_3"
if __name__ == "__main__":
pytest.main([__file__])