test_sequence_manager.py 1.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354
  1. import threading
  2. import time
  3. import pytest
  4. import torch
  5. from hivemind import DHT, get_logger, use_hivemind_log_handler
  6. from test_utils import *
  7. from petals.client import RemoteSequenceManager, RemoteSequential
  8. from petals.client.remote_model import DistributedBloomConfig
  9. from petals.data_structures import UID_DELIMITER
  10. use_hivemind_log_handler("in_root_logger")
  11. logger = get_logger(__file__)
  12. @pytest.mark.forked
  13. def test_sequence_manager_shutdown():
  14. config = DistributedBloomConfig.from_pretrained(MODEL_NAME, initial_peers=INITIAL_PEERS)
  15. dht = DHT(initial_peers=config.initial_peers, client_mode=True, start=True)
  16. sequential = RemoteSequential(config, dht)
  17. shutdown_evt = threading.Event()
  18. # test RemoteSequential with lossy compression
  19. block_uids = [f"{config.dht_prefix}{UID_DELIMITER}{i}" for i in range(config.n_layer)]
  20. sequential = RemoteSequential(
  21. config,
  22. dht,
  23. sequence_manager=TestSequenceManager(dht, block_uids, sequential.p2p, _was_shut_down=shutdown_evt, start=True),
  24. )
  25. assert sequential.sequence_manager.is_alive()
  26. assert sequential.sequence_manager._thread.ready.is_set()
  27. assert not shutdown_evt.is_set()
  28. sequential(torch.randn(1, 2, config.hidden_size))
  29. sequential.sequence_manager.shutdown()
  30. del sequential
  31. time.sleep(1)
  32. assert shutdown_evt.is_set()
  33. class TestSequenceManager(RemoteSequenceManager):
  34. """A sequence manager that signals if it was shut down"""
  35. def __init__(self, *args, _was_shut_down: threading.Event, **kwargs):
  36. super().__init__(*args, **kwargs)
  37. self._was_shut_down = _was_shut_down
  38. def shutdown(self):
  39. super().shutdown()
  40. assert not self.is_alive()
  41. self._was_shut_down.set()