Browse Source

Remove unused

Michael Diskin 3 năm trước cách đây
mục cha
commit
4c957e1f4c

+ 7 - 3
.gitignore

@@ -20,19 +20,23 @@ __pycache__/
 
 
 # Distribution / packaging
 # Distribution / packaging
 .Python
 .Python
-env/
-bin/
 build/
 build/
 develop-eggs/
 develop-eggs/
 dist/
 dist/
+downloads/
 eggs/
 eggs/
+.eggs/
+lib/
 lib64/
 lib64/
 parts/
 parts/
 sdist/
 sdist/
 var/
 var/
+wheels/
+share/python-wheels/
 *.egg-info/
 *.egg-info/
 .installed.cfg
 .installed.cfg
-*.egg/
+*.egg
+MANIFEST
 
 
 # Installer logs
 # Installer logs
 pip-log.txt
 pip-log.txt

+ 0 - 1
benchmarks/benchmark_averaging.py

@@ -9,7 +9,6 @@ import hivemind
 from hivemind.proto import runtime_pb2
 from hivemind.proto import runtime_pb2
 from hivemind.utils.limits import increase_file_limit
 from hivemind.utils.limits import increase_file_limit
 from hivemind.utils.logging import get_logger, use_hivemind_log_handler
 from hivemind.utils.logging import get_logger, use_hivemind_log_handler
-from hivemind.utils.networking import LOCALHOST
 
 
 use_hivemind_log_handler("in_root_logger")
 use_hivemind_log_handler("in_root_logger")
 logger = get_logger(__name__)
 logger = get_logger(__name__)

+ 0 - 1
benchmarks/benchmark_dht.py

@@ -3,7 +3,6 @@ import asyncio
 import random
 import random
 import time
 import time
 import uuid
 import uuid
-from logging import shutdown
 from typing import Tuple
 from typing import Tuple
 
 
 import numpy as np
 import numpy as np

+ 2 - 4
hivemind/averaging/averager.py

@@ -26,8 +26,7 @@ from hivemind.compression import (
     CompressionBase,
     CompressionBase,
     CompressionInfo,
     CompressionInfo,
     NoCompression,
     NoCompression,
-    deserialize_torch_tensor,
-    serialize_torch_tensor,
+    deserialize_torch_tensor
 )
 )
 from hivemind.dht import DHT, DHTID
 from hivemind.dht import DHT, DHTID
 from hivemind.p2p import P2P, P2PContext, P2PHandlerError, PeerID, ServicerBase
 from hivemind.p2p import P2P, P2PContext, P2PHandlerError, PeerID, ServicerBase
@@ -36,13 +35,12 @@ from hivemind.proto import averaging_pb2
 from hivemind.utils import MPFuture, TensorDescriptor, get_logger
 from hivemind.utils import MPFuture, TensorDescriptor, get_logger
 from hivemind.utils.asyncio import (
 from hivemind.utils.asyncio import (
     achain,
     achain,
-    afirst,
     aiter_with_timeout,
     aiter_with_timeout,
     anext,
     anext,
     as_aiter,
     as_aiter,
     azip,
     azip,
     enter_asynchronously,
     enter_asynchronously,
-    switch_to_uvloop,
+    switch_to_uvloop
 )
 )
 from hivemind.utils.grpc import combine_from_streaming, split_for_streaming
 from hivemind.utils.grpc import combine_from_streaming, split_for_streaming
 from hivemind.utils.serializer import MSGPackSerializer, SerializerBase
 from hivemind.utils.serializer import MSGPackSerializer, SerializerBase

+ 1 - 1
hivemind/averaging/key_manager.py

@@ -7,7 +7,7 @@ import numpy as np
 from hivemind.averaging.group_info import GroupInfo
 from hivemind.averaging.group_info import GroupInfo
 from hivemind.dht import DHT
 from hivemind.dht import DHT
 from hivemind.p2p import PeerID
 from hivemind.p2p import PeerID
-from hivemind.utils import DHTExpiration, ValueWithExpiration, get_dht_time, get_logger
+from hivemind.utils import DHTExpiration, get_logger
 
 
 GroupKey = str
 GroupKey = str
 GROUP_PATTERN = re.compile("^(([^.])+)[.]0b[01]*$")  # e.g. bert_exp4_averaging.0b01001101
 GROUP_PATTERN = re.compile("^(([^.])+)[.]0b[01]*$")  # e.g. bert_exp4_averaging.0b01001101

+ 2 - 1
hivemind/dht/__init__.py

@@ -15,5 +15,6 @@ The code is organized as follows:
 
 
 from hivemind.dht.dht import DHT
 from hivemind.dht.dht import DHT
 from hivemind.dht.node import DEFAULT_NUM_WORKERS, DHTNode
 from hivemind.dht.node import DEFAULT_NUM_WORKERS, DHTNode
-from hivemind.dht.routing import DHTID, DHTExpiration, DHTKey, DHTValue, Subkey
+from hivemind.dht.routing import DHTID, DHTKey, DHTValue, Subkey
+from hivemind.utils import DHTExpiration
 from hivemind.dht.validation import CompositeValidator, RecordValidatorBase
 from hivemind.dht.validation import CompositeValidator, RecordValidatorBase

+ 1 - 1
hivemind/dht/routing.py

@@ -10,7 +10,7 @@ from itertools import chain
 from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union
 from typing import Any, Dict, List, Optional, Sequence, Set, Tuple, Union
 
 
 from hivemind.p2p import PeerID
 from hivemind.p2p import PeerID
-from hivemind.utils import DHTExpiration, MSGPackSerializer, get_dht_time
+from hivemind.utils import MSGPackSerializer, get_dht_time
 
 
 DHTKey = Subkey = DHTValue = Any
 DHTKey = Subkey = DHTValue = Any
 BinaryDHTID = BinaryDHTValue = bytes
 BinaryDHTID = BinaryDHTValue = bytes

+ 0 - 1
hivemind/moe/server/runtime.py

@@ -1,5 +1,4 @@
 import multiprocessing as mp
 import multiprocessing as mp
-import multiprocessing.pool
 import threading
 import threading
 from collections import defaultdict
 from collections import defaultdict
 from itertools import chain
 from itertools import chain

+ 1 - 1
hivemind/optim/grad_averager.py

@@ -6,7 +6,7 @@ import torch
 import hivemind
 import hivemind
 from hivemind.averaging import DecentralizedAverager
 from hivemind.averaging import DecentralizedAverager
 from hivemind.averaging.control import StepControl
 from hivemind.averaging.control import StepControl
-from hivemind.utils import DHTExpiration, get_dht_time, get_logger
+from hivemind.utils import DHTExpiration, get_logger
 
 
 logger = get_logger(__name__)
 logger = get_logger(__name__)
 
 

+ 1 - 1
hivemind/optim/state_averager.py

@@ -14,7 +14,7 @@ from hivemind.averaging import DecentralizedAverager
 from hivemind.averaging.control import StepControl
 from hivemind.averaging.control import StepControl
 from hivemind.compression import CompressionInfo, TensorRole
 from hivemind.compression import CompressionInfo, TensorRole
 from hivemind.optim.grad_scaler import GradScaler
 from hivemind.optim.grad_scaler import GradScaler
-from hivemind.utils import DHTExpiration, PerformanceEMA, get_dht_time, get_logger, nested_flatten, nested_pack
+from hivemind.utils import DHTExpiration, PerformanceEMA, get_logger, nested_flatten, nested_pack
 
 
 logger = get_logger(__name__)
 logger = get_logger(__name__)
 
 

+ 1 - 1
hivemind/p2p/p2p_daemon.py

@@ -8,7 +8,7 @@ from contextlib import closing, suppress
 from dataclasses import dataclass
 from dataclasses import dataclass
 from datetime import datetime
 from datetime import datetime
 from importlib.resources import path
 from importlib.resources import path
-from typing import Any, AsyncIterator, Awaitable, Callable, Dict, List, Optional, Sequence, Tuple, Type, TypeVar, Union
+from typing import Any, AsyncIterator, Awaitable, Callable, List, Optional, Sequence, Tuple, Type, TypeVar, Union
 
 
 from google.protobuf.message import Message
 from google.protobuf.message import Message
 from multiaddr import Multiaddr
 from multiaddr import Multiaddr

+ 1 - 1
hivemind/utils/asyncio.py

@@ -2,7 +2,7 @@ import asyncio
 import concurrent.futures
 import concurrent.futures
 from concurrent.futures import ThreadPoolExecutor
 from concurrent.futures import ThreadPoolExecutor
 from contextlib import AbstractAsyncContextManager, AbstractContextManager, asynccontextmanager
 from contextlib import AbstractAsyncContextManager, AbstractContextManager, asynccontextmanager
-from typing import AsyncIterable, AsyncIterator, Awaitable, Callable, ContextManager, Optional, Tuple, TypeVar, Union
+from typing import AsyncIterable, AsyncIterator, Awaitable, Callable, Optional, Tuple, TypeVar, Union
 
 
 import uvloop
 import uvloop
 
 

+ 1 - 1
hivemind/utils/grpc.py

@@ -6,7 +6,7 @@ from __future__ import annotations
 
 
 import os
 import os
 import threading
 import threading
-from typing import Any, Dict, Iterable, Iterator, NamedTuple, Optional, Tuple, Type, TypeVar, Union
+from typing import Any, Dict, Iterable, Iterator, NamedTuple, Optional, Tuple, TypeVar, Union
 
 
 import grpc
 import grpc
 
 

+ 1 - 1
hivemind/utils/mpfuture.py

@@ -8,7 +8,7 @@ import threading
 import uuid
 import uuid
 from contextlib import nullcontext
 from contextlib import nullcontext
 from enum import Enum, auto
 from enum import Enum, auto
-from typing import Any, Callable, Dict, Generic, Optional, Type, TypeVar
+from typing import Any, Callable, Dict, Generic, Optional, TypeVar
 from weakref import ref
 from weakref import ref
 
 
 import torch  # used for py3.7-compatible shared memory
 import torch  # used for py3.7-compatible shared memory

+ 1 - 2
tests/test_allreduce.py

@@ -10,8 +10,7 @@ from hivemind import Quantile8BitQuantization, aenumerate
 from hivemind.averaging.allreduce import AllReduceRunner, AveragingMode
 from hivemind.averaging.allreduce import AllReduceRunner, AveragingMode
 from hivemind.averaging.partition import TensorPartContainer, TensorPartReducer
 from hivemind.averaging.partition import TensorPartContainer, TensorPartReducer
 from hivemind.compression import deserialize_torch_tensor
 from hivemind.compression import deserialize_torch_tensor
-from hivemind.p2p import P2P, StubBase
-from hivemind.proto.runtime_pb2 import CompressionType
+from hivemind.p2p import P2P
 
 
 
 
 @pytest.mark.forked
 @pytest.mark.forked

+ 0 - 1
tests/test_averaging.py

@@ -6,7 +6,6 @@ import pytest
 import torch
 import torch
 
 
 import hivemind
 import hivemind
-import hivemind.averaging.averager
 from hivemind.averaging.allreduce import AveragingMode
 from hivemind.averaging.allreduce import AveragingMode
 from hivemind.averaging.control import AveragingStage
 from hivemind.averaging.control import AveragingStage
 from hivemind.averaging.key_manager import GroupKeyManager
 from hivemind.averaging.key_manager import GroupKeyManager