run_server.py 4.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475
  1. from functools import partial
  2. import configargparse
  3. import torch
  4. from hivemind.proto.runtime_pb2 import CompressionType
  5. from hivemind.server import Server
  6. from hivemind.utils.threading import increase_file_limit
  7. def main():
  8. # fmt:off
  9. parser = configargparse.ArgParser(default_config_files=["config.yml"])
  10. parser.add('-c', '--config', required=False, is_config_file=True, help='config file path')
  11. parser.add_argument('--listen_on', type=str, default='0.0.0.0:*', required=False,
  12. help="'localhost' for local connections only, '0.0.0.0' for ipv4 '[::]' for ipv6")
  13. parser.add_argument('--num_experts', type=int, default=None, required=False, help="The number of experts to serve")
  14. parser.add_argument('--expert_pattern', type=str, default=None, required=False,
  15. help='all expert uids will follow this pattern, e.g. "myexpert.[0:256].[0:1024]" will sample random expert uids'
  16. ' between myexpert.0.0 and myexpert.255.1023 . Use either num_experts and this or expert_uids')
  17. parser.add_argument('--expert_uids', type=str, nargs="*", default=None, required=False,
  18. help="specify the exact list of expert uids to create. Use either this or num_experts"
  19. " and expert_pattern, not both")
  20. parser.add_argument('--expert_cls', type=str, default='ffn', required=False,
  21. help="expert type from test_utils.layers, e.g. 'ffn', 'transformer', 'det_dropout' or 'nop'.")
  22. parser.add_argument('--hidden_dim', type=int, default=1024, required=False, help='main dimension for expert_cls')
  23. parser.add_argument('--num_handlers', type=int, default=None, required=False,
  24. help='server will use this many processes to handle incoming requests')
  25. parser.add_argument('--max_batch_size', type=int, default=16384, required=False,
  26. help='The total number of examples in the same batch will not exceed this value')
  27. parser.add_argument('--device', type=str, default=None, required=False,
  28. help='all experts will use this device in torch notation; default: cuda if available else cpu')
  29. parser.add_argument('--optimizer', type=str, default='adam', required=False, help='adam, sgd or none')
  30. parser.add_argument('--no_dht', action='store_true', help='if specified, the server will not be attached to a dht')
  31. parser.add_argument('--initial_peers', type=str, nargs='*', required=False, default=[],
  32. help='one or more peers that can welcome you to the dht, e.g. 1.2.3.4:1337 192.132.231.4:4321')
  33. parser.add_argument('--dht_port', type=int, default=None, required=False, help='DHT node will listen on this port')
  34. parser.add_argument('--increase_file_limit', action='store_true',
  35. help='On *nix, this will increase the max number of processes '
  36. 'a server can spawn before hitting "Too many open files"; Use at your own risk.')
  37. parser.add_argument('--compression', type=str, default='NONE', required=False, help='Tensor compression '
  38. 'parameter for grpc. Can be NONE, MEANSTD or FLOAT16')
  39. # fmt:on
  40. args = vars(parser.parse_args())
  41. args.pop('config', None)
  42. optimizer = args.pop('optimizer')
  43. if optimizer == 'adam':
  44. optim_cls = torch.optim.Adam
  45. elif optimizer == 'sgd':
  46. optim_cls = partial(torch.optim.SGD, lr=0.01)
  47. elif optimizer == 'none':
  48. optim_cls = None
  49. else:
  50. raise ValueError("optim_cls must be adam, sgd or none")
  51. if args.pop('increase_file_limit'):
  52. increase_file_limit()
  53. compression_name = args.pop("compression")
  54. if compression_name == "MEANSTD":
  55. compression = CompressionType.MEANSTD_LAST_AXIS_FLOAT16
  56. elif compression_name == "FLOAT16":
  57. compression = CompressionType.FLOAT16
  58. else:
  59. compression = getattr(CompressionType, CompressionType.NONE)
  60. try:
  61. server = Server.create(**args, optim_cls=optim_cls, start=True, verbose=True, compression=compression)
  62. server.join()
  63. finally:
  64. server.shutdown()
  65. if __name__ == '__main__':
  66. main()