custom_trainer.py 3.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091
  1. # Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
  2. #
  3. # Licensed under the Apache License, Version 2.0 (the "License");
  4. # you may not use this file except in compliance with the License.
  5. # You may obtain a copy of the License at
  6. #
  7. # http://www.apache.org/licenses/LICENSE-2.0
  8. #
  9. # Unless required by applicable law or agreed to in writing, software
  10. # distributed under the License is distributed on an "AS IS" BASIS,
  11. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. # See the License for the specific language governing permissions and
  13. # limitations under the License.
  14. import inspect
  15. import paddle
  16. import paddlers
  17. from paddlers.tasks.change_detector import BaseChangeDetector
  18. from attach_tools import Attach
  19. attach = Attach.to(paddlers.tasks.change_detector)
  20. def make_trainer(net_type, attach_trainer=True):
  21. def _init_func(self,
  22. num_classes=2,
  23. use_mixed_loss=False,
  24. losses=None,
  25. **_params_):
  26. sig = inspect.signature(net_type.__init__)
  27. net_params = {
  28. k: p.default
  29. for k, p in sig.parameters.items() if not p.default is p.empty
  30. }
  31. net_params.pop('self', None)
  32. net_params.pop('num_classes', None)
  33. # Special rule to parse arguments from `_params_`.
  34. # When using pdrs.tasks.load_model, `_params_`` is a dict with the key '_params_'.
  35. # This bypasses the dynamic modification/creation of function signature.
  36. if '_params_' not in _params_:
  37. net_params.update(_params_)
  38. else:
  39. net_params.update(_params_['_params_'])
  40. super(trainer_type, self).__init__(
  41. model_name=net_type.__name__,
  42. num_classes=num_classes,
  43. use_mixed_loss=use_mixed_loss,
  44. losses=losses,
  45. **net_params)
  46. if not issubclass(net_type, paddle.nn.Layer):
  47. raise TypeError("net must be a subclass of paddle.nn.Layer")
  48. trainer_name = net_type.__name__
  49. trainer_type = type(trainer_name, (BaseChangeDetector, ),
  50. {'__init__': _init_func})
  51. if attach_trainer:
  52. trainer_type = attach(trainer_type)
  53. return trainer_type
  54. def make_trainer_and_build(net_type, *args, **kwargs):
  55. trainer_type = make_trainer(net_type, attach_trainer=True)
  56. return trainer_type(*args, **kwargs)
  57. @attach
  58. class CustomTrainer(BaseChangeDetector):
  59. def __init__(self,
  60. num_classes=2,
  61. use_mixed_loss=False,
  62. losses=None,
  63. in_channels=3,
  64. att_types='ct',
  65. use_dropout=False,
  66. **params):
  67. params.update({
  68. 'in_channels': in_channels,
  69. 'att_types': att_types,
  70. 'use_dropout': use_dropout
  71. })
  72. super().__init__(
  73. model_name='CustomModel',
  74. num_classes=num_classes,
  75. use_mixed_loss=use_mixed_loss,
  76. losses=losses,
  77. **params)