From 59cfea9f98153eeda1bf440f21ce31ab37eaf07c Mon Sep 17 00:00:00 2001 From: Taylor Mordan <48797707+taylormordan@users.noreply.github.com> Date: Fri, 3 Nov 2023 12:35:11 +0100 Subject: [PATCH] Add HRFormer backbones (#18) --- docs/LICENSE.HRFORMER | 203 ++++++++++++++++++++++++ setup.py | 3 + src/openpifpaf/encoder/caf.py | 4 +- src/openpifpaf/encoder/cif.py | 2 +- src/openpifpaf/network/basenetworks.py | 31 ++++ src/openpifpaf/network/factory.py | 12 +- src/openpifpaf/network/hrformer.py | 206 +++++++++++++++++++++++++ src/openpifpaf/train.py | 5 +- 8 files changed, 461 insertions(+), 5 deletions(-) create mode 100644 docs/LICENSE.HRFORMER create mode 100644 src/openpifpaf/network/hrformer.py diff --git a/docs/LICENSE.HRFORMER b/docs/LICENSE.HRFORMER new file mode 100644 index 000000000..6d915977b --- /dev/null +++ b/docs/LICENSE.HRFORMER @@ -0,0 +1,203 @@ +Copyright 2018-2020 Open-MMLab. All rights reserved. + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2018-2020 Open-MMLab. + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. \ No newline at end of file diff --git a/setup.py b/setup.py index 5df8d2437..576d3f138 100644 --- a/setup.py +++ b/setup.py @@ -92,6 +92,9 @@ def add_cpp_extension(): 'backbones': [ 'timm>=0.4.9', # For Swin Transformer and XCiT 'einops>=0.3', # required for BotNet + 'mmengine', + 'mmcv>=2.0', + 'mmpose>=1.0', # for HRFormer ], 'dev': [ 'flameprof', diff --git a/src/openpifpaf/encoder/caf.py b/src/openpifpaf/encoder/caf.py index 9c8c37549..755f112e1 100644 --- a/src/openpifpaf/encoder/caf.py +++ b/src/openpifpaf/encoder/caf.py @@ -275,9 +275,9 @@ def fill_field_values(self, field_i, fij, fill_values): else: scale1 = scale * self.config.meta.sigmas[joint1i] scale2 = scale * self.config.meta.sigmas[joint2i] - assert np.isnan(scale1) or 0.0 < scale1 < 100.0 + assert np.isnan(scale1) or scale1 > 0.0 self.fields_scale1[field_i, fij[1], fij[0]] = scale1 - assert np.isnan(scale2) or 0.0 < scale2 < 100.0 + assert np.isnan(scale2) or scale2 > 0.0 self.fields_scale2[field_i, fij[1], fij[0]] = scale2 def fields_as_tensor(self, valid_area): diff --git a/src/openpifpaf/encoder/cif.py b/src/openpifpaf/encoder/cif.py index 3ee684d52..6d8d165db 100644 --- a/src/openpifpaf/encoder/cif.py +++ b/src/openpifpaf/encoder/cif.py @@ -127,7 +127,7 @@ def fill_coordinate(self, f, xyv, scale): self.fields_bmin[f, miny:maxy, minx:maxx][mask] = bmin # update scale - assert np.isnan(scale) or 0.0 < scale < 100.0 + assert np.isnan(scale) or scale > 0.0 self.fields_scale[f, miny:maxy, minx:maxx][mask] = scale def fields(self, valid_area): diff --git a/src/openpifpaf/network/basenetworks.py b/src/openpifpaf/network/basenetworks.py index a10cdf14c..4e03043cd 100644 --- a/src/openpifpaf/network/basenetworks.py +++ b/src/openpifpaf/network/basenetworks.py @@ -765,3 +765,34 @@ def cli(cls, parser: argparse.ArgumentParser): @classmethod def configure(cls, args: argparse.Namespace): cls.input_image_size = args.botnet_input_image_size + + +class HRFormer(BaseNetwork): + scale_level = 0 + pretrained = True + unused_parameters = True # For DDP initialization + + def __init__(self, name, hrformer_net): + stride = 4 * (2 ** self.scale_level) + hrformer_backbone, out_features = hrformer_net(self.scale_level, self.pretrained) + super().__init__(name, stride=stride, out_features=out_features) + self.backbone = hrformer_backbone + + def forward(self, x): + return self.backbone(x)[0] + + @classmethod + def cli(cls, parser: argparse.ArgumentParser): + group = parser.add_argument_group('HRFormer') + group.add_argument('--hrformer-scale-level', + type=int, default=cls.scale_level, + help='level of the HRFormer pyramid') + assert cls.pretrained + group.add_argument('--hrformer-no-pretrain', dest='hrformer_pretrained', + default=True, action='store_false', + help='use randomly initialized models') + + @classmethod + def configure(cls, args: argparse.Namespace): + cls.scale_level = args.hrformer_scale_level + cls.pretrained = args.hrformer_pretrained diff --git a/src/openpifpaf/network/factory.py b/src/openpifpaf/network/factory.py index 78d81b5b9..0d49d4db0 100644 --- a/src/openpifpaf/network/factory.py +++ b/src/openpifpaf/network/factory.py @@ -10,7 +10,7 @@ from .. import headmeta from ..configurable import Configurable from . import basenetworks, heads, model_migration, nets, tracking_heads -from . import swin_transformer, xcit +from . import hrformer, swin_transformer, xcit from .tracking_base import TrackingBase @@ -77,6 +77,7 @@ CHECKPOINT_URLS = {} BASE_TYPES = set([ + basenetworks.HRFormer, basenetworks.MobileNetV2, basenetworks.MobileNetV3, basenetworks.Resnet, @@ -234,6 +235,15 @@ ], stride=16), 'botnet': lambda: basenetworks.BotNet('botnet'), + # HRFormer architectures + 'hrformersmall': lambda: basenetworks.HRFormer( + 'hrformersmall', hrformer.hrformersmall), + 'hrformersmallcat': lambda: basenetworks.HRFormer( + 'hrformersmallcat', hrformer.hrformersmallcat), + 'hrformerbase': lambda: basenetworks.HRFormer( + 'hrformerbase', hrformer.hrformerbase), + 'hrformerbasecat': lambda: basenetworks.HRFormer( + 'hrformerbasecat', hrformer.hrformerbasecat), } # base factories that wrap other base factories: BASE_FACTORIES['tshufflenetv2k16'] = lambda: TrackingBase(BASE_FACTORIES['shufflenetv2k16']()) diff --git a/src/openpifpaf/network/hrformer.py b/src/openpifpaf/network/hrformer.py new file mode 100644 index 000000000..655114782 --- /dev/null +++ b/src/openpifpaf/network/hrformer.py @@ -0,0 +1,206 @@ +import copy + +import torch + +try: + import mmpose.models +except ImportError: + pass + + +def hrformer_small_config(multiscale_output=False): + return dict( + type='HRFormer', + in_channels=3, + extra=dict( + drop_path_rate=0.1, + with_rpe=True, + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(2, ), + num_channels=(64, ), + num_heads=[2], + num_mlp_ratios=[4]), + stage2=dict( + num_modules=1, + num_branches=2, + block='HRFORMERBLOCK', + num_blocks=(2, 2), + num_channels=(32, 64), + num_heads=[1, 2], + mlp_ratios=[4, 4], + window_sizes=[7, 7]), + stage3=dict( + num_modules=4, + num_branches=3, + block='HRFORMERBLOCK', + num_blocks=(2, 2, 2), + num_channels=(32, 64, 128), + num_heads=[1, 2, 4], + mlp_ratios=[4, 4, 4], + window_sizes=[7, 7, 7]), + stage4=dict( + num_modules=2, + num_branches=4, + block='HRFORMERBLOCK', + num_blocks=(2, 2, 2, 2), + num_channels=(32, 64, 128, 256), + num_heads=[1, 2, 4, 8], + mlp_ratios=[4, 4, 4, 4], + window_sizes=[7, 7, 7, 7], + multiscale_output=multiscale_output)), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrformer_small-09516375_20220226.pth'), + ) + + +def hrformer_base_config(multiscale_output=False): + return dict( + type='HRFormer', + in_channels=3, + extra=dict( + drop_path_rate=0.2, + with_rpe=True, + stage1=dict( + num_modules=1, + num_branches=1, + block='BOTTLENECK', + num_blocks=(2, ), + num_channels=(64, ), + num_heads=[2], + mlp_ratios=[4]), + stage2=dict( + num_modules=1, + num_branches=2, + block='HRFORMERBLOCK', + num_blocks=(2, 2), + num_channels=(78, 156), + num_heads=[2, 4], + mlp_ratios=[4, 4], + window_sizes=[7, 7]), + stage3=dict( + num_modules=4, + num_branches=3, + block='HRFORMERBLOCK', + num_blocks=(2, 2, 2), + num_channels=(78, 156, 312), + num_heads=[2, 4, 8], + mlp_ratios=[4, 4, 4], + window_sizes=[7, 7, 7]), + stage4=dict( + num_modules=2, + num_branches=4, + block='HRFORMERBLOCK', + num_blocks=(2, 2, 2, 2), + num_channels=(78, 156, 312, 624), + num_heads=[2, 4, 8, 16], + mlp_ratios=[4, 4, 4, 4], + window_sizes=[7, 7, 7, 7], + multiscale_output=multiscale_output)), + init_cfg=dict( + type='Pretrained', + checkpoint='https://download.openmmlab.com/mmpose/' + 'pretrain_models/hrformer_base-32815020_20220226.pth'), + ) + + +class HRFormerModuleWithUpsample(torch.nn.Module): + """HRFormer module with forward modified to integrate upsampling.""" + def __init__(self, hrformermodule): + super().__init__() + self.hrformermodule = copy.deepcopy(hrformermodule) + + def forward(self, x): + # pylint: disable=all + """Adapted from https://github.com/open-mmlab/mmpose/blob/main/mmpose/models/backbones/hrnet.py#L194""" + if self.hrformermodule.num_branches == 1: + return [self.hrformermodule.branches[0](x[0])] + for i in range(self.hrformermodule.num_branches): + x[i] = self.hrformermodule.branches[i](x[i]) + x_fuse = [] + for i in range(len(self.hrformermodule.fuse_layers)): + y = 0 + for j in range(self.hrformermodule.num_branches): + if i == j: + y += x[j] + elif j < i: + y += self.hrformermodule.fuse_layers[i][j](x[j]) + elif i < j: # upsampling added here + y += torch.nn.functional.interpolate( + self.hrformermodule.fuse_layers[i][j](x[j]), + size=y.shape[2:], + mode='bilinear', + align_corners=False, + ) + x_fuse.append(self.hrformermodule.relu(y)) + return x_fuse + + +def adapt_hrformer(module): + """Adapt HRFormer's upsampling to work with custom image size in OpenPifPaf.""" + module_output = module + if isinstance(module, torch.nn.modules.Upsample): + module_output = torch.nn.Sequential() # remove Upsample module + elif isinstance(module, mmpose.models.backbones.hrformer.HRFomerModule): + module_output = HRFormerModuleWithUpsample(module) # add upsampling to HRFormerModule + for name, child in module.named_children(): + module_output.add_module(name, adapt_hrformer(child)) + del module + return module_output + + +def hrformer(hrformer_config_fn=None, + scale_level=0, + concat_feature_maps=False, + pretrained=True): + multiscale_output = (scale_level != 0) or concat_feature_maps + hrformer_config_dict = hrformer_config_fn(multiscale_output=multiscale_output) + hrformer_backbone = mmpose.models.build_backbone(hrformer_config_dict) + if pretrained: + hrformer_backbone.init_weights() + hrformer_backbone = adapt_hrformer(hrformer_backbone) + if concat_feature_maps: + fmp_index = [scale_level, 1, 2, 3] + fmp_index[scale_level] = 0 + fmp = mmpose.models.FeatureMapProcessor(select_index=fmp_index, concat=True) + out_features = sum(hrformer_config_dict['extra']['stage4']['num_channels']) + else: + fmp = mmpose.models.FeatureMapProcessor(select_index=scale_level) + out_features = hrformer_config_dict['extra']['stage4']['num_channels'][scale_level] + return torch.nn.Sequential(hrformer_backbone, fmp), out_features + + +def hrformersmall(scale_level=0, pretrained=True): + hrformer_backbone, out_features = hrformer(hrformer_small_config, + scale_level=scale_level, + concat_feature_maps=False, + pretrained=pretrained) + return hrformer_backbone, out_features + + +def hrformersmallcat(scale_level=0, pretrained=True): + hrformer_backbone, out_features = hrformer(hrformer_small_config, + scale_level=scale_level, + concat_feature_maps=True, + pretrained=pretrained) + return hrformer_backbone, out_features + + +def hrformerbase(scale_level=0, pretrained=True): + hrformer_backbone, out_features = hrformer(hrformer_base_config, + scale_level=scale_level, + concat_feature_maps=False, + pretrained=pretrained) + return hrformer_backbone, out_features + + +def hrformerbasecat(scale_level=0, pretrained=True): + hrformer_backbone, out_features = hrformer(hrformer_base_config, + scale_level=scale_level, + concat_feature_maps=True, + pretrained=pretrained) + return hrformer_backbone, out_features diff --git a/src/openpifpaf/train.py b/src/openpifpaf/train.py index b96d57290..d074731ff 100644 --- a/src/openpifpaf/train.py +++ b/src/openpifpaf/train.py @@ -198,10 +198,13 @@ def main(): net_cpu = torch.nn.SyncBatchNorm.convert_sync_batchnorm(net_cpu) else: LOG.info('not converting batchnorms to syncbatchnorms') + unused_parameters = (isinstance(datamodule, datasets.MultiDataModule) + or (hasattr(net_cpu.base_net, 'unused_parameters') + and net_cpu.base_net.unused_parameters)) net = torch.nn.parallel.DistributedDataParallel( net_cpu.to(device=args.device), device_ids=[args.local_rank], output_device=args.local_rank, - find_unused_parameters=isinstance(datamodule, datasets.MultiDataModule), + find_unused_parameters=unused_parameters, ) loss = loss.to(device=args.device) else: