From 85c82ab569835706b5d687f4ecbec303fa888f76 Mon Sep 17 00:00:00 2001 From: wangwl Date: Tue, 23 Dec 2025 01:45:54 +0000 Subject: [PATCH 1/3] add SWL --- .../SWL/WSL-Images/CODE_OF_CONDUCT.md | 5 + .../SWL/WSL-Images/CONTRIBUTING.md | 35 + .../Classification/SWL/WSL-Images/LICENSE | 399 ++++++++++ .../Classification/SWL/WSL-Images/README.md | 38 + .../Classification/SWL/WSL-Images/hubconf.py | 78 ++ .../build-in/Classification/SWL/coverage.txt | 3 + PyTorch/build-in/Classification/SWL/run | 1 + .../Classification/SWL/weloTrainStep.py | 692 ++++++++++++++++++ PyTorch/build-in/Classification/SWL/wsl.py | 253 +++++++ .../build-in/Classification/SWL/wsl_loss.jpg | Bin 0 -> 35863 bytes .../build-in/Classification/SWL/wsl_loss.txt | 29 + 11 files changed, 1533 insertions(+) create mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md create mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md create mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE create mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/README.md create mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py create mode 100644 PyTorch/build-in/Classification/SWL/coverage.txt create mode 100644 PyTorch/build-in/Classification/SWL/run create mode 100644 PyTorch/build-in/Classification/SWL/weloTrainStep.py create mode 100644 PyTorch/build-in/Classification/SWL/wsl.py create mode 100644 PyTorch/build-in/Classification/SWL/wsl_loss.jpg create mode 100644 PyTorch/build-in/Classification/SWL/wsl_loss.txt diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md b/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md new file mode 100644 index 000000000..0f7ad8bfc --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md @@ -0,0 +1,5 @@ +# Code of Conduct + +Facebook has adopted a Code of Conduct that we expect project participants to adhere to. +Please read the [full text](https://code.fb.com/codeofconduct/) +so that you can understand what actions will and will not be tolerated. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md b/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md new file mode 100644 index 000000000..47c825fbb --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md @@ -0,0 +1,35 @@ +# Contributing to WSL-Images +We want to make contributing to this project as easy and transparent as possible. + +## Our Development Process +Minor changes and improvements will be released on an ongoing basis. Larger changes (e.g., changesets implementing a new benchmark) will be released on a more periodic basis. + +## Pull Requests +We actively welcome your pull requests. + +1. Fork the repo and create your branch from `master`. +2. If you've added code that should be tested, add tests. +3. If you've changed APIs, update the documentation. +4. Ensure the test suite passes. +5. Make sure your code lints. +6. If you haven't already, complete the Contributor License Agreement ("CLA"). + +## Contributor License Agreement ("CLA") +In order to accept your pull request, we need you to submit a CLA. You only need +to do this once to work on any of Facebook's open source projects. + +Complete your CLA here: + +## Issues +We use GitHub issues to track public bugs. Please ensure your description is +clear and has sufficient instructions to be able to reproduce the issue. Follow +the template provided [here](.github/issue_template.md) when opening issues. + +## Coding Style +* 4 spaces for indentation rather than tabs +* 80 character line length +* 80 character line length + +## License +By contributing to WSL-Images, you agree that your contributions will be licensed +under the LICENSE file in the root directory of this source tree. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE b/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE new file mode 100644 index 000000000..f2915d90a --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE @@ -0,0 +1,399 @@ +Attribution-NonCommercial 4.0 International + +======================================================================= + +Creative Commons Corporation ("Creative Commons") is not a law firm and +does not provide legal services or legal advice. Distribution of +Creative Commons public licenses does not create a lawyer-client or +other relationship. Creative Commons makes its licenses and related +information available on an "as-is" basis. Creative Commons gives no +warranties regarding its licenses, any material licensed under their +terms and conditions, or any related information. Creative Commons +disclaims all liability for damages resulting from their use to the +fullest extent possible. + +Using Creative Commons Public Licenses + +Creative Commons public licenses provide a standard set of terms and +conditions that creators and other rights holders may use to share +original works of authorship and other material subject to copyright +and certain other rights specified in the public license below. The +following considerations are for informational purposes only, are not +exhaustive, and do not form part of our licenses. + + Considerations for licensors: Our public licenses are + intended for use by those authorized to give the public + permission to use material in ways otherwise restricted by + copyright and certain other rights. Our licenses are + irrevocable. Licensors should read and understand the terms + and conditions of the license they choose before applying it. + Licensors should also secure all rights necessary before + applying our licenses so that the public can reuse the + material as expected. Licensors should clearly mark any + material not subject to the license. This includes other CC- + licensed material, or material used under an exception or + limitation to copyright. More considerations for licensors: + wiki.creativecommons.org/Considerations_for_licensors + + Considerations for the public: By using one of our public + licenses, a licensor grants the public permission to use the + licensed material under specified terms and conditions. If + the licensor's permission is not necessary for any reason--for + example, because of any applicable exception or limitation to + copyright--then that use is not regulated by the license. Our + licenses grant only permissions under copyright and certain + other rights that a licensor has authority to grant. Use of + the licensed material may still be restricted for other + reasons, including because others have copyright or other + rights in the material. A licensor may make special requests, + such as asking that all changes be marked or described. + Although not required by our licenses, you are encouraged to + respect those requests where reasonable. More_considerations + for the public: + wiki.creativecommons.org/Considerations_for_licensees + +======================================================================= + +Creative Commons Attribution-NonCommercial 4.0 International Public +License + +By exercising the Licensed Rights (defined below), You accept and agree +to be bound by the terms and conditions of this Creative Commons +Attribution-NonCommercial 4.0 International Public License ("Public +License"). To the extent this Public License may be interpreted as a +contract, You are granted the Licensed Rights in consideration of Your +acceptance of these terms and conditions, and the Licensor grants You +such rights in consideration of benefits the Licensor receives from +making the Licensed Material available under these terms and +conditions. + +Section 1 -- Definitions. + + a. Adapted Material means material subject to Copyright and Similar + Rights that is derived from or based upon the Licensed Material + and in which the Licensed Material is translated, altered, + arranged, transformed, or otherwise modified in a manner requiring + permission under the Copyright and Similar Rights held by the + Licensor. For purposes of this Public License, where the Licensed + Material is a musical work, performance, or sound recording, + Adapted Material is always produced where the Licensed Material is + synched in timed relation with a moving image. + + b. Adapter's License means the license You apply to Your Copyright + and Similar Rights in Your contributions to Adapted Material in + accordance with the terms and conditions of this Public License. + + c. Copyright and Similar Rights means copyright and/or similar rights + closely related to copyright including, without limitation, + performance, broadcast, sound recording, and Sui Generis Database + Rights, without regard to how the rights are labeled or + categorized. For purposes of this Public License, the rights + specified in Section 2(b)(1)-(2) are not Copyright and Similar + Rights. + d. Effective Technological Measures means those measures that, in the + absence of proper authority, may not be circumvented under laws + fulfilling obligations under Article 11 of the WIPO Copyright + Treaty adopted on December 20, 1996, and/or similar international + agreements. + + e. Exceptions and Limitations means fair use, fair dealing, and/or + any other exception or limitation to Copyright and Similar Rights + that applies to Your use of the Licensed Material. + + f. Licensed Material means the artistic or literary work, database, + or other material to which the Licensor applied this Public + License. + + g. Licensed Rights means the rights granted to You subject to the + terms and conditions of this Public License, which are limited to + all Copyright and Similar Rights that apply to Your use of the + Licensed Material and that the Licensor has authority to license. + + h. Licensor means the individual(s) or entity(ies) granting rights + under this Public License. + + i. NonCommercial means not primarily intended for or directed towards + commercial advantage or monetary compensation. For purposes of + this Public License, the exchange of the Licensed Material for + other material subject to Copyright and Similar Rights by digital + file-sharing or similar means is NonCommercial provided there is + no payment of monetary compensation in connection with the + exchange. + + j. Share means to provide material to the public by any means or + process that requires permission under the Licensed Rights, such + as reproduction, public display, public performance, distribution, + dissemination, communication, or importation, and to make material + available to the public including in ways that members of the + public may access the material from a place and at a time + individually chosen by them. + + k. Sui Generis Database Rights means rights other than copyright + resulting from Directive 96/9/EC of the European Parliament and of + the Council of 11 March 1996 on the legal protection of databases, + as amended and/or succeeded, as well as other essentially + equivalent rights anywhere in the world. + + l. You means the individual or entity exercising the Licensed Rights + under this Public License. Your has a corresponding meaning. + +Section 2 -- Scope. + + a. License grant. + + 1. Subject to the terms and conditions of this Public License, + the Licensor hereby grants You a worldwide, royalty-free, + non-sublicensable, non-exclusive, irrevocable license to + exercise the Licensed Rights in the Licensed Material to: + + a. reproduce and Share the Licensed Material, in whole or + in part, for NonCommercial purposes only; and + + b. produce, reproduce, and Share Adapted Material for + NonCommercial purposes only. + + 2. Exceptions and Limitations. For the avoidance of doubt, where + Exceptions and Limitations apply to Your use, this Public + License does not apply, and You do not need to comply with + its terms and conditions. + + 3. Term. The term of this Public License is specified in Section + 6(a). + + 4. Media and formats; technical modifications allowed. The + Licensor authorizes You to exercise the Licensed Rights in + all media and formats whether now known or hereafter created, + and to make technical modifications necessary to do so. The + Licensor waives and/or agrees not to assert any right or + authority to forbid You from making technical modifications + necessary to exercise the Licensed Rights, including + technical modifications necessary to circumvent Effective + Technological Measures. For purposes of this Public License, + simply making modifications authorized by this Section 2(a) + (4) never produces Adapted Material. + + 5. Downstream recipients. + + a. Offer from the Licensor -- Licensed Material. Every + recipient of the Licensed Material automatically + receives an offer from the Licensor to exercise the + Licensed Rights under the terms and conditions of this + Public License. + + b. No downstream restrictions. You may not offer or impose + any additional or different terms or conditions on, or + apply any Effective Technological Measures to, the + Licensed Material if doing so restricts exercise of the + Licensed Rights by any recipient of the Licensed + Material. + + 6. No endorsement. Nothing in this Public License constitutes or + may be construed as permission to assert or imply that You + are, or that Your use of the Licensed Material is, connected + with, or sponsored, endorsed, or granted official status by, + the Licensor or others designated to receive attribution as + provided in Section 3(a)(1)(A)(i). + + b. Other rights. + + 1. Moral rights, such as the right of integrity, are not + licensed under this Public License, nor are publicity, + privacy, and/or other similar personality rights; however, to + the extent possible, the Licensor waives and/or agrees not to + assert any such rights held by the Licensor to the limited + extent necessary to allow You to exercise the Licensed + Rights, but not otherwise. + + 2. Patent and trademark rights are not licensed under this + Public License. + + 3. To the extent possible, the Licensor waives any right to + collect royalties from You for the exercise of the Licensed + Rights, whether directly or through a collecting society + under any voluntary or waivable statutory or compulsory + licensing scheme. In all other cases the Licensor expressly + reserves any right to collect such royalties, including when + the Licensed Material is used other than for NonCommercial + purposes. + +Section 3 -- License Conditions. + +Your exercise of the Licensed Rights is expressly made subject to the +following conditions. + + a. Attribution. + + 1. If You Share the Licensed Material (including in modified + form), You must: + + a. retain the following if it is supplied by the Licensor + with the Licensed Material: + + i. identification of the creator(s) of the Licensed + Material and any others designated to receive + attribution, in any reasonable manner requested by + the Licensor (including by pseudonym if + designated); + + ii. a copyright notice; + + iii. a notice that refers to this Public License; + + iv. a notice that refers to the disclaimer of + warranties; + + v. a URI or hyperlink to the Licensed Material to the + extent reasonably practicable; + + b. indicate if You modified the Licensed Material and + retain an indication of any previous modifications; and + + c. indicate the Licensed Material is licensed under this + Public License, and include the text of, or the URI or + hyperlink to, this Public License. + + 2. You may satisfy the conditions in Section 3(a)(1) in any + reasonable manner based on the medium, means, and context in + which You Share the Licensed Material. For example, it may be + reasonable to satisfy the conditions by providing a URI or + hyperlink to a resource that includes the required + information. + + 3. If requested by the Licensor, You must remove any of the + information required by Section 3(a)(1)(A) to the extent + reasonably practicable. + + 4. If You Share Adapted Material You produce, the Adapter's + License You apply must not prevent recipients of the Adapted + Material from complying with this Public License. + +Section 4 -- Sui Generis Database Rights. + +Where the Licensed Rights include Sui Generis Database Rights that +apply to Your use of the Licensed Material: + + a. for the avoidance of doubt, Section 2(a)(1) grants You the right + to extract, reuse, reproduce, and Share all or a substantial + portion of the contents of the database for NonCommercial purposes + only; + + b. if You include all or a substantial portion of the database + contents in a database in which You have Sui Generis Database + Rights, then the database in which You have Sui Generis Database + Rights (but not its individual contents) is Adapted Material; and + + c. You must comply with the conditions in Section 3(a) if You Share + all or a substantial portion of the contents of the database. + +For the avoidance of doubt, this Section 4 supplements and does not +replace Your obligations under this Public License where the Licensed +Rights include other Copyright and Similar Rights. + +Section 5 -- Disclaimer of Warranties and Limitation of Liability. + + a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE + EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS + AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF + ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, + IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, + WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR + PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, + ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT + KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT + ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. + + b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE + TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, + NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, + INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, + COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR + USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN + ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR + DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR + IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. + + c. The disclaimer of warranties and limitation of liability provided + above shall be interpreted in a manner that, to the extent + possible, most closely approximates an absolute disclaimer and + waiver of all liability. + +Section 6 -- Term and Termination. + + a. This Public License applies for the term of the Copyright and + Similar Rights licensed here. However, if You fail to comply with + this Public License, then Your rights under this Public License + terminate automatically. + + b. Where Your right to use the Licensed Material has terminated under + Section 6(a), it reinstates: + + 1. automatically as of the date the violation is cured, provided + it is cured within 30 days of Your discovery of the + violation; or + + 2. upon express reinstatement by the Licensor. + + For the avoidance of doubt, this Section 6(b) does not affect any + right the Licensor may have to seek remedies for Your violations + of this Public License. + + c. For the avoidance of doubt, the Licensor may also offer the + Licensed Material under separate terms or conditions or stop + distributing the Licensed Material at any time; however, doing so + will not terminate this Public License. + + d. Sections 1, 5, 6, 7, and 8 survive termination of this Public + License. + +Section 7 -- Other Terms and Conditions. + + a. The Licensor shall not be bound by any additional or different + terms or conditions communicated by You unless expressly agreed. + + b. Any arrangements, understandings, or agreements regarding the + Licensed Material not stated herein are separate from and + independent of the terms and conditions of this Public License. + +Section 8 -- Interpretation. + + a. For the avoidance of doubt, this Public License does not, and + shall not be interpreted to, reduce, limit, restrict, or impose + conditions on any use of the Licensed Material that could lawfully + be made without permission under this Public License. + + b. To the extent possible, if any provision of this Public License is + deemed unenforceable, it shall be automatically reformed to the + minimum extent necessary to make it enforceable. If the provision + cannot be reformed, it shall be severed from this Public License + without affecting the enforceability of the remaining terms and + conditions. + + c. No term or condition of this Public License will be waived and no + failure to comply consented to unless expressly agreed to by the + Licensor. + + d. Nothing in this Public License constitutes or may be interpreted + as a limitation upon, or waiver of, any privileges and immunities + that apply to the Licensor or You, including from the legal + processes of any jurisdiction or authority. + +======================================================================= + +Creative Commons is not a party to its public +licenses. Notwithstanding, Creative Commons may elect to apply one of +its public licenses to material it publishes and in those instances +will be considered the “Licensor.” The text of the Creative Commons +public licenses is dedicated to the public domain under the CC0 Public +Domain Dedication. Except for the limited purpose of indicating that +material is shared under a Creative Commons public license or as +otherwise permitted by the Creative Commons policies published at +creativecommons.org/policies, Creative Commons does not authorize the +use of the trademark "Creative Commons" or any other trademark or logo +of Creative Commons without its prior written consent including, +without limitation, in connection with any unauthorized modifications +to any of its public licenses or any other arrangements, +understandings, or agreements concerning use of licensed material. For +the avoidance of doubt, this paragraph does not form part of the +public licenses. + +Creative Commons may be contacted at creativecommons.org. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/README.md b/PyTorch/build-in/Classification/SWL/WSL-Images/README.md new file mode 100644 index 000000000..a43144e08 --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/WSL-Images/README.md @@ -0,0 +1,38 @@ +## WSL-Images + +This project provides models pre-trained in weakly-supervised fashion on **940 million** public images with 1.5K hashtags matching with 1000 ImageNet1K synsets, followed by fine-tuning on ImageNet1K dataset. Please refer to "Exploring the Limits of Weakly Supervised Pretraining" (https://arxiv.org/abs/1805.00932) presented at ECCV 2018 for the details of model training. + +We are providing 4 models with different capacities. + +| Model | #Parameters | FLOPS | Top-1 Acc. | Top-5 Acc. | +| ------------------ | :---------: | :---: | :--------: | :--------: | +| ResNeXt-101 32x8d | 88M | 16B | 82.2 | 96.4 | +| ResNeXt-101 32x16d | 193M | 36B | 84.2 | 97.2 | +| ResNeXt-101 32x32d | 466M | 87B | 85.1 | 97.5 | +| ResNeXt-101 32x48d | 829M | 153B | 85.4 | 97.6 | + +Our models significantly improve the training accuracy on ImageNet compared to training from scratch. **We achieve state-of-the-art accuracy of 85.4% on ImageNet with our ResNext-101 32x48d model.** + +## Loading models with torch.hub +The models are available with [torch.hub](https://pytorch.org/docs/stable/hub.html). +As an example, to load the ResNext-101 32x16d model, simply run: + +``` +model = torch.hub.load('facebookresearch/WSL-Images', 'resnext101_32x16d_wsl') +``` +Please refer to [torch.hub](https://pytorch.org/docs/stable/hub.html) to see a full example of using the model to classify an image. + +## Citing WSL-Images + +If you use the WSL-Images models, please cite the following publication. +``` +@inproceedings{wslimageseccv2018, + title={Exploring the Limits of Weakly Supervised Pretraining}, + author={Dhruv Kumar Mahajan and Ross B. Girshick and Vignesh Ramanathan and Kaiming He and Manohar Paluri and Yixuan Li and Ashwin Bharambe and Laurens van der Maaten}, + booktitle={ECCV}, + year={2018} +} +``` + +## License +WSL-Images models are released under the CC-BY-NC 4.0 license. See [LICENSE](LICENSE) for additional details. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py b/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py new file mode 100644 index 000000000..d28f8adaa --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py @@ -0,0 +1,78 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. + +# Optional list of dependencies required by the package +dependencies = ['torch', 'torchvision'] + +from torch.hub import load_state_dict_from_url +from torchvision.models.resnet import ResNet, Bottleneck + + +model_urls = { + 'resnext101_32x8d': 'https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth', + 'resnext101_32x16d': 'https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth', + 'resnext101_32x32d': 'https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth', + 'resnext101_32x48d': 'https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth', +} + + +def _resnext(arch, block, layers, pretrained, progress, **kwargs): + model = ResNet(block, layers, **kwargs) + state_dict = load_state_dict_from_url(model_urls[arch], progress=progress) + model.load_state_dict(state_dict) + return model + + +def resnext101_32x8d_wsl(progress=True, **kwargs): + """Constructs a ResNeXt-101 32x8 model pre-trained on weakly-supervised data + and finetuned on ImageNet from Figure 5 in + `"Exploring the Limits of Weakly Supervised Pretraining" `_ + + Args: + progress (bool): If True, displays a progress bar of the download to stderr. + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 8 + return _resnext('resnext101_32x8d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) + + +def resnext101_32x16d_wsl(progress=True, **kwargs): + """Constructs a ResNeXt-101 32x16 model pre-trained on weakly-supervised data + and finetuned on ImageNet from Figure 5 in + `"Exploring the Limits of Weakly Supervised Pretraining" `_ + + Args: + progress (bool): If True, displays a progress bar of the download to stderr. + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 16 + return _resnext('resnext101_32x16d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) + + +def resnext101_32x32d_wsl(progress=True, **kwargs): + """Constructs a ResNeXt-101 32x32 model pre-trained on weakly-supervised data + and finetuned on ImageNet from Figure 5 in + `"Exploring the Limits of Weakly Supervised Pretraining" `_ + + Args: + progress (bool): If True, displays a progress bar of the download to stderr. + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 32 + return _resnext('resnext101_32x32d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) + + +def resnext101_32x48d_wsl(progress=True, **kwargs): + """Constructs a ResNeXt-101 32x48 model pre-trained on weakly-supervised data + and finetuned on ImageNet from Figure 5 in + `"Exploring the Limits of Weakly Supervised Pretraining" `_ + + Args: + progress (bool): If True, displays a progress bar of the download to stderr. + """ + kwargs['groups'] = 32 + kwargs['width_per_group'] = 48 + return _resnext('resnext101_32x48d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) diff --git a/PyTorch/build-in/Classification/SWL/coverage.txt b/PyTorch/build-in/Classification/SWL/coverage.txt new file mode 100644 index 000000000..3aa0a123b --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/coverage.txt @@ -0,0 +1,3 @@ +all api: ['_amp_foreach_non_finite_check_and_unscale_', '_amp_update_scale_', '_copy_from', '_has_compatible_shallow_copy_type', '_local_scalar_dense', '_log_softmax', '_log_softmax_backward_data', '_pin_memory', '_reshape_alias', 'add_', 'addmm', 'as_strided', 'as_strided_', 'convolution', 'convolution_backward', 'copy_stride', 'div', 'eq', 'fill_', 'fused_sgd', 'is_pinned', 'linear', 'max_pool2d', 'maxpool2d_backward', 'maxpool2d_forward', 'mean', 'mm', 'mul', 'mul_', 'native_batch_norm', 'native_batch_norm_backward', 'nll_loss_backward', 'nll_loss_forward', 'reciprocal', 'relu_', 'set_', 'sum', 'threshold_backward', 'topk_out', 'view', 'zero_'], total: 41 +fallback op: [], total: 0 +coverage rate: 100.00% diff --git a/PyTorch/build-in/Classification/SWL/run b/PyTorch/build-in/Classification/SWL/run new file mode 100644 index 000000000..fbac492b0 --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/run @@ -0,0 +1 @@ +bash ../sdaaTest.sh wsl 8 0 diff --git a/PyTorch/build-in/Classification/SWL/weloTrainStep.py b/PyTorch/build-in/Classification/SWL/weloTrainStep.py new file mode 100644 index 000000000..13297c11b --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/weloTrainStep.py @@ -0,0 +1,692 @@ +#!/usr/bin/env python3 +# coding: utf-8 + +import os +import random +import sys +import time +import json +import argparse +from collections import OrderedDict +from pathlib import Path +import numpy as np +import pandas as pd +from tqdm import tqdm +import importlib + +os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":4096:8" # 强烈推荐在 shell/最顶端设置 +os.environ["PYTHONHASHSEED"] = "12345" +os.environ["OMP_NUM_THREADS"] = "1" +os.environ["MKL_NUM_THREADS"] = "1" + +def ensure_cublas_workspace(config=":4096:8"): + """ + 尝试为 cuBLAS 设置可复现 workspace。强烈建议在主脚本入口处(import torch 之前) + 通过 export 设置该 env。此函数会在运行时设置,但如果 torch 已经被 import, + 则可能为时已晚——函数会打印提醒。 + """ + already = os.environ.get("CUBLAS_WORKSPACE_CONFIG") + if already: + print(f"[seed_utils] CUBLAS_WORKSPACE_CONFIG 已存在:{already}") + else: + os.environ["CUBLAS_WORKSPACE_CONFIG"] = config + print(f"[seed_utils] 已设置 CUBLAS_WORKSPACE_CONFIG={config} (注意:请在 import torch 前设置以保证生效)") + +def set_global_seed(seed: int = 42, set_threads: bool = True): + """ + 统一随机性设置。注意:若希望完全发挥效果,请在主脚本入口(import torch 之前) + 先调用 ensure_cublas_workspace(...) 或在 shell 中 export CUBLAS_WORKSPACE_CONFIG。 + """ + ensure_cublas_workspace() # 会设置 env 并提醒 + os.environ["PYTHONHASHSEED"] = str(seed) + + if set_threads: + os.environ["OMP_NUM_THREADS"] = "1" + os.environ["MKL_NUM_THREADS"] = "1" + + random.seed(seed) + np.random.seed(seed) + + # 现在导入 torch(晚导入以便前面 env 生效) + import torch + torch.manual_seed(seed) + if torch.cuda.is_available(): + torch.cuda.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + # 强制确定性(如果存在不确定性算子,PyTorch 会报错并提示) + try: + torch.use_deterministic_algorithms(True) + except Exception as e: + print("[seed_utils] 设置 deterministic 模式时出错:", e) + print("[seed_utils] 请确认 CUBLAS_WORKSPACE_CONFIG 已在 import torch 之前设置。") + + torch.backends.cudnn.deterministic = True + torch.backends.cudnn.benchmark = False + + if set_threads: + torch.set_num_threads(1) + torch.set_num_interop_threads(1) + + print(f"[seed_utils] 全局 seed 已设置为 {seed}") + +set_global_seed(2025) + +""" +通用训练模版(优先从本地导入 Model -> 支持 DDP / 单卡,AMP,resume,日志,checkpoint) +保存为 train_template_localmodel.py +""" +import torch +import torch.nn as nn +import torch.optim as optim +import torch.backends.cudnn as cudnn +import torchvision.transforms as transforms +import torchvision.datasets as datasets +import torchvision.models as tv_models + +import torch.distributed as dist +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.utils.data import DataLoader +from torch.utils.data.distributed import DistributedSampler + +from torch.sdaa import amp +# from torch.cuda import amp + + +# ---------------------------- +# Helper utilities (self-contained) +# ---------------------------- +class AverageMeter(object): + def __init__(self, name='Meter', fmt=':.4f'): + self.name = name + self.fmt = fmt + self.reset() + def reset(self): + self.val = 0 + self.avg = 0 + self.sum = 0 + self.count = 0 + def update(self, val, n=1): + self.val = val + self.sum += val * n + self.count += n + self.avg = self.sum / max(1, self.count) + def __str__(self): + fmtstr = '{name} {val' + self.fmt + '} (avg {avg' + self.fmt + '})' + return fmtstr.format(name=self.name, val=self.val, avg=self.avg) + +def accuracy(output, target, topk=(1,)): + """Computes the precision@k for the specified values of k + 返回一个 list,每个元素是 tensor(百分比形式) + """ + with torch.no_grad(): + maxk = max(topk) + batch_size = target.size(0) + + # output: (N, C) -> pred: (maxk, N) + _, pred = output.topk(maxk, 1, True, True) + pred = pred.t() # (maxk, N) + correct = pred.eq(target.view(1, -1).expand_as(pred)) # (maxk, N) bool + + res = [] + for k in topk: + # 把前 k 行展平后求和(返回 0-dim tensor),随后换算为百分比 + correct_k = correct[:k].reshape(-1).float().sum() # 注意:不传 keepdim + # 乘以 100.0 / batch_size,保持返回 tensor(和之前代码兼容) + res.append(correct_k.mul_(100.0 / batch_size)) + return res + +def save_checkpoint(state, is_best, save_dir, filename='checkpoint.pth'): + save_path = os.path.join(save_dir, filename) + torch.save(state, save_path) + if is_best: + best_path = os.path.join(save_dir, 'model_best.pth') + torch.save(state, best_path) + +def set_seed(seed, deterministic=False): + random.seed(seed) + np.random.seed(seed) + torch.manual_seed(seed) + torch.cuda.manual_seed_all(seed) + if deterministic: + cudnn.deterministic = True + cudnn.benchmark = False + else: + cudnn.deterministic = False + cudnn.benchmark = True + +# ---------------------------- +# Argument parser +# ---------------------------- +def parse_args(): + parser = argparse.ArgumentParser(description='Generic PyTorch training template (DDP/AMP) with LocalModel priority') + parser.add_argument('--name', default='run', type=str, help='experiment name (log/checkpoints dir)') + parser.add_argument('--seed', default=42, type=int, help='random seed') + parser.add_argument('--arch', default='None', type=str, help='model name') + parser.add_argument('--deterministic', action='store_true', help='set cudnn deterministic (may be slower)') + parser.add_argument('--dataset', default='cifar10', choices=['cifar10','cifar100','imagenet','custom'], help='which dataset') + parser.add_argument('--datapath', default='./data', type=str, help='dataset root / imagenet root / custom root') + parser.add_argument('--imagenet_dir', default='./imagenet', type=str, help='if dataset=imagenet, path to imagenet root') + parser.add_argument('--custom_eval_dir', default=None, help='if dataset=custom, provide val dir') + parser.add_argument('--num_workers', default=4, type=int, help='dataloader workers per process') + parser.add_argument('--epochs', default=200, type=int) + parser.add_argument('--steps', default=0, type=int, help='max steps to run (if >0, training will stop when global_step reaches this).') + parser.add_argument('--batch_size', default=128, type=int) + parser.add_argument('--model_name', default='resnet18', help='torchvision model name or python path e.g. mypkg.mymodule.Model (used if no local Model)') + parser.add_argument('--num_classes', default=None, type=int, help='override num classes (auto-detect for common sets)') + parser.add_argument('--pretrained', action='store_true', help='use torchvision pretrained weights when available') + parser.add_argument('--optimizer', default='sgd', choices=['sgd','adam','adamw'], help='optimizer') + parser.add_argument('--lr', '--learning_rate', default=0.1, type=float) + parser.add_argument('--momentum', default=0.9, type=float) + parser.add_argument('--weight_decay', default=5e-4, type=float) + parser.add_argument('--nesterov', action='store_true') + parser.add_argument('--scheduler', default='multistep', choices=['multistep','step','cosine','none'], help='lr scheduler') + parser.add_argument('--milestones', default='100,150', type=str, help='milestones for multistep (comma sep)') + parser.add_argument('--step_size', default=30, type=int, help='step size for StepLR or cosine max epochs') + parser.add_argument('--gamma', default=0.1, type=float) + parser.add_argument('--scheduler_step_per_batch', action='store_true', help='call scheduler.step() per batch (for some schedulers)') + parser.add_argument('--resume', default='', type=str, help='path to checkpoint to resume from') + parser.add_argument('--start_epoch', default=0, type=int) + parser.add_argument('--print_freq', default=100, type=int) + parser.add_argument('--save_freq', default=10, type=int, help='save checkpoint every N epochs (rank0 only)') + parser.add_argument('--amp', action='store_true', default = True,help='use automatic mixed precision (AMP)') + parser.add_argument('--grad_accum_steps', default=1, type=int, help='gradient accumulation steps') + parser.add_argument('--local_rank', default=None, type=int, help='local rank passed by torchrun (if any). Use -1 or None for non-distributed') + parser.add_argument('--cutmix_prob', default=0.0, type=float) + parser.add_argument('--beta', default=1.0, type=float) + parser.add_argument('--seed_sampler', default=False, action='store_true', help='set sampler epoch seeds to make deterministic distributed shuffling') + args = parser.parse_args() + args.milestones = [int(x) for x in args.milestones.split(',')] if args.milestones else [] + return args + +# ---------------------------- +# build model (优先 LocalModel) +# ---------------------------- +def build_model_with_local_priority(args, device=None): + """ + 用参数 args.arch 作为模块名导入 Model() + 如果模块不存在或没有 Model 类,则报错停止。 + """ + try: + # 动态导入模块,比如 args.arch = "rexnet" + mod = importlib.import_module(args.arch) + Model = getattr(mod, "Model") # 从模块中获取 Model 类 + except Exception as e: + raise RuntimeError( + f"无法导入模型模块 '{args.arch}' 或未找到类 Model。" + f"\n错误信息:{e}" + ) + + # 解析数据集类别数 + if args.dataset == 'cifar10': + num_classes = 10 + elif args.dataset == 'cifar100': + num_classes = 100 + else: + print(f"[ERROR] 不支持的数据集类型:{args.dataset},无法确定类别数。程序终止。") + sys.exit(1) + + + # 实例化 + try: + model = Model(num_classes) + except Exception as e: + raise RuntimeError( + f"Model() 实例化失败,请检查模型构造函数。\n错误信息:{e}" + ) + + return model + +# ---------------------------- +# Data loader factory +# ---------------------------- +def build_dataloaders(args, rank, world_size): + if args.dataset == 'cifar10' or args.dataset == 'cifar100': + mean = (0.4914, 0.4822, 0.4465) + std = (0.2470, 0.2435, 0.2616) if args.dataset == 'cifar10' else (0.2023, 0.1994, 0.2010) + # train_transform = transforms.Compose([ + # transforms.RandomCrop(32, padding=4), + # transforms.RandomHorizontalFlip(), + # transforms.ToTensor(), + # transforms.Normalize(mean, std), + # ]) + # test_transform = transforms.Compose([ + # transforms.ToTensor(), + # transforms.Normalize(mean, std), + # ]) + + train_transform = transforms.Compose([ # 2025/12/3 从visformer模型开始 + transforms.Resize(256), # 先放大到 256 + transforms.RandomCrop(224), # 再随机裁剪为 224(更符合 ImageNet 风格增强) + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize(mean, std), + ]) + root = args.datapath + if args.dataset == 'cifar10': + train_set = datasets.CIFAR10(root=root, train=True, download=False, transform=train_transform) + val_set = datasets.CIFAR10(root=root, train=False, download=False, transform=test_transform) + num_classes = 10 + else: + train_set = datasets.CIFAR100(root=root, train=True, download=False, transform=train_transform) + val_set = datasets.CIFAR100(root=root, train=False, download=False, transform=test_transform) + num_classes = 100 + + elif args.dataset == 'imagenet': + train_dir = os.path.join(args.imagenet_dir, 'train') + val_dir = os.path.join(args.imagenet_dir, 'val') + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + transforms.Normalize((0.485,0.456,0.406), (0.229,0.224,0.225)), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + transforms.Normalize((0.485,0.456,0.406), (0.229,0.224,0.225)), + ]) + train_set = datasets.ImageFolder(train_dir, train_transform) + val_set = datasets.ImageFolder(val_dir, test_transform) + num_classes = args.num_classes or 1000 + + elif args.dataset == 'custom': + train_dir = os.path.join(args.datapath, 'train') + val_dir = args.custom_eval_dir or os.path.join(args.datapath, 'val') + train_transform = transforms.Compose([ + transforms.RandomResizedCrop(224), + transforms.RandomHorizontalFlip(), + transforms.ToTensor(), + ]) + test_transform = transforms.Compose([ + transforms.Resize(256), + transforms.CenterCrop(224), + transforms.ToTensor(), + ]) + train_set = datasets.ImageFolder(train_dir, train_transform) + val_set = datasets.ImageFolder(val_dir, test_transform) + num_classes = len(train_set.classes) + else: + raise ValueError("Unknown dataset") + + if dist.is_initialized() and world_size > 1: + train_sampler = DistributedSampler(train_set, num_replicas=world_size, rank=rank, shuffle=True) + else: + train_sampler = None + + train_loader = DataLoader(train_set, + batch_size=args.batch_size, + shuffle=(train_sampler is None), + num_workers=args.num_workers, + pin_memory=True, + sampler=train_sampler, + drop_last=False) + val_loader = DataLoader(val_set, + batch_size=args.batch_size, + shuffle=False, + num_workers=args.num_workers, + pin_memory=True) + + return train_loader, val_loader, num_classes, train_sampler + +# ---------------------------- +# Train & validate +# ---------------------------- +def train_one_epoch(args, epoch, model, criterion, optimizer, train_loader, device, scaler, scheduler=None, train_sampler=None, global_step_start=0, max_global_steps=None): + """ + 现在支持:若 max_global_steps 非 None,则当 global_step 达到该值时提前退出 + 返回: epoch_summary_dict, step_logs_list, global_step_end + step_logs_list: list of dicts with per-step info (for logging to CSV if需要) + """ + batch_time = AverageMeter('Time') + data_time = AverageMeter('Data') + losses = AverageMeter('Loss') + top1 = AverageMeter('Acc@1') + top5 = AverageMeter('Acc@5') + + model.train() + end = time.time() + optimizer.zero_grad() + + iters = len(train_loader) + step_logs = [] + global_step = global_step_start + + for i, (images, targets) in enumerate(train_loader): + # check global steps limit + if (max_global_steps is not None) and (global_step >= max_global_steps): + break + + data_time.update(time.time() - end) + images = images.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + + if args.amp: + with amp.autocast(): + outputs = model(images) + loss = criterion(outputs, targets) / args.grad_accum_steps + else: + outputs = model(images) + loss = criterion(outputs, targets) / args.grad_accum_steps + + if args.amp: + scaler.scale(loss).backward() + else: + loss.backward() + + # 每当累积步满足 grad_accum_steps 就 step + if (i + 1) % args.grad_accum_steps == 0: + if args.amp: + scaler.step(optimizer) + scaler.update() + else: + optimizer.step() + optimizer.zero_grad() + if scheduler is not None and args.scheduler_step_per_batch: + scheduler.step() + + with torch.no_grad(): + acc1, acc5 = accuracy(outputs, targets, topk=(1,5)) + losses.update(loss.item() * args.grad_accum_steps, images.size(0)) + top1.update(acc1.item(), images.size(0)) + top5.update(acc5.item(), images.size(0)) + + batch_time.update(time.time() - end) + end = time.time() + + # increment global step AFTER processing this batch + global_step += 1 + + # per-step print (controlled by print_freq) + if ((global_step % args.print_freq == 0) or (i == iters - 1)) and ((dist.get_rank() if dist.is_initialized() else 0) == 0): + lr = optimizer.param_groups[0]['lr'] + print(f"Epoch[{epoch}]:step[{i+1}/{iters}] step_train_loss {losses.val:.4f} acc1 {top1.val:.2f} acc5 {top5.val:.2f}") + + # collect per-step log + step_logs.append({ + 'epoch': epoch, + 'batch_idx': i, + 'global_step': global_step, + 'lr': optimizer.param_groups[0]['lr'], + 'loss': losses.val, + 'loss_avg': losses.avg, + 'acc1': top1.val, + 'acc1_avg': top1.avg, + 'acc5': top5.val, + 'acc5_avg': top5.avg, + 'time': batch_time.val + }) + + # if reached max_global_steps inside epoch, break (handled at loop start next iter) + if (max_global_steps is not None) and (global_step >= max_global_steps): + if (dist.get_rank() if dist.is_initialized() else 0) == 0: + print(f"[Info] 达到 max_global_steps={max_global_steps},将在 epoch 内提前停止。") + break + + # --- flush remaining grads if needed (handle gradient accumulation leftovers) --- + processed_batches = global_step - global_step_start # 实际处理的 batch 数 + if args.grad_accum_steps > 1 and (processed_batches % args.grad_accum_steps) != 0: + # only step if there are gradients + grads_present = any((p.grad is not None and p.requires_grad) for p in model.parameters()) + if grads_present: + if args.amp: + try: + scaler.step(optimizer) + scaler.update() + except Exception as e: + # 防御性:若 scaler.step 因某些原因失败,尝试普通 step(只在极端情况下) + print("[Warning] scaler.step 失败,尝试普通 optimizer.step():", e) + optimizer.step() + else: + optimizer.step() + optimizer.zero_grad() + if scheduler is not None and args.scheduler_step_per_batch: + scheduler.step() + if (dist.get_rank() if dist.is_initialized() else 0) == 0: + print(f"[Info] flushed remaining gradients after early stop (processed_batches={processed_batches}, grad_accum={args.grad_accum_steps}).") + + if scheduler is not None and not args.scheduler_step_per_batch: + scheduler.step() + + return OrderedDict([('loss', losses.avg), ('acc1', top1.avg), ('acc5', top5.avg)]), step_logs, global_step + +def validate(args, model, val_loader, criterion, device, max_batches=None): + """ + Validate on the val_loader. + If max_batches is not None, only process up to that many batches (useful for quick checks). + Returns an OrderedDict with loss/acc1/acc5 (averaged over processed samples). + """ + losses = AverageMeter('Loss') + top1 = AverageMeter('Acc@1') + top5 = AverageMeter('Acc@5') + + model.eval() + processed_batches = 0 + processed_samples = 0 + with torch.no_grad(): + for i, (images, targets) in enumerate(tqdm(val_loader)): + images = images.to(device, non_blocking=True) + targets = targets.to(device, non_blocking=True) + outputs = model(images) + loss = criterion(outputs, targets) + acc1, acc5 = accuracy(outputs, targets, topk=(1,5)) + batch_n = images.size(0) + losses.update(loss.item(), batch_n) + top1.update(acc1.item(), batch_n) + top5.update(acc5.item(), batch_n) + + processed_batches += 1 + processed_samples += batch_n + + if (max_batches is not None) and (processed_batches >= max_batches): + break + + # 如果没处理任何样本,避免除0(不太可能,但防御性) + if processed_samples == 0: + return OrderedDict([('loss', 0.0), ('acc1', 0.0), ('acc5', 0.0)]) + return OrderedDict([('loss', losses.avg), ('acc1', top1.avg), ('acc5', top5.avg)]) + +# ---------------------------- +# Main +# ---------------------------- +def main(): + args = parse_args() + + # handle local_rank from env if not provided + local_rank_env = os.environ.get('LOCAL_RANK', None) + if args.local_rank is None and local_rank_env is not None: + args.local_rank = int(local_rank_env) + + distributed = (args.local_rank is not None and args.local_rank != -1) + if distributed: + dist.init_process_group(backend='nccl', init_method='env://') + rank = dist.get_rank() + world_size = dist.get_world_size() + else: + rank = 0 + world_size = 1 + + if distributed: + torch.cuda.set_device(args.local_rank) + device = torch.device('cuda', args.local_rank) + else: + device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') + + set_seed(args.seed + (rank if distributed else 0), deterministic=args.deterministic) + + save_dir = os.path.join('models', args.name) + if rank == 0: + os.makedirs(save_dir, exist_ok=True) + with open(os.path.join(save_dir, 'args.json'), 'w') as f: + json.dump(vars(args), f, indent=2) + if distributed: + dist.barrier() + + train_loader, val_loader, auto_num_classes, train_sampler = build_dataloaders(args, rank, world_size) + if args.num_classes is None: + args.num_classes = auto_num_classes + + # 使用本地 Model 优先(LocalModel 已在文件顶部尝试导入) + model = build_model_with_local_priority(args, device) + model.to(device) + + if distributed: + model = DDP(model, device_ids=[args.local_rank], output_device=args.local_rank, find_unused_parameters=True) + + criterion = nn.CrossEntropyLoss().to(device) + params = [p for p in model.parameters() if p.requires_grad] + if args.optimizer == 'sgd': + optimizer = optim.SGD(params, lr=args.lr, momentum=args.momentum, + weight_decay=args.weight_decay, nesterov=args.nesterov) + elif args.optimizer == 'adam': + optimizer = optim.Adam(params, lr=args.lr, weight_decay=args.weight_decay) + elif args.optimizer == 'adamw': + optimizer = optim.AdamW(params, lr=args.lr, weight_decay=args.weight_decay) + else: + raise ValueError('Unknown optimizer') + + scheduler = None + if args.scheduler == 'multistep': + scheduler = optim.lr_scheduler.MultiStepLR(optimizer, milestones=args.milestones, gamma=args.gamma) + elif args.scheduler == 'step': + scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=args.step_size, gamma=args.gamma) + elif args.scheduler == 'cosine': + scheduler = optim.lr_scheduler.CosineAnnealingLR(optimizer, T_max=args.epochs) + elif args.scheduler == 'none': + scheduler = None + + scaler = amp.GradScaler() if args.amp else None + + start_epoch = args.start_epoch + best_acc = 0.0 + if args.resume: + if os.path.isfile(args.resume): + ckpt = torch.load(args.resume, map_location='cpu') + model_state = ckpt.get('state_dict', ckpt) + if isinstance(model, DDP): + model.module.load_state_dict(model_state) + else: + model.load_state_dict(model_state) + if 'optimizer' in ckpt: + optimizer.load_state_dict(ckpt['optimizer']) + start_epoch = ckpt.get('epoch', start_epoch) + best_acc = ckpt.get('best_acc', best_acc) + print(f"=> resumed from {args.resume}, start_epoch={start_epoch}") + else: + print(f"=> resume path {args.resume} not found") + + log_columns = ['epoch', 'lr', 'loss', 'acc1', 'acc5', 'val_loss', 'val_acc1', 'val_acc5'] + log_df = pd.DataFrame(columns=log_columns) + # step-level log + step_log_columns = ['epoch', 'batch_idx', 'global_step', 'lr', 'loss', 'loss_avg', 'acc1', 'acc1_avg', 'acc5', 'acc5_avg', 'time'] + step_log_df = pd.DataFrame(columns=step_log_columns) + + total_epochs = args.epochs + # global_step计数器(训练过程中跨epoch持续) + global_step = 0 + + epoch = start_epoch + # loop until either epoch criteria or step criteria met + while True: + if train_sampler is not None: + if args.seed_sampler: + train_sampler.set_epoch(epoch + args.seed) + else: + train_sampler.set_epoch(epoch) + + if rank == 0: + print(f"==== Epoch {epoch}/{total_epochs - 1} ====") + + # 如果传入了 args.steps (>0),则把剩余允许的 step 数传给 train_one_epoch, + # 否则 max_global_steps=None(按整 epoch 执行完) + if args.steps and args.steps > 0: + max_global_steps = args.steps + else: + max_global_steps = None + + train_log, step_logs, global_step = train_one_epoch( + args, epoch, model, criterion, optimizer, train_loader, device, scaler, + scheduler, train_sampler, global_step_start=global_step, max_global_steps=max_global_steps + ) + + # 如果启用了按 steps 的模式且已经达到上限,标记需要在做一次验证后退出 + if max_global_steps is not None and global_step >= max_global_steps: + if rank == 0: + print(f"[Main] 达到 max_global_steps={max_global_steps}(global_step={global_step}),将在完成验证后退出训练。") + # 我们不 return 立刻退出;后面的 validate / 保存逻辑会执行一次,然后 main 返回/结束 + end_due_to_steps = True + else: + end_due_to_steps = False + + # 验证并记录 epoch 级别日志(如果在 step 模式下很可能在中间某个 epoch 提前结束,但我们仍做一次 validate) + val_log = validate(args, model, val_loader, criterion, device, args.batch_size) + current_lr = optimizer.param_groups[0]['lr'] + + if rank == 0: + # epoch summary print, 格式与示例对齐 + print(f"Epoch[{epoch}]: epoch_train_loss {train_log['loss']:.4f} acc1 {train_log['acc1']:.2f} acc5 {train_log['acc5']:.2f} | " + f"val_loss {val_log['loss']:.4f} acc1 {val_log['acc1']:.2f} acc5 {val_log['acc5']:.2f} lr {current_lr:.6f}") + row = { + 'epoch': epoch, + 'lr': current_lr, + 'loss': train_log['loss'], + 'acc1': train_log['acc1'], + 'acc5': train_log['acc5'], + 'val_loss': val_log['loss'], + 'val_acc1': val_log['acc1'], + 'val_acc5': val_log['acc5'], + } + new_row_df = pd.DataFrame([row]) + log_df = pd.concat([log_df, new_row_df], ignore_index=True) + log_df.to_csv(os.path.join(save_dir, 'log.csv'), index=False) + + is_best = val_log['acc1'] > best_acc + if is_best: + best_acc = val_log['acc1'] + if (epoch % args.save_freq == 0) or is_best or ( (max_global_steps is None) and (epoch == total_epochs - 1) ) : + state = { + 'epoch': epoch, + 'state_dict': model.module.state_dict() if isinstance(model, DDP) else model.state_dict(), + 'best_acc': best_acc, + 'optimizer': optimizer.state_dict(), + 'args': vars(args) + } + save_checkpoint(state, is_best, save_dir, filename=f'checkpoint_epoch_{epoch}.pth') + + # 如果是因为 steps 模式达到上限,则在完成 validation / 保存后退出训练 + if end_due_to_steps: + if rank == 0: + print(f"[Main] 已在 steps 模式下完成最后一次验证并保存,训练结束(global_step={global_step})。") + break + + # increment epoch + epoch += 1 + + # stopping conditions: + # 1) if steps mode enabled and reached steps -> stop + if args.steps and args.steps > 0: + if global_step >= args.steps: + if rank == 0: + print(f"[Main] 已达到指定 steps={args.steps}(global_step={global_step}),训练结束。") + break + + # 2) if steps not used, stop when epoch >= epochs + else: + if epoch >= total_epochs: + if rank == 0: + print(f"[Main] 已达到指定 epochs={total_epochs}(epoch={epoch}),训练结束。") + break + + if dist.is_initialized(): + dist.barrier() + if rank == 0: + print("Training finished. Best val acc1: {:.2f}".format(best_acc)) + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/PyTorch/build-in/Classification/SWL/wsl.py b/PyTorch/build-in/Classification/SWL/wsl.py new file mode 100644 index 000000000..895f35531 --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/wsl.py @@ -0,0 +1,253 @@ +# resnext_wsl_single_file.py +import torch +import torch.nn as nn +from torch.hub import load_state_dict_from_url + +# ============================ +# 1. WSL 预训练权重 URL +# ============================ +model_urls = { + 'resnext101_32x8d': 'https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth', + 'resnext101_32x16d': 'https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth', + 'resnext101_32x32d': 'https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth', + 'resnext101_32x48d': 'https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth', +} + +# ============================ +# 2. 基础工具 +# ============================ +def conv3x3(in_planes, out_planes, stride=1, groups=1): + return nn.Conv2d( + in_planes, out_planes, + kernel_size=3, stride=stride, + padding=1, groups=groups, bias=False + ) + + +def conv1x1(in_planes, out_planes, stride=1): + return nn.Conv2d( + in_planes, out_planes, + kernel_size=1, stride=stride, bias=False + ) + +# ============================ +# 3. Bottleneck(ResNeXt 核心) +# ============================ +class Bottleneck(nn.Module): + expansion = 4 + + def __init__( + self, + inplanes, + planes, + stride=1, + downsample=None, + groups=1, + width_per_group=64 + ): + super().__init__() + + width = int(planes * (width_per_group / 64.)) * groups + + self.conv1 = conv1x1(inplanes, width) + self.bn1 = nn.BatchNorm2d(width) + + self.conv2 = conv3x3( + width, width, + stride=stride, + groups=groups + ) + self.bn2 = nn.BatchNorm2d(width) + + self.conv3 = conv1x1(width, planes * self.expansion) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + identity = x + + out = self.relu(self.bn1(self.conv1(x))) + out = self.relu(self.bn2(self.conv2(out))) + out = self.bn3(self.conv3(out)) + + if self.downsample is not None: + identity = self.downsample(x) + + out += identity + out = self.relu(out) + return out + +# ============================ +# 4. ResNet / ResNeXt 主体 +# ============================ +class ResNet(nn.Module): + def __init__( + self, + block, + layers, + num_classes=1000, + groups=1, + width_per_group=64 + ): + super().__init__() + + self.inplanes = 64 + self.groups = groups + self.width_per_group = width_per_group + + self.conv1 = nn.Conv2d( + 3, 64, + kernel_size=7, + stride=2, + padding=3, + bias=False + ) + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d( + kernel_size=3, stride=2, padding=1 + ) + + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.fc = nn.Linear(512 * block.expansion, num_classes) + + self._init_weights() + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + conv1x1(self.inplanes, planes * block.expansion, stride), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append( + block( + self.inplanes, planes, + stride=stride, + downsample=downsample, + groups=self.groups, + width_per_group=self.width_per_group, + ) + ) + self.inplanes = planes * block.expansion + + for _ in range(1, blocks): + layers.append( + block( + self.inplanes, planes, + groups=self.groups, + width_per_group=self.width_per_group, + ) + ) + + return nn.Sequential(*layers) + + def _init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, mode='fan_out') + elif isinstance(m, nn.BatchNorm2d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + + def forward(self, x): + x = self.relu(self.bn1(self.conv1(x))) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + + x = self.avgpool(x) + x = torch.flatten(x, 1) + x = self.fc(x) + return x + +# ============================ +# 5. 构造 ResNeXt-101 WSL +# ============================ +def _resnext(arch, groups, width_per_group, progress=True, **kwargs): + model = ResNet( + Bottleneck, + [3, 4, 23, 3], + groups=groups, + width_per_group=width_per_group, + **kwargs + ) + state_dict = load_state_dict_from_url( + model_urls[arch], + progress=progress + ) + model.load_state_dict(state_dict) + return model + +def resnext101_32x8d(num_classes=1000): + return ResNet( + Bottleneck, + layers=[3, 4, 23, 3], + num_classes=num_classes, + groups=32, + width_per_group=8, + ) + + +def resnext101_32x16d(num_classes=1000): + return ResNet( + Bottleneck, + layers=[3, 4, 23, 3], + num_classes=num_classes, + groups=32, + width_per_group=16, + ) + + +def resnext101_32x32d(num_classes=1000): + return ResNet( + Bottleneck, + layers=[3, 4, 23, 3], + num_classes=num_classes, + groups=32, + width_per_group=32, + ) + + +def resnext101_32x48d(num_classes=1000): + return ResNet( + Bottleneck, + layers=[3, 4, 23, 3], + num_classes=num_classes, + groups=32, + width_per_group=48, + ) + + +def Model(num_classes=1000, variant="32x8d"): + """ + variant: + - "32x8d" + - "32x16d" + - "32x32d" + - "32x48d" + """ + if variant == "32x8d": + return resnext101_32x8d(num_classes=num_classes) + elif variant == "32x16d": + return resnext101_32x16d(num_classes=num_classes) + elif variant == "32x32d": + return resnext101_32x32d(num_classes=num_classes) + elif variant == "32x48d": + return resnext101_32x48d(num_classes=num_classes) + else: + raise ValueError(f"Unknown variant: {variant}") diff --git a/PyTorch/build-in/Classification/SWL/wsl_loss.jpg b/PyTorch/build-in/Classification/SWL/wsl_loss.jpg new file mode 100644 index 0000000000000000000000000000000000000000..6d478cfdf564af63db6cdd1b82ea09e96185632e GIT binary patch literal 35863 zcmeFa1zcQPmOWfJL4yW|011Qwg1ZKXK;bS44u!h}3P^wi4G?X}lA+JRzJ3!H8}}|g zA@Ti(q^#_m+`RmP!lJ6`n%X*OeM4hgdq-zicTaEM=-BuKZ1U^W^tYwumDRQNjm@ot z!=vMq)3fu7%OCAR0-*e*kPIN*^HH_!Z zMD$#Nn8Z(FGAmo}FmS8ylbE`UV39KNe0y;4qiH|2>_679puekSe`(miwQCxHgMx&( zc_;(`alqLnb5;P>zskR^!N2;z|Cv4@kpBA9;|UQ4^mz6UHjb{RZ)Ob{i#je8DCMs> z#Qk7aVmr5XDgVbdXoXUYb`2%>Edmz(^#kWBaxeT@|zI`!e}D zG15jF&BSBJW~HTMr4P_nGjC$;%^KXWVTPEuT>h3!DJLa+RJQZ1`@8Xl+xl*oK%3LI z7sY6uC8du(lcOa_2(3dE6Wbt%0_O|xw!4r`9?iWllf$PXQnvtO`|D1Cn49n>DSS~`b&7)y+f7Ix%%4?-1;kpAFXs+>j(7JpSu}oz}ZKZ;F zi*^K|ln~$dN?>po=K#Rt>@or}BSx$19vlMfewW+Wx`sV`-x46cj}&>a7vWjNPfO6u z1a{^jvfYYC-ErjKHK%9N{_2@puU1b`zewez>^x6t>5Km=_zlyS6xR(lM&$glXW?*? z$qv`mGV^iXpliP+ex&psRSZwIT@e!-OJ1WaC2|Y+(3RMWSs{G>{vD{m%z}rnwtIdD z+k zjQ(|zzaAIHfnN#LjjIGre93vchW5%kTvNeR4`?a7I)-M4R!JER}x@2Mit6;8ETx+Zg?i5d%aVAJn-@Dc<7g z@~O<{yBnU%LE$8;Wj#1|PK!TQa*v}j#2<=-U1WU01xjaOj9iU57=I;UfYQ(fj1+q`e9e8J)6uOz zzF2jS)I&@s$+^WkQQ*?t-)-_hEZph+)6+YRM1`jy zKoYYEAu!o$?8-EC*FBlznkQQc*NU3yy;&wjXNZpP0ZWPMCJ`C*+Zj21xZ(2b?>nV?ev5rNkgj6!&O67)iJOg zTqdPNtA>wn0eLk@8$QtyHPjEW1sxxU`Bic95R?##l)cCBOeoUDlT+mcC3jAvQs7X` zY&oS0eG8J}LWY}B1r%kFvb8Sa?E3*<9njp|Tar1=Ebt0BlB{g53=%fzDP4}GSN*i{ zA(is^7j%?}m6J}Uug#CGz5#vn4z#Z2r<)UDjZFr@Q%;&87cV|0;~5kHRu+dbvNoo7 zQ_H7&o%8w+yD2XmKizv1Ux9~JkLo1naFSZI1-0s;U=KEXoVdmag_aN#tvn=t%n&)a z-LsBmn;uZRBV|xG9z56Yc?yFf1C>>Pu2^$uXCuaqa)70h{dZo*Z@-+D=o~cH%v3tCxMtVC?Z{fib zWWbPyRA$u<8-}!b9^hOkBqod)L}h<3a=wVWn0%BmCzrFafLnV5@;p^8OBW%WwPURt z6)KRmb3+BP`&!Khh{}Nwwxh^~Xsa$Mg3ZFQ9t2z!iOpuE;y{z9S`5^3i>5sHG)j_p z+mRbc5@=qglw!0SxdnJjF|&(K@~#V&HqSp8d*NHWQCDranHc@-`5kgv`$ICGRixEG zlSJ`OD9NYGwGYeqG+x>!`0@^k%KSveDlt_h1T`|KcJgGFreek|?7pffCH@a&%Fe|N z8)}Y9Yn7D1Q_pV!(ut<8p{VwQ$=q$RIqL-sFZJQ%2lL^@Qn|Wf1+iO`q&@v2d0Md& z0rlF`cPj+@3YRgWfN`U>24rG1=k4vIvpGJ^*hRe$vJdE9T;IrkH4V|a2J9`i3nycd`!qJWc4H{u+cL0MwK|`B2g!rT%#}9f1J?+(dQ~Ih;!x9 z<3$&7tQ8TYLOrD-uVsxl6!Se>*kYS^^K^Qim|(o6OjxfhjSkTsoxT^h?a zX%Py`W303V2=)De0*1S9vl#Bv8wWK#SVfQkRIWkYL3d>{ES{9*LD#0%~5!w}1cw$N>-1 z@K48;{c#HSp<6&3%HrN7i5ut|5Pl1Ill`x%|Ldy%tHbJN=JQ^Fr_EW)QXRZ&?<4D^ymk$35dwbTO80mv1tIDB-~$F%Zxz>(9`O`}4vv z+lKf#+Ml2o-!Guo(DnT^bOiEpM39P~nSVlFIQ#rJ*gp7hq(31q4X){e--vL~s$vYyKqyd8z$CUSq#NUi}EM-d;)f!tq2B_jikp0s$ZKQF(d6~aboS?Ha6Q_ zcaW{G&>(HbSrj3_?vdvIbe*`Tj5qah7ln2rd*dZ^dO1O<-n=eSY>1+8hW;I(;Bjy} z>7WWedVnwsFy$a6Y1T)p{Pf;*$Brji%2YQ|-~Dy}5$371R9ny;TSS{w>g7I-vpbaL za~eL(#!&-dKf9Z2re*jn2y;+!%Nkjn@)pp8J_B!S^n@o!8Bsfn@f?u8VI{qM>eKmh zOG;;Ke44F|T=JeXA)l290HnB60LThsMwBSeSYdBt=IbevJGw`a-BdWI0{{gX%MGCp zpWSHOL#Bj*_*gC%wQe-9VoKK1_fGOhz>KW0M>xz`VSxW>g{3)w@@pV@8z)@LGy`&e z6Af>G9uXt)O&rt4Q|b)+YxP5(pBKYJ@vrmc;5@&0&0Ag_eN^6k9pRVL5(B4QbGt%( zThrb|@V7m#t=11?)tKo3L-VzXSI{mG;y3m}(pQ3BjJ_8^MH~ryILLA{+3dvYWxyG0 z6k(-_(psw)d4?F}$QgZwHD+>mEyf6cErgBTTJ0-QIQ!aI6FKf6yz*aaXm721L(R0;ujbRYxlV zPCoud{Q&rG=Rk#rnStx;&qjq8P*3|bscRqqI<5s5@|At&79hiB zhj{L;N#7Gc;*DISyD{VIf!0yx#q!R-`l z!M?0#`m2$*fN2Y^c||vv!n(kHL{38>82dAT9Y6-K;)RV!kR9oQ^9Dv23#r;H{Pc zxcc*}-2~e?ao-f@yGCEW2x(MY7je7=&`~;GVQns4>fyS%0n}uOe-xx%1mWE&y6kO_ z|K)iJ3XgHPz*YMJ6O(iK{TSdpgooy6SKFYHU745w16(EJ1G{tt>hND))>P3)&jc+@O&rjql{c_NMkAX zsq6PI#9ChEJ4{gu_Zvf`BP^yEQJK#~3!vkyyE%^>Jy8eI6}#!oqK%og=yuA!eP1ij z>kMNq!EPau!Yda`w#l;BDk=)( zfYe+`kr)Z;ooBNf`Zbg4U+#swD|yX*Vdb2ej+X$;$+S>j;a7la7Iuk_RX?>yixO>w-o`8CXX-7}2IzIRm^|IbZ5u zx^FK`-&v__tn%w1b9fR;b_;NSPjLJ&fEv-^X+|NEs-CMwi5=X!-pjUW*@+N4f*J73sfFFa(z&7l0YaR~?mGz-SLRyUexg z7I27EhcH5AskeaKx07=JHYQy3A=eu7+zn%(FJ?t+YcnGjw0;npmU#EtmrFbBMsl*; z`sOB60u6P~CtZ%BJN)ZQZFv24s<}#18xl6=vkI&mbMyzJ?F*y=svjyM8s127P8HUo zS2I@4qpVxoxs>`t9^x9D&QI%%?*tL5V|gaKGH^$!qJQm0Li%4TSd=KoM<$ z>!zQ$!!-AXBshnlmx`~VD;7s6=NUT9@<-B3ztoc8ogKR1o>uE+Ea%P6$LUPhS4tP< z=OgRAw*awGI2g{u;-o$H>@?1E(#2=BrOJH{XfnQg?3@p>DeQ~uOLS>=#0ezj(y^`E zfAXkxX3)DV{P(I+g-7a$>GfKlJqC5!>eNubZ%YB#oaV|j{o*vDY!>;BfL+QW4$ca{ z(Z1*FUAO4F8%c(j&nhPJVplC2yiqH;7aQ%dB+&xft>|b%N#?x|AwxASNw==EazueX`oba&#K~CFeo8VF(s}^5S3_wtSn@v!PphVkq^q$@yBxCPsA zYZOe88hP^J%6>H=H?Ob6{K<3qgBEJ7ur$pyJij81Z#$N!_68c`J2f_Yj%=CLQLU*f zb%$zG$-eJU6WABp0|Q}=ncM+@+P6r<$SuoC|8|9uI`q$_v|h@6s!iyYYY19f5@Z-H zW6z77o+EMcknhpuwByGFbko!hpDW3I8sT}vR$fLb=RL2xq+qNGm5Pupy9IC~Mdefb zAnDKld_ClT+(`w*og79%L=?-@ptJOg-vS2vj!5ci3;^eB2g(>@Qpc~+1i(#GO$I>x zUns6=2<-G07kU%yws*rC(kS#dUGbmXv5oiv=)7@H)-hp7bzm+%tK}dA?sZIa0-F?F z#uWm8eBfwz-ZD08_*qT^y7zcV*zn4mUPfFQ2oQ?hW~=bW>^tcQEx(Mxia`ml!Jx z(UCA4Y0jDu0oA5Y=uSm~jLU(u!|1ZfORvjoCG%!C8$*&k_G=F0d|r!;tD}%G_Y1H# zpuX%`Dz^E=KUOO|BX3soDP;a4)y~oKH7mc2B;$P?`5txI#RilJ7=I|cL5EN7V0MxI zRzhQHXUSUwNxoEPVr|tF!cS%y+A*)yyF~-2aFTqU3I(r*zI-8eseDq~70Lth6RF^8 z%Oca~tdctXbSTRg*NSkboLcrl&qrBmMTsLEK6FEma{TTgT zZ3=f{dA63>$;@J#!z#b&ma)5+-vztq8|u$23}DT4yOKxA$}eLXX0Ey@d=?Nu2?Hem zl}j{~i#Wy>_y(OIwU?NsHUSA!s;0CL%u3y;`K6!lQTt3f{+rYM0V-Ye?_JA_sQG3< zzkuD7n`kA|KfTU!LZzb7(JZb3H$Z$VBy^M;tt+vUxv`wyO(IGJb}@eh6=$zKf4d*fez!d?s{Bng~$ za`K_j@2Y_&ed+MDt!$`ml;m3!(|YS9w+ySZ-BcbBF6No0FCnI=ZnWXZ3Ij?Zo_=sZ z|6>YL`R{eJNkpjiJALvW?Q(x|0uh<-iUE>jbZ2E6?Ko~mmbZX*^|;mh{CtdOZ=!+c zwA)4&t~6Y=37Bl|SG)tT1h^T}wKEMhgdHLTOMt%IzBuMADYYp}Pt?fI%B*P&I~}Dg zpmfuqb8}RcwYBZtuuDDpi#6#zpsMc9maprC*%+B=KV30D8k^xQ)q_G0TZ?T@onDDs zvzyNfu|yo`nzDUQa+n<38=k?3SSRV!wRq$P=FgDPh3O_ZBUs-6Q_+h(?Et_tC+4i$ z2s7%K@4j>I1f=;@H zHz_4I*=u)!!GM@$R9d`DzM5!bHT2jj)aQjm93cW3tlMHATsN-g3%G zQAMvMk<)-A@k27@)|fKUx7CiB`#LA-ZvRkG88vXE!pW)q zWeQWnio8bF*F9V0WOqLw)2GS+T>ObWM%@ui zG){jv>A?C_g18U5s5r9o`J|p9G=eT+=(|6C?US+1@(-HsUX&rUp(YOCTxc4w*t+b# z(}7~(NT`#nWNFbAtZ)HhvaLa3yzO>nzj*1+(bGTJXPKe z>P=RgsGpw%UEczDz$5?az`vJKk<~J{aKefS!dwi~m(_A-`AouqO3+G-)P4R`zk8{L ztkvPZE_}wZN?;#dtQTM6(KHr&<+454084Dh6g|#bVliGg-?6i={HD4z86VrrSFy>U zR9JHK`>y8QhtTRMe!vO2Dz3GGzQL5#%JfVps^o_{=|CM;1tydR#<*Q;l=MlSZSlLx zIVRKFZG%c?taW1s?{f%|o5&)+y$it4_n%-2ss*Q-N!XO-gn0&S`C2pv^X2w+-&doL zdMU8e9u(;dnKY*O-7O1-C&G`%1u&845ssJC4!|M?pDgv=Bgd_ROIiV;bTKtc

+?#qkGjAVePeJYh<~ z_uiCC@x-Fp%L}6PZr<9*KKp(U@og!JPdi2I>4_#sLPd{a6yw_Bq+6HWaY}^ldZVZR ziBKgNVh)DTGQH&k#b{P|`5wvX?&@iIN=;gV#I;A4<(1 z@z|2xMOvhRJcUK_$&czzBXns3EPXzAZCPXn@g7ZYG`grI#pzU2I&#e#$Ozp`-4(bb zNQy-Wrk&<>(Sv9oFqqv-yqRk@Emu=fbW&G2!-{cE4%bEu;*x-WA(vl)(O-}Y|2eYY zFX5}EYb*6Y^V_F^OYw?AX^RmA>OT?_FV|yx=Dl1tzku?^cl8>Zar@HS^Pwc;laH2a z?3q+64tTIYQwF_F*w?C@+vh~-U5)SDV|io9xH!;s-RNKw?lqGpL**CmN6#FUkFOgc zZ$;jaq0MO$0(!d?Hh?rsh$v38#l7L}jIQ(oJwj8+(>x`Ds*LYge1y66^vjf* zvav?U>NwFdUvHb%Q~6apC-mN2`pdZ_Rw}B^l{cC6zbVbI>Da-s=ODw=Bt;qHWh2&k zqG0!I#y6N)?WyFS*-Lc)RLcZ@{sJi0{EIy2yNh`~XQf!(X?IKyv`WXFn`C%!iA%EY zJGtC}YgOv1R;k?4AA;j=*&FW^^81#b(`NjS!!7>+%<^v&DFuIP=l-e}{>_YW$ksG) zMxoToXyj=ywVxyz@8CJ#n(qC^;m4Hx`eqllpo2DV+3Wi2;({_qlTpV+^=RYKhe5!e z`_^j;ty4UHPVyUa=RVpMv8&E;V<%jN_xkfmJTLDZZ#V3oL_Q_wwnt%gF2k7b9vFhv zg2QVLQd1+8PY39CVKzhW)1+lazeS&csIF)Sv&K(aNF&9R^?{gntoC6&!-}2*Eap}s z&t;eJNVNTe z8^ihNzq{0}e5KW7f0!dh_gV1iVl=F#6$c4}?hseb6-S43REEdj+>WDnTXoa(?ZW2p ztU;mt>-$=bWq6#I+ALeuO>_2}Eo+gfsShlw7bYEh6E?EKzIGrG`ro{uf3(lQzel9i z4vYUD>n1@>@^>ZtG|WLgwM^z8CKuz3lY7Ppk5{E z^NPuDIZz4B2>()Sp!D9sp^O4%8Kxjo&dP#e$Uz>*gz7h<;l;kY1;$kz+*(u0&bQ^( z>lVppKPbc6745+xPP?*3n*R-fyCh;&KGlo+DWPHudC~ULYkvC05)J#3__m@QBK6#` z32XBCoA;L`FZXW!SRV8UM#S~L9s1Fr#Lx1M!(L63d*n2Mcq$O-FfyBtXis<2Bk%a* z@zpVJM!6ao3_i29(Kje9P2t-fWyHiULw_`RSI*Uut+^P{bF-sWWpNc`z$AUKMmQ@M z$#!cZ1R^n;i4>ykmdi@1rM;qKf>Oubhh=FEN-gJc*Algjn~ue8&*H`gR5a5IKGR0J zyVu%V^fzw9A#Z}I8G0^l;3feGlNnR-bJ(WsA7^%xl1K`7skceeQkx)YMA0XS82;k8 zRKFg@&oO84E+%LN_?RwkCJKlJi^vu>G4MmAzxZ>#WmP}cw9MUbTZS>0_9OpdaQ)F= z{{x6+bq>~CDH29AhJxIz06`<+RymcAzIuC?{FXjyPRwy*|6&>m}S|4dGT?1K+q&lqFDPr?Ad zO;7xPN0V89BBGG0Vn(X!+gdg=-W@P+GB``ym3GQX^u>HS~Mo zHNEbl=%8wIWBvLDC_~xtI?5K8I7L&IZ46kSw=9zs@PVsOJh+kL8V~%WfsFyqNb|UP zWM;K`oZHi8gZ6wFBA!v2z1P}ZG ztW)*6kowkR^xb+hqn7T4XyH;8YHQ3=43-#XEZ?*IlKX(XP{HWF8ccp-cvTk}zaU?W zn&l9?N&MJrnlU&+%)s5K+3Z4V%PBTRGf_>2ozVHY46#=1-@<)=wEc*b$pmzGQvBBf znhn|;13z3>IMOX3*7IBW71|7BBR2yt>_0oKt4>m%*>Kxf_XInZqZNAkWpS=H?IKJk z2Sy=Dq2XlYV>`R=JjW9m6>*9UA9Wq|;T#7tG~Lg-MetMABt?+NkxbhCSwF(u<(KNK zf`G!?HjX?O4DYII9?|ESqdp_Wi{JpEUg{Va(!Qz%&J6gEDmD2Mhui|5#;y(AuRC0+ zf$FKsQBoa}8yRG@YIJ(O`x!?g{f0Q%PC00~4zO?e4$Oyan)8kf|87=WzMn*i6Mz&8 zvenNlHW=`I&X(HTOBEYIIOk!NJ{I<-w~H;;Lz6z2F}aHTS%N1NLpPvjhsR$(kqm(H z)eo$g4Gj_odo8{;6&PoUey*;bOQXBpVq%?ThEHOLa0ciLtc`-AS1IN>Z&Eaumim&6Gx3V^3``^-o8tI% z&>>~0i+AC@CwO9Jd-}XFW%`2yHV_-~*fp4{N4btvJO0t0{z)*cdtZ{al4Dv+>Oj#| zA1`GSB>x&4AwMv9k3jLi&fNS#-{bxs9oUGF_x?3?XpNHfXN&h&yxQN|Uk2`)PPueQ zw4`qiYXdfQ)TLeOPQYM%+QRz_k6DW{pn&=Tx7yG$aAE$ysh}I1j#`v1lv{uLjae(B zy&yxHkPm?b**w*_B@J7mBIxeJ7W-2M;aTGF{yL{E67no0`DeNTva;=8C&7k%!Ayur zbM_CSL(7jNg>d;+8HuJb8ElMou@+H+oFa_|{z78>70Umu<^PLD?C0_E(^i`=-JZ9r z*1KR%x)*vzy49YAS9+el4Vo-}7&HwuknNnCdA-DN?q{%1J6KDg*j3c8i_is)UrmZM z*iNj%jb4=B0#G3J8;0YXM-aX=k6XY?=)BMm1|sEW@VoZH%7*2uWT`&>@c;`*6fzxs zKDimaYJ`?L?g>*hYEuaFf=Lf$k`qHY9!!+L4*hynv;8nTUEwIHEBhsPmEd)wa!`nA ziIX@z&@n7A`?}ws;#2VEQMy~J*7Trd2#y=M>DxP|xU++BPl2wA|C4~tKLFHd zm|N;j#qd|41`l%iajWE*YAImp2EN7c0j%>l5^M>*pS)i`qK$pSE)!uO=fQZSBwfWk ztMw$73UAS8?0DhsLT`ClWbvljl;IbpLI!BleNb;ad z2AeM)ya>91t<4$l&Ll+5-DMzoyI9SHk$|OcW342VcpIl*a!YA<^XeA<=YB|4E{` z^up%b93eB?iS@BB>UdOner1z^8)2-&66zjeqJC8E%S{U`vg|57<$3dw>fkrcuMVHR>qh;k6tH7mt_>idGD8ssH56{y z$SPu|`{!!CGhBE$hm7dWIBh@s^9B>dw1Z5q{0<+RZ%;Qp$xeKxi&h;W!~P7L z)yGO#f^0*X=EB{_B&BSc*{c_}W;$}@9+0$9Erc_XZT(q%rsYnFSqo&UdtV_-w`-j7 zG9|25=4n>$J#xFgh!p*Io-)3|^^fhf!QLW0gA%43DgwNw)jUmiO4!RMSFc#BE-C`N zQKR3Rjq&Dd$g001N~&flF(a?Va6`Z*Ys)_xcKW^g{P*IipYPY7w|IXoIW$~H$c|Tj zXrP`O3Eu*g?w3XV_4+~mSNPMl^(Uks7=y_Ao`es=#Jz3-iiovw*4VgQt%wwybZpVK z?ktOk>Kn0ly3GkuYOQ2lcTYrXT)>FVwwQUdCfCQV;mcmW?`Fp)HV?ONd=e2BefIOu zX8p$&7=O@m$y=9L6eZT~M~M&~JWJs}E-R`M(XhD5{N|_l6}NwwoB71zB6JJQMrPWk znx(hVp20Y*e}89x{DpA--0rM-peOBVK)ZhiELA!YXp_s1p-3$`vpPc74qg7F{Srw8qE zZm?Jhw6UvCkzfk;$_iJNmX4pn=^8kD+WJObWMRm?J-IG6%??!Tz?OZ(02oExY@)ic zRSZ*gjw;*PGSVv)C5atIbf)p1U-qmYuUAFx-vS7?cg!Rl&~m$z5u%nnbr--TM1O1h z9js+ZYgw6Ai%QA^_tpCBVS4#tSy^3avBusFH-!)r-a?|pufaih@*Ka^C(94|q|INN z-#<`LyT14iG=6^|cVB&@v}VPDywggXF!s#ey>{XjP>TDgSLSN6dVU(B|IS1YN@2Q1 zAjdOI+Il#=g0n0jT3`6R7APt!dYX8LZWn3APfks{Cz!fwz^lgJ)pZD4x%=F&+rXM&n7l!RBO$p0vNUPVn*qjq^wT zg82scgPs*v2}&Mq|MD`MX(~>o0u{5P^e=A+`QumYh=Qfjk^?}KvGLid!|--7VH zUH5;d2cXl2AFqu!b9z(h9W9xN5ekKZ-kZ%XMt7BMG=^_@(}cYh85f4MDz5LtKYk5Vuve`yYW z1*?j}l5dXkyVUL^QHu*#CSopRA~ipEM93gUvK!o|oL+T0({NVFT(w`qc}opb#1HNt zth~LM5w#HMIM$oo!|!lCp>OOfs#KV>JE4D<9GTnr!)5i4|I%f#B;@CH7tUaQnC6$m zrz)Nx^Ca;I`_RT69+41HprRW4&QsRDtJ=}3fGu~l7ptC<$@!4;c*u4XgMlGe#?>Bo zwW-yaqL(s)#I`hd&R}+i$D`*Ux9L5WxE8Rxuw-eK{IIuJv?)nJR&_1R)8Ol4_rc8EyqVA3|E>H%db%l|7Q35b5}%3KZr_YJAE~#f8cAr zv^5uM^kq<-AbqsvI8v6)M4`4NbQeTs2XQ;24Mb3TJI>R-&{yt9IYV7>1@h)qi8bmp zmFZ?ba`2Jad+muu6aIOGXG{Yl8@B+^;Z;=Ek0}6czX=zRt4mq^JB#L@J_NCUFJ?7r zN9w%>%U0I*JRq_;gB>)-&J3ck;KW9@Ym@Ad6(zI)5lXsS0J`IoLx+2{h&4GTm+XGX zs{XJr`H$JLV%}iMqrMfwJ4Q+_oXRWOpYh8?b?U1EVXjns$ImDg>K!i04|%k^M`XV` z9=t;B5!hC~2-L60Xh^KF9W+SjoR0KWCv5m z918SRY(-9m))yaZDCfI9d>8+r|J+eO<>@qD#SE{(^I*$OSF(*Q-xT*-z{zr6Nu;mD zOT#(|ql~6kd(Z|InuoUld^Jrs5P|Y(%XcGp?FB6rlEw#pr{-dx7H*t)-)T;Y_=7Ju zFUY!7l2_7u;n71!Liz}7!2g#V%MWZ|-nWBY0;+nF10+bH=}MQuH6*Hx$+6W?Vz4>e zFlz~R>8523zgYwuXD9K+GHJR}MdWHraOuofvqm+bbvVtH)^y*sJ}N#Nc-AHJ3Zf4s7J}#7=Eo(G}{*ux)R1!w(?@2q0udRK0tU2 zm~fDgSYqp#3$gg1PiE!W7$T~u#K7|YEAtV;iFpsNrDgw`Wik&0#Fvt(c?3 z2swe>G9lM(xGXeS`4|#L8sK9W&+EE4GO7*UKp8H0D*tW|LFR4H*M#_Ex+rY1K$tYOahd&#rklhyvtKCTlG zo(;u#_`^&ys!`@?+6OA4T$m+qKGAL*lz+&*xO87i=@#J@WbuEM61KHb%AlX46=&&^ zt)jBavd#ov5}u=26UHa%sN55NK5jk9uR~OC7l&KX5HD_VXSQmei$Sf zB1=WFlAwf0w)7C1vF(MvY3q#~)%T^F`!53R zm&?BVRW9yt3W$DbLH^e3N90nEv;l==bWyl$#7I4J*8KCd^>J$ZgE&t;L3x4~j3tkbAqQSx?-z$)Pj~Z`>IShY)ToV zu(xpCPem>I9O`%|gau5v0!5aJ!)z|K_GypWEF;pdw!^A(z3Tcd*T3^S_vYI&yac-* z#zLkUKR=cxfwtl-?^212Jsue1; ze1;hk|4K7u%dV5p1oP8MoX_;1&^%yTh`^ULJYmBM%T3q^ZOrLnhwFS*O6S~THI zknY36%!9$CDpsMKdp?L11yuEI&K1Wp&9x5bX%bl(1Lpvt0{K>1#>;bx$!S>=imMu~ zQp>hsMRNcQKg4tNuyZ}7P4$mYRavs0$8kql4c{f7%)|a=fbj~O1576 z;(qW^z4PqZ_JyX0{tCaRlI`gv-y|v_p>@xo&>}3C_+!eLc0zdL!Xdm7e_Ar%YkTGN zx-G2Jo^T1aIFM)3i+4J^|K+G_O5|ik^K81`$8RW}w{+~js&r*{gTis)>2+aaapHl= zWNGMk?ocEV(!MS} zJub`cFFR%Ws#%=~tePFpddu-Vo-SHVN&mw?n@RicXUh-{47n~_B%#q_u7ia_H`7yw zf$nWiV5iF`tOMTD2A1wZTRvJ_zGCjL50gbpM&cq;zTBVBh;2}dSWen5dvOb>8aOq1 zyhBOZcjQ{_FEAJ7oK|0^Ya_<+3HA-($ zOy}Ol0*RgQoybAFiGDfYgKh20-48cOXNS6MsVnygg{0+8w|ZI?zI`}ncDxr|r!z_>={;E#iRhdS{7P!p2~ay|!v79@9+SRfBph)I7+c{T)gf92;cdi7 zIu^@ie|?za?k5^KJ(KY+&hsvlKX(tKd)f3~X9j-<$owjv`}aQo3~S~j&2(xEju?1e zEO*9kM(aD(eSm+oV{oh`i@@jzzDql%w{yu)zf~mFq#b(rG!&ON!r#sjKD-_lFXERO zddF>2`>79hvWLkEuG{gXbPSeI@uMv=bBeLF@2iD+P6l^DW;Zwjl)8P(>V} z+Bhp{N(Nkdtz5+FA8)qG6TfL8hE4E&Zoyh>kwjk7{B$`8`n-+hlA{!IR@qEo2wv5{PPns#&Lax)A$w* z$8zPGm1J=VJInr80?!T2j9lidcIv&Z53M#i)}oJW@Y$_eGBAO*q+0ugVxlx>_aQ~PPg zsqNgi0v0oXtb_|@w80~~Z&5^u*VUUF^@exN@jq z;m!FC%g0)NM{jUl)1~LVs>1Td;|1l9zLah{BJ=#i$PP-q52%})yeY+_X0=?GZ^$5D zgbzh!inZSu)y}v+991~`invsoFC<1Ob0`8Y4Je!rPNN{Jse3o)JbYPY)6gP{=G0iv zW1%qMdlMQPrV{=y@RJ7G!FOJJvh#Ou3KdQQD=)&7bD1cMOD9?%F6G+O40bEaxRx*b zp)>c#&dG-f#UWPgkOYA!HVEn5Kf2_Y(^ex^-su>>(semH0^0j-y4<8(G_dY{Z2Y>A z)o^^!5^C-tJ&w(_|5=iW_I!uXYo>jKgwPgE$*Cbo1#&Q5T;`zivGI)I`_b{Q*3Qvc{w0qgEy|&ap3~ z>q4HyS~nI62(TP5Gwjc)L!)<=q+^$*Vf0aw?_3@DDo%wPa=&~k2Jwiw=S7=5bRXAY zZn?{oKz_xSB?=jN)4gIVeXtuc#XhOVOwM{b6nKH6{yo-n7OXvKmY-FzbP)Cb+Pl)A zrm}6F7L-v2WtM;<$`DXMK$#Mu4Ph|MVIBo!YCzC{5H&5xAfteY1R-DuL?8%AK*FR9 z5s1nZ8Nwuj0x}4;1e)=kzW3FAZNGl+cHMic->X~xI8~=soxN+Hb@pES?6ubSt(&?R zj8;Kin0Wk_0&G&PM|T3U3BUHb{#tGKXA-NzDuha%pbu6SKgmhR%*`9Wk^71X*}vtH zPdjZ)Qsm3_9pz_O0epcj58VYZcYf(bQA#EuAqV$l9(^H`yWEWX#5^&X3K6*4tRGxQ zqV3cR=SVwq8?MCuST0Br;vEwrL}7VRoA#Hf;m-78x2Jd?DBP+Wh;Xju+_gQr-fWO; zs<(~;8{b~pdQ>_E=IYUQU9#?pAK~~y+Gv+DyWt)97#+ZZ454PG--nM642%Ke!%Jx{ zZ)(AtM(5Y=8V*YnIcU^9*s>9-UY}8*FKhUB2(@>JYrhxrXgNwYCfT zlviDOwf_4|R?h6ZU68+98unDXT}?4s$~(LkpWk?Q;eDXUUsn9Bag|=-uEfTnGouMc zFazy`d%;4!%St~4duCjHEu+)1F;zTey_#SfobA_qXFJA;*tI(Ha_C`|`^xc5UA3-S z6LwEoSeJQ^l=f|_}DgeDeT=^zeVdXer*XX3}~W2gb-ADhgdR&qMkwunw*g?z1o%wpdqOotfU$j=}Kg z*qsaxU{^BaL5Nu)CXLkR1wxg`&sn#zEoHrkCoyC%slM>Wj&V-n;m!kElHPYEEO||0 z*o_2MEeQMYa685Ewx85AqJ-S(LjJ7<+TxE5VimAQOX^YC6z>p+M)+*|9$)TivCE;iui2Ji?!2{FMUHD_k zh22d~_F$?vft~TiiWNdBgGRWn9obZFPCv4#>uRCJRS~%LXr*29dqG+}Y}i8K3re0? zbui<)K|$WY_*_T`yHzUU?8NJ$hp`#9?m1?&rBXTG!lg`NbpU&uvY6z-owG&yg?_9j zN<9v(l8L>Um*6fB3B%UCpUJw3EQ*G6x0~7noLR0CS?QkUr=yZj)ZNql?W(Hp$tC~} z`7<;@2^E8F4*$G_8?U)Hl;MkVMUM?Q%NE`Fj?~1eOz{eWM)g((2*{J>$ODd*? zWAvqoP7hOk_&W3oH#70b%Cxn{%=*)%=K!8VMN5y1`}P)sA|pJv6x)Tp>`S+EJOS^I z?Jj6-4%>2PE;3@?1Hk#+umi9HLjAtTGJHw7rH9h#vi;9u* z^*l0_{bf$%%({)X=1I%kqvwOD=z@)B&69S%gzPm@#RZ>S9z+lob$er^c-@*mPF-`J z;5V_(jh}S9Vm*^}X*$(YcHQZ$-jx99mc7z_1gPNh$~<9sNARYTjWsJ{sePefSPbzz zPcn!KuYJq8Q{DO&yI+cW9u}>#L+Pg)0`nwN9t+y_$_~`iGf;SWCG}e`wHteIvCvE z;P|q~q+lvtPh=&Nkrc(5$Sybz%&c1z0O=(DZ_Oy`NM&^-hj||%jc5w-U3@d-UIFLk z+A7)p2L~qo_1pa8Z}@NA(YYpA5A*%vfU?)?`0|zgB0q(e9uP`<@3jY2fqA=OXK8b! zFCBd2lkEZ3^bgEKo>FHl4;ek%f%-ees2)L|?enco;8Eo! z+^43WWvi0`cG%x8A&q1OjYRpy!Zd0~z`GPQH?I%MZMx_e7- z9J^8in>^juJTvGFP@RE++9(p`DFAr$w?e|#ZDwb`b@Mlc{f=&2`K4#WRkIH#I%Kj{ ziKN0C+{KM;CE*MKeQ>Jeeim@sS{D48tN_;O9Qg2r3baO=iiATQ^e!TWEU z%)iV8e=~T$ve^Is%B2D$YoC5xNTKe#3COTC3i(P(%}? zpd_*;D}d}5E`k0iTALeIVXKJHVi)WI0`--^mgzeOO`N)2t=`;NUzvjxO*nZ%B0K?} z;ps7y5z8*=1A0U|<#OHAcjr)?al_Ll%*dF*#?^u2b&Y5r!E%#)W(lZMuE}32Z=YkZ{+9K7WlDE=dP(47E7v$c0v4wA+;i%Uh)dW!V^V3;&+Invq z-?xr-dSEYmosix9To@`qObr|P(L5AVSRw&U3n-XN8*+$KEx72DR;!!x(jhlB3}6f! zfBgF>>|dYbQ5t$CgNVK*3S?NfKENL5;3VF1Go#(9(3_VCVD&A0*=SxX(=~$|ABsie z$tQw{J$WC;X@?j%NJO+I1H~STJ}VwSz4f9x;Lj9uHTlEy%O8;YziDe?JFO#=v{d1 zqN&l|5Sj^2ls~f_aL9HU6-&m1>x{dNAg(2XR~xGfJnK#B>jv=?_X&h zY~1C=Ko)y?|2}}~JKFk(4|6B4=om@u*cap6wCTwEt(Ub+E}=d@Mahc%oJE?cd|t_* zKwcd(?sKzhx-~xk#NlPPf~alJLu2`v>G@_(!X>%b$yCQB^ke?#W!*lPS@rdh5w#uo z8v+T<2l8%(C~&Yv@$1_(PjdHTtDvo{#0i~6Y|v&n4%*8~{dull>^0QcQ*25spR)m? z++IU@xBq}?ePStws~{lagqNDpBUz6GQQaEOGjek5mtBzFvpC_V+rlrh^x9vwGn&xt zZ|M8deV$pEfk2>q;Nr`C^!FhOKouB)e4Mfr*lNO}xdu1B)GMsKiAIt4ZGOUVJSUrY*kchMx)qr7bi{cr?cXbw+^nyy%5En$7OEk z=BDKGcGTGI=h3Zdh>n1Cavv4rNXobqaLABB;TJs}r@pypu*5lGid*_NykR zIG1K`jxI7TI_FNwd&ST;Jj)vwS>Yb`NQ>hRzauC(DmbY8i^9W(rx>8lLirC(J-eVt zRDv!1;u>?`_2=a&q}dH*c^t8|csTB}=V-XmNcQmUix>5VDrNE3cBDTHuK&;cr2q5h z%VF(#x~VO->Ccbjk3BOzSGQmMg4x_H%q7y$N3-jdqjnz7Kw%Dz(f|qa0!x!Fqj@*Y z&=KzPncPKxl96_&F%kompE#XZjlt*yWD}|SSDZR;4Ry~n^~XYOyHByDgiWP=RH&6Y zwY*KH#I)gl^j!sg=K|fF+0rtslR)*~R1+&6dFY@Y-t5$1(kfKTIrPuk9O$`jI z%hw8925Le-Tth2x^1l6UQ0qk`4l6b*!m<@jC6(%vf<(xY!E(5>ofbj|>!VZ$b71vu5Zfzwo{u$yJK*}dnk4ne znC;a**#z5>ZU^rCx%$1&I>48iKk6t$>?#KqVm{zuIeaVw6E(G}2g2U&+;=bn`(4Vh ztrCIbxgqcNQ4{G-WLwjCsC&4fXA;} Date: Wed, 7 Jan 2026 05:55:48 +0000 Subject: [PATCH 2/3] fix: cleanup code and update --- .../SWL/WSL-Images/CODE_OF_CONDUCT.md | 5 - .../SWL/WSL-Images/CONTRIBUTING.md | 35 -- .../Classification/SWL/WSL-Images/LICENSE | 399 ------------------ .../Classification/SWL/WSL-Images/README.md | 38 -- .../Classification/SWL/WSL-Images/hubconf.py | 78 ---- .../build-in/Classification/SWL/coverage.txt | 3 - PyTorch/build-in/Classification/SWL/readme | 65 +++ .../Classification/SWL/requirements_exact.txt | 89 ++++ PyTorch/build-in/Classification/SWL/run | 1 - .../build-in/Classification/SWL/wsl_loss.jpg | Bin 35863 -> 0 bytes .../build-in/Classification/SWL/wsl_loss.txt | 29 -- 11 files changed, 154 insertions(+), 588 deletions(-) delete mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md delete mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md delete mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE delete mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/README.md delete mode 100644 PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py delete mode 100644 PyTorch/build-in/Classification/SWL/coverage.txt create mode 100644 PyTorch/build-in/Classification/SWL/readme create mode 100644 PyTorch/build-in/Classification/SWL/requirements_exact.txt delete mode 100644 PyTorch/build-in/Classification/SWL/run delete mode 100644 PyTorch/build-in/Classification/SWL/wsl_loss.jpg delete mode 100644 PyTorch/build-in/Classification/SWL/wsl_loss.txt diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md b/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md deleted file mode 100644 index 0f7ad8bfc..000000000 --- a/PyTorch/build-in/Classification/SWL/WSL-Images/CODE_OF_CONDUCT.md +++ /dev/null @@ -1,5 +0,0 @@ -# Code of Conduct - -Facebook has adopted a Code of Conduct that we expect project participants to adhere to. -Please read the [full text](https://code.fb.com/codeofconduct/) -so that you can understand what actions will and will not be tolerated. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md b/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md deleted file mode 100644 index 47c825fbb..000000000 --- a/PyTorch/build-in/Classification/SWL/WSL-Images/CONTRIBUTING.md +++ /dev/null @@ -1,35 +0,0 @@ -# Contributing to WSL-Images -We want to make contributing to this project as easy and transparent as possible. - -## Our Development Process -Minor changes and improvements will be released on an ongoing basis. Larger changes (e.g., changesets implementing a new benchmark) will be released on a more periodic basis. - -## Pull Requests -We actively welcome your pull requests. - -1. Fork the repo and create your branch from `master`. -2. If you've added code that should be tested, add tests. -3. If you've changed APIs, update the documentation. -4. Ensure the test suite passes. -5. Make sure your code lints. -6. If you haven't already, complete the Contributor License Agreement ("CLA"). - -## Contributor License Agreement ("CLA") -In order to accept your pull request, we need you to submit a CLA. You only need -to do this once to work on any of Facebook's open source projects. - -Complete your CLA here: - -## Issues -We use GitHub issues to track public bugs. Please ensure your description is -clear and has sufficient instructions to be able to reproduce the issue. Follow -the template provided [here](.github/issue_template.md) when opening issues. - -## Coding Style -* 4 spaces for indentation rather than tabs -* 80 character line length -* 80 character line length - -## License -By contributing to WSL-Images, you agree that your contributions will be licensed -under the LICENSE file in the root directory of this source tree. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE b/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE deleted file mode 100644 index f2915d90a..000000000 --- a/PyTorch/build-in/Classification/SWL/WSL-Images/LICENSE +++ /dev/null @@ -1,399 +0,0 @@ -Attribution-NonCommercial 4.0 International - -======================================================================= - -Creative Commons Corporation ("Creative Commons") is not a law firm and -does not provide legal services or legal advice. Distribution of -Creative Commons public licenses does not create a lawyer-client or -other relationship. Creative Commons makes its licenses and related -information available on an "as-is" basis. Creative Commons gives no -warranties regarding its licenses, any material licensed under their -terms and conditions, or any related information. Creative Commons -disclaims all liability for damages resulting from their use to the -fullest extent possible. - -Using Creative Commons Public Licenses - -Creative Commons public licenses provide a standard set of terms and -conditions that creators and other rights holders may use to share -original works of authorship and other material subject to copyright -and certain other rights specified in the public license below. The -following considerations are for informational purposes only, are not -exhaustive, and do not form part of our licenses. - - Considerations for licensors: Our public licenses are - intended for use by those authorized to give the public - permission to use material in ways otherwise restricted by - copyright and certain other rights. Our licenses are - irrevocable. Licensors should read and understand the terms - and conditions of the license they choose before applying it. - Licensors should also secure all rights necessary before - applying our licenses so that the public can reuse the - material as expected. Licensors should clearly mark any - material not subject to the license. This includes other CC- - licensed material, or material used under an exception or - limitation to copyright. More considerations for licensors: - wiki.creativecommons.org/Considerations_for_licensors - - Considerations for the public: By using one of our public - licenses, a licensor grants the public permission to use the - licensed material under specified terms and conditions. If - the licensor's permission is not necessary for any reason--for - example, because of any applicable exception or limitation to - copyright--then that use is not regulated by the license. Our - licenses grant only permissions under copyright and certain - other rights that a licensor has authority to grant. Use of - the licensed material may still be restricted for other - reasons, including because others have copyright or other - rights in the material. A licensor may make special requests, - such as asking that all changes be marked or described. - Although not required by our licenses, you are encouraged to - respect those requests where reasonable. More_considerations - for the public: - wiki.creativecommons.org/Considerations_for_licensees - -======================================================================= - -Creative Commons Attribution-NonCommercial 4.0 International Public -License - -By exercising the Licensed Rights (defined below), You accept and agree -to be bound by the terms and conditions of this Creative Commons -Attribution-NonCommercial 4.0 International Public License ("Public -License"). To the extent this Public License may be interpreted as a -contract, You are granted the Licensed Rights in consideration of Your -acceptance of these terms and conditions, and the Licensor grants You -such rights in consideration of benefits the Licensor receives from -making the Licensed Material available under these terms and -conditions. - -Section 1 -- Definitions. - - a. Adapted Material means material subject to Copyright and Similar - Rights that is derived from or based upon the Licensed Material - and in which the Licensed Material is translated, altered, - arranged, transformed, or otherwise modified in a manner requiring - permission under the Copyright and Similar Rights held by the - Licensor. For purposes of this Public License, where the Licensed - Material is a musical work, performance, or sound recording, - Adapted Material is always produced where the Licensed Material is - synched in timed relation with a moving image. - - b. Adapter's License means the license You apply to Your Copyright - and Similar Rights in Your contributions to Adapted Material in - accordance with the terms and conditions of this Public License. - - c. Copyright and Similar Rights means copyright and/or similar rights - closely related to copyright including, without limitation, - performance, broadcast, sound recording, and Sui Generis Database - Rights, without regard to how the rights are labeled or - categorized. For purposes of this Public License, the rights - specified in Section 2(b)(1)-(2) are not Copyright and Similar - Rights. - d. Effective Technological Measures means those measures that, in the - absence of proper authority, may not be circumvented under laws - fulfilling obligations under Article 11 of the WIPO Copyright - Treaty adopted on December 20, 1996, and/or similar international - agreements. - - e. Exceptions and Limitations means fair use, fair dealing, and/or - any other exception or limitation to Copyright and Similar Rights - that applies to Your use of the Licensed Material. - - f. Licensed Material means the artistic or literary work, database, - or other material to which the Licensor applied this Public - License. - - g. Licensed Rights means the rights granted to You subject to the - terms and conditions of this Public License, which are limited to - all Copyright and Similar Rights that apply to Your use of the - Licensed Material and that the Licensor has authority to license. - - h. Licensor means the individual(s) or entity(ies) granting rights - under this Public License. - - i. NonCommercial means not primarily intended for or directed towards - commercial advantage or monetary compensation. For purposes of - this Public License, the exchange of the Licensed Material for - other material subject to Copyright and Similar Rights by digital - file-sharing or similar means is NonCommercial provided there is - no payment of monetary compensation in connection with the - exchange. - - j. Share means to provide material to the public by any means or - process that requires permission under the Licensed Rights, such - as reproduction, public display, public performance, distribution, - dissemination, communication, or importation, and to make material - available to the public including in ways that members of the - public may access the material from a place and at a time - individually chosen by them. - - k. Sui Generis Database Rights means rights other than copyright - resulting from Directive 96/9/EC of the European Parliament and of - the Council of 11 March 1996 on the legal protection of databases, - as amended and/or succeeded, as well as other essentially - equivalent rights anywhere in the world. - - l. You means the individual or entity exercising the Licensed Rights - under this Public License. Your has a corresponding meaning. - -Section 2 -- Scope. - - a. License grant. - - 1. Subject to the terms and conditions of this Public License, - the Licensor hereby grants You a worldwide, royalty-free, - non-sublicensable, non-exclusive, irrevocable license to - exercise the Licensed Rights in the Licensed Material to: - - a. reproduce and Share the Licensed Material, in whole or - in part, for NonCommercial purposes only; and - - b. produce, reproduce, and Share Adapted Material for - NonCommercial purposes only. - - 2. Exceptions and Limitations. For the avoidance of doubt, where - Exceptions and Limitations apply to Your use, this Public - License does not apply, and You do not need to comply with - its terms and conditions. - - 3. Term. The term of this Public License is specified in Section - 6(a). - - 4. Media and formats; technical modifications allowed. The - Licensor authorizes You to exercise the Licensed Rights in - all media and formats whether now known or hereafter created, - and to make technical modifications necessary to do so. The - Licensor waives and/or agrees not to assert any right or - authority to forbid You from making technical modifications - necessary to exercise the Licensed Rights, including - technical modifications necessary to circumvent Effective - Technological Measures. For purposes of this Public License, - simply making modifications authorized by this Section 2(a) - (4) never produces Adapted Material. - - 5. Downstream recipients. - - a. Offer from the Licensor -- Licensed Material. Every - recipient of the Licensed Material automatically - receives an offer from the Licensor to exercise the - Licensed Rights under the terms and conditions of this - Public License. - - b. No downstream restrictions. You may not offer or impose - any additional or different terms or conditions on, or - apply any Effective Technological Measures to, the - Licensed Material if doing so restricts exercise of the - Licensed Rights by any recipient of the Licensed - Material. - - 6. No endorsement. Nothing in this Public License constitutes or - may be construed as permission to assert or imply that You - are, or that Your use of the Licensed Material is, connected - with, or sponsored, endorsed, or granted official status by, - the Licensor or others designated to receive attribution as - provided in Section 3(a)(1)(A)(i). - - b. Other rights. - - 1. Moral rights, such as the right of integrity, are not - licensed under this Public License, nor are publicity, - privacy, and/or other similar personality rights; however, to - the extent possible, the Licensor waives and/or agrees not to - assert any such rights held by the Licensor to the limited - extent necessary to allow You to exercise the Licensed - Rights, but not otherwise. - - 2. Patent and trademark rights are not licensed under this - Public License. - - 3. To the extent possible, the Licensor waives any right to - collect royalties from You for the exercise of the Licensed - Rights, whether directly or through a collecting society - under any voluntary or waivable statutory or compulsory - licensing scheme. In all other cases the Licensor expressly - reserves any right to collect such royalties, including when - the Licensed Material is used other than for NonCommercial - purposes. - -Section 3 -- License Conditions. - -Your exercise of the Licensed Rights is expressly made subject to the -following conditions. - - a. Attribution. - - 1. If You Share the Licensed Material (including in modified - form), You must: - - a. retain the following if it is supplied by the Licensor - with the Licensed Material: - - i. identification of the creator(s) of the Licensed - Material and any others designated to receive - attribution, in any reasonable manner requested by - the Licensor (including by pseudonym if - designated); - - ii. a copyright notice; - - iii. a notice that refers to this Public License; - - iv. a notice that refers to the disclaimer of - warranties; - - v. a URI or hyperlink to the Licensed Material to the - extent reasonably practicable; - - b. indicate if You modified the Licensed Material and - retain an indication of any previous modifications; and - - c. indicate the Licensed Material is licensed under this - Public License, and include the text of, or the URI or - hyperlink to, this Public License. - - 2. You may satisfy the conditions in Section 3(a)(1) in any - reasonable manner based on the medium, means, and context in - which You Share the Licensed Material. For example, it may be - reasonable to satisfy the conditions by providing a URI or - hyperlink to a resource that includes the required - information. - - 3. If requested by the Licensor, You must remove any of the - information required by Section 3(a)(1)(A) to the extent - reasonably practicable. - - 4. If You Share Adapted Material You produce, the Adapter's - License You apply must not prevent recipients of the Adapted - Material from complying with this Public License. - -Section 4 -- Sui Generis Database Rights. - -Where the Licensed Rights include Sui Generis Database Rights that -apply to Your use of the Licensed Material: - - a. for the avoidance of doubt, Section 2(a)(1) grants You the right - to extract, reuse, reproduce, and Share all or a substantial - portion of the contents of the database for NonCommercial purposes - only; - - b. if You include all or a substantial portion of the database - contents in a database in which You have Sui Generis Database - Rights, then the database in which You have Sui Generis Database - Rights (but not its individual contents) is Adapted Material; and - - c. You must comply with the conditions in Section 3(a) if You Share - all or a substantial portion of the contents of the database. - -For the avoidance of doubt, this Section 4 supplements and does not -replace Your obligations under this Public License where the Licensed -Rights include other Copyright and Similar Rights. - -Section 5 -- Disclaimer of Warranties and Limitation of Liability. - - a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE - EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS - AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF - ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS, - IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION, - WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR - PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS, - ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT - KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT - ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU. - - b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE - TO YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION, - NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT, - INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES, - COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR - USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN - ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR - DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL OR - IN PART, THIS LIMITATION MAY NOT APPLY TO YOU. - - c. The disclaimer of warranties and limitation of liability provided - above shall be interpreted in a manner that, to the extent - possible, most closely approximates an absolute disclaimer and - waiver of all liability. - -Section 6 -- Term and Termination. - - a. This Public License applies for the term of the Copyright and - Similar Rights licensed here. However, if You fail to comply with - this Public License, then Your rights under this Public License - terminate automatically. - - b. Where Your right to use the Licensed Material has terminated under - Section 6(a), it reinstates: - - 1. automatically as of the date the violation is cured, provided - it is cured within 30 days of Your discovery of the - violation; or - - 2. upon express reinstatement by the Licensor. - - For the avoidance of doubt, this Section 6(b) does not affect any - right the Licensor may have to seek remedies for Your violations - of this Public License. - - c. For the avoidance of doubt, the Licensor may also offer the - Licensed Material under separate terms or conditions or stop - distributing the Licensed Material at any time; however, doing so - will not terminate this Public License. - - d. Sections 1, 5, 6, 7, and 8 survive termination of this Public - License. - -Section 7 -- Other Terms and Conditions. - - a. The Licensor shall not be bound by any additional or different - terms or conditions communicated by You unless expressly agreed. - - b. Any arrangements, understandings, or agreements regarding the - Licensed Material not stated herein are separate from and - independent of the terms and conditions of this Public License. - -Section 8 -- Interpretation. - - a. For the avoidance of doubt, this Public License does not, and - shall not be interpreted to, reduce, limit, restrict, or impose - conditions on any use of the Licensed Material that could lawfully - be made without permission under this Public License. - - b. To the extent possible, if any provision of this Public License is - deemed unenforceable, it shall be automatically reformed to the - minimum extent necessary to make it enforceable. If the provision - cannot be reformed, it shall be severed from this Public License - without affecting the enforceability of the remaining terms and - conditions. - - c. No term or condition of this Public License will be waived and no - failure to comply consented to unless expressly agreed to by the - Licensor. - - d. Nothing in this Public License constitutes or may be interpreted - as a limitation upon, or waiver of, any privileges and immunities - that apply to the Licensor or You, including from the legal - processes of any jurisdiction or authority. - -======================================================================= - -Creative Commons is not a party to its public -licenses. Notwithstanding, Creative Commons may elect to apply one of -its public licenses to material it publishes and in those instances -will be considered the “Licensor.” The text of the Creative Commons -public licenses is dedicated to the public domain under the CC0 Public -Domain Dedication. Except for the limited purpose of indicating that -material is shared under a Creative Commons public license or as -otherwise permitted by the Creative Commons policies published at -creativecommons.org/policies, Creative Commons does not authorize the -use of the trademark "Creative Commons" or any other trademark or logo -of Creative Commons without its prior written consent including, -without limitation, in connection with any unauthorized modifications -to any of its public licenses or any other arrangements, -understandings, or agreements concerning use of licensed material. For -the avoidance of doubt, this paragraph does not form part of the -public licenses. - -Creative Commons may be contacted at creativecommons.org. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/README.md b/PyTorch/build-in/Classification/SWL/WSL-Images/README.md deleted file mode 100644 index a43144e08..000000000 --- a/PyTorch/build-in/Classification/SWL/WSL-Images/README.md +++ /dev/null @@ -1,38 +0,0 @@ -## WSL-Images - -This project provides models pre-trained in weakly-supervised fashion on **940 million** public images with 1.5K hashtags matching with 1000 ImageNet1K synsets, followed by fine-tuning on ImageNet1K dataset. Please refer to "Exploring the Limits of Weakly Supervised Pretraining" (https://arxiv.org/abs/1805.00932) presented at ECCV 2018 for the details of model training. - -We are providing 4 models with different capacities. - -| Model | #Parameters | FLOPS | Top-1 Acc. | Top-5 Acc. | -| ------------------ | :---------: | :---: | :--------: | :--------: | -| ResNeXt-101 32x8d | 88M | 16B | 82.2 | 96.4 | -| ResNeXt-101 32x16d | 193M | 36B | 84.2 | 97.2 | -| ResNeXt-101 32x32d | 466M | 87B | 85.1 | 97.5 | -| ResNeXt-101 32x48d | 829M | 153B | 85.4 | 97.6 | - -Our models significantly improve the training accuracy on ImageNet compared to training from scratch. **We achieve state-of-the-art accuracy of 85.4% on ImageNet with our ResNext-101 32x48d model.** - -## Loading models with torch.hub -The models are available with [torch.hub](https://pytorch.org/docs/stable/hub.html). -As an example, to load the ResNext-101 32x16d model, simply run: - -``` -model = torch.hub.load('facebookresearch/WSL-Images', 'resnext101_32x16d_wsl') -``` -Please refer to [torch.hub](https://pytorch.org/docs/stable/hub.html) to see a full example of using the model to classify an image. - -## Citing WSL-Images - -If you use the WSL-Images models, please cite the following publication. -``` -@inproceedings{wslimageseccv2018, - title={Exploring the Limits of Weakly Supervised Pretraining}, - author={Dhruv Kumar Mahajan and Ross B. Girshick and Vignesh Ramanathan and Kaiming He and Manohar Paluri and Yixuan Li and Ashwin Bharambe and Laurens van der Maaten}, - booktitle={ECCV}, - year={2018} -} -``` - -## License -WSL-Images models are released under the CC-BY-NC 4.0 license. See [LICENSE](LICENSE) for additional details. diff --git a/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py b/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py deleted file mode 100644 index d28f8adaa..000000000 --- a/PyTorch/build-in/Classification/SWL/WSL-Images/hubconf.py +++ /dev/null @@ -1,78 +0,0 @@ -# Copyright (c) Facebook, Inc. and its affiliates. -# All rights reserved. -# -# This source code is licensed under the license found in the -# LICENSE file in the root directory of this source tree. - -# Optional list of dependencies required by the package -dependencies = ['torch', 'torchvision'] - -from torch.hub import load_state_dict_from_url -from torchvision.models.resnet import ResNet, Bottleneck - - -model_urls = { - 'resnext101_32x8d': 'https://download.pytorch.org/models/ig_resnext101_32x8-c38310e5.pth', - 'resnext101_32x16d': 'https://download.pytorch.org/models/ig_resnext101_32x16-c6f796b0.pth', - 'resnext101_32x32d': 'https://download.pytorch.org/models/ig_resnext101_32x32-e4b90b00.pth', - 'resnext101_32x48d': 'https://download.pytorch.org/models/ig_resnext101_32x48-3e41cc8a.pth', -} - - -def _resnext(arch, block, layers, pretrained, progress, **kwargs): - model = ResNet(block, layers, **kwargs) - state_dict = load_state_dict_from_url(model_urls[arch], progress=progress) - model.load_state_dict(state_dict) - return model - - -def resnext101_32x8d_wsl(progress=True, **kwargs): - """Constructs a ResNeXt-101 32x8 model pre-trained on weakly-supervised data - and finetuned on ImageNet from Figure 5 in - `"Exploring the Limits of Weakly Supervised Pretraining" `_ - - Args: - progress (bool): If True, displays a progress bar of the download to stderr. - """ - kwargs['groups'] = 32 - kwargs['width_per_group'] = 8 - return _resnext('resnext101_32x8d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) - - -def resnext101_32x16d_wsl(progress=True, **kwargs): - """Constructs a ResNeXt-101 32x16 model pre-trained on weakly-supervised data - and finetuned on ImageNet from Figure 5 in - `"Exploring the Limits of Weakly Supervised Pretraining" `_ - - Args: - progress (bool): If True, displays a progress bar of the download to stderr. - """ - kwargs['groups'] = 32 - kwargs['width_per_group'] = 16 - return _resnext('resnext101_32x16d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) - - -def resnext101_32x32d_wsl(progress=True, **kwargs): - """Constructs a ResNeXt-101 32x32 model pre-trained on weakly-supervised data - and finetuned on ImageNet from Figure 5 in - `"Exploring the Limits of Weakly Supervised Pretraining" `_ - - Args: - progress (bool): If True, displays a progress bar of the download to stderr. - """ - kwargs['groups'] = 32 - kwargs['width_per_group'] = 32 - return _resnext('resnext101_32x32d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) - - -def resnext101_32x48d_wsl(progress=True, **kwargs): - """Constructs a ResNeXt-101 32x48 model pre-trained on weakly-supervised data - and finetuned on ImageNet from Figure 5 in - `"Exploring the Limits of Weakly Supervised Pretraining" `_ - - Args: - progress (bool): If True, displays a progress bar of the download to stderr. - """ - kwargs['groups'] = 32 - kwargs['width_per_group'] = 48 - return _resnext('resnext101_32x48d', Bottleneck, [3, 4, 23, 3], True, progress, **kwargs) diff --git a/PyTorch/build-in/Classification/SWL/coverage.txt b/PyTorch/build-in/Classification/SWL/coverage.txt deleted file mode 100644 index 3aa0a123b..000000000 --- a/PyTorch/build-in/Classification/SWL/coverage.txt +++ /dev/null @@ -1,3 +0,0 @@ -all api: ['_amp_foreach_non_finite_check_and_unscale_', '_amp_update_scale_', '_copy_from', '_has_compatible_shallow_copy_type', '_local_scalar_dense', '_log_softmax', '_log_softmax_backward_data', '_pin_memory', '_reshape_alias', 'add_', 'addmm', 'as_strided', 'as_strided_', 'convolution', 'convolution_backward', 'copy_stride', 'div', 'eq', 'fill_', 'fused_sgd', 'is_pinned', 'linear', 'max_pool2d', 'maxpool2d_backward', 'maxpool2d_forward', 'mean', 'mm', 'mul', 'mul_', 'native_batch_norm', 'native_batch_norm_backward', 'nll_loss_backward', 'nll_loss_forward', 'reciprocal', 'relu_', 'set_', 'sum', 'threshold_backward', 'topk_out', 'view', 'zero_'], total: 41 -fallback op: [], total: 0 -coverage rate: 100.00% diff --git a/PyTorch/build-in/Classification/SWL/readme b/PyTorch/build-in/Classification/SWL/readme new file mode 100644 index 000000000..10da84230 --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/readme @@ -0,0 +1,65 @@ +```markdown +## 1. 模型链接 +- 原始仓库链接: +https://github.com/huggingface/pytorch-image-models?tab=readme-ov-file#models + +## 2. 快速开始 + +使用本模型执行训练的主要流程如下: + +1. **基础环境安装**:介绍训练前需要完成的基础环境检查和安装。 +2. **获取数据集**:介绍如何获取训练所需的数据集。 +3. **构建环境**:介绍如何构建模型运行所需要的环境。 +4. **启动训练**:介绍如何运行训练。 + +### 2.1 基础环境安装 + +请参考主仓库的基础环境安装章节,完成训练前的基础环境检查和安装(如驱动、固件等)。 + +### 2.2 准备数据集 + +#### 2.2.1 获取数据集 + +训练使用 **CIFAR-100** 数据集。该数据集为开源数据集,包含 100 个类别的 60000 张彩色图像。 + +#### 2.2.2 处理数据集 + +请确保数据集已下载并解压。根据训练脚本的默认配置,建议将数据集存放在模型目录的上级 `data` 目录中(即 `../data`),或者根据实际路径修改训练命令中的 `--datapath` 参数。 + +### 2.3 构建环境 + +所使用的环境下需包含 PyTorch 框架虚拟环境。 + +1. 执行以下命令,启动虚拟环境(根据实际环境名称修改): + + ```bash + conda activate torch_env_py310 + +``` + +2. 安装 Python 依赖。确保已安装项目所需的依赖包: +```bash +pip install -r requirements_exact.txt + +``` + + + +### 2.4 启动训练 + +1. 在构建好的环境中,进入模型训练脚本所在目录。 + +2. 运行训练。该模型支持单机单卡训练。 +执行以下命令启动训练(使用 CIFAR-100 数据集,Batch Size 为 128): +```bash +python weloTrainStep.py \ + --name train \ + --arch swl \ + --print_freq 1 \ + --steps 100 \ + --dataset cifar100 \ + --datapath ../data \ + --batch_size 16 \ + --epochs 100 + +``` diff --git a/PyTorch/build-in/Classification/SWL/requirements_exact.txt b/PyTorch/build-in/Classification/SWL/requirements_exact.txt new file mode 100644 index 000000000..7394b3319 --- /dev/null +++ b/PyTorch/build-in/Classification/SWL/requirements_exact.txt @@ -0,0 +1,89 @@ +addict==2.4.0 +aliyun-python-sdk-core==2.16.0 +aliyun-python-sdk-kms==2.16.5 +anyio==4.11.0 +astunparse==1.6.3 +certifi==2024.12.14 +cffi==2.0.0 +charset-normalizer==3.4.1 +click==8.3.1 +colorama==0.4.6 +contourpy==1.3.2 +crcmod==1.7 +cryptography==46.0.3 +cycler==0.12.1 +einops==0.8.1 +exceptiongroup==1.3.1 +filelock==3.14.0 +fonttools==4.60.1 +fsspec==2024.12.0 +future @ file:///croot/future_1730902796226/work +git-filter-repo==2.47.0 +h11==0.16.0 +hf-xet==1.2.0 +httpcore==1.0.9 +httpx==0.28.1 +huggingface_hub==1.1.5 +idna==3.10 +inplace-abn @ git+https://github.com/mapillary/inplace_abn.git@b50bfe9c7cd7116a3ab091a352b48d6ba5ee701c +Jinja2==3.1.5 +jmespath==0.10.0 +joblib==1.5.2 +kiwisolver==1.4.9 +Markdown==3.10 +markdown-it-py==4.0.0 +MarkupSafe==3.0.2 +matplotlib==3.10.7 +mdurl==0.1.2 +mmdet==3.3.0 +mmengine==0.10.7 +model-index==0.1.11 +mpmath==1.3.0 +networkx==3.4.2 +numpy==1.23.5 +opencv-python==4.12.0.88 +opendatalab==0.0.10 +openmim==0.3.9 +openxlab==0.1.3 +ordered-set==4.1.0 +oss2==2.17.0 +packaging @ file:///croot/packaging_1734472117206/work +pandas==2.3.3 +pillow==11.1.0 +platformdirs==4.5.1 +pycocotools==2.0.11 +pycparser @ file:///tmp/build/80754af9/pycparser_1636541352034/work +pycryptodome==3.23.0 +Pygments==2.19.2 +pyparsing==3.2.5 +python-dateutil==2.9.0.post0 +pytz==2023.4 +PyYAML @ file:///croot/pyyaml_1728657952215/work +requests==2.28.2 +rich==13.4.2 +safetensors==0.7.0 +scikit-learn==1.7.2 +scipy==1.15.3 +shapely==2.1.2 +shellingham==1.5.4 +six @ file:///tmp/build/80754af9/six_1644875935023/work +sniffio==1.3.1 +sympy==1.13.3 +tabulate==0.9.0 +termcolor==3.2.0 +terminaltables==3.1.10 +threadpoolctl==3.6.0 +timm==1.0.22 +tomli==2.3.0 +torch @ file:///apps/torch-2.4.0a0%2Bgit4451b0e-cp310-cp310-linux_x86_64.whl#sha256=2e472c916044cac5a1a0e0d8b0e12bb943d8522b24ff826c8014dd444dccd378 +torch_sdaa @ file:///apps/torch_sdaa-2.0.0-cp310-cp310-linux_x86_64.whl#sha256=5aa57889b002e1231fbf806642e1353bfa016297bc25178396e89adc2b1f92e7 +torchaudio @ file:///apps/torchaudio-2.0.2%2Bda3eb8d-cp310-cp310-linux_x86_64.whl#sha256=46525c02fb7eaa8dafea860428de3d01e437ba8d6ff2cc228d7c71975ac4054b +torchdata @ file:///apps/torchdata-0.6.1%2Be1feeb2-py3-none-any.whl#sha256=aa2dc1a7732ea68adfad186978049bf68cc1afdbbdd1e17a8024227ab770e433 +torchtext @ file:///apps/torchtext-0.15.2a0%2B4571036-cp310-cp310-linux_x86_64.whl#sha256=7e42c684ba366f97b59ec37488bf95e416cce3892b6589200d2b3ad159ee5788 +torchvision @ file:///apps/torchvision-0.15.1a0%2B42759b1-cp310-cp310-linux_x86_64.whl#sha256=4b904db2d50102415536bc764bbc31c669b90b1b014f90964e9eccaadb2fd9eb +tqdm==4.65.2 +typer-slim==0.20.0 +typing_extensions==4.15.0 +tzdata==2025.2 +urllib3==1.26.20 +yapf==0.43.0 diff --git a/PyTorch/build-in/Classification/SWL/run b/PyTorch/build-in/Classification/SWL/run deleted file mode 100644 index fbac492b0..000000000 --- a/PyTorch/build-in/Classification/SWL/run +++ /dev/null @@ -1 +0,0 @@ -bash ../sdaaTest.sh wsl 8 0 diff --git a/PyTorch/build-in/Classification/SWL/wsl_loss.jpg b/PyTorch/build-in/Classification/SWL/wsl_loss.jpg deleted file mode 100644 index 6d478cfdf564af63db6cdd1b82ea09e96185632e..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 35863 zcmeFa1zcQPmOWfJL4yW|011Qwg1ZKXK;bS44u!h}3P^wi4G?X}lA+JRzJ3!H8}}|g zA@Ti(q^#_m+`RmP!lJ6`n%X*OeM4hgdq-zicTaEM=-BuKZ1U^W^tYwumDRQNjm@ot z!=vMq)3fu7%OCAR0-*e*kPIN*^HH_!Z zMD$#Nn8Z(FGAmo}FmS8ylbE`UV39KNe0y;4qiH|2>_679puekSe`(miwQCxHgMx&( zc_;(`alqLnb5;P>zskR^!N2;z|Cv4@kpBA9;|UQ4^mz6UHjb{RZ)Ob{i#je8DCMs> z#Qk7aVmr5XDgVbdXoXUYb`2%>Edmz(^#kWBaxeT@|zI`!e}D zG15jF&BSBJW~HTMr4P_nGjC$;%^KXWVTPEuT>h3!DJLa+RJQZ1`@8Xl+xl*oK%3LI z7sY6uC8du(lcOa_2(3dE6Wbt%0_O|xw!4r`9?iWllf$PXQnvtO`|D1Cn49n>DSS~`b&7)y+f7Ix%%4?-1;kpAFXs+>j(7JpSu}oz}ZKZ;F zi*^K|ln~$dN?>po=K#Rt>@or}BSx$19vlMfewW+Wx`sV`-x46cj}&>a7vWjNPfO6u z1a{^jvfYYC-ErjKHK%9N{_2@puU1b`zewez>^x6t>5Km=_zlyS6xR(lM&$glXW?*? z$qv`mGV^iXpliP+ex&psRSZwIT@e!-OJ1WaC2|Y+(3RMWSs{G>{vD{m%z}rnwtIdD z+k zjQ(|zzaAIHfnN#LjjIGre93vchW5%kTvNeR4`?a7I)-M4R!JER}x@2Mit6;8ETx+Zg?i5d%aVAJn-@Dc<7g z@~O<{yBnU%LE$8;Wj#1|PK!TQa*v}j#2<=-U1WU01xjaOj9iU57=I;UfYQ(fj1+q`e9e8J)6uOz zzF2jS)I&@s$+^WkQQ*?t-)-_hEZph+)6+YRM1`jy zKoYYEAu!o$?8-EC*FBlznkQQc*NU3yy;&wjXNZpP0ZWPMCJ`C*+Zj21xZ(2b?>nV?ev5rNkgj6!&O67)iJOg zTqdPNtA>wn0eLk@8$QtyHPjEW1sxxU`Bic95R?##l)cCBOeoUDlT+mcC3jAvQs7X` zY&oS0eG8J}LWY}B1r%kFvb8Sa?E3*<9njp|Tar1=Ebt0BlB{g53=%fzDP4}GSN*i{ zA(is^7j%?}m6J}Uug#CGz5#vn4z#Z2r<)UDjZFr@Q%;&87cV|0;~5kHRu+dbvNoo7 zQ_H7&o%8w+yD2XmKizv1Ux9~JkLo1naFSZI1-0s;U=KEXoVdmag_aN#tvn=t%n&)a z-LsBmn;uZRBV|xG9z56Yc?yFf1C>>Pu2^$uXCuaqa)70h{dZo*Z@-+D=o~cH%v3tCxMtVC?Z{fib zWWbPyRA$u<8-}!b9^hOkBqod)L}h<3a=wVWn0%BmCzrFafLnV5@;p^8OBW%WwPURt z6)KRmb3+BP`&!Khh{}Nwwxh^~Xsa$Mg3ZFQ9t2z!iOpuE;y{z9S`5^3i>5sHG)j_p z+mRbc5@=qglw!0SxdnJjF|&(K@~#V&HqSp8d*NHWQCDranHc@-`5kgv`$ICGRixEG zlSJ`OD9NYGwGYeqG+x>!`0@^k%KSveDlt_h1T`|KcJgGFreek|?7pffCH@a&%Fe|N z8)}Y9Yn7D1Q_pV!(ut<8p{VwQ$=q$RIqL-sFZJQ%2lL^@Qn|Wf1+iO`q&@v2d0Md& z0rlF`cPj+@3YRgWfN`U>24rG1=k4vIvpGJ^*hRe$vJdE9T;IrkH4V|a2J9`i3nycd`!qJWc4H{u+cL0MwK|`B2g!rT%#}9f1J?+(dQ~Ih;!x9 z<3$&7tQ8TYLOrD-uVsxl6!Se>*kYS^^K^Qim|(o6OjxfhjSkTsoxT^h?a zX%Py`W303V2=)De0*1S9vl#Bv8wWK#SVfQkRIWkYL3d>{ES{9*LD#0%~5!w}1cw$N>-1 z@K48;{c#HSp<6&3%HrN7i5ut|5Pl1Ill`x%|Ldy%tHbJN=JQ^Fr_EW)QXRZ&?<4D^ymk$35dwbTO80mv1tIDB-~$F%Zxz>(9`O`}4vv z+lKf#+Ml2o-!Guo(DnT^bOiEpM39P~nSVlFIQ#rJ*gp7hq(31q4X){e--vL~s$vYyKqyd8z$CUSq#NUi}EM-d;)f!tq2B_jikp0s$ZKQF(d6~aboS?Ha6Q_ zcaW{G&>(HbSrj3_?vdvIbe*`Tj5qah7ln2rd*dZ^dO1O<-n=eSY>1+8hW;I(;Bjy} z>7WWedVnwsFy$a6Y1T)p{Pf;*$Brji%2YQ|-~Dy}5$371R9ny;TSS{w>g7I-vpbaL za~eL(#!&-dKf9Z2re*jn2y;+!%Nkjn@)pp8J_B!S^n@o!8Bsfn@f?u8VI{qM>eKmh zOG;;Ke44F|T=JeXA)l290HnB60LThsMwBSeSYdBt=IbevJGw`a-BdWI0{{gX%MGCp zpWSHOL#Bj*_*gC%wQe-9VoKK1_fGOhz>KW0M>xz`VSxW>g{3)w@@pV@8z)@LGy`&e z6Af>G9uXt)O&rt4Q|b)+YxP5(pBKYJ@vrmc;5@&0&0Ag_eN^6k9pRVL5(B4QbGt%( zThrb|@V7m#t=11?)tKo3L-VzXSI{mG;y3m}(pQ3BjJ_8^MH~ryILLA{+3dvYWxyG0 z6k(-_(psw)d4?F}$QgZwHD+>mEyf6cErgBTTJ0-QIQ!aI6FKf6yz*aaXm721L(R0;ujbRYxlV zPCoud{Q&rG=Rk#rnStx;&qjq8P*3|bscRqqI<5s5@|At&79hiB zhj{L;N#7Gc;*DISyD{VIf!0yx#q!R-`l z!M?0#`m2$*fN2Y^c||vv!n(kHL{38>82dAT9Y6-K;)RV!kR9oQ^9Dv23#r;H{Pc zxcc*}-2~e?ao-f@yGCEW2x(MY7je7=&`~;GVQns4>fyS%0n}uOe-xx%1mWE&y6kO_ z|K)iJ3XgHPz*YMJ6O(iK{TSdpgooy6SKFYHU745w16(EJ1G{tt>hND))>P3)&jc+@O&rjql{c_NMkAX zsq6PI#9ChEJ4{gu_Zvf`BP^yEQJK#~3!vkyyE%^>Jy8eI6}#!oqK%og=yuA!eP1ij z>kMNq!EPau!Yda`w#l;BDk=)( zfYe+`kr)Z;ooBNf`Zbg4U+#swD|yX*Vdb2ej+X$;$+S>j;a7la7Iuk_RX?>yixO>w-o`8CXX-7}2IzIRm^|IbZ5u zx^FK`-&v__tn%w1b9fR;b_;NSPjLJ&fEv-^X+|NEs-CMwi5=X!-pjUW*@+N4f*J73sfFFa(z&7l0YaR~?mGz-SLRyUexg z7I27EhcH5AskeaKx07=JHYQy3A=eu7+zn%(FJ?t+YcnGjw0;npmU#EtmrFbBMsl*; z`sOB60u6P~CtZ%BJN)ZQZFv24s<}#18xl6=vkI&mbMyzJ?F*y=svjyM8s127P8HUo zS2I@4qpVxoxs>`t9^x9D&QI%%?*tL5V|gaKGH^$!qJQm0Li%4TSd=KoM<$ z>!zQ$!!-AXBshnlmx`~VD;7s6=NUT9@<-B3ztoc8ogKR1o>uE+Ea%P6$LUPhS4tP< z=OgRAw*awGI2g{u;-o$H>@?1E(#2=BrOJH{XfnQg?3@p>DeQ~uOLS>=#0ezj(y^`E zfAXkxX3)DV{P(I+g-7a$>GfKlJqC5!>eNubZ%YB#oaV|j{o*vDY!>;BfL+QW4$ca{ z(Z1*FUAO4F8%c(j&nhPJVplC2yiqH;7aQ%dB+&xft>|b%N#?x|AwxASNw==EazueX`oba&#K~CFeo8VF(s}^5S3_wtSn@v!PphVkq^q$@yBxCPsA zYZOe88hP^J%6>H=H?Ob6{K<3qgBEJ7ur$pyJij81Z#$N!_68c`J2f_Yj%=CLQLU*f zb%$zG$-eJU6WABp0|Q}=ncM+@+P6r<$SuoC|8|9uI`q$_v|h@6s!iyYYY19f5@Z-H zW6z77o+EMcknhpuwByGFbko!hpDW3I8sT}vR$fLb=RL2xq+qNGm5Pupy9IC~Mdefb zAnDKld_ClT+(`w*og79%L=?-@ptJOg-vS2vj!5ci3;^eB2g(>@Qpc~+1i(#GO$I>x zUns6=2<-G07kU%yws*rC(kS#dUGbmXv5oiv=)7@H)-hp7bzm+%tK}dA?sZIa0-F?F z#uWm8eBfwz-ZD08_*qT^y7zcV*zn4mUPfFQ2oQ?hW~=bW>^tcQEx(Mxia`ml!Jx z(UCA4Y0jDu0oA5Y=uSm~jLU(u!|1ZfORvjoCG%!C8$*&k_G=F0d|r!;tD}%G_Y1H# zpuX%`Dz^E=KUOO|BX3soDP;a4)y~oKH7mc2B;$P?`5txI#RilJ7=I|cL5EN7V0MxI zRzhQHXUSUwNxoEPVr|tF!cS%y+A*)yyF~-2aFTqU3I(r*zI-8eseDq~70Lth6RF^8 z%Oca~tdctXbSTRg*NSkboLcrl&qrBmMTsLEK6FEma{TTgT zZ3=f{dA63>$;@J#!z#b&ma)5+-vztq8|u$23}DT4yOKxA$}eLXX0Ey@d=?Nu2?Hem zl}j{~i#Wy>_y(OIwU?NsHUSA!s;0CL%u3y;`K6!lQTt3f{+rYM0V-Ye?_JA_sQG3< zzkuD7n`kA|KfTU!LZzb7(JZb3H$Z$VBy^M;tt+vUxv`wyO(IGJb}@eh6=$zKf4d*fez!d?s{Bng~$ za`K_j@2Y_&ed+MDt!$`ml;m3!(|YS9w+ySZ-BcbBF6No0FCnI=ZnWXZ3Ij?Zo_=sZ z|6>YL`R{eJNkpjiJALvW?Q(x|0uh<-iUE>jbZ2E6?Ko~mmbZX*^|;mh{CtdOZ=!+c zwA)4&t~6Y=37Bl|SG)tT1h^T}wKEMhgdHLTOMt%IzBuMADYYp}Pt?fI%B*P&I~}Dg zpmfuqb8}RcwYBZtuuDDpi#6#zpsMc9maprC*%+B=KV30D8k^xQ)q_G0TZ?T@onDDs zvzyNfu|yo`nzDUQa+n<38=k?3SSRV!wRq$P=FgDPh3O_ZBUs-6Q_+h(?Et_tC+4i$ z2s7%K@4j>I1f=;@H zHz_4I*=u)!!GM@$R9d`DzM5!bHT2jj)aQjm93cW3tlMHATsN-g3%G zQAMvMk<)-A@k27@)|fKUx7CiB`#LA-ZvRkG88vXE!pW)q zWeQWnio8bF*F9V0WOqLw)2GS+T>ObWM%@ui zG){jv>A?C_g18U5s5r9o`J|p9G=eT+=(|6C?US+1@(-HsUX&rUp(YOCTxc4w*t+b# z(}7~(NT`#nWNFbAtZ)HhvaLa3yzO>nzj*1+(bGTJXPKe z>P=RgsGpw%UEczDz$5?az`vJKk<~J{aKefS!dwi~m(_A-`AouqO3+G-)P4R`zk8{L ztkvPZE_}wZN?;#dtQTM6(KHr&<+454084Dh6g|#bVliGg-?6i={HD4z86VrrSFy>U zR9JHK`>y8QhtTRMe!vO2Dz3GGzQL5#%JfVps^o_{=|CM;1tydR#<*Q;l=MlSZSlLx zIVRKFZG%c?taW1s?{f%|o5&)+y$it4_n%-2ss*Q-N!XO-gn0&S`C2pv^X2w+-&doL zdMU8e9u(;dnKY*O-7O1-C&G`%1u&845ssJC4!|M?pDgv=Bgd_ROIiV;bTKtc

+?#qkGjAVePeJYh<~ z_uiCC@x-Fp%L}6PZr<9*KKp(U@og!JPdi2I>4_#sLPd{a6yw_Bq+6HWaY}^ldZVZR ziBKgNVh)DTGQH&k#b{P|`5wvX?&@iIN=;gV#I;A4<(1 z@z|2xMOvhRJcUK_$&czzBXns3EPXzAZCPXn@g7ZYG`grI#pzU2I&#e#$Ozp`-4(bb zNQy-Wrk&<>(Sv9oFqqv-yqRk@Emu=fbW&G2!-{cE4%bEu;*x-WA(vl)(O-}Y|2eYY zFX5}EYb*6Y^V_F^OYw?AX^RmA>OT?_FV|yx=Dl1tzku?^cl8>Zar@HS^Pwc;laH2a z?3q+64tTIYQwF_F*w?C@+vh~-U5)SDV|io9xH!;s-RNKw?lqGpL**CmN6#FUkFOgc zZ$;jaq0MO$0(!d?Hh?rsh$v38#l7L}jIQ(oJwj8+(>x`Ds*LYge1y66^vjf* zvav?U>NwFdUvHb%Q~6apC-mN2`pdZ_Rw}B^l{cC6zbVbI>Da-s=ODw=Bt;qHWh2&k zqG0!I#y6N)?WyFS*-Lc)RLcZ@{sJi0{EIy2yNh`~XQf!(X?IKyv`WXFn`C%!iA%EY zJGtC}YgOv1R;k?4AA;j=*&FW^^81#b(`NjS!!7>+%<^v&DFuIP=l-e}{>_YW$ksG) zMxoToXyj=ywVxyz@8CJ#n(qC^;m4Hx`eqllpo2DV+3Wi2;({_qlTpV+^=RYKhe5!e z`_^j;ty4UHPVyUa=RVpMv8&E;V<%jN_xkfmJTLDZZ#V3oL_Q_wwnt%gF2k7b9vFhv zg2QVLQd1+8PY39CVKzhW)1+lazeS&csIF)Sv&K(aNF&9R^?{gntoC6&!-}2*Eap}s z&t;eJNVNTe z8^ihNzq{0}e5KW7f0!dh_gV1iVl=F#6$c4}?hseb6-S43REEdj+>WDnTXoa(?ZW2p ztU;mt>-$=bWq6#I+ALeuO>_2}Eo+gfsShlw7bYEh6E?EKzIGrG`ro{uf3(lQzel9i z4vYUD>n1@>@^>ZtG|WLgwM^z8CKuz3lY7Ppk5{E z^NPuDIZz4B2>()Sp!D9sp^O4%8Kxjo&dP#e$Uz>*gz7h<;l;kY1;$kz+*(u0&bQ^( z>lVppKPbc6745+xPP?*3n*R-fyCh;&KGlo+DWPHudC~ULYkvC05)J#3__m@QBK6#` z32XBCoA;L`FZXW!SRV8UM#S~L9s1Fr#Lx1M!(L63d*n2Mcq$O-FfyBtXis<2Bk%a* z@zpVJM!6ao3_i29(Kje9P2t-fWyHiULw_`RSI*Uut+^P{bF-sWWpNc`z$AUKMmQ@M z$#!cZ1R^n;i4>ykmdi@1rM;qKf>Oubhh=FEN-gJc*Algjn~ue8&*H`gR5a5IKGR0J zyVu%V^fzw9A#Z}I8G0^l;3feGlNnR-bJ(WsA7^%xl1K`7skceeQkx)YMA0XS82;k8 zRKFg@&oO84E+%LN_?RwkCJKlJi^vu>G4MmAzxZ>#WmP}cw9MUbTZS>0_9OpdaQ)F= z{{x6+bq>~CDH29AhJxIz06`<+RymcAzIuC?{FXjyPRwy*|6&>m}S|4dGT?1K+q&lqFDPr?Ad zO;7xPN0V89BBGG0Vn(X!+gdg=-W@P+GB``ym3GQX^u>HS~Mo zHNEbl=%8wIWBvLDC_~xtI?5K8I7L&IZ46kSw=9zs@PVsOJh+kL8V~%WfsFyqNb|UP zWM;K`oZHi8gZ6wFBA!v2z1P}ZG ztW)*6kowkR^xb+hqn7T4XyH;8YHQ3=43-#XEZ?*IlKX(XP{HWF8ccp-cvTk}zaU?W zn&l9?N&MJrnlU&+%)s5K+3Z4V%PBTRGf_>2ozVHY46#=1-@<)=wEc*b$pmzGQvBBf znhn|;13z3>IMOX3*7IBW71|7BBR2yt>_0oKt4>m%*>Kxf_XInZqZNAkWpS=H?IKJk z2Sy=Dq2XlYV>`R=JjW9m6>*9UA9Wq|;T#7tG~Lg-MetMABt?+NkxbhCSwF(u<(KNK zf`G!?HjX?O4DYII9?|ESqdp_Wi{JpEUg{Va(!Qz%&J6gEDmD2Mhui|5#;y(AuRC0+ zf$FKsQBoa}8yRG@YIJ(O`x!?g{f0Q%PC00~4zO?e4$Oyan)8kf|87=WzMn*i6Mz&8 zvenNlHW=`I&X(HTOBEYIIOk!NJ{I<-w~H;;Lz6z2F}aHTS%N1NLpPvjhsR$(kqm(H z)eo$g4Gj_odo8{;6&PoUey*;bOQXBpVq%?ThEHOLa0ciLtc`-AS1IN>Z&Eaumim&6Gx3V^3``^-o8tI% z&>>~0i+AC@CwO9Jd-}XFW%`2yHV_-~*fp4{N4btvJO0t0{z)*cdtZ{al4Dv+>Oj#| zA1`GSB>x&4AwMv9k3jLi&fNS#-{bxs9oUGF_x?3?XpNHfXN&h&yxQN|Uk2`)PPueQ zw4`qiYXdfQ)TLeOPQYM%+QRz_k6DW{pn&=Tx7yG$aAE$ysh}I1j#`v1lv{uLjae(B zy&yxHkPm?b**w*_B@J7mBIxeJ7W-2M;aTGF{yL{E67no0`DeNTva;=8C&7k%!Ayur zbM_CSL(7jNg>d;+8HuJb8ElMou@+H+oFa_|{z78>70Umu<^PLD?C0_E(^i`=-JZ9r z*1KR%x)*vzy49YAS9+el4Vo-}7&HwuknNnCdA-DN?q{%1J6KDg*j3c8i_is)UrmZM z*iNj%jb4=B0#G3J8;0YXM-aX=k6XY?=)BMm1|sEW@VoZH%7*2uWT`&>@c;`*6fzxs zKDimaYJ`?L?g>*hYEuaFf=Lf$k`qHY9!!+L4*hynv;8nTUEwIHEBhsPmEd)wa!`nA ziIX@z&@n7A`?}ws;#2VEQMy~J*7Trd2#y=M>DxP|xU++BPl2wA|C4~tKLFHd zm|N;j#qd|41`l%iajWE*YAImp2EN7c0j%>l5^M>*pS)i`qK$pSE)!uO=fQZSBwfWk ztMw$73UAS8?0DhsLT`ClWbvljl;IbpLI!BleNb;ad z2AeM)ya>91t<4$l&Ll+5-DMzoyI9SHk$|OcW342VcpIl*a!YA<^XeA<=YB|4E{` z^up%b93eB?iS@BB>UdOner1z^8)2-&66zjeqJC8E%S{U`vg|57<$3dw>fkrcuMVHR>qh;k6tH7mt_>idGD8ssH56{y z$SPu|`{!!CGhBE$hm7dWIBh@s^9B>dw1Z5q{0<+RZ%;Qp$xeKxi&h;W!~P7L z)yGO#f^0*X=EB{_B&BSc*{c_}W;$}@9+0$9Erc_XZT(q%rsYnFSqo&UdtV_-w`-j7 zG9|25=4n>$J#xFgh!p*Io-)3|^^fhf!QLW0gA%43DgwNw)jUmiO4!RMSFc#BE-C`N zQKR3Rjq&Dd$g001N~&flF(a?Va6`Z*Ys)_xcKW^g{P*IipYPY7w|IXoIW$~H$c|Tj zXrP`O3Eu*g?w3XV_4+~mSNPMl^(Uks7=y_Ao`es=#Jz3-iiovw*4VgQt%wwybZpVK z?ktOk>Kn0ly3GkuYOQ2lcTYrXT)>FVwwQUdCfCQV;mcmW?`Fp)HV?ONd=e2BefIOu zX8p$&7=O@m$y=9L6eZT~M~M&~JWJs}E-R`M(XhD5{N|_l6}NwwoB71zB6JJQMrPWk znx(hVp20Y*e}89x{DpA--0rM-peOBVK)ZhiELA!YXp_s1p-3$`vpPc74qg7F{Srw8qE zZm?Jhw6UvCkzfk;$_iJNmX4pn=^8kD+WJObWMRm?J-IG6%??!Tz?OZ(02oExY@)ic zRSZ*gjw;*PGSVv)C5atIbf)p1U-qmYuUAFx-vS7?cg!Rl&~m$z5u%nnbr--TM1O1h z9js+ZYgw6Ai%QA^_tpCBVS4#tSy^3avBusFH-!)r-a?|pufaih@*Ka^C(94|q|INN z-#<`LyT14iG=6^|cVB&@v}VPDywggXF!s#ey>{XjP>TDgSLSN6dVU(B|IS1YN@2Q1 zAjdOI+Il#=g0n0jT3`6R7APt!dYX8LZWn3APfks{Cz!fwz^lgJ)pZD4x%=F&+rXM&n7l!RBO$p0vNUPVn*qjq^wT zg82scgPs*v2}&Mq|MD`MX(~>o0u{5P^e=A+`QumYh=Qfjk^?}KvGLid!|--7VH zUH5;d2cXl2AFqu!b9z(h9W9xN5ekKZ-kZ%XMt7BMG=^_@(}cYh85f4MDz5LtKYk5Vuve`yYW z1*?j}l5dXkyVUL^QHu*#CSopRA~ipEM93gUvK!o|oL+T0({NVFT(w`qc}opb#1HNt zth~LM5w#HMIM$oo!|!lCp>OOfs#KV>JE4D<9GTnr!)5i4|I%f#B;@CH7tUaQnC6$m zrz)Nx^Ca;I`_RT69+41HprRW4&QsRDtJ=}3fGu~l7ptC<$@!4;c*u4XgMlGe#?>Bo zwW-yaqL(s)#I`hd&R}+i$D`*Ux9L5WxE8Rxuw-eK{IIuJv?)nJR&_1R)8Ol4_rc8EyqVA3|E>H%db%l|7Q35b5}%3KZr_YJAE~#f8cAr zv^5uM^kq<-AbqsvI8v6)M4`4NbQeTs2XQ;24Mb3TJI>R-&{yt9IYV7>1@h)qi8bmp zmFZ?ba`2Jad+muu6aIOGXG{Yl8@B+^;Z;=Ek0}6czX=zRt4mq^JB#L@J_NCUFJ?7r zN9w%>%U0I*JRq_;gB>)-&J3ck;KW9@Ym@Ad6(zI)5lXsS0J`IoLx+2{h&4GTm+XGX zs{XJr`H$JLV%}iMqrMfwJ4Q+_oXRWOpYh8?b?U1EVXjns$ImDg>K!i04|%k^M`XV` z9=t;B5!hC~2-L60Xh^KF9W+SjoR0KWCv5m z918SRY(-9m))yaZDCfI9d>8+r|J+eO<>@qD#SE{(^I*$OSF(*Q-xT*-z{zr6Nu;mD zOT#(|ql~6kd(Z|InuoUld^Jrs5P|Y(%XcGp?FB6rlEw#pr{-dx7H*t)-)T;Y_=7Ju zFUY!7l2_7u;n71!Liz}7!2g#V%MWZ|-nWBY0;+nF10+bH=}MQuH6*Hx$+6W?Vz4>e zFlz~R>8523zgYwuXD9K+GHJR}MdWHraOuofvqm+bbvVtH)^y*sJ}N#Nc-AHJ3Zf4s7J}#7=Eo(G}{*ux)R1!w(?@2q0udRK0tU2 zm~fDgSYqp#3$gg1PiE!W7$T~u#K7|YEAtV;iFpsNrDgw`Wik&0#Fvt(c?3 z2swe>G9lM(xGXeS`4|#L8sK9W&+EE4GO7*UKp8H0D*tW|LFR4H*M#_Ex+rY1K$tYOahd&#rklhyvtKCTlG zo(;u#_`^&ys!`@?+6OA4T$m+qKGAL*lz+&*xO87i=@#J@WbuEM61KHb%AlX46=&&^ zt)jBavd#ov5}u=26UHa%sN55NK5jk9uR~OC7l&KX5HD_VXSQmei$Sf zB1=WFlAwf0w)7C1vF(MvY3q#~)%T^F`!53R zm&?BVRW9yt3W$DbLH^e3N90nEv;l==bWyl$#7I4J*8KCd^>J$ZgE&t;L3x4~j3tkbAqQSx?-z$)Pj~Z`>IShY)ToV zu(xpCPem>I9O`%|gau5v0!5aJ!)z|K_GypWEF;pdw!^A(z3Tcd*T3^S_vYI&yac-* z#zLkUKR=cxfwtl-?^212Jsue1; ze1;hk|4K7u%dV5p1oP8MoX_;1&^%yTh`^ULJYmBM%T3q^ZOrLnhwFS*O6S~THI zknY36%!9$CDpsMKdp?L11yuEI&K1Wp&9x5bX%bl(1Lpvt0{K>1#>;bx$!S>=imMu~ zQp>hsMRNcQKg4tNuyZ}7P4$mYRavs0$8kql4c{f7%)|a=fbj~O1576 z;(qW^z4PqZ_JyX0{tCaRlI`gv-y|v_p>@xo&>}3C_+!eLc0zdL!Xdm7e_Ar%YkTGN zx-G2Jo^T1aIFM)3i+4J^|K+G_O5|ik^K81`$8RW}w{+~js&r*{gTis)>2+aaapHl= zWNGMk?ocEV(!MS} zJub`cFFR%Ws#%=~tePFpddu-Vo-SHVN&mw?n@RicXUh-{47n~_B%#q_u7ia_H`7yw zf$nWiV5iF`tOMTD2A1wZTRvJ_zGCjL50gbpM&cq;zTBVBh;2}dSWen5dvOb>8aOq1 zyhBOZcjQ{_FEAJ7oK|0^Ya_<+3HA-($ zOy}Ol0*RgQoybAFiGDfYgKh20-48cOXNS6MsVnygg{0+8w|ZI?zI`}ncDxr|r!z_>={;E#iRhdS{7P!p2~ay|!v79@9+SRfBph)I7+c{T)gf92;cdi7 zIu^@ie|?za?k5^KJ(KY+&hsvlKX(tKd)f3~X9j-<$owjv`}aQo3~S~j&2(xEju?1e zEO*9kM(aD(eSm+oV{oh`i@@jzzDql%w{yu)zf~mFq#b(rG!&ON!r#sjKD-_lFXERO zddF>2`>79hvWLkEuG{gXbPSeI@uMv=bBeLF@2iD+P6l^DW;Zwjl)8P(>V} z+Bhp{N(Nkdtz5+FA8)qG6TfL8hE4E&Zoyh>kwjk7{B$`8`n-+hlA{!IR@qEo2wv5{PPns#&Lax)A$w* z$8zPGm1J=VJInr80?!T2j9lidcIv&Z53M#i)}oJW@Y$_eGBAO*q+0ugVxlx>_aQ~PPg zsqNgi0v0oXtb_|@w80~~Z&5^u*VUUF^@exN@jq z;m!FC%g0)NM{jUl)1~LVs>1Td;|1l9zLah{BJ=#i$PP-q52%})yeY+_X0=?GZ^$5D zgbzh!inZSu)y}v+991~`invsoFC<1Ob0`8Y4Je!rPNN{Jse3o)JbYPY)6gP{=G0iv zW1%qMdlMQPrV{=y@RJ7G!FOJJvh#Ou3KdQQD=)&7bD1cMOD9?%F6G+O40bEaxRx*b zp)>c#&dG-f#UWPgkOYA!HVEn5Kf2_Y(^ex^-su>>(semH0^0j-y4<8(G_dY{Z2Y>A z)o^^!5^C-tJ&w(_|5=iW_I!uXYo>jKgwPgE$*Cbo1#&Q5T;`zivGI)I`_b{Q*3Qvc{w0qgEy|&ap3~ z>q4HyS~nI62(TP5Gwjc)L!)<=q+^$*Vf0aw?_3@DDo%wPa=&~k2Jwiw=S7=5bRXAY zZn?{oKz_xSB?=jN)4gIVeXtuc#XhOVOwM{b6nKH6{yo-n7OXvKmY-FzbP)Cb+Pl)A zrm}6F7L-v2WtM;<$`DXMK$#Mu4Ph|MVIBo!YCzC{5H&5xAfteY1R-DuL?8%AK*FR9 z5s1nZ8Nwuj0x}4;1e)=kzW3FAZNGl+cHMic->X~xI8~=soxN+Hb@pES?6ubSt(&?R zj8;Kin0Wk_0&G&PM|T3U3BUHb{#tGKXA-NzDuha%pbu6SKgmhR%*`9Wk^71X*}vtH zPdjZ)Qsm3_9pz_O0epcj58VYZcYf(bQA#EuAqV$l9(^H`yWEWX#5^&X3K6*4tRGxQ zqV3cR=SVwq8?MCuST0Br;vEwrL}7VRoA#Hf;m-78x2Jd?DBP+Wh;Xju+_gQr-fWO; zs<(~;8{b~pdQ>_E=IYUQU9#?pAK~~y+Gv+DyWt)97#+ZZ454PG--nM642%Ke!%Jx{ zZ)(AtM(5Y=8V*YnIcU^9*s>9-UY}8*FKhUB2(@>JYrhxrXgNwYCfT zlviDOwf_4|R?h6ZU68+98unDXT}?4s$~(LkpWk?Q;eDXUUsn9Bag|=-uEfTnGouMc zFazy`d%;4!%St~4duCjHEu+)1F;zTey_#SfobA_qXFJA;*tI(Ha_C`|`^xc5UA3-S z6LwEoSeJQ^l=f|_}DgeDeT=^zeVdXer*XX3}~W2gb-ADhgdR&qMkwunw*g?z1o%wpdqOotfU$j=}Kg z*qsaxU{^BaL5Nu)CXLkR1wxg`&sn#zEoHrkCoyC%slM>Wj&V-n;m!kElHPYEEO||0 z*o_2MEeQMYa685Ewx85AqJ-S(LjJ7<+TxE5VimAQOX^YC6z>p+M)+*|9$)TivCE;iui2Ji?!2{FMUHD_k zh22d~_F$?vft~TiiWNdBgGRWn9obZFPCv4#>uRCJRS~%LXr*29dqG+}Y}i8K3re0? zbui<)K|$WY_*_T`yHzUU?8NJ$hp`#9?m1?&rBXTG!lg`NbpU&uvY6z-owG&yg?_9j zN<9v(l8L>Um*6fB3B%UCpUJw3EQ*G6x0~7noLR0CS?QkUr=yZj)ZNql?W(Hp$tC~} z`7<;@2^E8F4*$G_8?U)Hl;MkVMUM?Q%NE`Fj?~1eOz{eWM)g((2*{J>$ODd*? zWAvqoP7hOk_&W3oH#70b%Cxn{%=*)%=K!8VMN5y1`}P)sA|pJv6x)Tp>`S+EJOS^I z?Jj6-4%>2PE;3@?1Hk#+umi9HLjAtTGJHw7rH9h#vi;9u* z^*l0_{bf$%%({)X=1I%kqvwOD=z@)B&69S%gzPm@#RZ>S9z+lob$er^c-@*mPF-`J z;5V_(jh}S9Vm*^}X*$(YcHQZ$-jx99mc7z_1gPNh$~<9sNARYTjWsJ{sePefSPbzz zPcn!KuYJq8Q{DO&yI+cW9u}>#L+Pg)0`nwN9t+y_$_~`iGf;SWCG}e`wHteIvCvE z;P|q~q+lvtPh=&Nkrc(5$Sybz%&c1z0O=(DZ_Oy`NM&^-hj||%jc5w-U3@d-UIFLk z+A7)p2L~qo_1pa8Z}@NA(YYpA5A*%vfU?)?`0|zgB0q(e9uP`<@3jY2fqA=OXK8b! zFCBd2lkEZ3^bgEKo>FHl4;ek%f%-ees2)L|?enco;8Eo! z+^43WWvi0`cG%x8A&q1OjYRpy!Zd0~z`GPQH?I%MZMx_e7- z9J^8in>^juJTvGFP@RE++9(p`DFAr$w?e|#ZDwb`b@Mlc{f=&2`K4#WRkIH#I%Kj{ ziKN0C+{KM;CE*MKeQ>Jeeim@sS{D48tN_;O9Qg2r3baO=iiATQ^e!TWEU z%)iV8e=~T$ve^Is%B2D$YoC5xNTKe#3COTC3i(P(%}? zpd_*;D}d}5E`k0iTALeIVXKJHVi)WI0`--^mgzeOO`N)2t=`;NUzvjxO*nZ%B0K?} z;ps7y5z8*=1A0U|<#OHAcjr)?al_Ll%*dF*#?^u2b&Y5r!E%#)W(lZMuE}32Z=YkZ{+9K7WlDE=dP(47E7v$c0v4wA+;i%Uh)dW!V^V3;&+Invq z-?xr-dSEYmosix9To@`qObr|P(L5AVSRw&U3n-XN8*+$KEx72DR;!!x(jhlB3}6f! zfBgF>>|dYbQ5t$CgNVK*3S?NfKENL5;3VF1Go#(9(3_VCVD&A0*=SxX(=~$|ABsie z$tQw{J$WC;X@?j%NJO+I1H~STJ}VwSz4f9x;Lj9uHTlEy%O8;YziDe?JFO#=v{d1 zqN&l|5Sj^2ls~f_aL9HU6-&m1>x{dNAg(2XR~xGfJnK#B>jv=?_X&h zY~1C=Ko)y?|2}}~JKFk(4|6B4=om@u*cap6wCTwEt(Ub+E}=d@Mahc%oJE?cd|t_* zKwcd(?sKzhx-~xk#NlPPf~alJLu2`v>G@_(!X>%b$yCQB^ke?#W!*lPS@rdh5w#uo z8v+T<2l8%(C~&Yv@$1_(PjdHTtDvo{#0i~6Y|v&n4%*8~{dull>^0QcQ*25spR)m? z++IU@xBq}?ePStws~{lagqNDpBUz6GQQaEOGjek5mtBzFvpC_V+rlrh^x9vwGn&xt zZ|M8deV$pEfk2>q;Nr`C^!FhOKouB)e4Mfr*lNO}xdu1B)GMsKiAIt4ZGOUVJSUrY*kchMx)qr7bi{cr?cXbw+^nyy%5En$7OEk z=BDKGcGTGI=h3Zdh>n1Cavv4rNXobqaLABB;TJs}r@pypu*5lGid*_NykR zIG1K`jxI7TI_FNwd&ST;Jj)vwS>Yb`NQ>hRzauC(DmbY8i^9W(rx>8lLirC(J-eVt zRDv!1;u>?`_2=a&q}dH*c^t8|csTB}=V-XmNcQmUix>5VDrNE3cBDTHuK&;cr2q5h z%VF(#x~VO->Ccbjk3BOzSGQmMg4x_H%q7y$N3-jdqjnz7Kw%Dz(f|qa0!x!Fqj@*Y z&=KzPncPKxl96_&F%kompE#XZjlt*yWD}|SSDZR;4Ry~n^~XYOyHByDgiWP=RH&6Y zwY*KH#I)gl^j!sg=K|fF+0rtslR)*~R1+&6dFY@Y-t5$1(kfKTIrPuk9O$`jI z%hw8925Le-Tth2x^1l6UQ0qk`4l6b*!m<@jC6(%vf<(xY!E(5>ofbj|>!VZ$b71vu5Zfzwo{u$yJK*}dnk4ne znC;a**#z5>ZU^rCx%$1&I>48iKk6t$>?#KqVm{zuIeaVw6E(G}2g2U&+;=bn`(4Vh ztrCIbxgqcNQ4{G-WLwjCsC&4fXA;} Date: Thu, 8 Jan 2026 10:33:26 +0000 Subject: [PATCH 3/3] fix: rename files and update code --- PyTorch/build-in/Classification/SWL/{readme => readme.md} | 0 .../SWL/{requirements_exact.txt => requirements.txt} | 0 2 files changed, 0 insertions(+), 0 deletions(-) rename PyTorch/build-in/Classification/SWL/{readme => readme.md} (100%) rename PyTorch/build-in/Classification/SWL/{requirements_exact.txt => requirements.txt} (100%) diff --git a/PyTorch/build-in/Classification/SWL/readme b/PyTorch/build-in/Classification/SWL/readme.md similarity index 100% rename from PyTorch/build-in/Classification/SWL/readme rename to PyTorch/build-in/Classification/SWL/readme.md diff --git a/PyTorch/build-in/Classification/SWL/requirements_exact.txt b/PyTorch/build-in/Classification/SWL/requirements.txt similarity index 100% rename from PyTorch/build-in/Classification/SWL/requirements_exact.txt rename to PyTorch/build-in/Classification/SWL/requirements.txt